mirror of
https://github.com/RoboSats/robosats.git
synced 2024-12-13 10:56:24 +00:00
Merge branch 'main' into fix-verify-signed-message
This commit is contained in:
commit
5d18d040fe
@ -175,3 +175,6 @@ SLASHED_BOND_REWARD_SPLIT = 0.5
|
|||||||
|
|
||||||
# Username for HTLCs escrows
|
# Username for HTLCs escrows
|
||||||
ESCROW_USERNAME = 'admin'
|
ESCROW_USERNAME = 'admin'
|
||||||
|
|
||||||
|
#Social
|
||||||
|
NOSTR_NSEC = 'nsec1vxhs2zc4kqe0dhz4z2gfrdyjsrwf8pg3neeqx6w4nl8djfzdp0dqwd6rxh'
|
||||||
|
26
.github/workflows/coordinator-image.yml
vendored
26
.github/workflows/coordinator-image.yml
vendored
@ -21,31 +21,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
workflow: frontend-build.yml
|
workflow: frontend-build.yml
|
||||||
workflow_conclusion: success
|
workflow_conclusion: success
|
||||||
name: web-main-js
|
name: django-main-static
|
||||||
path: frontend/static/frontend/
|
path: frontend
|
||||||
|
|
||||||
- name: 'Download Basic main.js Artifact for a release'
|
- name: 'Download Basic main.js Artifact for a release'
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-main-js
|
name: django-main-static
|
||||||
path: frontend/static/frontend/
|
path: frontend
|
||||||
|
|
||||||
- name: 'Download pro.js Artifact'
|
|
||||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
|
||||||
uses: dawidd6/action-download-artifact@v6
|
|
||||||
with:
|
|
||||||
workflow: frontend-build.yml
|
|
||||||
workflow_conclusion: success
|
|
||||||
name: web-pro-js
|
|
||||||
path: frontend/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Download pro.js Artifact for a release'
|
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: web-pro-js
|
|
||||||
path: frontend/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Log in to Docker Hub'
|
- name: 'Log in to Docker Hub'
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@ -75,7 +59,7 @@ jobs:
|
|||||||
echo ${{ steps.commit.outputs.long }}>"commit_sha"
|
echo ${{ steps.commit.outputs.long }}>"commit_sha"
|
||||||
|
|
||||||
- name: 'Build and push Docker image'
|
- name: 'Build and push Docker image'
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: true
|
push: true
|
||||||
|
128
.github/workflows/desktop-build.yml
vendored
Normal file
128
.github/workflows/desktop-build.yml
vendored
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
name: "Build: Desktop"
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
semver:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
paths: [ "desktopApp", "frontend" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
paths: [ "desktopApp", "frontend" ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-desktop:
|
||||||
|
permissions: write-all
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '16'
|
||||||
|
|
||||||
|
- name: 'Download Basic main.js Artifact'
|
||||||
|
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||||
|
uses: dawidd6/action-download-artifact@v6
|
||||||
|
with:
|
||||||
|
workflow: frontend-build.yml
|
||||||
|
workflow_conclusion: success
|
||||||
|
name: desktop-main-static
|
||||||
|
path: desktopApp
|
||||||
|
|
||||||
|
- name: 'Download Basic main.js Artifact for a release'
|
||||||
|
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: desktop-main-static
|
||||||
|
path: desktopApp
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
cd desktopApp
|
||||||
|
npm install
|
||||||
|
|
||||||
|
- name: Build for macOS
|
||||||
|
run: |
|
||||||
|
cd desktopApp
|
||||||
|
npm run package-mac
|
||||||
|
|
||||||
|
- name: Build for Windows
|
||||||
|
run: |
|
||||||
|
cd desktopApp
|
||||||
|
npm run package-win
|
||||||
|
|
||||||
|
- name: Build for Linux
|
||||||
|
run: |
|
||||||
|
cd desktopApp
|
||||||
|
npm run package-linux
|
||||||
|
|
||||||
|
- name: 'Get Commit Hash'
|
||||||
|
id: commit
|
||||||
|
uses: pr-mpt/actions-commit-hash@v3
|
||||||
|
|
||||||
|
- name: Print semver
|
||||||
|
run: echo The semver is ${{ github.event.inputs.semver }}
|
||||||
|
|
||||||
|
- name: Upload macOS Build Artifact
|
||||||
|
if: inputs.semver != ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-darwin-x64
|
||||||
|
name: robosats-desktop-${{ inputs.semver }}-mac-darwin-x64.zip
|
||||||
|
|
||||||
|
- name: Upload Windows Build Artifact
|
||||||
|
if: inputs.semver != ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-win32-ia32
|
||||||
|
name: robosats-desktop-${{ inputs.semver }}-win32-ia32.zip
|
||||||
|
|
||||||
|
- name: Upload Linux Build Artifact
|
||||||
|
if: inputs.semver != ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-linux-x64
|
||||||
|
name: robosats-desktop-${{ inputs.semver }}-linux-x64.zip
|
||||||
|
|
||||||
|
- name: Upload macOS Build Artifact
|
||||||
|
id: upload-release-mac-zip-asset
|
||||||
|
if: inputs.semver == ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-darwin-x64
|
||||||
|
name: robosats-desktop-${{ steps.commit.outputs.short }}-mac-darwin-x64.zip
|
||||||
|
|
||||||
|
- name: Upload Windows Build Artifact
|
||||||
|
id: upload-release-win-zip-asset
|
||||||
|
if: inputs.semver == ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-win32-ia32
|
||||||
|
name: robosats-desktop-${{ steps.commit.outputs.short }}-win32-ia32.zip
|
||||||
|
|
||||||
|
- name: Upload Linux Build Artifact
|
||||||
|
id: upload-release-linux-zip-asset
|
||||||
|
if: inputs.semver == ''
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
path: desktopApp/release-builds/Robosats-linux-x64
|
||||||
|
name: robosats-desktop-${{ steps.commit.outputs.short }}-linux-x64.zip
|
32
.github/workflows/frontend-build.yml
vendored
32
.github/workflows/frontend-build.yml
vendored
@ -50,34 +50,34 @@ jobs:
|
|||||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||||
cd frontend
|
cd frontend
|
||||||
npm run build
|
npm run build
|
||||||
- name: 'Archive Web Basic Build Results'
|
- name: 'Archive Django Static Build Results'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-main-js
|
name: django-main-static
|
||||||
path: |
|
path: |
|
||||||
frontend/static/frontend/*main.js
|
frontend/static
|
||||||
frontend/static/frontend/*.wasm
|
frontend/templates/frontend/*.html
|
||||||
- name: 'Archive Web Basic Selfhosted Build Results'
|
- name: 'Archive Node App Static Build Results'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-basic-selfhosted-js
|
name: nodeapp-main-static
|
||||||
path: |
|
path: |
|
||||||
frontend/static/frontend/*basic.selfhosted.js
|
nodeapp/static
|
||||||
frontend/static/frontend/*.wasm
|
nodeapp/*.html
|
||||||
- name: 'Archive Web PRO Build Results'
|
- name: 'Archive Desktop App Static Build Results'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-pro-js
|
name: desktop-main-static
|
||||||
path: |
|
path: |
|
||||||
frontend/static/frontend/*pro.js
|
desktopApp/static
|
||||||
frontend/static/frontend/*.wasm
|
desktopApp/*.html
|
||||||
- name: 'Archive Web PRO SelhostedBuild Results'
|
- name: 'Archive Django Static Build Results'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-pro-selfhosted-js
|
name: web-main-static
|
||||||
path: |
|
path: |
|
||||||
frontend/static/frontend/*pro.selfhosted.js
|
web/static
|
||||||
frontend/static/frontend/*.wasm
|
web/*.html
|
||||||
- name: 'Archive Mobile Build Results'
|
- name: 'Archive Mobile Build Results'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
23
.github/workflows/integration-tests.yml
vendored
23
.github/workflows/integration-tests.yml
vendored
@ -17,18 +17,27 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
max-parallel: 2
|
max-parallel: 2
|
||||||
matrix:
|
matrix:
|
||||||
python-tag: ['3.12.3-slim-bookworm', '3.13-rc-slim-bookworm']
|
python-tag: ['3.12.3-slim-bookworm']
|
||||||
lnd-version: ['v0.17.4-beta']
|
lnd-version: ['v0.18.2-beta']
|
||||||
cln-version: ['v24.05']
|
cln-version: ['v24.08']
|
||||||
ln-vendor: ['LND', 'CLN']
|
ln-vendor: ['LND', 'CLN']
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: 'Checkout'
|
- name: 'Checkout'
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 'Download static files Artifact'
|
||||||
|
uses: dawidd6/action-download-artifact@v6
|
||||||
|
with:
|
||||||
|
workflow: frontend-build.yml
|
||||||
|
workflow_conclusion: success
|
||||||
|
name: django-main-static
|
||||||
|
path: frontend
|
||||||
|
|
||||||
- name: Patch Dockerfile and .env-sample
|
- name: Patch Dockerfile and .env-sample
|
||||||
run: |
|
run: |
|
||||||
sed -i "1s/FROM python:.*/FROM python:${{ matrix.python-tag }}/" Dockerfile
|
sed -i "1s/FROM python:.*/FROM python:${{ matrix.python-tag }}/" Dockerfile
|
||||||
@ -60,16 +69,16 @@ jobs:
|
|||||||
|
|
||||||
- name: Wait for coordinator (django server)
|
- name: Wait for coordinator (django server)
|
||||||
run: |
|
run: |
|
||||||
while [ "$(docker inspect --format "{{.State.Health.Status}}" coordinator)" != "healthy" ]; do
|
while [ "$(docker inspect --format "{{.State.Health.Status}}" test-coordinator)" != "healthy" ]; do
|
||||||
echo "Waiting for coordinator to be healthy..."
|
echo "Waiting for coordinator to be healthy..."
|
||||||
sleep 5
|
sleep 5
|
||||||
done
|
done
|
||||||
|
|
||||||
- name: 'Run tests with coverage'
|
- name: 'Run tests with coverage'
|
||||||
run: |
|
run: |
|
||||||
docker exec coordinator coverage run manage.py test
|
docker exec test-coordinator coverage run manage.py test
|
||||||
docker exec coordinator coverage report
|
docker exec test-coordinator coverage report
|
||||||
docker exec coordinator coverage html
|
docker exec test-coordinator coverage html
|
||||||
env:
|
env:
|
||||||
LNVENDOR: ${{ matrix.ln-vendor }}
|
LNVENDOR: ${{ matrix.ln-vendor }}
|
||||||
DEVELOPMENT: True
|
DEVELOPMENT: True
|
||||||
|
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
@ -74,9 +74,16 @@ jobs:
|
|||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
semver: ${{ needs.check-versions.outputs.semver }}
|
semver: ${{ needs.check-versions.outputs.semver }}
|
||||||
|
|
||||||
|
desktop-build:
|
||||||
|
uses: RoboSats/robosats/.github/workflows/desktop-build.yml@main
|
||||||
|
needs: [frontend-build, check-versions]
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
semver: ${{ needs.check-versions.outputs.semver }}
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: [check-versions, integration-tests, coordinator-image, selfhosted-client-image, web-client-image, android-build]
|
needs: [check-versions, integration-tests, coordinator-image, selfhosted-client-image, web-client-image, android-build, desktop-build]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@ -171,4 +178,55 @@ jobs:
|
|||||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
asset_path: app-x86-release.apk
|
asset_path: app-x86-release.apk
|
||||||
asset_name: robosats-${{ needs.check-versions.outputs.semver }}-x86.apk
|
asset_name: robosats-${{ needs.check-versions.outputs.semver }}-x86.apk
|
||||||
asset_content_type: application/apk
|
asset_content_type: application/apk
|
||||||
|
|
||||||
|
- name: 'Download macOS Build Artifact'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-mac-darwin-x64.zip
|
||||||
|
path: .
|
||||||
|
|
||||||
|
- name: 'Upload macOS Build Artifact'
|
||||||
|
id: upload-release-mac-zip-asset
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
|
asset_path: .
|
||||||
|
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-mac-darwin-x64.zip
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: 'Download linux Build Artifact'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||||
|
path: .
|
||||||
|
|
||||||
|
- name: 'Upload linux Build Artifact'
|
||||||
|
id: upload-release-linux-zip-asset
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
|
asset_path: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||||
|
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||||
|
asset_content_type: application/zip
|
||||||
|
|
||||||
|
- name: 'Download window Build Artifact'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||||
|
path: .
|
||||||
|
|
||||||
|
- name: 'Upload macOS Build Artifact'
|
||||||
|
id: upload-release-win-zip-asset
|
||||||
|
uses: actions/upload-release-asset@v1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||||
|
asset_path: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||||
|
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||||
|
asset_content_type: application/zip
|
33
.github/workflows/selfhosted-client-image.yml
vendored
33
.github/workflows/selfhosted-client-image.yml
vendored
@ -21,40 +21,21 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: 'Copy Static' # Needed since Github actions does not support symlinks
|
- name: 'Download Basic main.js Artifact'
|
||||||
run: cp -r frontend/static nodeapp/static
|
|
||||||
|
|
||||||
- name: 'Download basic.selfhosted.js Artifact'
|
|
||||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||||
uses: dawidd6/action-download-artifact@v6
|
uses: dawidd6/action-download-artifact@v6
|
||||||
with:
|
with:
|
||||||
workflow: frontend-build.yml
|
workflow: frontend-build.yml
|
||||||
workflow_conclusion: success
|
workflow_conclusion: success
|
||||||
name: web-basic-selfhosted-js
|
name: nodeapp-main-static
|
||||||
path: nodeapp/static/frontend/
|
path: nodeapp
|
||||||
|
|
||||||
- name: 'Download main.js Artifact for a release'
|
- name: 'Download Basic main.js Artifact for a release'
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-basic-selfhosted-js
|
name: nodeapp-main-static
|
||||||
path: nodeapp/static/frontend/
|
path: nodeapp
|
||||||
|
|
||||||
- name: 'Download pro.selfhosted.js Artifact'
|
|
||||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
|
||||||
uses: dawidd6/action-download-artifact@v6
|
|
||||||
with:
|
|
||||||
workflow: frontend-build.yml
|
|
||||||
workflow_conclusion: success
|
|
||||||
name: web-pro-selfhosted-js
|
|
||||||
path: nodeapp/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Download pro.js Artifact for a release'
|
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: web-pro-selfhosted-js
|
|
||||||
path: nodeapp/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Log in to Docker Hub'
|
- name: 'Log in to Docker Hub'
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@ -85,7 +66,7 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: 'Build and push Docker image'
|
- name: 'Build and push Docker image'
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: ./nodeapp
|
context: ./nodeapp
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
33
.github/workflows/web-client-image.yml
vendored
33
.github/workflows/web-client-image.yml
vendored
@ -21,40 +21,21 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: 'Copy Static' # Needed since Github actions does not support symlinks
|
- name: 'Download Basic main.js Artifact'
|
||||||
run: cp -r frontend/static web/static
|
|
||||||
|
|
||||||
- name: 'Download main.js Artifact'
|
|
||||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||||
uses: dawidd6/action-download-artifact@v6
|
uses: dawidd6/action-download-artifact@v6
|
||||||
with:
|
with:
|
||||||
workflow: frontend-build.yml
|
workflow: frontend-build.yml
|
||||||
workflow_conclusion: success
|
workflow_conclusion: success
|
||||||
name: web-main-js
|
name: web-main-static
|
||||||
path: web/static/frontend/
|
path: web
|
||||||
|
|
||||||
- name: 'Download main.js Artifact for a release'
|
- name: 'Download Basic main.js Artifact for a release'
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: web-main-js
|
name: web-main-static
|
||||||
path: web/static/frontend/
|
path: web
|
||||||
|
|
||||||
- name: 'Download pro.js Artifact'
|
|
||||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
|
||||||
uses: dawidd6/action-download-artifact@v6
|
|
||||||
with:
|
|
||||||
workflow: frontend-build.yml
|
|
||||||
workflow_conclusion: success
|
|
||||||
name: web-pro-js
|
|
||||||
path: web/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Download pro.js Artifact for a release'
|
|
||||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: web-pro-js
|
|
||||||
path: web/static/frontend/
|
|
||||||
|
|
||||||
- name: 'Log in to Docker Hub'
|
- name: 'Log in to Docker Hub'
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
@ -85,7 +66,7 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: 'Build and push Docker image'
|
- name: 'Build and push Docker image'
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
context: ./web
|
context: ./web
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
17
.gitignore
vendored
17
.gitignore
vendored
@ -634,6 +634,7 @@ frontend/static/assets/avatars*
|
|||||||
api/lightning/*_grpc.py
|
api/lightning/*_grpc.py
|
||||||
api/lightning/*_pb2.py
|
api/lightning/*_pb2.py
|
||||||
api/lightning/pymp*
|
api/lightning/pymp*
|
||||||
|
api/lightning/pip*
|
||||||
frontend/static/locales/collected_phrases.json
|
frontend/static/locales/collected_phrases.json
|
||||||
frontend/static/admin*
|
frontend/static/admin*
|
||||||
frontend/static/rest_framework*
|
frontend/static/rest_framework*
|
||||||
@ -641,15 +642,21 @@ frontend/static/import_export*
|
|||||||
frontend/static/drf_spectacular_sidecar/
|
frontend/static/drf_spectacular_sidecar/
|
||||||
frontend/src/components/PaymentMethods/Icons/code*
|
frontend/src/components/PaymentMethods/Icons/code*
|
||||||
frontend/src/components/PaymentMethods/Icons/webp*
|
frontend/src/components/PaymentMethods/Icons/webp*
|
||||||
frontend/static/frontend/**
|
frontend/static/frontend
|
||||||
docs/.jekyll-cache*
|
docs/.jekyll-cache*
|
||||||
docs/_site*
|
docs/_site*
|
||||||
node
|
node
|
||||||
|
desktopApp/release-builds
|
||||||
|
|
||||||
# mobile frontend statics
|
# frontend statics
|
||||||
mobile/html/Web.bundle/js*
|
frontend/templates/frontend/*.html
|
||||||
mobile/html/Web.bundle/css*
|
mobile/html/Web.bundle
|
||||||
mobile/html/Web.bundle/assets*
|
desktopApp/static
|
||||||
|
desktopApp/*.html
|
||||||
|
web/static
|
||||||
|
web/*.html
|
||||||
|
nodeapp/static
|
||||||
|
nodeapp/*.html
|
||||||
|
|
||||||
# Protocol Buffers
|
# Protocol Buffers
|
||||||
api/lightning/*.proto
|
api/lightning/*.proto
|
||||||
|
@ -11,7 +11,7 @@ repos:
|
|||||||
- '--fix=lf'
|
- '--fix=lf'
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: pretty-format-json
|
- id: pretty-format-json
|
||||||
exclude: ^frontend/
|
exclude: ^frontend/|^mobile/
|
||||||
args:
|
args:
|
||||||
- '--autofix'
|
- '--autofix'
|
||||||
- '--no-sort-keys'
|
- '--no-sort-keys'
|
||||||
|
@ -7,7 +7,7 @@ This contributing guide is based on the [Bisq contributing guide](https://github
|
|||||||
|
|
||||||
## Communication Channels
|
## Communication Channels
|
||||||
|
|
||||||
Most communication about development takes place on our [Matrix Development group](https://matrix.to/#/#robosats:matrix.org).
|
Most communication about development takes place on our [SimpleX Development Group](https://simplex.chat/contact#/?v=2-7&smp=smp%3A%2F%2F6iIcWT_dF2zN_w5xzZEY7HI2Prbh3ldP07YTyDexPjE%3D%40smp10.simplex.im%2FKEkNLMlgM8vrrU3xjBt5emS7EsP0c4s1%23%2F%3Fv%3D1-3%26dh%3DMCowBQYDK2VuAyEABehx7Tgefl_vvOGOe2SThJCGACKRgSU2wiUdIJ5bQHw%253D%26srv%3Drb2pbttocvnbrngnwziclp2f4ckjq65kebafws6g4hy22cdaiv5dwjqd.onion&data=%7B%22type%22%3A%22group%22%2C%22groupLinkId%22%3A%22gFi-9hvL3XgXXTgnlZPyJw%3D%3D%22%7D).
|
||||||
|
|
||||||
Discussion about code changes happens in GitHub issues and pull requests.
|
Discussion about code changes happens in GitHub issues and pull requests.
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.12.3-slim-bookworm
|
FROM python:3.13.0-slim-bookworm
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
ARG DEBIAN_FRONTEND=noninteractive
|
||||||
ARG DEVELOPMENT=False
|
ARG DEVELOPMENT=False
|
||||||
|
|
||||||
|
@ -210,6 +210,7 @@ class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
|||||||
f"Dispute of order {order.id} solved successfully on favor of the maker",
|
f"Dispute of order {order.id} solved successfully on favor of the maker",
|
||||||
messages.SUCCESS,
|
messages.SUCCESS,
|
||||||
)
|
)
|
||||||
|
send_notification.delay(order_id=order.id, message="dispute_closed")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.message_user(
|
self.message_user(
|
||||||
@ -248,6 +249,7 @@ class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
|||||||
f"Dispute of order {order.id} solved successfully on favor of the taker",
|
f"Dispute of order {order.id} solved successfully on favor of the taker",
|
||||||
messages.SUCCESS,
|
messages.SUCCESS,
|
||||||
)
|
)
|
||||||
|
send_notification.delay(order_id=order.id, message="dispute_closed")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.message_user(
|
self.message_user(
|
||||||
|
@ -449,7 +449,7 @@ class CLNNode:
|
|||||||
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
||||||
if min(routes_cost) >= max_routing_fee_sats:
|
if min(routes_cost) >= max_routing_fee_sats:
|
||||||
payout["context"] = {
|
payout["context"] = {
|
||||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget."
|
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget. This can be adjusted with Advanced Options enabled."
|
||||||
}
|
}
|
||||||
return payout
|
return payout
|
||||||
|
|
||||||
|
@ -424,7 +424,7 @@ class LNDNode:
|
|||||||
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
||||||
if min(routes_cost) >= max_routing_fee_sats:
|
if min(routes_cost) >= max_routing_fee_sats:
|
||||||
payout["context"] = {
|
payout["context"] = {
|
||||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget."
|
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget. This can be adjusted with Advanced Options enabled."
|
||||||
}
|
}
|
||||||
return payout
|
return payout
|
||||||
|
|
||||||
@ -478,6 +478,7 @@ class LNDNode:
|
|||||||
payment_request=lnpayment.invoice,
|
payment_request=lnpayment.invoice,
|
||||||
fee_limit_sat=fee_limit_sat,
|
fee_limit_sat=fee_limit_sat,
|
||||||
timeout_seconds=timeout_seconds,
|
timeout_seconds=timeout_seconds,
|
||||||
|
amp=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
routerstub = router_pb2_grpc.RouterStub(cls.channel)
|
routerstub = router_pb2_grpc.RouterStub(cls.channel)
|
||||||
@ -536,6 +537,7 @@ class LNDNode:
|
|||||||
fee_limit_sat=fee_limit_sat,
|
fee_limit_sat=fee_limit_sat,
|
||||||
timeout_seconds=timeout_seconds,
|
timeout_seconds=timeout_seconds,
|
||||||
allow_self_payment=True,
|
allow_self_payment=True,
|
||||||
|
amp=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
order = lnpayment.order_paid_LN
|
order = lnpayment.order_paid_LN
|
||||||
|
@ -8,7 +8,7 @@ from django.utils import timezone
|
|||||||
|
|
||||||
from api.lightning.node import LNNode
|
from api.lightning.node import LNNode
|
||||||
from api.models import Currency, LNPayment, MarketTick, OnchainPayment, Order
|
from api.models import Currency, LNPayment, MarketTick, OnchainPayment, Order
|
||||||
from api.tasks import send_devfund_donation, send_notification
|
from api.tasks import send_devfund_donation, send_notification, nostr_send_order_event
|
||||||
from api.utils import get_minning_fee, validate_onchain_address, location_country
|
from api.utils import get_minning_fee, validate_onchain_address, location_country
|
||||||
from chat.models import Message
|
from chat.models import Message
|
||||||
|
|
||||||
@ -704,9 +704,9 @@ class Logics:
|
|||||||
|
|
||||||
if context["invoice_amount"] < MIN_SWAP_AMOUNT:
|
if context["invoice_amount"] < MIN_SWAP_AMOUNT:
|
||||||
context["swap_allowed"] = False
|
context["swap_allowed"] = False
|
||||||
context[
|
context["swap_failure_reason"] = (
|
||||||
"swap_failure_reason"
|
f"Order amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats"
|
||||||
] = f"Order amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats"
|
)
|
||||||
order.log(
|
order.log(
|
||||||
f"Onchain payment option was not offered: amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats",
|
f"Onchain payment option was not offered: amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats",
|
||||||
level="WARN",
|
level="WARN",
|
||||||
@ -714,9 +714,9 @@ class Logics:
|
|||||||
return True, context
|
return True, context
|
||||||
elif context["invoice_amount"] > MAX_SWAP_AMOUNT:
|
elif context["invoice_amount"] > MAX_SWAP_AMOUNT:
|
||||||
context["swap_allowed"] = False
|
context["swap_allowed"] = False
|
||||||
context[
|
context["swap_failure_reason"] = (
|
||||||
"swap_failure_reason"
|
f"Order amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats"
|
||||||
] = f"Order amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats"
|
)
|
||||||
order.log(
|
order.log(
|
||||||
f"Onchain payment option was not offered: amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats",
|
f"Onchain payment option was not offered: amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats",
|
||||||
level="WARN",
|
level="WARN",
|
||||||
@ -741,9 +741,9 @@ class Logics:
|
|||||||
)
|
)
|
||||||
if not valid:
|
if not valid:
|
||||||
context["swap_allowed"] = False
|
context["swap_allowed"] = False
|
||||||
context[
|
context["swap_failure_reason"] = (
|
||||||
"swap_failure_reason"
|
"Not enough onchain liquidity available to offer a swap"
|
||||||
] = "Not enough onchain liquidity available to offer a swap"
|
)
|
||||||
order.log(
|
order.log(
|
||||||
"Onchain payment option was not offered: onchain liquidity available to offer a swap",
|
"Onchain payment option was not offered: onchain liquidity available to offer a swap",
|
||||||
level="WARN",
|
level="WARN",
|
||||||
@ -1019,6 +1019,8 @@ class Logics:
|
|||||||
order.log("Order expired while waiting for maker bond")
|
order.log("Order expired while waiting for maker bond")
|
||||||
order.log("Maker bond was cancelled")
|
order.log("Maker bond was cancelled")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 2.a) When maker cancels after bond
|
# 2.a) When maker cancels after bond
|
||||||
@ -1039,6 +1041,8 @@ class Logics:
|
|||||||
order.log("Order cancelled by maker while public or paused")
|
order.log("Order cancelled by maker while public or paused")
|
||||||
order.log("Maker bond was <b>unlocked</b>")
|
order.log("Maker bond was <b>unlocked</b>")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 2.b) When maker cancels after bond and before taker bond is locked
|
# 2.b) When maker cancels after bond and before taker bond is locked
|
||||||
@ -1058,6 +1062,8 @@ class Logics:
|
|||||||
order.log("Maker bond was <b>unlocked</b>")
|
order.log("Maker bond was <b>unlocked</b>")
|
||||||
order.log("Taker bond was <b>cancelled</b>")
|
order.log("Taker bond was <b>cancelled</b>")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 3) When taker cancels before bond
|
# 3) When taker cancels before bond
|
||||||
@ -1070,6 +1076,8 @@ class Logics:
|
|||||||
|
|
||||||
order.log("Taker cancelled before locking the bond")
|
order.log("Taker cancelled before locking the bond")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 4) When taker or maker cancel after bond (before escrow)
|
# 4) When taker or maker cancel after bond (before escrow)
|
||||||
@ -1099,6 +1107,8 @@ class Logics:
|
|||||||
order.log("Maker bond was <b>settled</b>")
|
order.log("Maker bond was <b>settled</b>")
|
||||||
order.log("Taker bond was <b>unlocked</b>")
|
order.log("Taker bond was <b>unlocked</b>")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 4.b) When taker cancel after bond (before escrow)
|
# 4.b) When taker cancel after bond (before escrow)
|
||||||
@ -1121,6 +1131,8 @@ class Logics:
|
|||||||
order.log("Taker bond was <b>settled</b>")
|
order.log("Taker bond was <b>settled</b>")
|
||||||
order.log("Maker bond was <b>unlocked</b>")
|
order.log("Maker bond was <b>unlocked</b>")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# 5) When trade collateral has been posted (after escrow)
|
# 5) When trade collateral has been posted (after escrow)
|
||||||
@ -1136,6 +1148,9 @@ class Logics:
|
|||||||
order.log(
|
order.log(
|
||||||
f"Taker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
f"Taker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# if the taker had asked, and now the maker does: cancel order, return everything
|
# if the taker had asked, and now the maker does: cancel order, return everything
|
||||||
@ -1144,6 +1159,9 @@ class Logics:
|
|||||||
order.log(
|
order.log(
|
||||||
f"Maker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
f"Maker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
# Otherwise just make true the asked for cancel flags
|
# Otherwise just make true the asked for cancel flags
|
||||||
@ -1181,6 +1199,8 @@ class Logics:
|
|||||||
order.update_status(Order.Status.CCA)
|
order.update_status(Order.Status.CCA)
|
||||||
send_notification.delay(order_id=order.id, message="collaborative_cancelled")
|
send_notification.delay(order_id=order.id, message="collaborative_cancelled")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
order.log("Order was collaboratively cancelled")
|
order.log("Order was collaboratively cancelled")
|
||||||
order.log("Maker bond was <b>unlocked</b>")
|
order.log("Maker bond was <b>unlocked</b>")
|
||||||
order.log("Taker bond was <b>unlocked</b>")
|
order.log("Taker bond was <b>unlocked</b>")
|
||||||
@ -1208,6 +1228,8 @@ class Logics:
|
|||||||
|
|
||||||
order.save() # update all fields
|
order.save() # update all fields
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
order.log(f"Order({order.id},{str(order)}) is public in the order book")
|
order.log(f"Order({order.id},{str(order)}) is public in the order book")
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -1255,9 +1277,9 @@ class Logics:
|
|||||||
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
||||||
|
|
||||||
if user.robot.wants_stealth:
|
if user.robot.wants_stealth:
|
||||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||||
else:
|
else:
|
||||||
description = f"RoboSats - Publishing '{str(order)}' - Maker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
description = f"{config("NODE_ALIAS")} - Publishing '{str(order)}' - Maker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||||
|
|
||||||
# Gen hold Invoice
|
# Gen hold Invoice
|
||||||
try:
|
try:
|
||||||
@ -1350,6 +1372,9 @@ class Logics:
|
|||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
send_notification.delay(order_id=order.id, message="order_taken_confirmed")
|
send_notification.delay(order_id=order.id, message="order_taken_confirmed")
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
|
|
||||||
order.log(
|
order.log(
|
||||||
f"<b>Contract formalized.</b> Maker: Robot({order.maker.robot.id},{order.maker}). Taker: Robot({order.taker.robot.id},{order.taker}). API median price {order.currency.exchange_rate} {dict(Currency.currency_choices)[order.currency.currency]}/BTC. Premium is {order.premium}%. Contract size {order.last_satoshis} Sats"
|
f"<b>Contract formalized.</b> Maker: Robot({order.maker.robot.id},{order.maker}). Taker: Robot({order.taker.robot.id},{order.taker}). API median price {order.currency.exchange_rate} {dict(Currency.currency_choices)[order.currency.currency]}/BTC. Premium is {order.premium}%. Contract size {order.last_satoshis} Sats"
|
||||||
)
|
)
|
||||||
@ -1377,10 +1402,10 @@ class Logics:
|
|||||||
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
||||||
pos_text = "Buying" if cls.is_buyer(order, user) else "Selling"
|
pos_text = "Buying" if cls.is_buyer(order, user) else "Selling"
|
||||||
if user.robot.wants_stealth:
|
if user.robot.wants_stealth:
|
||||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||||
else:
|
else:
|
||||||
description = (
|
description = (
|
||||||
f"RoboSats - Taking 'Order {order.id}' {pos_text} BTC for {str(float(order.amount)) + Currency.currency_dict[str(order.currency.currency)]}"
|
f"{config("NODE_ALIAS")} - Taking 'Order {order.id}' {pos_text} BTC for {str(float(order.amount)) + Currency.currency_dict[str(order.currency.currency)]}"
|
||||||
+ " - Taker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
+ " - Taker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1476,9 +1501,9 @@ class Logics:
|
|||||||
order.log(f"Escrow invoice amount is calculated as {escrow_satoshis} Sats")
|
order.log(f"Escrow invoice amount is calculated as {escrow_satoshis} Sats")
|
||||||
|
|
||||||
if user.robot.wants_stealth:
|
if user.robot.wants_stealth:
|
||||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||||
else:
|
else:
|
||||||
description = f"RoboSats - Escrow amount for '{str(order)}' - It WILL FREEZE IN YOUR WALLET. It will be released to the buyer once you confirm you received the fiat. It will automatically return if buyer does not confirm the payment."
|
description = f"{config("NODE_ALIAS")} - Escrow amount for '{str(order)}' - It WILL FREEZE IN YOUR WALLET. It will be released to the buyer once you confirm you received the fiat. It will automatically return if buyer does not confirm the payment."
|
||||||
|
|
||||||
# Gen hold Invoice
|
# Gen hold Invoice
|
||||||
try:
|
try:
|
||||||
@ -1741,11 +1766,15 @@ class Logics:
|
|||||||
order.log(
|
order.log(
|
||||||
f"Robot({user.robot.id},{user.username}) paused the public order"
|
f"Robot({user.robot.id},{user.username}) paused the public order"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
elif order.status == Order.Status.PAU:
|
elif order.status == Order.Status.PAU:
|
||||||
order.update_status(Order.Status.PUB)
|
order.update_status(Order.Status.PUB)
|
||||||
order.log(
|
order.log(
|
||||||
f"Robot({user.robot.id},{user.username}) made public the paused order"
|
f"Robot({user.robot.id},{user.username}) made public the paused order"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
nostr_send_order_event.delay(order_id=order.id)
|
||||||
else:
|
else:
|
||||||
order.log(
|
order.log(
|
||||||
f"Robot({user.robot.id},{user.username}) tried to pause/unpause an order that was not public or paused",
|
f"Robot({user.robot.id},{user.username}) tried to pause/unpause an order that was not public or paused",
|
||||||
|
@ -6,7 +6,7 @@ from django.core.management.base import BaseCommand
|
|||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
|
||||||
from api.models import Robot
|
from api.models import Robot
|
||||||
from api.notifications import Telegram
|
from api.notifications import Notifications
|
||||||
from api.utils import get_session
|
from api.utils import get_session
|
||||||
|
|
||||||
|
|
||||||
@ -17,7 +17,7 @@ class Command(BaseCommand):
|
|||||||
bot_token = config("TELEGRAM_TOKEN")
|
bot_token = config("TELEGRAM_TOKEN")
|
||||||
updates_url = f"https://api.telegram.org/bot{bot_token}/getUpdates"
|
updates_url = f"https://api.telegram.org/bot{bot_token}/getUpdates"
|
||||||
session = get_session()
|
session = get_session()
|
||||||
telegram = Telegram()
|
notifications = Notifications()
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
offset = 0
|
offset = 0
|
||||||
@ -49,17 +49,17 @@ class Command(BaseCommand):
|
|||||||
continue
|
continue
|
||||||
parts = message.split(" ")
|
parts = message.split(" ")
|
||||||
if len(parts) < 2:
|
if len(parts) < 2:
|
||||||
self.telegram.send_message(
|
self.notifications.send_telegram_message(
|
||||||
chat_id=result["message"]["from"]["id"],
|
result["message"]["from"]["id"],
|
||||||
text='You must enable the notifications bot using the RoboSats client. Click on your "Robot robot" -> "Enable Telegram" and follow the link or scan the QR code.',
|
'You must enable the notifications bot using the RoboSats client. Click on your "Robot robot" -> "Enable Telegram" and follow the link or scan the QR code.',
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
token = parts[-1]
|
token = parts[-1]
|
||||||
robot = Robot.objects.filter(telegram_token=token).first()
|
robot = Robot.objects.filter(telegram_token=token).first()
|
||||||
if not robot:
|
if not robot:
|
||||||
self.telegram.send_message(
|
self.notifications.send_telegram_message(
|
||||||
chat_id=result["message"]["from"]["id"],
|
result["message"]["from"]["id"],
|
||||||
text=f'Wops, invalid token! There is no Robot with telegram chat token "{token}"',
|
f'Wops, invalid token! There is no Robot with telegram chat token "{token}"',
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ class Command(BaseCommand):
|
|||||||
robot.telegram_lang_code = result["message"]["from"][
|
robot.telegram_lang_code = result["message"]["from"][
|
||||||
"language_code"
|
"language_code"
|
||||||
]
|
]
|
||||||
self.telegram.welcome(robot.user)
|
self.notifications.welcome(robot.user)
|
||||||
robot.telegram_enabled = True
|
robot.telegram_enabled = True
|
||||||
robot.save(
|
robot.save(
|
||||||
update_fields=[
|
update_fields=[
|
||||||
|
26
api/migrations/0047_notification.py
Normal file
26
api/migrations/0047_notification.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-06-14 18:31
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('api', '0046_alter_currency_currency'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Notification',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
|
||||||
|
('title', models.CharField(default=None, max_length=240)),
|
||||||
|
('description', models.CharField(blank=True, default=None, max_length=240)),
|
||||||
|
('order', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='api.order')),
|
||||||
|
('robot', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='api.robot')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
19
api/migrations/0048_alter_order_reference.py
Normal file
19
api/migrations/0048_alter_order_reference.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# Generated by Django 5.0.6 on 2024-06-29 14:07
|
||||||
|
|
||||||
|
import api.models.order
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('api', '0047_notification'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='order',
|
||||||
|
name='reference',
|
||||||
|
field=models.UUIDField(default=api.models.order.custom_uuid, editable=False),
|
||||||
|
),
|
||||||
|
]
|
18
api/migrations/0049_alter_currency_currency.py
Normal file
18
api/migrations/0049_alter_currency_currency.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 5.0.8 on 2024-08-15 18:06
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('api', '0048_alter_order_reference'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='currency',
|
||||||
|
name='currency',
|
||||||
|
field=models.PositiveSmallIntegerField(choices=[(1, 'USD'), (2, 'EUR'), (3, 'JPY'), (4, 'GBP'), (5, 'AUD'), (6, 'CAD'), (7, 'CHF'), (8, 'CNY'), (9, 'HKD'), (10, 'NZD'), (11, 'SEK'), (12, 'KRW'), (13, 'SGD'), (14, 'NOK'), (15, 'MXN'), (16, 'BYN'), (17, 'RUB'), (18, 'ZAR'), (19, 'TRY'), (20, 'BRL'), (21, 'CLP'), (22, 'CZK'), (23, 'DKK'), (24, 'HRK'), (25, 'HUF'), (26, 'INR'), (27, 'ISK'), (28, 'PLN'), (29, 'RON'), (30, 'ARS'), (31, 'VES'), (32, 'COP'), (33, 'PEN'), (34, 'UYU'), (35, 'PYG'), (36, 'BOB'), (37, 'IDR'), (38, 'ANG'), (39, 'CRC'), (40, 'CUP'), (41, 'DOP'), (42, 'GHS'), (43, 'GTQ'), (44, 'ILS'), (45, 'JMD'), (46, 'KES'), (47, 'KZT'), (48, 'MYR'), (49, 'NAD'), (50, 'NGN'), (51, 'AZN'), (52, 'PAB'), (53, 'PHP'), (54, 'PKR'), (55, 'QAR'), (56, 'SAR'), (57, 'THB'), (58, 'TTD'), (59, 'VND'), (60, 'XOF'), (61, 'TWD'), (62, 'TZS'), (63, 'XAF'), (64, 'UAH'), (65, 'EGP'), (66, 'LKR'), (67, 'MAD'), (68, 'AED'), (69, 'TND'), (70, 'ETB'), (71, 'GEL'), (72, 'UGX'), (73, 'RSD'), (74, 'IRT'), (75, 'BDT'), (76, 'ALL'), (77, 'DZD'), (300, 'XAU'), (1000, 'BTC')], unique=True),
|
||||||
|
),
|
||||||
|
]
|
18
api/migrations/0050_alter_order_status.py
Normal file
18
api/migrations/0050_alter_order_status.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 5.0.8 on 2024-08-22 08:30
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('api', '0049_alter_currency_currency'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='order',
|
||||||
|
name='status',
|
||||||
|
field=models.PositiveSmallIntegerField(choices=[(0, 'Waiting for maker bond'), (1, 'Public'), (2, 'Paused'), (3, 'Waiting for taker bond'), (4, 'Cancelled'), (5, 'Expired'), (6, 'Waiting for trade collateral and buyer invoice'), (7, 'Waiting only for seller trade collateral'), (8, 'Waiting only for buyer invoice'), (9, 'Sending fiat - In chatroom'), (10, 'Fiat sent - In chatroom'), (11, 'In dispute'), (12, 'Collaboratively cancelled'), (13, 'Sending satoshis to buyer'), (14, 'Successful trade'), (15, 'Failed lightning network routing'), (16, 'Wait for dispute resolution'), (17, 'Maker lost dispute'), (18, 'Taker lost dispute')], default=0),
|
||||||
|
),
|
||||||
|
]
|
@ -4,5 +4,14 @@ from .market_tick import MarketTick
|
|||||||
from .onchain_payment import OnchainPayment
|
from .onchain_payment import OnchainPayment
|
||||||
from .order import Order
|
from .order import Order
|
||||||
from .robot import Robot
|
from .robot import Robot
|
||||||
|
from .notification import Notification
|
||||||
|
|
||||||
__all__ = ["Currency", "LNPayment", "MarketTick", "OnchainPayment", "Order", "Robot"]
|
__all__ = [
|
||||||
|
"Currency",
|
||||||
|
"LNPayment",
|
||||||
|
"MarketTick",
|
||||||
|
"OnchainPayment",
|
||||||
|
"Order",
|
||||||
|
"Robot",
|
||||||
|
"Notification",
|
||||||
|
]
|
||||||
|
35
api/models/notification.py
Normal file
35
api/models/notification.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# We use custom seeded UUID generation during testing
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from decouple import config
|
||||||
|
from api.models import Order, Robot
|
||||||
|
from django.db import models
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
if config("TESTING", cast=bool, default=False):
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
random.seed(1)
|
||||||
|
chars = string.ascii_lowercase + string.digits
|
||||||
|
|
||||||
|
def custom_uuid():
|
||||||
|
return uuid.uuid5(uuid.NAMESPACE_DNS, "".join(random.choices(chars, k=20)))
|
||||||
|
|
||||||
|
else:
|
||||||
|
custom_uuid = uuid.uuid4
|
||||||
|
|
||||||
|
|
||||||
|
class Notification(models.Model):
|
||||||
|
# notification info
|
||||||
|
created_at = models.DateTimeField(default=timezone.now)
|
||||||
|
|
||||||
|
robot = models.ForeignKey(Robot, on_delete=models.CASCADE, default=None)
|
||||||
|
order = models.ForeignKey(Order, on_delete=models.CASCADE, default=None)
|
||||||
|
|
||||||
|
# notification details
|
||||||
|
title = models.CharField(max_length=240, null=False, default=None)
|
||||||
|
description = models.CharField(max_length=240, default=None, blank=True)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.title} {self.description}"
|
@ -10,6 +10,7 @@ from django.db import models
|
|||||||
from django.db.models.signals import pre_delete
|
from django.db.models.signals import pre_delete
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from api.tasks import send_notification
|
||||||
|
|
||||||
if config("TESTING", cast=bool, default=False):
|
if config("TESTING", cast=bool, default=False):
|
||||||
import random
|
import random
|
||||||
@ -45,7 +46,7 @@ class Order(models.Model):
|
|||||||
DIS = 11, "In dispute"
|
DIS = 11, "In dispute"
|
||||||
CCA = 12, "Collaboratively cancelled"
|
CCA = 12, "Collaboratively cancelled"
|
||||||
PAY = 13, "Sending satoshis to buyer"
|
PAY = 13, "Sending satoshis to buyer"
|
||||||
SUC = 14, "Sucessful trade"
|
SUC = 14, "Successful trade"
|
||||||
FAI = 15, "Failed lightning network routing"
|
FAI = 15, "Failed lightning network routing"
|
||||||
WFR = 16, "Wait for dispute resolution"
|
WFR = 16, "Wait for dispute resolution"
|
||||||
MLD = 17, "Maker lost dispute"
|
MLD = 17, "Maker lost dispute"
|
||||||
@ -91,10 +92,7 @@ class Order(models.Model):
|
|||||||
decimal_places=2,
|
decimal_places=2,
|
||||||
default=0,
|
default=0,
|
||||||
null=True,
|
null=True,
|
||||||
validators=[
|
validators=[MinValueValidator(Decimal(-100)), MaxValueValidator(Decimal(999))],
|
||||||
MinValueValidator(Decimal(-100)),
|
|
||||||
MaxValueValidator(Decimal(999))
|
|
||||||
],
|
|
||||||
blank=True,
|
blank=True,
|
||||||
)
|
)
|
||||||
# explicit
|
# explicit
|
||||||
@ -352,6 +350,8 @@ class Order(models.Model):
|
|||||||
self.log(
|
self.log(
|
||||||
f"Order state went from {old_status}: <i>{Order.Status(old_status).label}</i> to {new_status}: <i>{Order.Status(new_status).label}</i>"
|
f"Order state went from {old_status}: <i>{Order.Status(old_status).label}</i> to {new_status}: <i>{Order.Status(new_status).label}</i>"
|
||||||
)
|
)
|
||||||
|
if new_status == Order.Status.FAI:
|
||||||
|
send_notification.delay(order_id=self.id, message="lightning_failed")
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_delete, sender=Order)
|
@receiver(pre_delete, sender=Order)
|
||||||
|
@ -1,12 +1,8 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.core.validators import validate_comma_separated_integer_list
|
from django.core.validators import validate_comma_separated_integer_list
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models.signals import post_save, pre_delete
|
from django.db.models.signals import post_save
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
from django.utils.html import mark_safe
|
|
||||||
|
|
||||||
|
|
||||||
class Robot(models.Model):
|
class Robot(models.Model):
|
||||||
@ -88,25 +84,5 @@ class Robot(models.Model):
|
|||||||
def save_user_robot(sender, instance, **kwargs):
|
def save_user_robot(sender, instance, **kwargs):
|
||||||
instance.robot.save()
|
instance.robot.save()
|
||||||
|
|
||||||
@receiver(pre_delete, sender=User)
|
|
||||||
def del_avatar_from_disk(sender, instance, **kwargs):
|
|
||||||
try:
|
|
||||||
avatar_file = Path(
|
|
||||||
settings.AVATAR_ROOT + instance.robot.avatar.url.split("/")[-1]
|
|
||||||
)
|
|
||||||
avatar_file.unlink()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.user.username
|
return self.user.username
|
||||||
|
|
||||||
# to display avatars in admin panel
|
|
||||||
def get_avatar(self):
|
|
||||||
if not self.avatar:
|
|
||||||
return settings.STATIC_ROOT + "unknown_avatar.png"
|
|
||||||
return self.avatar.url
|
|
||||||
|
|
||||||
# method to create a fake table field in read only mode
|
|
||||||
def avatar_tag(self):
|
|
||||||
return mark_safe('<img src="%s" width="50" height="50" />' % self.get_avatar())
|
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import time
|
|
||||||
|
|
||||||
from .utils import human_format
|
# UNUSED
|
||||||
|
# import time
|
||||||
|
|
||||||
|
# UNUSED
|
||||||
|
# from .utils import human_format
|
||||||
|
|
||||||
|
|
||||||
class NickGenerator:
|
class NickGenerator:
|
||||||
@ -94,7 +97,7 @@ class NickGenerator:
|
|||||||
# if self.verbose:
|
# if self.verbose:
|
||||||
# print(f"Adverb: {adv}, id {adv_id}.")
|
# print(f"Adverb: {adv}, id {adv_id}.")
|
||||||
else:
|
else:
|
||||||
adv_id, adv, remainder = 0, "", nick_id
|
adv, remainder = "", nick_id
|
||||||
|
|
||||||
# Compute adjective id
|
# Compute adjective id
|
||||||
if self.use_adj:
|
if self.use_adj:
|
||||||
|
114
api/nostr.py
Normal file
114
api/nostr.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import pygeohash
|
||||||
|
import hashlib
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from asgiref.sync import sync_to_async
|
||||||
|
from nostr_sdk import Keys, Client, EventBuilder, NostrSigner, Kind, Tag
|
||||||
|
from api.models import Order
|
||||||
|
from decouple import config
|
||||||
|
|
||||||
|
|
||||||
|
class Nostr:
|
||||||
|
"""Simple nostr events manager to be used as a cache system for clients"""
|
||||||
|
|
||||||
|
async def send_order_event(self, order):
|
||||||
|
"""Creates the event and sends it to the coordinator relay"""
|
||||||
|
|
||||||
|
if config("NOSTR_NSEC", cast=str, default="") == "":
|
||||||
|
return
|
||||||
|
|
||||||
|
print("Sending nostr event")
|
||||||
|
|
||||||
|
# Initialize with coordinator Keys
|
||||||
|
keys = Keys.parse(config("NOSTR_NSEC", cast=str))
|
||||||
|
signer = NostrSigner.keys(keys)
|
||||||
|
client = Client(signer)
|
||||||
|
|
||||||
|
# Add relays and connect
|
||||||
|
await client.add_relays(["ws://localhost:7777"])
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
robot_name = await self.get_robot_name(order)
|
||||||
|
robot_hash_id = await self.get_robot_hash_id(order)
|
||||||
|
currency = await self.get_robot_currency(order)
|
||||||
|
|
||||||
|
event = EventBuilder(
|
||||||
|
Kind(38383),
|
||||||
|
"",
|
||||||
|
self.generate_tags(order, robot_name, robot_hash_id, currency),
|
||||||
|
).to_event(keys)
|
||||||
|
await client.send_event(event)
|
||||||
|
print(f"Nostr event sent: {event.as_json()}")
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def get_robot_name(self, order):
|
||||||
|
return order.maker.username
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def get_robot_hash_id(self, order):
|
||||||
|
return order.maker.robot.hash_id
|
||||||
|
|
||||||
|
@sync_to_async
|
||||||
|
def get_robot_currency(self, order):
|
||||||
|
return str(order.currency)
|
||||||
|
|
||||||
|
def generate_tags(self, order, robot_name, robot_hash_id, currency):
|
||||||
|
hashed_id = hashlib.md5(
|
||||||
|
f"{config("COORDINATOR_ALIAS", cast=str)}{order.id}".encode("utf-8")
|
||||||
|
).hexdigest()
|
||||||
|
|
||||||
|
tags = [
|
||||||
|
Tag.parse(["d", str(uuid.UUID(hashed_id))]),
|
||||||
|
Tag.parse(["name", robot_name, robot_hash_id]),
|
||||||
|
Tag.parse(["k", "sell" if order.type == Order.Types.SELL else "buy"]),
|
||||||
|
Tag.parse(["f", currency]),
|
||||||
|
Tag.parse(["s", self.get_status_tag(order)]),
|
||||||
|
Tag.parse(["amt", "0"]),
|
||||||
|
Tag.parse(
|
||||||
|
["fa"]
|
||||||
|
+ (
|
||||||
|
[str(order.amount)]
|
||||||
|
if not order.has_range
|
||||||
|
else [str(order.min_amount), str(order.max_amount)]
|
||||||
|
)
|
||||||
|
),
|
||||||
|
Tag.parse(["pm"] + order.payment_method.split(" ")),
|
||||||
|
Tag.parse(["premium", str(order.premium)]),
|
||||||
|
Tag.parse(
|
||||||
|
[
|
||||||
|
"source",
|
||||||
|
f"http://{config("HOST_NAME")}/order/{config("COORDINATOR_ALIAS", cast=str).lower()}/{order.id}",
|
||||||
|
]
|
||||||
|
),
|
||||||
|
Tag.parse(
|
||||||
|
[
|
||||||
|
"expiration",
|
||||||
|
str(int(order.expires_at.timestamp())),
|
||||||
|
str(order.escrow_duration),
|
||||||
|
]
|
||||||
|
),
|
||||||
|
Tag.parse(["y", "robosats", config("COORDINATOR_ALIAS", cast=str).lower()]),
|
||||||
|
Tag.parse(["n", str(config("NETWORK"))]),
|
||||||
|
Tag.parse(["layer"] + self.get_layer_tag(order)),
|
||||||
|
Tag.parse(["bond", str(order.bond_size)]),
|
||||||
|
Tag.parse(["z", "order"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
if order.latitude and order.longitude:
|
||||||
|
tags.extend(
|
||||||
|
[Tag.parse(["g", pygeohash.encode(order.latitude, order.longitude)])]
|
||||||
|
)
|
||||||
|
|
||||||
|
return tags
|
||||||
|
|
||||||
|
def get_status_tag(self, order):
|
||||||
|
if order.status == Order.Status.PUB:
|
||||||
|
return "pending"
|
||||||
|
else:
|
||||||
|
return "success"
|
||||||
|
|
||||||
|
def get_layer_tag(self, order):
|
||||||
|
if order.type == Order.Types.SELL:
|
||||||
|
return ["onchain", "lightning"]
|
||||||
|
else:
|
||||||
|
return ["lightning"]
|
@ -1,12 +1,14 @@
|
|||||||
from secrets import token_urlsafe
|
from secrets import token_urlsafe
|
||||||
|
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
from api.models import (
|
||||||
from api.models import Order
|
Order,
|
||||||
|
Notification,
|
||||||
|
)
|
||||||
from api.utils import get_session
|
from api.utils import get_session
|
||||||
|
|
||||||
|
|
||||||
class Telegram:
|
class Notifications:
|
||||||
"""Simple telegram messages using TG's API"""
|
"""Simple telegram messages using TG's API"""
|
||||||
|
|
||||||
session = get_session()
|
session = get_session()
|
||||||
@ -29,13 +31,24 @@ class Telegram:
|
|||||||
|
|
||||||
return context
|
return context
|
||||||
|
|
||||||
def send_message(self, chat_id, text):
|
def send_message(self, order, robot, title, description=""):
|
||||||
|
"""Save a message for a user and sends it to Telegram"""
|
||||||
|
self.save_message(order, robot, title, description)
|
||||||
|
if robot.telegram_enabled:
|
||||||
|
self.send_telegram_message(robot.telegram_chat_id, title, description)
|
||||||
|
|
||||||
|
def save_message(self, order, robot, title, description=""):
|
||||||
|
"""Save a message for a user"""
|
||||||
|
Notification.objects.create(
|
||||||
|
title=title, description=description, robot=robot, order=order
|
||||||
|
)
|
||||||
|
|
||||||
|
def send_telegram_message(self, chat_id, title, description=""):
|
||||||
"""sends a message to a user with telegram notifications enabled"""
|
"""sends a message to a user with telegram notifications enabled"""
|
||||||
|
|
||||||
bot_token = config("TELEGRAM_TOKEN")
|
bot_token = config("TELEGRAM_TOKEN")
|
||||||
|
text = f"{title} {description}"
|
||||||
message_url = f"https://api.telegram.org/bot{bot_token}/sendMessage?chat_id={chat_id}&text={text}"
|
message_url = f"https://api.telegram.org/bot{bot_token}/sendMessage?chat_id={chat_id}&text={text}"
|
||||||
|
|
||||||
# if it fails, it should keep trying
|
# if it fails, it should keep trying
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
@ -49,119 +62,127 @@ class Telegram:
|
|||||||
lang = user.robot.telegram_lang_code
|
lang = user.robot.telegram_lang_code
|
||||||
|
|
||||||
if lang == "es":
|
if lang == "es":
|
||||||
text = f"🔔 Hola {user.username}, te enviaré notificaciones sobre tus órdenes en RoboSats."
|
title = f"🔔 Hola {user.username}, te enviaré notificaciones sobre tus órdenes en RoboSats."
|
||||||
else:
|
else:
|
||||||
text = f"🔔 Hey {user.username}, I will send you notifications about your RoboSats orders."
|
title = f"🔔 Hey {user.username}, I will send you notifications about your RoboSats orders."
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
self.send_telegram_message(user.robot.telegram_chat_id, title)
|
||||||
user.robot.telegram_welcomed = True
|
user.robot.telegram_welcomed = True
|
||||||
user.robot.save(update_fields=["telegram_welcomed"])
|
user.robot.save(update_fields=["telegram_welcomed"])
|
||||||
return
|
return
|
||||||
|
|
||||||
def order_taken_confirmed(self, order):
|
def order_taken_confirmed(self, order):
|
||||||
if order.maker.robot.telegram_enabled:
|
lang = order.maker.robot.telegram_lang_code
|
||||||
lang = order.maker.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"✅ Hey {order.maker.username} ¡Tu orden con ID {order.id} ha sido tomada por {order.taker.username}!🥳"
|
||||||
text = f"✅ Hey {order.maker.username} ¡Tu orden con ID {order.id} ha sido tomada por {order.taker.username}!🥳 Visita http://{self.site}/order/{order.id} para continuar."
|
description = f"Visita http://{self.site}/order/{order.id} para continuar."
|
||||||
else:
|
else:
|
||||||
text = f"✅ Hey {order.maker.username}, your order was taken by {order.taker.username}!🥳 Visit http://{self.site}/order/{order.id} to proceed with the trade."
|
title = f"✅ Hey {order.maker.username}, your order was taken by {order.taker.username}!🥳"
|
||||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
description = (
|
||||||
|
f"Visit http://{self.site}/order/{order.id} to proceed with the trade."
|
||||||
|
)
|
||||||
|
self.send_message(order, order.maker.robot, title, description)
|
||||||
|
|
||||||
if order.taker.robot.telegram_enabled:
|
lang = order.taker.robot.telegram_lang_code
|
||||||
lang = order.taker.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"✅ Hey {order.taker.username}, acabas de tomar la orden con ID {order.id}."
|
||||||
text = f"✅ Hey {order.taker.username}, acabas de tomar la orden con ID {order.id}."
|
else:
|
||||||
else:
|
title = f"✅ Hey {order.taker.username}, you just took the order with ID {order.id}."
|
||||||
text = f"✅ Hey {order.taker.username}, you just took the order with ID {order.id}."
|
self.send_message(order, order.taker.robot, title)
|
||||||
self.send_message(order.taker.robot.telegram_chat_id, text)
|
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def fiat_exchange_starts(self, order):
|
def fiat_exchange_starts(self, order):
|
||||||
for user in [order.maker, order.taker]:
|
for user in [order.maker, order.taker]:
|
||||||
if user.robot.telegram_enabled:
|
lang = user.robot.telegram_lang_code
|
||||||
lang = user.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"✅ Hey {user.username}, el depósito de garantía y el recibo del comprador han sido recibidos. Es hora de enviar el dinero fiat."
|
||||||
text = f"✅ Hey {user.username}, el depósito de garantía y el recibo del comprador han sido recibidos. Es hora de enviar el dinero fiat. Visita http://{self.site}/order/{order.id} para hablar con tu contraparte."
|
description = f"Visita http://{self.site}/order/{order.id} para hablar con tu contraparte."
|
||||||
else:
|
else:
|
||||||
text = f"✅ Hey {user.username}, the escrow and invoice have been submitted. The fiat exchange starts now via the platform chat. Visit http://{self.site}/order/{order.id} to talk with your counterpart."
|
title = f"✅ Hey {user.username}, the escrow and invoice have been submitted. The fiat exchange starts now via the platform chat."
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
description = f"Visit http://{self.site}/order/{order.id} to talk with your counterpart."
|
||||||
|
self.send_message(order, user.robot, title, description)
|
||||||
return
|
return
|
||||||
|
|
||||||
def order_expired_untaken(self, order):
|
def order_expired_untaken(self, order):
|
||||||
if order.maker.robot.telegram_enabled:
|
lang = order.maker.robot.telegram_lang_code
|
||||||
lang = order.maker.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"😪 Hey {order.maker.username}, tu orden con ID {order.id} ha expirado sin ser tomada por ningún robot."
|
||||||
text = f"😪 Hey {order.maker.username}, tu orden con ID {order.id} ha expirado sin ser tomada por ningún robot. Visita http://{self.site}/order/{order.id} para renovarla."
|
description = f"Visita http://{self.site}/order/{order.id} para renovarla."
|
||||||
else:
|
else:
|
||||||
text = f"😪 Hey {order.maker.username}, your order with ID {order.id} has expired without a taker. Visit http://{self.site}/order/{order.id} to renew it."
|
title = f"😪 Hey {order.maker.username}, your order with ID {order.id} has expired without a taker."
|
||||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
description = f"Visit http://{self.site}/order/{order.id} to renew it."
|
||||||
|
self.send_message(order, order.maker.robot, title, description)
|
||||||
return
|
return
|
||||||
|
|
||||||
def trade_successful(self, order):
|
def trade_successful(self, order):
|
||||||
for user in [order.maker, order.taker]:
|
for user in [order.maker, order.taker]:
|
||||||
if user.robot.telegram_enabled:
|
lang = user.robot.telegram_lang_code
|
||||||
lang = user.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"🥳 ¡Tu orden con ID {order.id} ha finalizado exitosamente!"
|
||||||
text = f"🥳 ¡Tu orden con ID {order.id} ha finalizado exitosamente!⚡ Únete a nosotros en @robosats_es y ayúdanos a mejorar."
|
description = (
|
||||||
else:
|
"⚡ Únete a nosotros en @robosats_es y ayúdanos a mejorar."
|
||||||
text = f"🥳 Your order with ID {order.id} has finished successfully!⚡ Join us @robosats and help us improve."
|
)
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
else:
|
||||||
|
title = f"🥳 Your order with ID {order.id} has finished successfully!"
|
||||||
|
description = "⚡ Join us @robosats and help us improve."
|
||||||
|
self.send_message(order, user.robot, title, description)
|
||||||
return
|
return
|
||||||
|
|
||||||
def public_order_cancelled(self, order):
|
def public_order_cancelled(self, order):
|
||||||
if order.maker.robot.telegram_enabled:
|
lang = order.maker.robot.telegram_lang_code
|
||||||
lang = order.maker.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"❌ Hey {order.maker.username}, has cancelado tu orden pública con ID {order.id}."
|
||||||
text = f"❌ Hey {order.maker.username}, has cancelado tu orden pública con ID {order.id}."
|
else:
|
||||||
else:
|
title = f"❌ Hey {order.maker.username}, you have cancelled your public order with ID {order.id}."
|
||||||
text = f"❌ Hey {order.maker.username}, you have cancelled your public order with ID {order.id}."
|
self.send_message(order, order.maker.robot, title)
|
||||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def collaborative_cancelled(self, order):
|
def collaborative_cancelled(self, order):
|
||||||
for user in [order.maker, order.taker]:
|
for user in [order.maker, order.taker]:
|
||||||
if user.robot.telegram_enabled:
|
lang = user.robot.telegram_lang_code
|
||||||
lang = user.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"❌ Hey {user.username}, tu orden con ID {str(order.id)} fue cancelada colaborativamente."
|
||||||
text = f"❌ Hey {user.username}, tu orden con ID {str(order.id)} fue cancelada colaborativamente."
|
else:
|
||||||
else:
|
title = f"❌ Hey {user.username}, your order with ID {str(order.id)} has been collaboratively cancelled."
|
||||||
text = f"❌ Hey {user.username}, your order with ID {str(order.id)} has been collaboratively cancelled."
|
self.send_message(order, user.robot, title)
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def dispute_opened(self, order):
|
def dispute_opened(self, order):
|
||||||
for user in [order.maker, order.taker]:
|
for user in [order.maker, order.taker]:
|
||||||
if user.robot.telegram_enabled:
|
lang = user.robot.telegram_lang_code
|
||||||
lang = user.robot.telegram_lang_code
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"⚖️ Hey {user.username}, la orden con ID {str(order.id)} ha entrado en disputa."
|
||||||
text = f"⚖️ Hey {user.username}, la orden con ID {str(order.id)} ha entrado en disputa."
|
else:
|
||||||
else:
|
title = f"⚖️ Hey {user.username}, a dispute has been opened on your order with ID {str(order.id)}."
|
||||||
text = f"⚖️ Hey {user.username}, a dispute has been opened on your order with ID {str(order.id)}."
|
self.send_message(order, user.robot, title)
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
|
||||||
|
|
||||||
admin_chat_id = config("TELEGRAM_COORDINATOR_CHAT_ID")
|
admin_chat_id = config("TELEGRAM_COORDINATOR_CHAT_ID")
|
||||||
|
|
||||||
if len(admin_chat_id) == 0:
|
if len(admin_chat_id) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
coordinator_text = f"There is a new dispute opened for the order with ID {str(order.id)}. Visit http://{self.site}/coordinator/api/order/{str(order.id)}/change to proceed."
|
coordinator_text = (
|
||||||
self.send_message(admin_chat_id, coordinator_text)
|
f"There is a new dispute opened for the order with ID {str(order.id)}."
|
||||||
|
)
|
||||||
|
coordinator_description = f"Visit http://{self.site}/coordinator/api/order/{str(order.id)}/change to proceed."
|
||||||
|
self.send_telegram_message(
|
||||||
|
admin_chat_id, coordinator_text, coordinator_description
|
||||||
|
)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def order_published(self, order):
|
def order_published(self, order):
|
||||||
if order.maker.robot.telegram_enabled:
|
lang = order.maker.robot.telegram_lang_code
|
||||||
lang = order.maker.robot.telegram_lang_code
|
# In weird cases the order cannot be found (e.g. it is cancelled)
|
||||||
# In weird cases the order cannot be found (e.g. it is cancelled)
|
queryset = Order.objects.filter(maker=order.maker)
|
||||||
queryset = Order.objects.filter(maker=order.maker)
|
if len(queryset) == 0:
|
||||||
if len(queryset) == 0:
|
return
|
||||||
return
|
order = queryset.last()
|
||||||
order = queryset.last()
|
if lang == "es":
|
||||||
if lang == "es":
|
title = f"✅ Hey {order.maker.username}, tu orden con ID {str(order.id)} es pública en el libro de ordenes."
|
||||||
text = f"✅ Hey {order.maker.username}, tu orden con ID {str(order.id)} es pública en el libro de ordenes."
|
else:
|
||||||
else:
|
title = f"✅ Hey {order.maker.username}, your order with ID {str(order.id)} is public in the order book."
|
||||||
text = f"✅ Hey {order.maker.username}, your order with ID {str(order.id)} is public in the order book."
|
self.send_message(order, order.maker.robot, title)
|
||||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def new_chat_message(self, order, chat_message):
|
def new_chat_message(self, order, chat_message):
|
||||||
@ -189,14 +210,56 @@ class Telegram:
|
|||||||
notification_reason = f"(You receive this notification because this was the first in-chat message. You will only be notified again if there is a gap bigger than {TIMEGAP} minutes between messages)"
|
notification_reason = f"(You receive this notification because this was the first in-chat message. You will only be notified again if there is a gap bigger than {TIMEGAP} minutes between messages)"
|
||||||
|
|
||||||
user = chat_message.receiver
|
user = chat_message.receiver
|
||||||
if user.robot.telegram_enabled:
|
title = f"💬 Hey {user.username}, a new chat message in-app was sent to you by {chat_message.sender.username} for order ID {str(order.id)}."
|
||||||
text = f"💬 Hey {user.username}, a new chat message in-app was sent to you by {chat_message.sender.username} for order ID {str(order.id)}. {notification_reason}"
|
self.send_message(order, user.robot, title, notification_reason)
|
||||||
self.send_message(user.robot.telegram_chat_id, text)
|
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def coordinator_cancelled(self, order):
|
def coordinator_cancelled(self, order):
|
||||||
if order.maker.robot.telegram_enabled:
|
title = f"🛠️ Your order with ID {order.id} has been cancelled by the coordinator {config('COORDINATOR_ALIAS', cast=str, default='NoAlias')} for the upcoming maintenance stop."
|
||||||
text = f"🛠️ Your order with ID {order.id} has been cancelled by the coordinator {config('COORDINATOR_ALIAS', cast=str, default='NoAlias')} for the upcoming maintenance stop."
|
self.send_message(order, order.maker.robot, title)
|
||||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
return
|
||||||
|
|
||||||
|
def dispute_closed(self, order):
|
||||||
|
lang = order.maker.robot.telegram_lang_code
|
||||||
|
if order.status == Order.Status.MLD:
|
||||||
|
# Maker lost dispute
|
||||||
|
looser = order.maker
|
||||||
|
winner = order.taker
|
||||||
|
elif order.status == Order.Status.TLD:
|
||||||
|
# Taker lost dispute
|
||||||
|
looser = order.taker
|
||||||
|
winner = order.maker
|
||||||
|
|
||||||
|
lang = looser.robot.telegram_lang_code
|
||||||
|
if lang == "es":
|
||||||
|
title = f"⚖️ Hey {looser.username}, has perdido la disputa en la orden con ID {str(order.id)}."
|
||||||
|
else:
|
||||||
|
title = f"⚖️ Hey {looser.username}, you lost the dispute on your order with ID {str(order.id)}."
|
||||||
|
self.send_message(order, looser.robot, title)
|
||||||
|
|
||||||
|
lang = winner.robot.telegram_lang_code
|
||||||
|
if lang == "es":
|
||||||
|
title = f"⚖️ Hey {winner.username}, has ganado la disputa en la orden con ID {str(order.id)}."
|
||||||
|
else:
|
||||||
|
title = f"⚖️ Hey {winner.username}, you won the dispute on your order with ID {str(order.id)}."
|
||||||
|
self.send_message(order, winner.robot, title)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def lightning_failed(self, order):
|
||||||
|
lang = order.maker.robot.telegram_lang_code
|
||||||
|
if order.type == Order.Types.BUY:
|
||||||
|
buyer = order.maker
|
||||||
|
else:
|
||||||
|
buyer = order.taker
|
||||||
|
|
||||||
|
if lang == "es":
|
||||||
|
title = f"⚡❌ Hey {buyer.username}, el pago lightning en la order con ID {str(order.id)} ha fallado."
|
||||||
|
description = "Intentalo de nuevo con una nueva factura o con otra wallet."
|
||||||
|
else:
|
||||||
|
title = f"⚡❌ Hey {buyer.username}, the lightning payment on your order with ID {str(order.id)} failed."
|
||||||
|
description = "Try again with a new invoice or from another wallet."
|
||||||
|
|
||||||
|
self.send_message(order, buyer.robot, title, description)
|
||||||
return
|
return
|
||||||
|
@ -112,7 +112,7 @@ class OrderViewSchema:
|
|||||||
- `11` "In dispute"
|
- `11` "In dispute"
|
||||||
- `12` "Collaboratively cancelled"
|
- `12` "Collaboratively cancelled"
|
||||||
- `13` "Sending satoshis to buyer"
|
- `13` "Sending satoshis to buyer"
|
||||||
- `14` "Sucessful trade"
|
- `14` "Successful trade"
|
||||||
- `15` "Failed lightning network routing"
|
- `15` "Failed lightning network routing"
|
||||||
- `16` "Wait for dispute resolution"
|
- `16` "Wait for dispute resolution"
|
||||||
- `17` "Maker lost dispute"
|
- `17` "Maker lost dispute"
|
||||||
@ -378,6 +378,21 @@ class BookViewSchema:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationSchema:
|
||||||
|
get = {
|
||||||
|
"summary": "Get robot notifications",
|
||||||
|
"description": "Get a list of notifications sent to the robot.",
|
||||||
|
"parameters": [
|
||||||
|
OpenApiParameter(
|
||||||
|
name="created_at",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description=("Shows notifications created AFTER this date."),
|
||||||
|
type=str,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class RobotViewSchema:
|
class RobotViewSchema:
|
||||||
get = {
|
get = {
|
||||||
"summary": "Get robot info",
|
"summary": "Get robot info",
|
||||||
|
@ -2,7 +2,7 @@ from decouple import config
|
|||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from .models import MarketTick, Order
|
from .models import MarketTick, Order, Notification
|
||||||
|
|
||||||
RETRY_TIME = int(config("RETRY_TIME"))
|
RETRY_TIME = int(config("RETRY_TIME"))
|
||||||
|
|
||||||
@ -490,6 +490,19 @@ class OrderDetailSerializer(serializers.ModelSerializer):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ListNotificationSerializer(serializers.ModelSerializer):
|
||||||
|
status = serializers.SerializerMethodField(
|
||||||
|
help_text="The `status` of the order when the notification was trigered",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Notification
|
||||||
|
fields = ("title", "description", "order_id", "status", "created_at")
|
||||||
|
|
||||||
|
def get_status(self, notification) -> int:
|
||||||
|
return notification.order.status
|
||||||
|
|
||||||
|
|
||||||
class OrderPublicSerializer(serializers.ModelSerializer):
|
class OrderPublicSerializer(serializers.ModelSerializer):
|
||||||
maker_nick = serializers.CharField(required=False)
|
maker_nick = serializers.CharField(required=False)
|
||||||
maker_hash_id = serializers.CharField(required=False)
|
maker_hash_id = serializers.CharField(required=False)
|
||||||
|
53
api/tasks.py
53
api/tasks.py
@ -1,3 +1,4 @@
|
|||||||
|
from asgiref.sync import async_to_sync
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from celery.exceptions import SoftTimeLimitExceeded
|
from celery.exceptions import SoftTimeLimitExceeded
|
||||||
|
|
||||||
@ -251,6 +252,20 @@ def cache_market():
|
|||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="", ignore_result=True, time_limit=120)
|
||||||
|
def nostr_send_order_event(order_id=None):
|
||||||
|
if order_id:
|
||||||
|
from api.models import Order
|
||||||
|
from api.nostr import Nostr
|
||||||
|
|
||||||
|
order = Order.objects.get(id=order_id)
|
||||||
|
|
||||||
|
nostr = Nostr()
|
||||||
|
async_to_sync(nostr.send_order_event)(order)
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="send_notification", ignore_result=True, time_limit=120)
|
@shared_task(name="send_notification", ignore_result=True, time_limit=120)
|
||||||
def send_notification(order_id=None, chat_message_id=None, message=None):
|
def send_notification(order_id=None, chat_message_id=None, message=None):
|
||||||
if order_id:
|
if order_id:
|
||||||
@ -263,48 +278,50 @@ def send_notification(order_id=None, chat_message_id=None, message=None):
|
|||||||
chat_message = Message.objects.get(id=chat_message_id)
|
chat_message = Message.objects.get(id=chat_message_id)
|
||||||
order = chat_message.order
|
order = chat_message.order
|
||||||
|
|
||||||
taker_enabled = False if order.taker is None else order.taker.robot.telegram_enabled
|
from api.notifications import Notifications
|
||||||
if not (order.maker.robot.telegram_enabled or taker_enabled):
|
|
||||||
return
|
|
||||||
|
|
||||||
from api.notifications import Telegram
|
notifications = Notifications()
|
||||||
|
|
||||||
telegram = Telegram()
|
|
||||||
|
|
||||||
if message == "welcome":
|
if message == "welcome":
|
||||||
telegram.welcome(order)
|
notifications.welcome(order)
|
||||||
|
|
||||||
elif message == "order_expired_untaken":
|
elif message == "order_expired_untaken":
|
||||||
telegram.order_expired_untaken(order)
|
notifications.order_expired_untaken(order)
|
||||||
|
|
||||||
elif message == "trade_successful":
|
elif message == "trade_successful":
|
||||||
telegram.trade_successful(order)
|
notifications.trade_successful(order)
|
||||||
|
|
||||||
elif message == "public_order_cancelled":
|
elif message == "public_order_cancelled":
|
||||||
telegram.public_order_cancelled(order)
|
notifications.public_order_cancelled(order)
|
||||||
|
|
||||||
elif message == "taker_expired_b4bond":
|
elif message == "taker_expired_b4bond":
|
||||||
telegram.taker_expired_b4bond(order)
|
notifications.taker_expired_b4bond(order)
|
||||||
|
|
||||||
elif message == "order_published":
|
elif message == "order_published":
|
||||||
telegram.order_published(order)
|
notifications.order_published(order)
|
||||||
|
|
||||||
elif message == "order_taken_confirmed":
|
elif message == "order_taken_confirmed":
|
||||||
telegram.order_taken_confirmed(order)
|
notifications.order_taken_confirmed(order)
|
||||||
|
|
||||||
elif message == "fiat_exchange_starts":
|
elif message == "fiat_exchange_starts":
|
||||||
telegram.fiat_exchange_starts(order)
|
notifications.fiat_exchange_starts(order)
|
||||||
|
|
||||||
elif message == "dispute_opened":
|
elif message == "dispute_opened":
|
||||||
telegram.dispute_opened(order)
|
notifications.dispute_opened(order)
|
||||||
|
|
||||||
elif message == "collaborative_cancelled":
|
elif message == "collaborative_cancelled":
|
||||||
telegram.collaborative_cancelled(order)
|
notifications.collaborative_cancelled(order)
|
||||||
|
|
||||||
elif message == "new_chat_message":
|
elif message == "new_chat_message":
|
||||||
telegram.new_chat_message(order, chat_message)
|
notifications.new_chat_message(order, chat_message)
|
||||||
|
|
||||||
elif message == "coordinator_cancelled":
|
elif message == "coordinator_cancelled":
|
||||||
telegram.coordinator_cancelled(order)
|
notifications.coordinator_cancelled(order)
|
||||||
|
|
||||||
|
elif message == "dispute_closed":
|
||||||
|
notifications.dispute_closed(order)
|
||||||
|
|
||||||
|
elif message == "lightning_failed":
|
||||||
|
notifications.lightning_failed(order)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
@ -15,6 +15,7 @@ from .views import (
|
|||||||
RobotView,
|
RobotView,
|
||||||
StealthView,
|
StealthView,
|
||||||
TickView,
|
TickView,
|
||||||
|
NotificationsView,
|
||||||
)
|
)
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
@ -36,4 +37,5 @@ urlpatterns = [
|
|||||||
path("ticks/", TickView.as_view(), name="ticks"),
|
path("ticks/", TickView.as_view(), name="ticks"),
|
||||||
path("stealth/", StealthView.as_view(), name="stealth"),
|
path("stealth/", StealthView.as_view(), name="stealth"),
|
||||||
path("chat/", ChatView.as_view({"get": "get", "post": "post"}), name="chat"),
|
path("chat/", ChatView.as_view({"get": "get", "post": "post"}), name="chat"),
|
||||||
|
path("notifications/", NotificationsView.as_view(), name="notifications"),
|
||||||
]
|
]
|
||||||
|
@ -141,7 +141,7 @@ def get_devfund_pubkey(network: str) -> str:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
session = get_session()
|
session = get_session()
|
||||||
url = "https://raw.githubusercontent.com/RoboSats/robosats/main/devfund_pubey.json"
|
url = "https://raw.githubusercontent.com/RoboSats/robosats/main/devfund_pubkey.json"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = session.get(url)
|
response = session.get(url)
|
||||||
@ -188,8 +188,7 @@ def get_exchange_rates(currencies):
|
|||||||
blockchain_rates.append(
|
blockchain_rates.append(
|
||||||
float(blockchain_prices[currency]["last"])
|
float(blockchain_prices[currency]["last"])
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
print(e)
|
|
||||||
blockchain_rates.append(np.nan)
|
blockchain_rates.append(np.nan)
|
||||||
api_rates.append(blockchain_rates)
|
api_rates.append(blockchain_rates)
|
||||||
|
|
||||||
|
43
api/views.py
43
api/views.py
@ -5,6 +5,8 @@ from django.conf import settings
|
|||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.db.models import Q, Sum
|
from django.db.models import Q, Sum
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from django.utils.dateparse import parse_datetime
|
||||||
|
from django.http import HttpResponseBadRequest
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework import status, viewsets
|
from rest_framework import status, viewsets
|
||||||
from rest_framework.authentication import TokenAuthentication
|
from rest_framework.authentication import TokenAuthentication
|
||||||
@ -14,8 +16,15 @@ from rest_framework.response import Response
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
from api.logics import Logics
|
from api.logics import Logics
|
||||||
from api.models import Currency, LNPayment, MarketTick, OnchainPayment, Order
|
from api.models import (
|
||||||
from api.notifications import Telegram
|
Currency,
|
||||||
|
LNPayment,
|
||||||
|
MarketTick,
|
||||||
|
OnchainPayment,
|
||||||
|
Order,
|
||||||
|
Notification,
|
||||||
|
)
|
||||||
|
from api.notifications import Notifications
|
||||||
from api.oas_schemas import (
|
from api.oas_schemas import (
|
||||||
BookViewSchema,
|
BookViewSchema,
|
||||||
HistoricalViewSchema,
|
HistoricalViewSchema,
|
||||||
@ -28,6 +37,7 @@ from api.oas_schemas import (
|
|||||||
RobotViewSchema,
|
RobotViewSchema,
|
||||||
StealthViewSchema,
|
StealthViewSchema,
|
||||||
TickViewSchema,
|
TickViewSchema,
|
||||||
|
NotificationSchema,
|
||||||
)
|
)
|
||||||
from api.serializers import (
|
from api.serializers import (
|
||||||
ClaimRewardSerializer,
|
ClaimRewardSerializer,
|
||||||
@ -39,6 +49,7 @@ from api.serializers import (
|
|||||||
StealthSerializer,
|
StealthSerializer,
|
||||||
TickSerializer,
|
TickSerializer,
|
||||||
UpdateOrderSerializer,
|
UpdateOrderSerializer,
|
||||||
|
ListNotificationSerializer,
|
||||||
)
|
)
|
||||||
from api.utils import (
|
from api.utils import (
|
||||||
compute_avg_premium,
|
compute_avg_premium,
|
||||||
@ -659,7 +670,7 @@ class RobotView(APIView):
|
|||||||
context["last_login"] = user.last_login
|
context["last_login"] = user.last_login
|
||||||
|
|
||||||
# Adds/generate telegram token and whether it is enabled
|
# Adds/generate telegram token and whether it is enabled
|
||||||
context = {**context, **Telegram.get_context(user)}
|
context = {**context, **Notifications.get_context(user)}
|
||||||
|
|
||||||
# return active order or last made order if any
|
# return active order or last made order if any
|
||||||
has_no_active_order, _, order = Logics.validate_already_maker_or_taker(
|
has_no_active_order, _, order = Logics.validate_already_maker_or_taker(
|
||||||
@ -730,6 +741,32 @@ class BookView(ListAPIView):
|
|||||||
return Response(book_data, status=status.HTTP_200_OK)
|
return Response(book_data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationsView(ListAPIView):
|
||||||
|
authentication_classes = [TokenAuthentication]
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
serializer_class = ListNotificationSerializer
|
||||||
|
|
||||||
|
@extend_schema(**NotificationSchema.get)
|
||||||
|
def get(self, request, format=None):
|
||||||
|
robot = request.user.robot
|
||||||
|
queryset = Notification.objects.filter(robot=robot).order_by("-created_at")
|
||||||
|
created_at = request.GET.get("created_at")
|
||||||
|
|
||||||
|
if created_at:
|
||||||
|
created_at = parse_datetime(created_at)
|
||||||
|
if not created_at:
|
||||||
|
return HttpResponseBadRequest("Invalid date format")
|
||||||
|
queryset = queryset.filter(created_at__gte=created_at)
|
||||||
|
|
||||||
|
notification_data = []
|
||||||
|
for notification in queryset:
|
||||||
|
data = self.serializer_class(notification).data
|
||||||
|
data["order_id"] = notification.order.id
|
||||||
|
notification_data.append(data)
|
||||||
|
|
||||||
|
return Response(notification_data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
class InfoView(viewsets.ViewSet):
|
class InfoView(viewsets.ViewSet):
|
||||||
serializer_class = InfoSerializer
|
serializer_class = InfoSerializer
|
||||||
|
|
||||||
|
41
desktopApp/Readme.md
Normal file
41
desktopApp/Readme.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# RoboSats Desktop App
|
||||||
|
|
||||||
|
RoboSats desktop app serves the RoboSats frontend app directly and redirects all API requests to RoboSats P2P market coordinator through your TOR proxy.
|
||||||
|
|
||||||
|
## How to Use
|
||||||
|
|
||||||
|
### Step 1: Clone the Repository
|
||||||
|
|
||||||
|
First, clone the repository to your local machine:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/RoboSats/robosats.git
|
||||||
|
cd robosats
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Step 2: Install Dependencies
|
||||||
|
```bash
|
||||||
|
cd desktopApp
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Step 3: Run the App Locally
|
||||||
|
```bash
|
||||||
|
npm run start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Step 4: Package the App
|
||||||
|
|
||||||
|
To package the app for different platforms (Linux, Windows, macOS), use the corresponding npm commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run package-linux
|
||||||
|
npm run package-win
|
||||||
|
npm run package-mac
|
||||||
|
```
|
||||||
|
|
||||||
|
### Additional Information
|
||||||
|
This desktop app ensures all API requests are redirected through a TOR proxy to maintain privacy and anonymity while accessing the RoboSats P2P market coordinator.
|
||||||
|
|
BIN
desktopApp/assets/icon/Robosats.icns
Normal file
BIN
desktopApp/assets/icon/Robosats.icns
Normal file
Binary file not shown.
BIN
desktopApp/assets/icon/Robosats.ico
Normal file
BIN
desktopApp/assets/icon/Robosats.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.2 KiB |
133
desktopApp/assets/icon/Robosats.svg
Normal file
133
desktopApp/assets/icon/Robosats.svg
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||||
|
width="256px" height="256px" viewBox="0 0 256 256" enable-background="new 0 0 256 256" xml:space="preserve">
|
||||||
|
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="128" y1="256" x2="128" y2="0">
|
||||||
|
<stop offset="0.1269" style="stop-color:#CCCCCC"/>
|
||||||
|
<stop offset="0.2947" style="stop-color:#E1E1E1"/>
|
||||||
|
<stop offset="0.4889" style="stop-color:#FFFFFF"/>
|
||||||
|
</linearGradient>
|
||||||
|
<rect fill="url(#SVGID_1_)" width="256" height="256"/>
|
||||||
|
<g>
|
||||||
|
<g>
|
||||||
|
<g>
|
||||||
|
<g>
|
||||||
|
<g>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="-2.2432" y1="60.3643" x2="167.041" y2="264.634" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_2_)" d="M65.814,152.139c0.91,0.784,1.91,1.396,2.96,1.85c-1.711-1.462-3.403-2.891-5.073-4.277
|
||||||
|
C64.274,150.59,64.974,151.414,65.814,152.139z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="-2.2031" y1="60.4131" x2="167.0789" y2="264.6803" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_3_)" d="M65.814,152.139c0.91,0.784,1.91,1.396,2.96,1.85c-1.711-1.462-3.403-2.891-5.073-4.277
|
||||||
|
C64.274,150.59,64.974,151.414,65.814,152.139z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="-1.457" y1="44.2812" x2="188.127" y2="273.0462" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_4_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||||
|
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_5_" gradientUnits="userSpaceOnUse" x1="6.127" y1="53.4326" x2="175.4021" y2="257.6916" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_5_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||||
|
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_6_" gradientUnits="userSpaceOnUse" x1="6.166" y1="53.4805" x2="175.4397" y2="257.7377" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_6_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||||
|
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_7_" gradientUnits="userSpaceOnUse" x1="6.5864" y1="37.6182" x2="196.1729" y2="266.3862" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_7_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||||
|
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||||
|
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_8_" gradientUnits="userSpaceOnUse" x1="14.1919" y1="46.7949" x2="183.4888" y2="251.0801" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_8_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||||
|
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||||
|
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_9_" gradientUnits="userSpaceOnUse" x1="14.2378" y1="46.8506" x2="183.5231" y2="251.1217" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_9_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||||
|
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||||
|
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||||
|
</g>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_10_" gradientUnits="userSpaceOnUse" x1="-56.7104" y1="46.0752" x2="181.1495" y2="333.0932" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_10_)" d="M47.215,171.653c-0.005,7.697-0.009,15.186-0.014,22.144c2.244-2.067,4.739-4.303,7.153-6.622
|
||||||
|
c0.744-0.719,1.28-0.813,2.038-0.109c1.297,1.197,2.635,2.35,4.001,3.56c1.243-1.401,2.409-2.72,3.618-4.084
|
||||||
|
C58.394,181.564,52.874,176.668,47.215,171.653z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_11_" gradientUnits="userSpaceOnUse" x1="-71.7656" y1="17.4062" x2="178.9234" y2="319.9048" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_11_)" d="M96.606,215.496c-5.554-4.934-10.928-9.709-16.356-14.536c-1.225,1.39-2.362,2.679-3.551,4.024
|
||||||
|
c1.533,1.378,2.997,2.693,4.539,4.078c-2.06,2.116-4.067,4.186-6.239,6.42C82.293,215.486,89.327,215.491,96.606,215.496z"/>
|
||||||
|
|
||||||
|
<linearGradient id="SVGID_12_" gradientUnits="userSpaceOnUse" x1="48.644" y1="2.749" x2="238.2449" y2="231.5344" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||||
|
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||||
|
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||||
|
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||||
|
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||||
|
</linearGradient>
|
||||||
|
<path fill="url(#SVGID_12_)" d="M151.123,140.642c4.551-2.31,8.836-5.034,12.748-8.313c7.891-6.625,13.992-14.499,16.895-24.56
|
||||||
|
c2.469-8.565,3.027-17.312,2.158-26.142c-0.596-6.125-2.252-11.961-5.594-17.219c-7.176-11.29-17.574-18.036-30.375-21.218
|
||||||
|
c-8.9-2.214-18.036-2.651-27.165-2.687c-23.748-0.088-47.498-0.055-71.249-0.067c-0.423-0.001-0.847,0.036-1.287,0.056
|
||||||
|
c-0.015,24.535-0.031,48.95-0.046,73.32c15.731-11.838,31.863-14.195,48.42-2.57c2.176-2.02,4.324-4.011,6.562-6.088
|
||||||
|
c-2.269-1.653-4.427-3.226-6.688-4.872c5.694-4.126,11.212-8.121,16.712-12.105c-1.47-3.392-0.892-6.063,1.599-7.667
|
||||||
|
c2.145-1.383,5.17-0.997,6.868,0.874c1.745,1.923,1.889,4.86,0.337,6.912c-1.768,2.34-4.548,2.716-7.774,0.995
|
||||||
|
c-2.781,3.42-5.572,6.854-8.424,10.36c2.357,1.672,4.611,3.269,6.938,4.919c-4.579,3.08-9.056,6.089-13.548,9.107
|
||||||
|
c0.167,0.201,0.234,0.306,0.324,0.386c16.396,14.547,32.791,29.093,49.197,43.631c3.506,3.105,7.074,6.147,9.555,10.212
|
||||||
|
c6.645,10.863,7.08,22.205,2.514,33.884c-2.002,5.131-5.035,9.634-9.098,13.737c19.465,0.012,38.66,0.024,58.096,0.036
|
||||||
|
c-19.633-24.874-39.131-49.577-58.684-74.348C150.508,140.993,150.805,140.802,151.123,140.642z"/>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 9.1 KiB |
94
desktopApp/index.js
Normal file
94
desktopApp/index.js
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
// Modules to control application life and create native browser window
|
||||||
|
var electron_1 = require("electron");
|
||||||
|
var child_process_1 = require("child_process");
|
||||||
|
var path = require("path");
|
||||||
|
var os = require("os");
|
||||||
|
var tor = null;
|
||||||
|
// Function to determine the current OS and find the appropriate Tor binary
|
||||||
|
function checkPlatformAndRunTor() {
|
||||||
|
var platform = os.platform();
|
||||||
|
switch (platform) {
|
||||||
|
case 'win32':
|
||||||
|
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-win/tor/tor.exe'));
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-mac/tor/tor'));
|
||||||
|
break;
|
||||||
|
case 'linux':
|
||||||
|
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-linux/tor/tor'));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error("Unsupported platform: ".concat(platform));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Function to start Tor process
|
||||||
|
checkPlatformAndRunTor();
|
||||||
|
// Listen for Tor process stdout data
|
||||||
|
tor.stdout.on("data", function (data) {
|
||||||
|
var message = data.toString();
|
||||||
|
console.log("Data received: ".concat(message));
|
||||||
|
});
|
||||||
|
// Listen for Tor process stderr data
|
||||||
|
tor.stderr.on("data", function (data) {
|
||||||
|
console.error("Error received: ".concat(data.toString()));
|
||||||
|
electron_1.app.exit(1); // Exit the app if there's an error in the Tor process
|
||||||
|
});
|
||||||
|
// Function to create the main application window
|
||||||
|
function createWindow() {
|
||||||
|
// Create the browser window with specific dimensions
|
||||||
|
var mainWindow = new electron_1.BrowserWindow({
|
||||||
|
width: 1200,
|
||||||
|
height: 800,
|
||||||
|
icon: path.join(__dirname, '/static/assets/images/favicon-32x32.png'),
|
||||||
|
webPreferences: {
|
||||||
|
nodeIntegration: false, // Disable Node.js integration in the renderer
|
||||||
|
contextIsolation: true, // Enable context isolation for security
|
||||||
|
},
|
||||||
|
});
|
||||||
|
// Load the index.html file from the app directory
|
||||||
|
mainWindow.loadURL("file://".concat(path.resolve(__dirname, 'index.html#/garage')), {
|
||||||
|
extraHeaders: "pragma: no-cache\n" // Prevent caching of the loaded file
|
||||||
|
});
|
||||||
|
// Handle failed load attempts by reloading the file
|
||||||
|
mainWindow.webContents.on("did-fail-load", function () {
|
||||||
|
console.log("Failed to load the page, retrying...");
|
||||||
|
mainWindow.loadURL("file://".concat(__dirname, "/index.html#/garage"));
|
||||||
|
});
|
||||||
|
// Uncomment the following line to open the DevTools
|
||||||
|
// mainWindow.webContents.openDevTools();
|
||||||
|
}
|
||||||
|
// This method is called when Electron has finished initialization
|
||||||
|
electron_1.app.whenReady().then(function () {
|
||||||
|
// Create the window after the app is ready
|
||||||
|
createWindow();
|
||||||
|
// Re-create a window if the app is activated and there are no other windows open (MacOS specific behavior)
|
||||||
|
electron_1.app.on("activate", function () {
|
||||||
|
if (electron_1.BrowserWindow.getAllWindows().length === 0)
|
||||||
|
createWindow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// Setup the app session when Electron is ready
|
||||||
|
electron_1.app.on("ready", function () {
|
||||||
|
// Redirect requests to static files
|
||||||
|
electron_1.session.defaultSession.webRequest.onBeforeRequest({ urls: ['file:///static/*'] }, function (details, callback) {
|
||||||
|
var url = details.url;
|
||||||
|
var modifiedUrl = url.slice(7);
|
||||||
|
var staticFilePath = path.join(__dirname, modifiedUrl);
|
||||||
|
callback({ redirectURL: "file://".concat(staticFilePath) });
|
||||||
|
});
|
||||||
|
// Set the proxy for the session to route through Tor
|
||||||
|
electron_1.session.defaultSession.setProxy({
|
||||||
|
proxyRules: "socks://localhost:9050",
|
||||||
|
proxyBypassRules: "<local>",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// Handle all windows closed event except on macOS
|
||||||
|
electron_1.app.on("window-all-closed", function () {
|
||||||
|
// Terminate the Tor process if it exists
|
||||||
|
tor === null || tor === void 0 ? void 0 : tor.kill();
|
||||||
|
if (process.platform !== "darwin")
|
||||||
|
electron_1.app.quit();
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=index.js.map
|
1
desktopApp/index.js.map
Normal file
1
desktopApp/index.js.map
Normal file
@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";;AAAA,uEAAuE;AACvE,qCAAsE;AACtE,+CAAsE;AACtE,2BAA6B;AAC7B,uBAAyB;AAEzB,IAAI,GAAG,GAA0C,IAAI,CAAC;AAEtD,2EAA2E;AAE3E,SAAS,sBAAsB;IAC7B,IAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAC;IAE/B,QAAQ,QAAQ,EAAE,CAAC;QACjB,KAAK,OAAO;YACV,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,0BAA0B,CAAC,CAAC,CAAC;YAC9D,MAAM;QACR,KAAK,QAAQ;YACX,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,sBAAsB,CAAC,CAAC,CAAC;YAC1D,MAAM;QACR,KAAK,OAAO;YACV,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,wBAAwB,CAAC,CAAC,CAAC;YAC5D,MAAM;QACR;YACE,MAAM,IAAI,KAAK,CAAC,gCAAyB,QAAQ,CAAE,CAAC,CAAC;IACzD,CAAC;AACH,CAAC;AAED,gCAAgC;AAChC,sBAAsB,EAAE,CAAA;AAGxB,qCAAqC;AACrC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,UAAC,IAAY;IACjC,IAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;IAChC,OAAO,CAAC,GAAG,CAAC,yBAAkB,OAAO,CAAE,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC;AAEH,qCAAqC;AACrC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,UAAC,IAAY;IACjC,OAAO,CAAC,KAAK,CAAC,0BAAmB,IAAI,CAAC,QAAQ,EAAE,CAAE,CAAC,CAAC;IACpD,cAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,sDAAsD;AACrE,CAAC,CAAC,CAAC;AAEH,iDAAiD;AACjD,SAAS,YAAY;IACnB,qDAAqD;IACrD,IAAM,UAAU,GAAG,IAAI,wBAAa,CAAC;QACnC,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,GAAG;QACX,IAAI,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,yCAAyC,CAAC;QACpE,cAAc,EAAE;YACd,eAAe,EAAE,KAAK,EAAG,8CAA8C;YACvE,gBAAgB,EAAE,IAAI,EAAG,wCAAwC;SAClE;KACF,CAAC,CAAC;IAEH,kDAAkD;IAClD,UAAU,CAAC,OAAO,CAAC,iBAAU,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAE,EAAE;QAC3E,YAAY,EAAE,oBAAoB,CAAE,qCAAqC;KAC1E,CAAC,CAAC;IAEH,oDAAoD;IACpD,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC,eAAe,EAAE;QACzC,OAAO,CAAC,GAAG,CAAC,sCAAsC,CAAC,CAAC;QACpD,UAAU,CAAC,OAAO,CAAC,iBAAU,SAAS,uBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;IAEH,oDAAoD;IACpD,yCAAyC;AAC3C,CAAC;AAED,kEAAkE;AAClE,cAAG,CAAC,SAAS,EAAE,CAAC,IAAI,CAAC;IACnB,2CAA2C;IAC3C,YAAY,EAAE,CAAC;IAEf,2GAA2G;IAC3G,cAAG,CAAC,EAAE,CAAC,UAAU,EAAE;QACjB,IAAI,wBAAa,CAAC,aAAa,EAAE,CAAC,MAAM,KAAK,CAAC;YAAE,YAAY,EAAE,CAAC;IACjE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,+CAA+C;AAC/C,cAAG,CAAC,EAAE,CAAC,OAAO,EAAE;IACd,oCAAoC;IACpC,kBAAO,CAAC,cAAc,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,IAAI,EAAE,CAAC,kBAAkB,CAAC,EAAE,EAAE,UAAC,OAAO,EAAE,QAAQ;QAClG,IAAM,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;QACxB,IAAM,WAAW,GAAG,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACjC,IAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;QACzD,QAAQ,CAAC,EAAE,WAAW,EAAE,iBAAU,cAAc,CAAE,EAAE,CAAC,CAAC;IACxD,CAAC,CAAC,CAAC;IAEH,qDAAqD;IACrD,kBAAO,CAAC,cAAc,CAAC,QAAQ,CAAC;QAC9B,UAAU,EAAE,wBAAwB;QACpC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,kDAAkD;AAClD,cAAG,CAAC,EAAE,CAAC,mBAAmB,EAAE;IAC1B,yCAAyC;IACzC,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,EAAE,CAAC;IACZ,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ;QAAE,cAAG,CAAC,IAAI,EAAE,CAAC;AAChD,CAAC,CAAC,CAAC"}
|
106
desktopApp/index.ts
Normal file
106
desktopApp/index.ts
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
// Modules to control application life and create native browser window
|
||||||
|
import { app, BrowserWindow, session, protocol, net } from 'electron';
|
||||||
|
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as os from "os";
|
||||||
|
|
||||||
|
let tor: ChildProcessWithoutNullStreams | null = null;
|
||||||
|
|
||||||
|
// Function to determine the current OS and find the appropriate Tor binary
|
||||||
|
|
||||||
|
function checkPlatformAndRunTor(): void {
|
||||||
|
const platform = os.platform();
|
||||||
|
|
||||||
|
switch (platform) {
|
||||||
|
case 'win32':
|
||||||
|
tor = spawn(path.join(__dirname, '/tor/tor-win/tor/tor.exe'));
|
||||||
|
break;
|
||||||
|
case 'darwin':
|
||||||
|
tor = spawn(path.join(__dirname, '/tor/tor-mac/tor/tor'));
|
||||||
|
break;
|
||||||
|
case 'linux':
|
||||||
|
tor = spawn(path.join(__dirname, '/tor/tor-linux/tor/tor'));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported platform: ${platform}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to start Tor process
|
||||||
|
checkPlatformAndRunTor()
|
||||||
|
|
||||||
|
|
||||||
|
// Listen for Tor process stdout data
|
||||||
|
tor.stdout.on("data", (data: Buffer) => {
|
||||||
|
const message = data.toString();
|
||||||
|
console.log(`Data received: ${message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Listen for Tor process stderr data
|
||||||
|
tor.stderr.on("data", (data: Buffer) => {
|
||||||
|
console.error(`Error received: ${data.toString()}`);
|
||||||
|
app.exit(1); // Exit the app if there's an error in the Tor process
|
||||||
|
});
|
||||||
|
|
||||||
|
// Function to create the main application window
|
||||||
|
function createWindow(): void {
|
||||||
|
// Create the browser window with specific dimensions
|
||||||
|
const mainWindow = new BrowserWindow({
|
||||||
|
width: 1200,
|
||||||
|
height: 800,
|
||||||
|
icon:path.join(__dirname, '/static/assets/images/favicon-32x32.png'),
|
||||||
|
webPreferences: {
|
||||||
|
nodeIntegration: false, // Disable Node.js integration in the renderer
|
||||||
|
contextIsolation: true, // Enable context isolation for security
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Load the index.html file from the app directory
|
||||||
|
mainWindow.loadURL(`file://${path.resolve(__dirname, 'index.html#/garage')}`, {
|
||||||
|
extraHeaders: "pragma: no-cache\n" // Prevent caching of the loaded file
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle failed load attempts by reloading the file
|
||||||
|
mainWindow.webContents.on("did-fail-load", () => {
|
||||||
|
console.log("Failed to load the page, retrying...");
|
||||||
|
mainWindow.loadURL(`file://${__dirname}/index.html#/garage`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Uncomment the following line to open the DevTools
|
||||||
|
// mainWindow.webContents.openDevTools();
|
||||||
|
}
|
||||||
|
|
||||||
|
// This method is called when Electron has finished initialization
|
||||||
|
app.whenReady().then(() => {
|
||||||
|
// Create the window after the app is ready
|
||||||
|
createWindow();
|
||||||
|
|
||||||
|
// Re-create a window if the app is activated and there are no other windows open (MacOS specific behavior)
|
||||||
|
app.on("activate", () => {
|
||||||
|
if (BrowserWindow.getAllWindows().length === 0) createWindow();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Setup the app session when Electron is ready
|
||||||
|
app.on("ready", () => {
|
||||||
|
// Redirect requests to static files
|
||||||
|
session.defaultSession.webRequest.onBeforeRequest({ urls: ['file:///static/*'] }, (details, callback) => {
|
||||||
|
const url = details.url;
|
||||||
|
const modifiedUrl = url.slice(7);
|
||||||
|
const staticFilePath = path.join(__dirname, modifiedUrl);
|
||||||
|
callback({ redirectURL: `file://${staticFilePath}` });
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set the proxy for the session to route through Tor
|
||||||
|
session.defaultSession.setProxy({
|
||||||
|
proxyRules: "socks://localhost:9050",
|
||||||
|
proxyBypassRules: "<local>",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Handle all windows closed event except on macOS
|
||||||
|
app.on("window-all-closed", () => {
|
||||||
|
// Terminate the Tor process if it exists
|
||||||
|
tor?.kill();
|
||||||
|
if (process.platform !== "darwin") app.quit();
|
||||||
|
});
|
2834
desktopApp/package-lock.json
generated
Normal file
2834
desktopApp/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
48
desktopApp/package.json
Normal file
48
desktopApp/package.json
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
{
|
||||||
|
"name": "desktop-app",
|
||||||
|
"version": "0.7.1",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "electron .",
|
||||||
|
"compile": "./node_modules/.bin/tsc",
|
||||||
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
|
"package-linux": "npx @electron/packager . Robosats --platform=linux --arch=x64 --icon=./assets/icon/Robosats.svg --overwrite --out=release-builds",
|
||||||
|
"package-win": "npx @electron/packager . Robosats --platform=win32 --arch=ia32 --icon=./assets/icon/Robosats.ico --overwrite --out=release-builds",
|
||||||
|
"package-mac": "npx @electron/packager . Robosats --platform=darwin --arch=x64 --icon=./assets/icon/Robosats.icns --overwrite --out=release-builds"
|
||||||
|
},
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"devDependencies": {
|
||||||
|
"@electron/packager": "^18.3.2",
|
||||||
|
"electron": "^30.0.3",
|
||||||
|
"typescript": "^5.4.5"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"cors": "^2.8.5",
|
||||||
|
"express": "^4.21.1"
|
||||||
|
},
|
||||||
|
"build": {
|
||||||
|
"appId": "com.electron.robosats",
|
||||||
|
"productName": "RobosatsApp",
|
||||||
|
"directories": {
|
||||||
|
"output": "dist"
|
||||||
|
},
|
||||||
|
"win": {
|
||||||
|
"target": [
|
||||||
|
"NSIS"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"mac": {
|
||||||
|
"target": [
|
||||||
|
"dmg"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"target": [
|
||||||
|
"AppImage",
|
||||||
|
"deb"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
28
desktopApp/tor/README.CONJURE.md
Normal file
28
desktopApp/tor/README.CONJURE.md
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Conjure
|
||||||
|
|
||||||
|
[Conjure](https://jhalderm.com/pub/papers/conjure-ccs19.pdf) is an anti-censorship tool in the refraction networking (a.k.a. decoy routing) lineage of circumvention systems. The key innovation of Conjure is to turn the unused IP address space of deploying ISPs into a large pool of **phantom** proxies that users can connect to. Due to the size of unused IPv6 address space and the potential for collateral damage against real websites hosted by the deploying ISPs, Conjure provides an effective solution to the problem of censors enumerating deployed bridges or proxies.
|
||||||
|
|
||||||
|
Conjure is currenty deployed on the University of Colorado network and a small to mid size ISP in Michigan.
|
||||||
|
|
||||||
|
# Conjure Pluggable Transport for Tor
|
||||||
|
|
||||||
|
This repository is an implementation of both the client and bridge side of a Tor pluggable transport that uses the deployed Conjure network to allow users to connect to the Tor network. The client side calls the [`gotapdance` library](https://github.com/refraction-networking/gotapdance) to communicate with deployed Conjure stations and route client traffic through the phantom proxies assigned by the station. The bridge side receives [haproxy](https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt) connections from the Conjure station that wrap the proxied client traffic.
|
||||||
|
|
||||||
|
# Deployment details
|
||||||
|
|
||||||
|
We currently have deployed a low capacity Conjure bridge named [Haunt](https://metrics.torproject.org/rs.html#details/A84C946BF4E14E63A3C92E140532A4594F2C24CD). To connect through this bridge, use the `torrc` file in the `client/` directory as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd client/
|
||||||
|
tor -f torrc
|
||||||
|
```
|
||||||
|
|
||||||
|
# Warnings
|
||||||
|
|
||||||
|
This tool and the deployment is still under active development. We are still working on securing the connection between the deployed Conjure stations and the Conjure bridge. We are also working on improving the censorship resistance of the registration connection between the client and the station. Do not expect this to work out of the box in all areas.
|
||||||
|
|
||||||
|
The Conjure station sometimes suffers from a heavy load of users. When this happens, connections will fail. If you are testing this out, try waiting awhile and trying again later.
|
||||||
|
|
||||||
|
# Conjure development
|
||||||
|
|
||||||
|
Due to the complex nature of the Conjure deployment, it can be difficult to set up a local development environment. Check out [phantombox](https://gitlab.torproject.org/cohosh/phantombox) for an automated libvirt-based setup that works on Linux.
|
109
desktopApp/tor/README.SNOWFLAKE.md
Normal file
109
desktopApp/tor/README.SNOWFLAKE.md
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
# Snowflake
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/keroserene/snowflake.svg?branch=master)](https://travis-ci.org/keroserene/snowflake)
|
||||||
|
|
||||||
|
Pluggable Transport using WebRTC, inspired by Flashproxy.
|
||||||
|
|
||||||
|
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||||
|
**Table of Contents**
|
||||||
|
|
||||||
|
- [Structure of this Repository](#structure-of-this-repository)
|
||||||
|
- [Usage](#usage)
|
||||||
|
- [Using Snowflake with Tor](#using-snowflake-with-tor)
|
||||||
|
- [Running a Snowflake Proxy](#running-a-snowflake-proxy)
|
||||||
|
- [Using the Snowflake Library with Other Applications](#using-the-snowflake-library-with-other-applications)
|
||||||
|
- [Test Environment](#test-environment)
|
||||||
|
- [FAQ](#faq)
|
||||||
|
- [More info and links](#more-info-and-links)
|
||||||
|
|
||||||
|
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||||
|
|
||||||
|
### Structure of this Repository
|
||||||
|
|
||||||
|
- `broker/` contains code for the Snowflake broker
|
||||||
|
- `doc/` contains Snowflake documentation and manpages
|
||||||
|
- `client/` contains the Tor pluggable transport client and client library code
|
||||||
|
- `common/` contains generic libraries used by multiple pieces of Snowflake
|
||||||
|
- `proxy/` contains code for the Go standalone Snowflake proxy
|
||||||
|
- `probetest/` contains code for a NAT probetesting service
|
||||||
|
- `server/` contains the Tor pluggable transport server and server library code
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Snowflake is currently deployed as a pluggable transport for Tor.
|
||||||
|
|
||||||
|
#### Using Snowflake with Tor
|
||||||
|
|
||||||
|
To use the Snowflake client with Tor, you will need to add the appropriate `Bridge` and `ClientTransportPlugin` lines to your [torrc](https://2019.www.torproject.org/docs/tor-manual.html.en) file. See the [client README](client) for more information on building and running the Snowflake client.
|
||||||
|
|
||||||
|
#### Running a Snowflake Proxy
|
||||||
|
|
||||||
|
You can contribute to Snowflake by running a Snowflake proxy. We have the option to run a proxy in your browser or as a standalone Go program. See our [community documentation](https://community.torproject.org/relay/setup/snowflake/) for more details.
|
||||||
|
|
||||||
|
#### Using the Snowflake Library with Other Applications
|
||||||
|
|
||||||
|
Snowflake can be used as a Go API, and adheres to the [v2.1 pluggable transports specification](). For more information on using the Snowflake Go library, see the [Snowflake library documentation](doc/using-the-snowflake-library.md).
|
||||||
|
|
||||||
|
### Test Environment
|
||||||
|
|
||||||
|
There is a Docker-based test environment at https://github.com/cohosh/snowbox.
|
||||||
|
|
||||||
|
### FAQ
|
||||||
|
|
||||||
|
**Q: How does it work?**
|
||||||
|
|
||||||
|
In the Tor use-case:
|
||||||
|
|
||||||
|
1. Volunteers visit websites which host the "snowflake" proxy. (just
|
||||||
|
like flashproxy)
|
||||||
|
2. Tor clients automatically find available browser proxies via the Broker
|
||||||
|
(the domain fronted signaling channel).
|
||||||
|
3. Tor client and browser proxy establish a WebRTC peer connection.
|
||||||
|
4. Proxy connects to some relay.
|
||||||
|
5. Tor occurs.
|
||||||
|
|
||||||
|
More detailed information about how clients, snowflake proxies, and the Broker
|
||||||
|
fit together on the way...
|
||||||
|
|
||||||
|
**Q: What are the benefits of this PT compared with other PTs?**
|
||||||
|
|
||||||
|
Snowflake combines the advantages of flashproxy and meek. Primarily:
|
||||||
|
|
||||||
|
- It has the convenience of Meek, but can support magnitudes more
|
||||||
|
users with negligible CDN costs. (Domain fronting is only used for brief
|
||||||
|
signalling / NAT-piercing to setup the P2P WebRTC DataChannels which handle
|
||||||
|
the actual traffic.)
|
||||||
|
|
||||||
|
- Arbitrarily high numbers of volunteer proxies are possible like in
|
||||||
|
flashproxy, but NATs are no longer a usability barrier - no need for
|
||||||
|
manual port forwarding!
|
||||||
|
|
||||||
|
**Q: Why is this called Snowflake?**
|
||||||
|
|
||||||
|
It utilizes the "ICE" negotiation via WebRTC, and also involves a great
|
||||||
|
abundance of ephemeral and short-lived (and special!) volunteer proxies...
|
||||||
|
|
||||||
|
### More info and links
|
||||||
|
|
||||||
|
We have more documentation in the [Snowflake wiki](https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/snowflake/-/wikis/home) and at https://snowflake.torproject.org/.
|
||||||
|
|
||||||
|
|
||||||
|
##### -- Android AAR Reproducible Build Setup --
|
||||||
|
|
||||||
|
Using `gomobile` it is possible to build snowflake as shared libraries for all
|
||||||
|
the architectures supported by Android. This is in the _.gitlab-ci.yml_, which
|
||||||
|
runs in GitLab CI. It is also possible to run this setup in a Virtual Machine
|
||||||
|
using [vagrant](https://www.vagrantup.com/). Just run `vagrant up` and it will
|
||||||
|
create and provision the VM. `vagrant ssh` to get into the VM to use it as a
|
||||||
|
development environment.
|
||||||
|
|
||||||
|
##### uTLS Settings
|
||||||
|
|
||||||
|
Snowflake communicate with broker that serves as signaling server with TLS based domain fronting connection, which may be identified by its usage of Go language TLS stack.
|
||||||
|
|
||||||
|
uTLS is a software library designed to initiate the TLS Client Hello fingerprint of browsers or other popular software's TLS stack to evade censorship based on TLS client hello fingerprint with `-utls-imitate` . You can use `-version` to see a list of supported values.
|
||||||
|
|
||||||
|
Depending on client and server configuration, it may not always work as expected as not all extensions are correctly implemented.
|
||||||
|
|
||||||
|
You can also remove SNI (Server Name Indication) from client hello to evade censorship with `-utls-nosni`, not all servers supports this.
|
263
desktopApp/tor/README.WEBTUNNEL.md
Normal file
263
desktopApp/tor/README.WEBTUNNEL.md
Normal file
@ -0,0 +1,263 @@
|
|||||||
|
# WebTunnel
|
||||||
|
|
||||||
|
Pluggable Transport based on HTTP Upgrade(HTTPT)
|
||||||
|
|
||||||
|
WebTunnel is pluggable transport that attempt to imitate web browsing activities based on [HTTPT](https://censorbib.nymity.ch/#Frolov2020b).
|
||||||
|
|
||||||
|
## Client Usage
|
||||||
|
Connect to a WebTunnel server with a Tor configuration file like:
|
||||||
|
```
|
||||||
|
UseBridges 1
|
||||||
|
DataDirectory datadir
|
||||||
|
|
||||||
|
ClientTransportPlugin webtunnel exec ./client
|
||||||
|
|
||||||
|
Bridge webtunnel 192.0.2.3:1 url=https://akbwadp9lc5fyyz0cj4d76z643pxgbfh6oyc-167-71-71-157.sslip.io/5m9yq0j4ghkz0fz7qmuw58cvbjon0ebnrsp0
|
||||||
|
|
||||||
|
SocksPort auto
|
||||||
|
|
||||||
|
Log info
|
||||||
|
```
|
||||||
|
## Server Setup
|
||||||
|
|
||||||
|
#### Install Tor
|
||||||
|
On a Debian system, first install tor normally with
|
||||||
|
```
|
||||||
|
apt install apt-transport-https
|
||||||
|
lsb_release -c
|
||||||
|
nano /etc/apt/sources.list.d/tor.list
|
||||||
|
wget -qO- https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor | tee /usr/share/keyrings/tor-archive-keyring.gpg >/dev/null
|
||||||
|
apt update
|
||||||
|
apt install tor deb.torproject.org-keyring
|
||||||
|
```
|
||||||
|
|
||||||
|
### Disable default instance
|
||||||
|
The default Tor configuration is not useful for this setup, so the next step will be disabling them.
|
||||||
|
```
|
||||||
|
systemctl stop tor@default.service
|
||||||
|
systemctl mask tor@default.service
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get Environment Ready
|
||||||
|
```
|
||||||
|
#copy server file to server
|
||||||
|
scp server root@$SERVER_ADDRESS:/var/lib/torwebtunnel/webtunnel
|
||||||
|
```
|
||||||
|
|
||||||
|
then create server torrc at `/var/lib/torwebtunnel/torrc`
|
||||||
|
```
|
||||||
|
BridgeRelay 1
|
||||||
|
|
||||||
|
ORPort 10000
|
||||||
|
|
||||||
|
ServerTransportPlugin webtunnel exec /var/lib/torwebtunnel/webtunnel
|
||||||
|
|
||||||
|
ServerTransportListenAddr webtunnel 127.0.0.1:11000
|
||||||
|
|
||||||
|
ExtORPort auto
|
||||||
|
|
||||||
|
ContactInfo WebTunnel email: tor.relay.email@torproject.net ciissversion:2
|
||||||
|
|
||||||
|
Nickname WebTunnelTest
|
||||||
|
|
||||||
|
PublishServerDescriptor 1
|
||||||
|
BridgeDistribution none
|
||||||
|
|
||||||
|
DataDirectory /var/lib/torwebtunnel/tor-data
|
||||||
|
CacheDirectory /tmp/tor-tmp-torwebtunnel
|
||||||
|
|
||||||
|
SocksPort 0
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Configure service unit file
|
||||||
|
Create a service unit file as follow
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
Description=Tor Web Tunnel
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
DynamicUser=yes
|
||||||
|
PrivateUsers=true
|
||||||
|
PrivateMounts=true
|
||||||
|
ProtectSystem=strict
|
||||||
|
PrivateTmp=true
|
||||||
|
PrivateDevices=true
|
||||||
|
ProtectClock=true
|
||||||
|
NoNewPrivileges=true
|
||||||
|
ProtectHome=tmpfs
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectKernelLogs=true
|
||||||
|
|
||||||
|
StateDirectory=torwebtunnel
|
||||||
|
|
||||||
|
ExecStart=/usr/bin/tor -f /var/lib/torwebtunnel/torrc --RunAsDaemon 0
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=default.target
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Obtain Certificate
|
||||||
|
WebTunnel Requires a valid TLS certificate, to obtain that
|
||||||
|
```
|
||||||
|
curl https://get.acme.sh | sh -s email=my@example.com
|
||||||
|
~/.acme.sh/acme.sh --issue --standalone --domain $SERVER_ADDRESS
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Install & Configure Nginx
|
||||||
|
To coexist with other content at a single port, it is necessary to install a reverse proxy like nginx:
|
||||||
|
```
|
||||||
|
apt install nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
And then configure HTTP Upgrade forwarding at /etc/nginx/nginx.conf.
|
||||||
|
```
|
||||||
|
--- a/before.conf
|
||||||
|
+++ b/after.conf
|
||||||
|
@@ -60,6 +60,13 @@ http {
|
||||||
|
|
||||||
|
include /etc/nginx/conf.d/*.conf;
|
||||||
|
include /etc/nginx/sites-enabled/*;
|
||||||
|
+
|
||||||
|
+ #WebSocket Support
|
||||||
|
+ map $http_upgrade $connection_upgrade {
|
||||||
|
+ default upgrade;
|
||||||
|
+ '' close;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Finally, add http forwarding setting to a new file at /etc/nginx/site-enabled .
|
||||||
|
```
|
||||||
|
server {
|
||||||
|
listen [::]:443 ssl http2;
|
||||||
|
listen 443 ssl http2;
|
||||||
|
server_name $SERVER_ADDRESS;
|
||||||
|
#ssl on;
|
||||||
|
|
||||||
|
# certs sent to the client in SERVER HELLO are concatenated in ssl_certificate
|
||||||
|
ssl_certificate /etc/nginx/ssl/fullchain.cer;
|
||||||
|
ssl_certificate_key /etc/nginx/ssl/key.key;
|
||||||
|
|
||||||
|
|
||||||
|
ssl_session_timeout 15m;
|
||||||
|
|
||||||
|
ssl_protocols TLSv1.2 TLSv1.3;
|
||||||
|
|
||||||
|
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
|
||||||
|
|
||||||
|
ssl_prefer_server_ciphers off;
|
||||||
|
|
||||||
|
ssl_session_cache shared:MozSSL:50m;
|
||||||
|
#ssl_ecdh_curve secp521r1,prime256v1,secp384r1;
|
||||||
|
ssl_session_tickets off;
|
||||||
|
|
||||||
|
add_header Strict-Transport-Security "max-age=63072000" always;
|
||||||
|
|
||||||
|
location /$PATH {
|
||||||
|
proxy_pass http://127.0.0.1:11000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
###Set WebSocket headers ####
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
### Set Proxy headers ####
|
||||||
|
proxy_set_header Accept-Encoding "";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
add_header Front-End-Https on;
|
||||||
|
|
||||||
|
proxy_redirect off;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
## Docker Setup
|
||||||
|
|
||||||
|
Webtunnel is a new pluggable transport available for bridge operators.
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
An existing website using nginx balancer to handle traffic. (other load banlancer is currently untested)
|
||||||
|
|
||||||
|
Handle traffic directly, without CDN. (CDN passthrough is currently untested)
|
||||||
|
|
||||||
|
A container runtime like Docker.
|
||||||
|
|
||||||
|
### Configure nginx Forwarding
|
||||||
|
If you haven't already, configure websocket forwarding support in nginx by configure HTTP Upgrade forwarding at /etc/nginx/nginx.conf:
|
||||||
|
```
|
||||||
|
--- a/before.conf
|
||||||
|
+++ b/after.conf
|
||||||
|
@@ -60,6 +60,13 @@ http {
|
||||||
|
|
||||||
|
include /etc/nginx/conf.d/*.conf;
|
||||||
|
include /etc/nginx/sites-enabled/*;
|
||||||
|
+
|
||||||
|
+ #WebSocket Support
|
||||||
|
+ map $http_upgrade $connection_upgrade {
|
||||||
|
+ default upgrade;
|
||||||
|
+ '' close;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
}
|
||||||
|
```
|
||||||
|
And add a forwarded path under one the served domain, typically defined in files within `/etc/nginx/sites-enabled/`, replace $PATH with a random string(which you could generate with `echo $(cat /dev/urandom | tr -cd "qwertyuiopasdfghjklzxcvbnmMNBVCXZLKJHGFDSAQWERTUIOP0987654321"|head -c 24)`):
|
||||||
|
```
|
||||||
|
location /$PATH {
|
||||||
|
proxy_pass http://127.0.0.1:11000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
###Set WebSocket headers ####
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
|
||||||
|
### Set Proxy headers ####
|
||||||
|
proxy_set_header Accept-Encoding "";
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
add_header Front-End-Https on;
|
||||||
|
|
||||||
|
proxy_redirect off;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Docker Runtime(if necessary)
|
||||||
|
```
|
||||||
|
apt install curl sudo
|
||||||
|
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||||
|
sudo sh ./get-docker.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Dockerlized Webtunnel Server
|
||||||
|
Replace `URL` with your domain and path, and `OPERATOR_EMAIL` with your email address, then run:
|
||||||
|
```
|
||||||
|
truncate --size 0 .env
|
||||||
|
echo "URL=https://yourdomain/and/path" >> .env
|
||||||
|
echo "OPERATOR_EMAIL=your@email.org" >> .env
|
||||||
|
echo "BRIDGE_NICKNAME=WTBr$(cat /dev/urandom | tr -cd 'qwertyuiopasdfghjklzxcvbnmMNBVCXZLKJHGFDSAQWERTUIOP0987654321'|head -c 10)" >> .env
|
||||||
|
echo "GENEDORPORT=4$(cat /dev/urandom | tr -cd '0987654321'|head -c 4)" >> .env
|
||||||
|
```
|
||||||
|
This will create an environment file for the configuration of webtunnel bridge.
|
||||||
|
|
||||||
|
After creating the configure file, download the webtunnel docker compose file, and instancize it.
|
||||||
|
````shell
|
||||||
|
curl https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/webtunnel/-/raw/main/release/container/docker-compose.yml?inline=false > docker-compose.yml
|
||||||
|
docker compose up -d
|
||||||
|
````
|
||||||
|
It includes auto update by default, and will update webtunnel bridge server without any further action. Remove `watchtower` to disable this behavior.
|
||||||
|
|
||||||
|
### Get Bridgeline and Check it is Running
|
||||||
|
You can obtain bridgeline and verify if it is working by running
|
||||||
|
```shell
|
||||||
|
docker compose exec webtunnel-bridge get-bridge-line.sh
|
||||||
|
```
|
359517
desktopApp/tor/geoip
Normal file
359517
desktopApp/tor/geoip
Normal file
File diff suppressed because it is too large
Load Diff
155241
desktopApp/tor/geoip6
Normal file
155241
desktopApp/tor/geoip6
Normal file
File diff suppressed because it is too large
Load Diff
1
desktopApp/tor/tor-linux/data/geoip
Symbolic link
1
desktopApp/tor/tor-linux/data/geoip
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip
|
1
desktopApp/tor/tor-linux/data/geoip6
Symbolic link
1
desktopApp/tor/tor-linux/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip6
|
BIN
desktopApp/tor/tor-linux/tor/libcrypto.so.3
Executable file
BIN
desktopApp/tor/tor-linux/tor/libcrypto.so.3
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libevent-2.1.so.7
Executable file
BIN
desktopApp/tor/tor-linux/tor/libevent-2.1.so.7
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libssl.so.3
Executable file
BIN
desktopApp/tor/tor-linux/tor/libssl.so.3
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libstdc++.so.6
Executable file
BIN
desktopApp/tor/tor-linux/tor/libstdc++.so.6
Executable file
Binary file not shown.
@ -0,0 +1 @@
|
|||||||
|
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/conjure-client
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/conjure-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/lyrebird
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/lyrebird
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"recommendedDefault" : "obfs4",
|
||||||
|
"pluggableTransports" : {
|
||||||
|
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird",
|
||||||
|
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client",
|
||||||
|
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client",
|
||||||
|
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client -registerURL https://registration.refraction.network/api"
|
||||||
|
},
|
||||||
|
"bridges" : {
|
||||||
|
"meek-azure" : [
|
||||||
|
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||||
|
],
|
||||||
|
"obfs4" : [
|
||||||
|
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||||
|
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||||
|
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||||
|
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||||
|
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||||
|
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||||
|
],
|
||||||
|
"snowflake" : [
|
||||||
|
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||||
|
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/snowflake-client
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/snowflake-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/webtunnel-client
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/webtunnel-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/tor
Executable file
BIN
desktopApp/tor/tor-linux/tor/tor
Executable file
Binary file not shown.
1
desktopApp/tor/tor-mac/data/geoip
Symbolic link
1
desktopApp/tor/tor-mac/data/geoip
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip
|
1
desktopApp/tor/tor-mac/data/geoip6
Symbolic link
1
desktopApp/tor/tor-mac/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip6
|
BIN
desktopApp/tor/tor-mac/tor/libevent-2.1.7.dylib
Executable file
BIN
desktopApp/tor/tor-mac/tor/libevent-2.1.7.dylib
Executable file
Binary file not shown.
@ -0,0 +1 @@
|
|||||||
|
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/conjure-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/conjure-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/lyrebird
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/lyrebird
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"recommendedDefault" : "obfs4",
|
||||||
|
"pluggableTransports" : {
|
||||||
|
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird",
|
||||||
|
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client",
|
||||||
|
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client",
|
||||||
|
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client -registerURL https://registration.refraction.network/api"
|
||||||
|
},
|
||||||
|
"bridges" : {
|
||||||
|
"meek-azure" : [
|
||||||
|
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||||
|
],
|
||||||
|
"obfs4" : [
|
||||||
|
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||||
|
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||||
|
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||||
|
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||||
|
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||||
|
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||||
|
],
|
||||||
|
"snowflake" : [
|
||||||
|
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||||
|
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/snowflake-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/snowflake-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/webtunnel-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/webtunnel-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/tor
Executable file
BIN
desktopApp/tor/tor-mac/tor/tor
Executable file
Binary file not shown.
1
desktopApp/tor/tor-win/data/geoip
Symbolic link
1
desktopApp/tor/tor-win/data/geoip
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip
|
1
desktopApp/tor/tor-win/data/geoip6
Symbolic link
1
desktopApp/tor/tor-win/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
../../geoip6
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
|||||||
|
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/conjure-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/conjure-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/lyrebird.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/lyrebird.exe
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"recommendedDefault" : "obfs4",
|
||||||
|
"pluggableTransports" : {
|
||||||
|
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird.exe",
|
||||||
|
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client.exe",
|
||||||
|
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client.exe",
|
||||||
|
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client.exe -registerURL https://registration.refraction.network/api"
|
||||||
|
},
|
||||||
|
"bridges" : {
|
||||||
|
"meek-azure" : [
|
||||||
|
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||||
|
],
|
||||||
|
"obfs4" : [
|
||||||
|
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||||
|
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||||
|
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||||
|
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||||
|
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||||
|
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||||
|
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||||
|
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||||
|
],
|
||||||
|
"snowflake" : [
|
||||||
|
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||||
|
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/snowflake-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/snowflake-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/webtunnel-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/webtunnel-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/tor-gencert.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/tor-gencert.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/tor.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/tor.exe
Executable file
Binary file not shown.
14
desktopApp/tsconfig.json
Normal file
14
desktopApp/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es5",
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"sourceMap": true,
|
||||||
|
"emitDecoratorMetadata": true,
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"removeComments": false,
|
||||||
|
"noImplicitAny": false,
|
||||||
|
"outDir": "."
|
||||||
|
},
|
||||||
|
"exclude": [ "node_modules" ]
|
||||||
|
}
|
@ -1,14 +1,9 @@
|
|||||||
version: '3.9'
|
version: '3.9'
|
||||||
services:
|
services:
|
||||||
jekyll:
|
jekyll:
|
||||||
image: jekyll/jekyll:4.0
|
|
||||||
container_name: pages
|
container_name: pages
|
||||||
|
build: ./docs
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
|
||||||
- .:/usr/src/robosats
|
|
||||||
command: bash -c "cd /usr/src/robosats/docs/
|
|
||||||
&& bundle install
|
|
||||||
&& bundle exec jekyll serve --force_polling -H 0.0.0.0 -P 4000"
|
|
||||||
ports:
|
ports:
|
||||||
- 4000:4000
|
- 4000:4000
|
||||||
|
|
||||||
@ -226,6 +221,16 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./node/db:/var/lib/postgresql/data
|
- ./node/db:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
strfry:
|
||||||
|
build: ./docker/strfry
|
||||||
|
container_name: strfry-dev
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./docker/strfry/strfry.conf:/etc/strfry.conf:ro
|
||||||
|
- ./docker/strfry/onion_urls.txt:/app/onion_urls.txt:ro
|
||||||
|
- ./node/strfry/db:/app/strfry-db:rw
|
||||||
|
network_mode: service:tor
|
||||||
|
|
||||||
# # Postgresql for CLN
|
# # Postgresql for CLN
|
||||||
# postgres-cln:
|
# postgres-cln:
|
||||||
# image: postgres:14.2-alpine
|
# image: postgres:14.2-alpine
|
||||||
|
@ -15,7 +15,7 @@ version: '3.9'
|
|||||||
services:
|
services:
|
||||||
bitcoind:
|
bitcoind:
|
||||||
image: ruimarinho/bitcoin-core:${BITCOIND_VERSION:-24.0.1}-alpine
|
image: ruimarinho/bitcoin-core:${BITCOIND_VERSION:-24.0.1}-alpine
|
||||||
container_name: btc
|
container_name: test-btc
|
||||||
restart: always
|
restart: always
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
@ -27,6 +27,7 @@ services:
|
|||||||
- "9998:9998"
|
- "9998:9998"
|
||||||
- "5432:5432"
|
- "5432:5432"
|
||||||
- "6379:6379"
|
- "6379:6379"
|
||||||
|
- "7777:7777"
|
||||||
volumes:
|
volumes:
|
||||||
- bitcoin:/bitcoin/.bitcoin/
|
- bitcoin:/bitcoin/.bitcoin/
|
||||||
- ./tests/bitcoind/entrypoint.sh:/entrypoint.sh
|
- ./tests/bitcoind/entrypoint.sh:/entrypoint.sh
|
||||||
@ -50,7 +51,7 @@ services:
|
|||||||
|
|
||||||
coordinator-LND:
|
coordinator-LND:
|
||||||
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
||||||
container_name: coordinator-LND
|
container_name: test-coordinator-LND
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- bitcoin:/root/.bitcoin/
|
- bitcoin:/root/.bitcoin/
|
||||||
@ -83,7 +84,7 @@ services:
|
|||||||
coordinator-CLN:
|
coordinator-CLN:
|
||||||
image: elementsproject/lightningd:${CLN_VERSION:-v24.05}
|
image: elementsproject/lightningd:${CLN_VERSION:-v24.05}
|
||||||
restart: always
|
restart: always
|
||||||
container_name: coordinator-CLN
|
container_name: test-coordinator-CLN
|
||||||
environment:
|
environment:
|
||||||
LIGHTNINGD_NETWORK: 'regtest'
|
LIGHTNINGD_NETWORK: 'regtest'
|
||||||
volumes:
|
volumes:
|
||||||
@ -97,7 +98,7 @@ services:
|
|||||||
|
|
||||||
robot-LND:
|
robot-LND:
|
||||||
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
||||||
container_name: robot-LND
|
container_name: test-robot-LND
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- bitcoin:/root/.bitcoin/
|
- bitcoin:/root/.bitcoin/
|
||||||
@ -129,7 +130,7 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:${REDIS_VERSION:-7.2.1}-alpine
|
image: redis:${REDIS_VERSION:-7.2.1}-alpine
|
||||||
container_name: redis
|
container_name: test-redis
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- redisdata:/data
|
- redisdata:/data
|
||||||
@ -141,7 +142,7 @@ services:
|
|||||||
args:
|
args:
|
||||||
DEVELOPMENT: True
|
DEVELOPMENT: True
|
||||||
image: backend-image
|
image: backend-image
|
||||||
container_name: coordinator
|
container_name: test-coordinator
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
DEVELOPMENT: True
|
DEVELOPMENT: True
|
||||||
@ -171,7 +172,7 @@ services:
|
|||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:${POSTGRES_VERSION:-14.2}-alpine
|
image: postgres:${POSTGRES_VERSION:-14.2}-alpine
|
||||||
container_name: sql
|
container_name: test-sql
|
||||||
restart: always
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: 'example'
|
POSTGRES_PASSWORD: 'example'
|
||||||
@ -182,7 +183,7 @@ services:
|
|||||||
# celery-worker:
|
# celery-worker:
|
||||||
# image: backend-image
|
# image: backend-image
|
||||||
# pull_policy: never
|
# pull_policy: never
|
||||||
# container_name: celery-worker
|
# container_name: test-celery-worker
|
||||||
# restart: always
|
# restart: always
|
||||||
# environment:
|
# environment:
|
||||||
# DEVELOPMENT: True
|
# DEVELOPMENT: True
|
||||||
|
41
docker/strfry/Dockerfile
Normal file
41
docker/strfry/Dockerfile
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
FROM ubuntu:jammy
|
||||||
|
ENV TZ=Europe/London
|
||||||
|
|
||||||
|
RUN apt update && apt install -y --no-install-recommends \
|
||||||
|
git g++ make pkg-config libtool ca-certificates \
|
||||||
|
libssl-dev zlib1g-dev liblmdb-dev libflatbuffers-dev \
|
||||||
|
libsecp256k1-dev libzstd-dev
|
||||||
|
|
||||||
|
# setup app
|
||||||
|
RUN git clone https://github.com/KoalaSat/strfry /app
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN git submodule update --init
|
||||||
|
RUN make setup-golpe
|
||||||
|
RUN make clean
|
||||||
|
RUN make -j4
|
||||||
|
|
||||||
|
RUN apt update && apt install -y --no-install-recommends \
|
||||||
|
liblmdb0 libflatbuffers1 libsecp256k1-0 libb2-1 libzstd1 torsocks cron\
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN echo "TorAddress 127.0.0.1" >> /etc/tor/torsocks.conf
|
||||||
|
RUN echo "TorPort 9050" >> /etc/tor/torsocks.conf
|
||||||
|
|
||||||
|
# Setting up crontab
|
||||||
|
COPY crontab /etc/cron.d/crontab
|
||||||
|
RUN chmod 0644 /etc/cron.d/crontab
|
||||||
|
RUN crontab /etc/cron.d/crontab
|
||||||
|
|
||||||
|
# Setting up entrypoints
|
||||||
|
COPY sync.sh /etc/strfry/sync.sh
|
||||||
|
COPY entrypoint.sh /etc/strfry/entrypoint.sh
|
||||||
|
|
||||||
|
RUN chmod +x /etc/strfry/entrypoint.sh
|
||||||
|
RUN chmod +x /etc/strfry/sync.sh
|
||||||
|
|
||||||
|
#Setting up logs
|
||||||
|
RUN touch /var/log/cron.log && chmod 0644 /var/log/cron.log
|
||||||
|
|
||||||
|
ENTRYPOINT ["/etc/strfry/entrypoint.sh"]
|
24
docker/strfry/crontab
Normal file
24
docker/strfry/crontab
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
# Edit this file to introduce tasks to be run by cron.
|
||||||
|
#
|
||||||
|
# Each task to run has to be defined through a single line
|
||||||
|
# indicating with different fields when the task will be run
|
||||||
|
# and what command to run for the task
|
||||||
|
#
|
||||||
|
# To define the time you can provide concrete values for
|
||||||
|
# minute (m), hour (h), day of month (dom), month (mon),
|
||||||
|
# and day of week (dow) or use '*' in these fields (for 'any').
|
||||||
|
#
|
||||||
|
# Notice that tasks will be started based on the cron's system
|
||||||
|
# daemon's notion of time and timezones.
|
||||||
|
#
|
||||||
|
# Output of the crontab jobs (including errors) is sent through
|
||||||
|
# email to the user the crontab file belongs to (unless redirected).
|
||||||
|
#
|
||||||
|
# For example, you can run a backup of all your user accounts
|
||||||
|
# at 5 a.m every week with:
|
||||||
|
# 0 5 * * 1 tar -zcf /var/backups/home.tgz /home/
|
||||||
|
#
|
||||||
|
# For more information see the manual pages of crontab(5) and cron(8)
|
||||||
|
#
|
||||||
|
# m h dom mon dow command
|
||||||
|
*/1 * * * * torsocks /etc/strfry/sync.sh >> /var/log/cron.log 2>&1
|
3
docker/strfry/entrypoint.sh
Executable file
3
docker/strfry/entrypoint.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
cron -f -l 8 & tail -f /var/log/cron.log & /app/strfry relay
|
4
docker/strfry/onion_urls.txt
Normal file
4
docker/strfry/onion_urls.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
ws://testraliar7xkhos2gipv2k65obykofb4jqzl5l4danfryacifi4t7qd.onion/nostr
|
||||||
|
ws://jpp3w5tpxtyg6lifonisdszpriiapszzem4wod2zsdweyfenlsxeoxid.onion/nostr
|
||||||
|
ws://ghbtv7lhoyhomyir4xvxaeyqgx4ylxksia343jaat3njqqlkqpdjqcyd.onion/nostr
|
||||||
|
ws://wsjyhbashc4zrrex6vijpryujggbka5plry2o62dxqoz3pxinblnj4ad.onion/nostr
|
138
docker/strfry/strfry.conf
Normal file
138
docker/strfry/strfry.conf
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
##
|
||||||
|
## Default strfry config
|
||||||
|
##
|
||||||
|
|
||||||
|
# Directory that contains the strfry LMDB database (restart required)
|
||||||
|
db = "/app/strfry-db/"
|
||||||
|
|
||||||
|
dbParams {
|
||||||
|
# Maximum number of threads/processes that can simultaneously have LMDB transactions open (restart required)
|
||||||
|
maxreaders = 256
|
||||||
|
|
||||||
|
# Size of mmap() to use when loading LMDB (default is 10TB, does *not* correspond to disk-space used) (restart required)
|
||||||
|
mapsize = 10995116277760
|
||||||
|
|
||||||
|
# Disables read-ahead when accessing the LMDB mapping. Reduces IO activity when DB size is larger than RAM. (restart required)
|
||||||
|
noReadAhead = false
|
||||||
|
}
|
||||||
|
|
||||||
|
events {
|
||||||
|
# Maximum size of normalised JSON, in bytes
|
||||||
|
maxEventSize = 65536
|
||||||
|
|
||||||
|
# Events newer than this will be rejected
|
||||||
|
rejectEventsNewerThanSeconds = 900
|
||||||
|
|
||||||
|
# Events older than this will be rejected
|
||||||
|
rejectEventsOlderThanSeconds = 94608000
|
||||||
|
|
||||||
|
# Ephemeral events older than this will be rejected
|
||||||
|
rejectEphemeralEventsOlderThanSeconds = 60
|
||||||
|
|
||||||
|
# Ephemeral events will be deleted from the DB when older than this
|
||||||
|
ephemeralEventsLifetimeSeconds = 300
|
||||||
|
|
||||||
|
# Maximum number of tags allowed
|
||||||
|
maxNumTags = 2000
|
||||||
|
|
||||||
|
# Maximum size for tag values, in bytes
|
||||||
|
maxTagValSize = 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
relay {
|
||||||
|
# Interface to listen on. Use 0.0.0.0 to listen on all interfaces (restart required)
|
||||||
|
bind = "0.0.0.0"
|
||||||
|
|
||||||
|
# Port to open for the nostr websocket protocol (restart required)
|
||||||
|
port = 7777
|
||||||
|
|
||||||
|
# Set OS-limit on maximum number of open files/sockets (if 0, don't attempt to set) (restart required)
|
||||||
|
nofiles = 1000000
|
||||||
|
|
||||||
|
# HTTP header that contains the client's real IP, before reverse proxying (ie x-real-ip) (MUST be all lower-case)
|
||||||
|
realIpHeader = ""
|
||||||
|
|
||||||
|
info {
|
||||||
|
# NIP-11: Name of this server. Short/descriptive (< 30 characters)
|
||||||
|
name = "Robosats"
|
||||||
|
|
||||||
|
# NIP-11: Detailed information about relay, free-form
|
||||||
|
description = "Federation cache system."
|
||||||
|
|
||||||
|
# NIP-11: Administrative nostr pubkey, for contact purposes
|
||||||
|
pubkey = ""
|
||||||
|
|
||||||
|
# NIP-11: Alternative administrative contact (email, website, etc)
|
||||||
|
contact = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
# Maximum accepted incoming websocket frame size (should be larger than max event) (restart required)
|
||||||
|
maxWebsocketPayloadSize = 131072
|
||||||
|
|
||||||
|
# Websocket-level PING message frequency (should be less than any reverse proxy idle timeouts) (restart required)
|
||||||
|
autoPingSeconds = 55
|
||||||
|
|
||||||
|
# If TCP keep-alive should be enabled (detect dropped connections to upstream reverse proxy)
|
||||||
|
enableTcpKeepalive = false
|
||||||
|
|
||||||
|
# How much uninterrupted CPU time a REQ query should get during its DB scan
|
||||||
|
queryTimesliceBudgetMicroseconds = 10000
|
||||||
|
|
||||||
|
# Maximum records that can be returned per filter
|
||||||
|
maxFilterLimit = 500
|
||||||
|
|
||||||
|
# Maximum number of subscriptions (concurrent REQs) a connection can have open at any time
|
||||||
|
maxSubsPerConnection = 3
|
||||||
|
|
||||||
|
writePolicy {
|
||||||
|
# If non-empty, path to an executable script that implements the writePolicy plugin logic
|
||||||
|
plugin = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
compression {
|
||||||
|
# Use permessage-deflate compression if supported by client. Reduces bandwidth, but slight increase in CPU (restart required)
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
# Maintain a sliding window buffer for each connection. Improves compression, but uses more memory (restart required)
|
||||||
|
slidingWindow = false
|
||||||
|
}
|
||||||
|
|
||||||
|
logging {
|
||||||
|
# Dump all incoming messages
|
||||||
|
dumpInAll = false
|
||||||
|
|
||||||
|
# Dump all incoming EVENT messages
|
||||||
|
dumpInEvents = false
|
||||||
|
|
||||||
|
# Dump all incoming REQ/CLOSE messages
|
||||||
|
dumpInReqs = false
|
||||||
|
|
||||||
|
# Log performance metrics for initial REQ database scans
|
||||||
|
dbScanPerf = false
|
||||||
|
|
||||||
|
# Log reason for invalid event rejection? Can be disabled to silence excessive logging
|
||||||
|
invalidEvents = true
|
||||||
|
}
|
||||||
|
|
||||||
|
numThreads {
|
||||||
|
# Ingester threads: route incoming requests, validate events/sigs (restart required)
|
||||||
|
ingester = 3
|
||||||
|
|
||||||
|
# reqWorker threads: Handle initial DB scan for events (restart required)
|
||||||
|
reqWorker = 3
|
||||||
|
|
||||||
|
# reqMonitor threads: Handle filtering of new events (restart required)
|
||||||
|
reqMonitor = 3
|
||||||
|
|
||||||
|
# negentropy threads: Handle negentropy protocol messages (restart required)
|
||||||
|
negentropy = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
negentropy {
|
||||||
|
# Support negentropy protocol messages
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
# Maximum records that sync will process before returning an error
|
||||||
|
maxSyncEvents = 1000000
|
||||||
|
}
|
||||||
|
}
|
7
docker/strfry/sync.sh
Executable file
7
docker/strfry/sync.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
filters='{"kinds":[38383]}'
|
||||||
|
|
||||||
|
while IFS= read -r line; do
|
||||||
|
/app/strfry --config /etc/strfry.conf sync ${line} --filter "$filters" --dir both
|
||||||
|
done < /app/onion_urls.txt
|
12
docs/Dockerfile
Normal file
12
docs/Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM jekyll/jekyll:4.0
|
||||||
|
|
||||||
|
WORKDIR /usr/src/docs
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN chown -R jekyll:jekyll /usr/src/docs
|
||||||
|
RUN chmod -R 755 /usr/src/docs
|
||||||
|
|
||||||
|
RUN bundle install
|
||||||
|
|
||||||
|
CMD ["bundle", "exec", "jekyll", "serve", "--force_polling", "-H", "0.0.0.0", "-P", "4000"]
|
@ -7,13 +7,15 @@ source "https://rubygems.org"
|
|||||||
#
|
#
|
||||||
# This will help ensure the proper Jekyll version is running.
|
# This will help ensure the proper Jekyll version is running.
|
||||||
# Happy Jekylling!
|
# Happy Jekylling!
|
||||||
|
|
||||||
|
gem "activesupport", ">= 6.1.7.5"
|
||||||
gem "minimal-mistakes-jekyll"
|
gem "minimal-mistakes-jekyll"
|
||||||
gem "jekyll-include-cache"
|
gem "jekyll-include-cache"
|
||||||
gem "webrick"
|
gem "webrick"
|
||||||
gem "breakpoint"
|
gem "breakpoint"
|
||||||
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
# If you want to use GitHub Pages, remove the "gem "jekyll"" above and
|
||||||
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
# uncomment the line below. To upgrade, run `bundle update github-pages`.
|
||||||
gem "github-pages", "~> 231", group: :jekyll_plugins
|
gem "github-pages", "~> 232", group: :jekyll_plugins
|
||||||
# If you have any plugins, put them here!
|
# If you have any plugins, put them here!
|
||||||
group :jekyll_plugins do
|
group :jekyll_plugins do
|
||||||
gem "jekyll-feed", "~> 0.17"
|
gem "jekyll-feed", "~> 0.17"
|
||||||
|
@ -1,286 +0,0 @@
|
|||||||
GEM
|
|
||||||
remote: https://rubygems.org/
|
|
||||||
specs:
|
|
||||||
activesupport (6.0.6.1)
|
|
||||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
|
||||||
i18n (>= 0.7, < 2)
|
|
||||||
minitest (~> 5.1)
|
|
||||||
tzinfo (~> 1.1)
|
|
||||||
zeitwerk (~> 2.2, >= 2.2.2)
|
|
||||||
addressable (2.8.6)
|
|
||||||
public_suffix (>= 2.0.2, < 6.0)
|
|
||||||
breakpoint (2.7.1)
|
|
||||||
sass (~> 3.3)
|
|
||||||
sassy-maps (< 1.0.0)
|
|
||||||
coffee-script (2.4.1)
|
|
||||||
coffee-script-source
|
|
||||||
execjs
|
|
||||||
coffee-script-source (1.12.2)
|
|
||||||
colorator (1.1.0)
|
|
||||||
commonmarker (0.23.10)
|
|
||||||
concurrent-ruby (1.2.3)
|
|
||||||
dnsruby (1.70.0)
|
|
||||||
simpleidn (~> 0.2.1)
|
|
||||||
em-websocket (0.5.3)
|
|
||||||
eventmachine (>= 0.12.9)
|
|
||||||
http_parser.rb (~> 0)
|
|
||||||
ethon (0.16.0)
|
|
||||||
ffi (>= 1.15.0)
|
|
||||||
eventmachine (1.2.7)
|
|
||||||
execjs (2.9.1)
|
|
||||||
faraday (2.9.0)
|
|
||||||
faraday-net_http (>= 2.0, < 3.2)
|
|
||||||
faraday-net_http (3.1.0)
|
|
||||||
net-http
|
|
||||||
ffi (1.16.3)
|
|
||||||
forwardable-extended (2.6.0)
|
|
||||||
gemoji (4.1.0)
|
|
||||||
github-pages (231)
|
|
||||||
github-pages-health-check (= 1.18.2)
|
|
||||||
jekyll (= 3.9.5)
|
|
||||||
jekyll-avatar (= 0.8.0)
|
|
||||||
jekyll-coffeescript (= 1.2.2)
|
|
||||||
jekyll-commonmark-ghpages (= 0.4.0)
|
|
||||||
jekyll-default-layout (= 0.1.5)
|
|
||||||
jekyll-feed (= 0.17.0)
|
|
||||||
jekyll-gist (= 1.5.0)
|
|
||||||
jekyll-github-metadata (= 2.16.1)
|
|
||||||
jekyll-include-cache (= 0.2.1)
|
|
||||||
jekyll-mentions (= 1.6.0)
|
|
||||||
jekyll-optional-front-matter (= 0.3.2)
|
|
||||||
jekyll-paginate (= 1.1.0)
|
|
||||||
jekyll-readme-index (= 0.3.0)
|
|
||||||
jekyll-redirect-from (= 0.16.0)
|
|
||||||
jekyll-relative-links (= 0.6.1)
|
|
||||||
jekyll-remote-theme (= 0.4.3)
|
|
||||||
jekyll-sass-converter (= 1.5.2)
|
|
||||||
jekyll-seo-tag (= 2.8.0)
|
|
||||||
jekyll-sitemap (= 1.4.0)
|
|
||||||
jekyll-swiss (= 1.0.0)
|
|
||||||
jekyll-theme-architect (= 0.2.0)
|
|
||||||
jekyll-theme-cayman (= 0.2.0)
|
|
||||||
jekyll-theme-dinky (= 0.2.0)
|
|
||||||
jekyll-theme-hacker (= 0.2.0)
|
|
||||||
jekyll-theme-leap-day (= 0.2.0)
|
|
||||||
jekyll-theme-merlot (= 0.2.0)
|
|
||||||
jekyll-theme-midnight (= 0.2.0)
|
|
||||||
jekyll-theme-minimal (= 0.2.0)
|
|
||||||
jekyll-theme-modernist (= 0.2.0)
|
|
||||||
jekyll-theme-primer (= 0.6.0)
|
|
||||||
jekyll-theme-slate (= 0.2.0)
|
|
||||||
jekyll-theme-tactile (= 0.2.0)
|
|
||||||
jekyll-theme-time-machine (= 0.2.0)
|
|
||||||
jekyll-titles-from-headings (= 0.5.3)
|
|
||||||
jemoji (= 0.13.0)
|
|
||||||
kramdown (= 2.4.0)
|
|
||||||
kramdown-parser-gfm (= 1.1.0)
|
|
||||||
liquid (= 4.0.4)
|
|
||||||
mercenary (~> 0.3)
|
|
||||||
minima (= 2.5.1)
|
|
||||||
nokogiri (>= 1.13.6, < 2.0)
|
|
||||||
rouge (= 3.30.0)
|
|
||||||
terminal-table (~> 1.4)
|
|
||||||
github-pages-health-check (1.18.2)
|
|
||||||
addressable (~> 2.3)
|
|
||||||
dnsruby (~> 1.60)
|
|
||||||
octokit (>= 4, < 8)
|
|
||||||
public_suffix (>= 3.0, < 6.0)
|
|
||||||
typhoeus (~> 1.3)
|
|
||||||
html-pipeline (2.14.3)
|
|
||||||
activesupport (>= 2)
|
|
||||||
nokogiri (>= 1.4)
|
|
||||||
http_parser.rb (0.8.0)
|
|
||||||
i18n (1.14.4)
|
|
||||||
concurrent-ruby (~> 1.0)
|
|
||||||
jekyll (3.9.5)
|
|
||||||
addressable (~> 2.4)
|
|
||||||
colorator (~> 1.0)
|
|
||||||
em-websocket (~> 0.5)
|
|
||||||
i18n (>= 0.7, < 2)
|
|
||||||
jekyll-sass-converter (~> 1.0)
|
|
||||||
jekyll-watch (~> 2.0)
|
|
||||||
kramdown (>= 1.17, < 3)
|
|
||||||
liquid (~> 4.0)
|
|
||||||
mercenary (~> 0.3.3)
|
|
||||||
pathutil (~> 0.9)
|
|
||||||
rouge (>= 1.7, < 4)
|
|
||||||
safe_yaml (~> 1.0)
|
|
||||||
jekyll-avatar (0.8.0)
|
|
||||||
jekyll (>= 3.0, < 5.0)
|
|
||||||
jekyll-coffeescript (1.2.2)
|
|
||||||
coffee-script (~> 2.2)
|
|
||||||
coffee-script-source (~> 1.12)
|
|
||||||
jekyll-commonmark (1.4.0)
|
|
||||||
commonmarker (~> 0.22)
|
|
||||||
jekyll-commonmark-ghpages (0.4.0)
|
|
||||||
commonmarker (~> 0.23.7)
|
|
||||||
jekyll (~> 3.9.0)
|
|
||||||
jekyll-commonmark (~> 1.4.0)
|
|
||||||
rouge (>= 2.0, < 5.0)
|
|
||||||
jekyll-default-layout (0.1.5)
|
|
||||||
jekyll (>= 3.0, < 5.0)
|
|
||||||
jekyll-feed (0.17.0)
|
|
||||||
jekyll (>= 3.7, < 5.0)
|
|
||||||
jekyll-gist (1.5.0)
|
|
||||||
octokit (~> 4.2)
|
|
||||||
jekyll-github-metadata (2.16.1)
|
|
||||||
jekyll (>= 3.4, < 5.0)
|
|
||||||
octokit (>= 4, < 7, != 4.4.0)
|
|
||||||
jekyll-include-cache (0.2.1)
|
|
||||||
jekyll (>= 3.7, < 5.0)
|
|
||||||
jekyll-mentions (1.6.0)
|
|
||||||
html-pipeline (~> 2.3)
|
|
||||||
jekyll (>= 3.7, < 5.0)
|
|
||||||
jekyll-optional-front-matter (0.3.2)
|
|
||||||
jekyll (>= 3.0, < 5.0)
|
|
||||||
jekyll-paginate (1.1.0)
|
|
||||||
jekyll-readme-index (0.3.0)
|
|
||||||
jekyll (>= 3.0, < 5.0)
|
|
||||||
jekyll-redirect-from (0.16.0)
|
|
||||||
jekyll (>= 3.3, < 5.0)
|
|
||||||
jekyll-relative-links (0.6.1)
|
|
||||||
jekyll (>= 3.3, < 5.0)
|
|
||||||
jekyll-remote-theme (0.4.3)
|
|
||||||
addressable (~> 2.0)
|
|
||||||
jekyll (>= 3.5, < 5.0)
|
|
||||||
jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0)
|
|
||||||
rubyzip (>= 1.3.0, < 3.0)
|
|
||||||
jekyll-sass-converter (1.5.2)
|
|
||||||
sass (~> 3.4)
|
|
||||||
jekyll-seo-tag (2.8.0)
|
|
||||||
jekyll (>= 3.8, < 5.0)
|
|
||||||
jekyll-sitemap (1.4.0)
|
|
||||||
jekyll (>= 3.7, < 5.0)
|
|
||||||
jekyll-swiss (1.0.0)
|
|
||||||
jekyll-theme-architect (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-cayman (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-dinky (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-hacker (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-leap-day (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-merlot (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-midnight (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-minimal (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-modernist (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-primer (0.6.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-github-metadata (~> 2.9)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-slate (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-tactile (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-theme-time-machine (0.2.0)
|
|
||||||
jekyll (> 3.5, < 5.0)
|
|
||||||
jekyll-seo-tag (~> 2.0)
|
|
||||||
jekyll-titles-from-headings (0.5.3)
|
|
||||||
jekyll (>= 3.3, < 5.0)
|
|
||||||
jekyll-watch (2.2.1)
|
|
||||||
listen (~> 3.0)
|
|
||||||
jemoji (0.13.0)
|
|
||||||
gemoji (>= 3, < 5)
|
|
||||||
html-pipeline (~> 2.2)
|
|
||||||
jekyll (>= 3.0, < 5.0)
|
|
||||||
kramdown (2.4.0)
|
|
||||||
rexml
|
|
||||||
kramdown-parser-gfm (1.1.0)
|
|
||||||
kramdown (~> 2.0)
|
|
||||||
liquid (4.0.4)
|
|
||||||
listen (3.9.0)
|
|
||||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
|
||||||
rb-inotify (~> 0.9, >= 0.9.10)
|
|
||||||
mercenary (0.3.6)
|
|
||||||
minima (2.5.1)
|
|
||||||
jekyll (>= 3.5, < 5.0)
|
|
||||||
jekyll-feed (~> 0.9)
|
|
||||||
jekyll-seo-tag (~> 2.1)
|
|
||||||
minimal-mistakes-jekyll (4.25.1)
|
|
||||||
jekyll (>= 3.7, < 5.0)
|
|
||||||
jekyll-feed (~> 0.1)
|
|
||||||
jekyll-gist (~> 1.5)
|
|
||||||
jekyll-include-cache (~> 0.1)
|
|
||||||
jekyll-paginate (~> 1.1)
|
|
||||||
jekyll-sitemap (~> 1.3)
|
|
||||||
minitest (5.22.2)
|
|
||||||
net-http (0.4.1)
|
|
||||||
uri
|
|
||||||
nokogiri (1.16.2-x86_64-linux)
|
|
||||||
racc (~> 1.4)
|
|
||||||
octokit (4.25.1)
|
|
||||||
faraday (>= 1, < 3)
|
|
||||||
sawyer (~> 0.9)
|
|
||||||
pathutil (0.16.2)
|
|
||||||
forwardable-extended (~> 2.6)
|
|
||||||
public_suffix (5.0.5)
|
|
||||||
racc (1.7.3)
|
|
||||||
rb-fsevent (0.11.2)
|
|
||||||
rb-inotify (0.10.1)
|
|
||||||
ffi (~> 1.0)
|
|
||||||
rexml (3.2.6)
|
|
||||||
rouge (3.30.0)
|
|
||||||
rubyzip (2.3.2)
|
|
||||||
safe_yaml (1.0.5)
|
|
||||||
sass (3.7.4)
|
|
||||||
sass-listen (~> 4.0.0)
|
|
||||||
sass-listen (4.0.0)
|
|
||||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
|
||||||
rb-inotify (~> 0.9, >= 0.9.7)
|
|
||||||
sassy-maps (0.4.0)
|
|
||||||
sass (~> 3.3)
|
|
||||||
sawyer (0.9.2)
|
|
||||||
addressable (>= 2.3.5)
|
|
||||||
faraday (>= 0.17.3, < 3)
|
|
||||||
simpleidn (0.2.1)
|
|
||||||
unf (~> 0.1.4)
|
|
||||||
terminal-table (1.8.0)
|
|
||||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
|
||||||
thread_safe (0.3.6)
|
|
||||||
typhoeus (1.4.1)
|
|
||||||
ethon (>= 0.9.0)
|
|
||||||
tzinfo (1.2.10)
|
|
||||||
thread_safe (~> 0.1)
|
|
||||||
unf (0.1.4)
|
|
||||||
unf_ext
|
|
||||||
unf_ext (0.0.9.1)
|
|
||||||
unicode-display_width (1.8.0)
|
|
||||||
uri (0.13.0)
|
|
||||||
webrick (1.8.1)
|
|
||||||
zeitwerk (2.6.13)
|
|
||||||
|
|
||||||
PLATFORMS
|
|
||||||
x86_64-linux
|
|
||||||
x86_64-linux-musl
|
|
||||||
|
|
||||||
DEPENDENCIES
|
|
||||||
breakpoint
|
|
||||||
github-pages (~> 231)
|
|
||||||
http_parser.rb (~> 0.6.0)
|
|
||||||
jekyll-feed (~> 0.17)
|
|
||||||
jekyll-include-cache
|
|
||||||
minimal-mistakes-jekyll
|
|
||||||
tzinfo (~> 1.2)
|
|
||||||
tzinfo-data
|
|
||||||
wdm (~> 0.1.1)
|
|
||||||
webrick
|
|
||||||
|
|
||||||
BUNDLED WITH
|
|
||||||
2.3.13
|
|
@ -18,11 +18,11 @@ This contributing guide is based on the [Bisq contributing guide](https://github
|
|||||||
|
|
||||||
*!!! Beware of scammers impersonating RoboSats admins. Admins will NEVER privately message/call you.*
|
*!!! Beware of scammers impersonating RoboSats admins. Admins will NEVER privately message/call you.*
|
||||||
|
|
||||||
- **Simplex:** [RoboSats Main Group](https://simplex.chat/contact#/?v=1-2&smp=smp%3A%2F%2F0YuTwO05YJWS8rkjn9eLJDjQhFKvIYd8d4xG8X1blIU%3D%40smp8.simplex.im%2FyEX_vdhWew_FkovCQC3mRYRWZB1j_cBq%23%2F%3Fv%3D1-2%26dh%3DMCowBQYDK2VuAyEAnrf9Jw3Ajdp4EQw71kqA64VgsIIzw8YNn68WjF09jFY%253D%26srv%3Dbeccx4yfxxbvyhqypaavemqurytl6hozr47wfc7uuecacjqdvwpw2xid.onion&data=%7B%22type%22%3A%22group%22%2C%22groupLinkId%22%3A%22hWnMVPnJl-KT3-virDk0JA%3D%3D%22%7D). Got questions or a problem? Find community-driven support in the public SimpleX group chat. If you're wanting to hang out with other cool robots and learn more about RoboSats, then those discussions happen in SimpleX, Nostr, and Matrix group chats.
|
- **Simplex:**
|
||||||
|
- [RoboSats Main Group](https://simplex.chat/contact#/?v=1-2&smp=smp%3A%2F%2F0YuTwO05YJWS8rkjn9eLJDjQhFKvIYd8d4xG8X1blIU%3D%40smp8.simplex.im%2FyEX_vdhWew_FkovCQC3mRYRWZB1j_cBq%23%2F%3Fv%3D1-2%26dh%3DMCowBQYDK2VuAyEAnrf9Jw3Ajdp4EQw71kqA64VgsIIzw8YNn68WjF09jFY%253D%26srv%3Dbeccx4yfxxbvyhqypaavemqurytl6hozr47wfc7uuecacjqdvwpw2xid.onion&data=%7B%22type%22%3A%22group%22%2C%22groupLinkId%22%3A%22hWnMVPnJl-KT3-virDk0JA%3D%3D%22%7D). Got questions or a problem? Find community-driven support in the public SimpleX group chat. If you're wanting to hang out with other cool robots and learn more about RoboSats, then those discussions happen in SimpleX, Nostr, and Matrix group chats.
|
||||||
|
- [RoboSats Development Group](https://simplex.chat/contact#/?v=2-7&smp=smp%3A%2F%2F6iIcWT_dF2zN_w5xzZEY7HI2Prbh3ldP07YTyDexPjE%3D%40smp10.simplex.im%2FKEkNLMlgM8vrrU3xjBt5emS7EsP0c4s1%23%2F%3Fv%3D1-3%26dh%3DMCowBQYDK2VuAyEABehx7Tgefl_vvOGOe2SThJCGACKRgSU2wiUdIJ5bQHw%253D%26srv%3Drb2pbttocvnbrngnwziclp2f4ckjq65kebafws6g4hy22cdaiv5dwjqd.onion&data=%7B%22type%22%3A%22group%22%2C%22groupLinkId%22%3A%22gFi-9hvL3XgXXTgnlZPyJw%3D%3D%22%7D). Main developer communication group chat where open and technical discussions about development takes place. Discussion about code changes happens in GitHub issues and pull requests (PRs).
|
||||||
|
|
||||||
- **Nostr:** [RoboSats General Group](https://snort.social/e/note1tfwvglg8xz8420pfgav0dc9mqekv02nkpck2axefklrema7lk6wszmwxdy). Hang out with other cool robots and do not hesitate to ask questions about RoboSats! Also, the [RoboSats Nostr account](https://snort.social/p/npub1p2psats79rypr8lpnl9t5qdekfp700x660qsgw284xvq4s09lqrqqk3m82) (npub1p2psats79rypr8lpnl9t5qdekfp700x660qsgw284xvq4s09lqrqqk3m82) provides important project updates, tips and tricks of using RoboSats, and other privacy-centric commentary. Questions and engagement are welcome. Keep in mind: problems requiring RoboSats staff support should be directed to the main SimpleX group chat instead, where responses are quicker and staff can further investigate your problem.
|
- **Nostr:** [RoboSats General 0xGroup](nostr:naddr1qpqrjv34vgckzcfjxp3kgvtzxcuxgepevycrzvesv5en2wps8pjrvd3hxuexvefs8qexxe3nvcun2v3ex3jxgdfhx56kxdm9vyck2ep48yq3wamnwvaz7tm8wfhh2urn9cc8scmgv96zucm0d5pqqqcyqqqfskq06gxuf). Hang out with other cool robots and do not hesitate to ask questions about RoboSats! Also, the [RoboSats Nostr account](https://snort.social/p/npub1p2psats79rypr8lpnl9t5qdekfp700x660qsgw284xvq4s09lqrqqk3m82) (npub1p2psats79rypr8lpnl9t5qdekfp700x660qsgw284xvq4s09lqrqqk3m82) provides important project updates, tips and tricks of using RoboSats, and other privacy-centric commentary. Questions and engagement are welcome. Keep in mind: problems requiring RoboSats staff support should be directed to the main SimpleX group chat instead, where responses are quicker and staff can further investigate your problem.
|
||||||
|
|
||||||
- **Matrix:** [RoboSats Development Group](https://matrix.to/#/#robosats:matrix.org). Main developer communication group chat where open and technical discussions about development takes place. Discussion about code changes happens in GitHub issues and pull requests (PRs).
|
|
||||||
|
|
||||||
- **Twitter:** [RoboSats](https://twitter.com/RoboSats). Announces project updates, shares tips and tricks of using RoboSats, and discusses other privacy-related topics. Feel free to ask questions and to share your experience using RoboSats! The psuedonymous "Robotoshi" character, a privacy-loving and sometimes sassy robot, runs the RoboSats Twitter and tries his best to answer questions! Keep in mind that the aforementioned public Simplex group chat may be more effective in seeking technical help and users seeking dispute help will be directed to the SimpleX chat.
|
- **Twitter:** [RoboSats](https://twitter.com/RoboSats). Announces project updates, shares tips and tricks of using RoboSats, and discusses other privacy-related topics. Feel free to ask questions and to share your experience using RoboSats! The psuedonymous "Robotoshi" character, a privacy-loving and sometimes sassy robot, runs the RoboSats Twitter and tries his best to answer questions! Keep in mind that the aforementioned public Simplex group chat may be more effective in seeking technical help and users seeking dispute help will be directed to the SimpleX chat.
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user