wip
This commit is contained in:
parent
8bcb3a7e85
commit
de2755f8ed
173 changed files with 47169 additions and 20113 deletions
10
.babelrc
10
.babelrc
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"env": {
|
||||
"test": {
|
||||
"plugins": [
|
||||
"transform-es2015-modules-commonjs",
|
||||
"@babel/plugin-proposal-class-properties"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
**/.git
|
||||
**/node_modules
|
||||
**/radata
|
||||
20
.env.example
20
.env.example
|
|
@ -1,20 +0,0 @@
|
|||
# Gun db storage
|
||||
DATA_FILE_NAME=radata2
|
||||
# Gun peer
|
||||
PEERS=["https://gun.shock.network/gun","https://gun-eu.shock.network/gun"]
|
||||
# API Device Token
|
||||
MS_TO_TOKEN_EXPIRATION=4500000
|
||||
# E2EE
|
||||
SHOCK_ENCRYPTION_ECC=true
|
||||
CACHE_HEADERS_MANDATORY=true
|
||||
SHOCK_CACHE=true
|
||||
# SSH Tunnel Provider
|
||||
LOCAL_TUNNEL_SERVER=https://tunnel.rip
|
||||
# Default content to your own seed server
|
||||
TORRENT_SEED_URL=https://webtorrent.shock.network
|
||||
# Admin token for your own seed server
|
||||
TORRENT_SEED_TOKEN=jibberish
|
||||
# "default" or "hosting"
|
||||
DEPLOYMENT_TYPE=hosting
|
||||
# allow to create a user with unlocked lnd
|
||||
ALLOW_UNLOCKED_LND=false
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
*.ts
|
||||
/public/*.min.js
|
||||
105
.eslintrc.json
105
.eslintrc.json
|
|
@ -1,105 +0,0 @@
|
|||
{
|
||||
"extends": ["eslint:all", "prettier", "plugin:mocha/recommended"],
|
||||
"plugins": ["prettier", "mocha", "babel"],
|
||||
"rules": {
|
||||
"prettier/prettier": "error",
|
||||
"strict": "off",
|
||||
|
||||
"mocha/no-mocha-arrows": "off",
|
||||
|
||||
"max-statements-per-line": "off",
|
||||
|
||||
"no-empty-function": "off",
|
||||
|
||||
"no-console": "off",
|
||||
|
||||
"max-statements": "off",
|
||||
|
||||
"global-require": "off",
|
||||
|
||||
"new-cap": "off",
|
||||
|
||||
"one-var": "off",
|
||||
|
||||
"max-lines-per-function": "off",
|
||||
|
||||
"no-underscore-dangle": "off",
|
||||
|
||||
"no-implicit-coercion": "off",
|
||||
|
||||
"no-magic-numbers": "off",
|
||||
|
||||
"no-negated-condition": "off",
|
||||
|
||||
"capitalized-comments": "off",
|
||||
|
||||
"max-params": "off",
|
||||
|
||||
"multiline-comment-style": "off",
|
||||
|
||||
"spaced-comment": "off",
|
||||
|
||||
"no-inline-comments": "off",
|
||||
|
||||
"sort-keys": "off",
|
||||
|
||||
"max-lines": "off",
|
||||
|
||||
"prefer-template": "off",
|
||||
|
||||
"callback-return": "off",
|
||||
|
||||
"no-ternary": "off",
|
||||
|
||||
"no-invalid-this": "off",
|
||||
|
||||
"babel/no-invalid-this": "error",
|
||||
|
||||
"complexity": "off",
|
||||
|
||||
"yoda": "off",
|
||||
|
||||
"prefer-promise-reject-errors": "off",
|
||||
|
||||
"camelcase": "off",
|
||||
|
||||
"consistent-return": "off",
|
||||
|
||||
"no-shadow": "off",
|
||||
// We're usually throwing objects throughout the API to allow for more detailed error messages
|
||||
"no-throw-literal": "off",
|
||||
|
||||
// lightning has sync methods and this rule bans them
|
||||
"no-sync": "off",
|
||||
|
||||
"id-length": "off",
|
||||
|
||||
// typescript does this
|
||||
"no-unused-vars": "off",
|
||||
|
||||
// https://github.com/prettier/eslint-config-prettier/issues/132
|
||||
"line-comment-position": "off",
|
||||
|
||||
// if someone does this it's probably intentional
|
||||
"no-useless-concat": "off",
|
||||
|
||||
"no-plusplus": "off",
|
||||
|
||||
"no-undefined": "off",
|
||||
|
||||
"no-process-env": "off",
|
||||
|
||||
// I am now convinced TODO comments closer to the relevant code are better
|
||||
// than GH issues. Especially when it only concerns a single function /
|
||||
// routine.
|
||||
"no-warning-comments": "off",
|
||||
|
||||
// broken
|
||||
"sort-imports": "off"
|
||||
},
|
||||
"parser": "babel-eslint",
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
}
|
||||
}
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
|
|
@ -1,2 +0,0 @@
|
|||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
4
.github/FUNDING.yml
vendored
4
.github/FUNDING.yml
vendored
|
|
@ -1,4 +0,0 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: [shocknet,]
|
||||
custom: ['https://lightning.page/tcUUzRkyzXYhIZQbmopiCLREyZ_kQJqQ-C4XesecOm4.GX1Dv-eGcfKuOPobBK9Q-Sc-o697XgVCQzOCfqfimIo',]
|
||||
30
.github/dependabot.yml
vendored
30
.github/dependabot.yml
vendored
|
|
@ -1,30 +0,0 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- dependency-name: bitcore-lib
|
||||
versions:
|
||||
- 8.24.2
|
||||
- 8.25.0
|
||||
- 8.25.2
|
||||
- 8.25.3
|
||||
- 8.25.4
|
||||
- 8.25.7
|
||||
- 9.0.0
|
||||
- dependency-name: socket.io
|
||||
versions:
|
||||
- 3.1.0
|
||||
- dependency-name: commander
|
||||
versions:
|
||||
- 7.0.0
|
||||
- 7.1.0
|
||||
- dependency-name: lint-staged
|
||||
versions:
|
||||
- 10.5.3
|
||||
- dependency-name: eslint-plugin-prettier
|
||||
versions:
|
||||
- 3.3.1
|
||||
40
.github/workflows/dockerhub.yml
vendored
40
.github/workflows/dockerhub.yml
vendored
|
|
@ -1,40 +0,0 @@
|
|||
name: Publish Docker image
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: shockwallet/api
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
40
.github/workflows/main.yml
vendored
40
.github/workflows/main.yml
vendored
|
|
@ -1,40 +0,0 @@
|
|||
name: Update Wizard
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ 'master' ]
|
||||
pull_request:
|
||||
branches: [ 'master' ]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
name:
|
||||
description: 'Bump Wizard Version'
|
||||
required: false
|
||||
default: 'yes'
|
||||
|
||||
jobs:
|
||||
dispatch:
|
||||
strategy:
|
||||
matrix:
|
||||
repo: ['shocknet/Wizard']
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 🛎️ Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: ⚙️ Install Dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: 📝 Run Tests
|
||||
run: yarn test
|
||||
|
||||
- name: 📯 Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.REPO_ACCESS_TOKEN }}
|
||||
repository: ${{ matrix.repo }}
|
||||
event-type: api-update
|
||||
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}'
|
||||
36
.github/workflows/version.yml
vendored
36
.github/workflows/version.yml
vendored
|
|
@ -1,36 +0,0 @@
|
|||
name: Bump "package.json" Version
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [prereleased, released]
|
||||
|
||||
jobs:
|
||||
version-bump:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0 # otherwise, you will failed to push refs to dest repo
|
||||
- name: Get the last API TAG and current version in package.json
|
||||
run: |
|
||||
export RELEASE_TAG=$(git describe --tags --abbrev=0) && \
|
||||
echo "VERSION=${RELEASE_TAG}" >> $GITHUB_ENV
|
||||
|
||||
export API_TAG=$(cat ./package.json | jq -r '.version')
|
||||
|
||||
echo $(if [ "$API_TAG" = "$RELEASE_TAG" ]; then echo "UPGRADEABLE=false"; else echo "UPGRADEABLE=true"; fi) >> $GITHUB_ENV
|
||||
|
||||
- name: Update and Commit files
|
||||
if: ${{ env.UPGRADEABLE == 'true' }}
|
||||
run: |
|
||||
cat ./package.json | jq -r --arg API_TAG "${{ env.VERSION }}" '.version = $API_TAG' | tee a.json && mv a.json package.json
|
||||
git config --local user.email "actions@shock.network"
|
||||
git config --local user.name "Version Update Action"
|
||||
git commit -m "version upgraded to ${{ env.VERSION }}" -a
|
||||
- name: Push changes
|
||||
if: ${{ env.UPGRADEABLE == 'true' }}
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: master
|
||||
25
.gitignore
vendored
25
.gitignore
vendored
|
|
@ -1,25 +1,2 @@
|
|||
node_modules
|
||||
.storage
|
||||
services/auth/secrets.json
|
||||
.env
|
||||
*.log
|
||||
# New logger date format
|
||||
*.log.*
|
||||
.directory
|
||||
.DS_Store
|
||||
|
||||
test-radata/
|
||||
radata/
|
||||
radata-*.tmp
|
||||
*.cert
|
||||
*.key
|
||||
|
||||
*-audit.json
|
||||
# Yarn v2
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
.env
|
||||
1
.npmrc
1
.npmrc
|
|
@ -1 +0,0 @@
|
|||
engine-strict = true
|
||||
1
.nvmrc
1
.nvmrc
|
|
@ -1 +0,0 @@
|
|||
v14.18.3
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"requirePragma": true,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"endOfLine": "auto"
|
||||
}
|
||||
77
.vscode/launch.json
vendored
77
.vscode/launch.json
vendored
|
|
@ -1,77 +0,0 @@
|
|||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach",
|
||||
"port": 9229,
|
||||
"request": "attach",
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"type": "pwa-node"
|
||||
},
|
||||
{
|
||||
"name": "Nodemon",
|
||||
"program": "${workspaceFolder}/main.js",
|
||||
"args": ["--", "-h", "0.0.0.0", "-c"],
|
||||
"request": "launch",
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"type": "node",
|
||||
"envFile": "${workspaceFolder}/.env",
|
||||
"outputCapture": "std",
|
||||
|
||||
// https://code.visualstudio.com/docs/nodejs/nodejs-debugging#_restarting-debug-sessions-automatically-when-source-is-edited
|
||||
// Tip: Pressing the Stop button stops the debug session and disconnects
|
||||
// from Node.js, but nodemon (and Node.js) will continue to run. To stop
|
||||
// nodemon, you will have to kill it from the command line (which is
|
||||
// easily possible if you use the integratedTerminal as shown above).
|
||||
|
||||
// Tip: In case of syntax errors, nodemon will not be able to start
|
||||
// Node.js successfully until the error has been fixed. In this case, VS
|
||||
// Code will continue trying to attach to Node.js but eventually give up
|
||||
// (after 10 seconds). To avoid this, you can increase the timeout by
|
||||
// adding a timeout attribute with a larger value (in milliseconds).
|
||||
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/nodemon/bin/nodemon.js",
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"restart": true
|
||||
},
|
||||
{
|
||||
"name": "Nodemon+Polar",
|
||||
"program": "${workspaceFolder}/main.js",
|
||||
"args": [
|
||||
"--",
|
||||
"-h",
|
||||
"0.0.0.0",
|
||||
"--trace-warnings",
|
||||
"--max-old-space-size=4096",
|
||||
"-c",
|
||||
"-d",
|
||||
"C:\\Users\\Predator\\AppData\\Local\\Lnd\\tls.cert",
|
||||
"-m",
|
||||
"C:\\Users\\Predator\\AppData\\Local\\Lnd\\data\\chain\\bitcoin\\mainnet\\admin.macaroon",
|
||||
"--tunnel"
|
||||
],
|
||||
"request": "launch",
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"type": "node",
|
||||
"envFile": "${workspaceFolder}/.env",
|
||||
"outputCapture": "std",
|
||||
|
||||
// https://code.visualstudio.com/docs/nodejs/nodejs-debugging#_restarting-debug-sessions-automatically-when-source-is-edited
|
||||
// Tip: Pressing the Stop button stops the debug session and disconnects
|
||||
// from Node.js, but nodemon (and Node.js) will continue to run. To stop
|
||||
// nodemon, you will have to kill it from the command line (which is
|
||||
// easily possible if you use the integratedTerminal as shown above).
|
||||
|
||||
// Tip: In case of syntax errors, nodemon will not be able to start
|
||||
// Node.js successfully until the error has been fixed. In this case, VS
|
||||
// Code will continue trying to attach to Node.js but eventually give up
|
||||
// (after 10 seconds). To avoid this, you can increase the timeout by
|
||||
// adding a timeout attribute with a larger value (in milliseconds).
|
||||
|
||||
"runtimeExecutable": "${workspaceFolder}/node_modules/nodemon/bin/nodemon.js",
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"restart": true
|
||||
}
|
||||
]
|
||||
}
|
||||
83
.vscode/settings.json
vendored
83
.vscode/settings.json
vendored
|
|
@ -1,83 +0,0 @@
|
|||
{
|
||||
"eslint.enable": true,
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"debug.node.autoAttach": "on",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"cSpell.words": [
|
||||
"acked",
|
||||
"addinvoice",
|
||||
"Authing",
|
||||
"channelbalance",
|
||||
"ciphertext",
|
||||
"closechannel",
|
||||
"closedchannels",
|
||||
"Cltv",
|
||||
"connectpeer",
|
||||
"disconnectpeer",
|
||||
"eccrypto",
|
||||
"endregion",
|
||||
"ephem",
|
||||
"epriv",
|
||||
"Epub",
|
||||
"estimatefee",
|
||||
"estimateroutefee",
|
||||
"exportallchanbackups",
|
||||
"exportchanbackup",
|
||||
"falsey",
|
||||
"forwardinghistory",
|
||||
"getchaninfo",
|
||||
"getinfo",
|
||||
"getnetworkinfo",
|
||||
"getnodeinfo",
|
||||
"GUNRPC",
|
||||
"Healthz",
|
||||
"initwall",
|
||||
"ISEA",
|
||||
"keysend",
|
||||
"kubernetes",
|
||||
"listchannels",
|
||||
"listinvoices",
|
||||
"listpayments",
|
||||
"listpeers",
|
||||
"listunspent",
|
||||
"lndchanbackups",
|
||||
"LNDRPC",
|
||||
"lndstreaming",
|
||||
"lnrpc",
|
||||
"lres",
|
||||
"msgpack",
|
||||
"newaddress",
|
||||
"openchannel",
|
||||
"otheruser",
|
||||
"payreq",
|
||||
"pendingchannels",
|
||||
"preimage",
|
||||
"PUBKEY",
|
||||
"qrcode",
|
||||
"queryroute",
|
||||
"radata",
|
||||
"Reqs",
|
||||
"resave",
|
||||
"satoshis",
|
||||
"sendcoins",
|
||||
"sendmany",
|
||||
"sendpayment",
|
||||
"sendtoroute",
|
||||
"serverhost",
|
||||
"serverport",
|
||||
"shockping",
|
||||
"SHOCKWALLET",
|
||||
"signmessage",
|
||||
"thenables",
|
||||
"trackpayment",
|
||||
"txid",
|
||||
"unfollow",
|
||||
"Unlocker",
|
||||
"unsubscription",
|
||||
"utxos",
|
||||
"uuidv",
|
||||
"verifymessage",
|
||||
"walletbalance"
|
||||
]
|
||||
}
|
||||
33
.vscode/snippets.code-snippets
vendored
33
.vscode/snippets.code-snippets
vendored
|
|
@ -1,33 +0,0 @@
|
|||
{
|
||||
// Place your api workspace snippets here. Each snippet is defined under a
|
||||
// snippet name and has a scope, prefix, body and description. Add comma
|
||||
// separated ids of the languages where the snippet is applicable in the scope
|
||||
// field. If scope is left empty or omitted, the snippet gets applied to all
|
||||
// languages. The prefix is what is used to trigger the snippet and the body
|
||||
// will be expanded and inserted. Possible variables are: $1, $2 for tab
|
||||
// stops, $0 for the final cursor position, and ${1:label}, ${2:another} for
|
||||
// placeholders. Placeholders with the same ids are connected. Example: "Print
|
||||
// to console": {"scope": "javascript,typescript", "prefix": "log", "body":
|
||||
// ["console.log('$1');", "$2"
|
||||
// ],
|
||||
// "description": "Log output to console"
|
||||
// }
|
||||
|
||||
"Route Body": {
|
||||
"body": [
|
||||
"try {",
|
||||
" return res.json({",
|
||||
"",
|
||||
" })",
|
||||
"} catch (e) {",
|
||||
" console.log(e)",
|
||||
" return res.status(500).json({",
|
||||
" errorMessage: e.message",
|
||||
" })",
|
||||
"}"
|
||||
],
|
||||
"description": "Route Body",
|
||||
"prefix": "rbody",
|
||||
"scope": "javascript"
|
||||
}
|
||||
}
|
||||
18
Dockerfile
18
Dockerfile
|
|
@ -1,18 +0,0 @@
|
|||
FROM node:14-buster-slim
|
||||
|
||||
EXPOSE 9835
|
||||
|
||||
VOLUME [ "/root/.lnd", "/data" ]
|
||||
RUN apt-get update && apt-get install -y apt-transport-https git
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
||||
ADD ./package.json /app/package.json
|
||||
ADD ./yarn.lock /app/yarn.lock
|
||||
|
||||
RUN yarn
|
||||
|
||||
ADD . /app
|
||||
|
||||
ENTRYPOINT [ "node", "main.js" ]
|
||||
688
LICENSE
688
LICENSE
|
|
@ -1,688 +0,0 @@
|
|||
“Commons Clause” License Condition v1.0
|
||||
|
||||
The Software is provided to you by the Licensor under the License, as defined below, subject to the following condition.
|
||||
|
||||
Without limiting other conditions in the License, the grant of rights under the License will not include, and the License does not grant to you, right to Sell the Software.
|
||||
|
||||
For purposes of the foregoing, “Sell” means practicing any or all of the rights granted to you under the License to provide to third parties, for a fee or other consideration (including without limitation fees for hosting or consulting/ support services related to the Software), a product or service whose value derives, entirely or substantially, from the functionality of the Software. Any license notice or attribution required by the License must also include this Commons Cause License Condition notice.
|
||||
|
||||
Software: shocknet api
|
||||
License: GPL3
|
||||
Licensor: Shock Network, Inc.
|
||||
|
||||
|
||||
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
77
README.md
77
README.md
|
|
@ -1,77 +0,0 @@
|
|||
<h1>Lightning.Pub</h1>
|
||||
|
||||

|
||||
[](http://makeapullrequest.com)
|
||||
[](https://t.me/LightningPage)
|
||||
[](https://twitter.com/ShockBTC)
|
||||
|
||||
<p></p>
|
||||
|
||||
This is an alpha release of the Lightning.Page backend service, providing a wrapper for [LND](https://github.com/shocknet/lnd/releases) and more stuff to be announced later.<br>
|
||||
|
||||
Run this service on your Lightning node and connect with a mobile device or desktop browser.
|
||||
|
||||
---
|
||||
- [Easy Installation](#easy-installation)
|
||||
- [Manual Installation](#manual-installation)
|
||||
- [Docker Usage](#docker-usage)
|
||||
- [Node Security](#node-security)
|
||||
<!--- - [Docker for Raspberry Pi](#docker-for-raspberry-pi) -->
|
||||
---
|
||||
### Easy Installation
|
||||
|
||||
For easy setup on your Laptop/Desktop, [a node wizard is available here.](https://github.com/shocknet/wizard)
|
||||
|
||||
|
||||
### Manual Installation
|
||||
#### Notes:
|
||||
* The service defaults to port `9835`
|
||||
* Looks for local LND in its default path
|
||||
* Change defaults in `defaults.js`
|
||||
* Requires [Node.js](https://nodejs.org) 16
|
||||
|
||||
#### Steps:
|
||||
1) Run [LND](https://github.com/shocknet/lnd/releases) - *Example mainnet startup*:
|
||||
|
||||
```
|
||||
./lnd --bitcoin.active --bitcoin.mainnet --bitcoin.node=neutrino --neutrino.connect=neutrino.shock.network --routing.assumechanvalid --accept-keysend --allow-circular-route --feeurl=https://nodes.lightning.computer/fees/v1/btc-fee-estimates.json
|
||||
```
|
||||
|
||||
|
||||
2) Download and Install API
|
||||
|
||||
```
|
||||
git clone https://github.com/shocknet/Lightning.Pub
|
||||
cd Lightning.Pub
|
||||
yarn install
|
||||
```
|
||||
|
||||
3) Run with `yarn start -t` *(`-t` is recommended but [not required](#node-security))*
|
||||
4) Connect with Dashboard
|
||||
|
||||
|
||||
### Docker Usage
|
||||
To run Lightning.Pub in a fully isolated environment you can use the Docker image
|
||||
provided on the Docker Hub and easily interact with API's CLI interface and flags.
|
||||
|
||||
#### Prerequisites
|
||||
To interact with Lightning.Pub's Docker image you need an instance of LND running and
|
||||
also if your configs, LND related files and certificates are located on a local file system you'll need to mount **Docker Volumes** pointed to them while starting the container.
|
||||
|
||||
Example of listing available configuration flags:
|
||||
```
|
||||
docker run --rm shockwallet/Lightning.Pub:latest --help
|
||||
```
|
||||
Example of running an local instance with mounted volumes:
|
||||
```
|
||||
docker run -v /home/$USER/.lnd:/root/.lnd --network host shockwallet/Lightning.Pub:latest
|
||||
```
|
||||
|
||||
### Node Security
|
||||
|
||||
Lightning.Pub uses E2E encryption bootstrapped with PAKE.
|
||||
|
||||
There are advanced or testing scenarios where you may wish to bypass this security, to do so pass the env `TRUSTED_KEYS=false`
|
||||
|
||||
Communication between the wallet and Lightning.Pub is encrypted regardless of whether or not SSL is used, though an SSL equipped reverse proxy is recommended for better usability with front-ends. Running with `-t` enables the built-in SSL tunnel provider for ease of use and zero-configuration networking.
|
||||
|
||||
117
build/express_server.js
Normal file
117
build/express_server.js
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
"use strict";
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
var __assign = (this && this.__assign) || function () {
|
||||
__assign = Object.assign || function(t) {
|
||||
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||
s = arguments[i];
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
|
||||
t[p] = s[p];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
return __assign.apply(this, arguments);
|
||||
};
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var express_1 = __importDefault(require("express"));
|
||||
var Types = __importStar(require("./types"));
|
||||
var logErrorAndReturnResponse = function (error, response, res, logger) { logger.error(error.message || error); res.json({ status: 'ERROR', reason: response }); };
|
||||
exports.default = (function (methods, opts) {
|
||||
var logger = opts.logger || { log: console.log, error: console.error };
|
||||
var app = (0, express_1.default)();
|
||||
if (!opts.allowNotImplementedMethods && !methods.EncryptionExchange)
|
||||
throw new Error('method: EncryptionExchange is not implemented');
|
||||
app.post('/api/encryption/exchange', function (req, res) { return __awaiter(void 0, void 0, void 0, function () {
|
||||
var authContext, request, error, query, params, response, e_1;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
_a.trys.push([0, 3, , 4]);
|
||||
if (!methods.EncryptionExchange)
|
||||
throw new Error('method: EncryptionExchange is not implemented');
|
||||
return [4 /*yield*/, opts.NoAuthAuthGuard(req.headers['authorization'])];
|
||||
case 1:
|
||||
authContext = _a.sent();
|
||||
request = req.body;
|
||||
error = Types.EncryptionExchangeRequestValidate(request);
|
||||
if (error !== null)
|
||||
return [2 /*return*/, logErrorAndReturnResponse(error, 'invalid request body', res, logger)];
|
||||
query = req.query;
|
||||
params = req.params;
|
||||
return [4 /*yield*/, methods.EncryptionExchange(__assign(__assign(__assign({}, authContext), query), params), request)];
|
||||
case 2:
|
||||
response = _a.sent();
|
||||
res.json({ status: 'OK', result: response });
|
||||
return [3 /*break*/, 4];
|
||||
case 3:
|
||||
e_1 = _a.sent();
|
||||
logErrorAndReturnResponse(e_1, e_1.message || e_1, res, logger);
|
||||
return [3 /*break*/, 4];
|
||||
case 4: return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
}); });
|
||||
return { Listen: function (port) { return app.listen(port, function () { return logger.log('Example app listening on port ' + port); }); } };
|
||||
});
|
||||
//# sourceMappingURL=express_server.js.map
|
||||
1
build/express_server.js.map
Normal file
1
build/express_server.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"express_server.js","sourceRoot":"","sources":["../proto/autogenerated/ts/express_server.ts"],"names":[],"mappings":";AAAA,+DAA+D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAE/D,oDAA2C;AAC3C,6CAAgC;AAUhC,IAAM,yBAAyB,GAAG,UAAC,KAAY,EAAE,QAAgB,EAAE,GAAa,EAAE,MAAc,IAAO,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAA,CAAC,CAAC,CAAA;AAC9L,mBAAe,UAAC,OAA4B,EAAE,IAAmB;IAC7D,IAAM,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,EAAE,GAAG,EAAE,OAAO,CAAC,GAAG,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,CAAA;IACxE,IAAM,GAAG,GAAG,IAAA,iBAAO,GAAE,CAAA;IACrB,IAAI,CAAC,IAAI,CAAC,0BAA0B,IAAI,CAAC,OAAO,CAAC,kBAAkB;QAAE,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;IACrI,GAAG,CAAC,IAAI,CAAC,0BAA0B,EAAE,UAAO,GAAG,EAAE,GAAG;;;;;;oBAE5C,IAAI,CAAC,OAAO,CAAC,kBAAkB;wBAAE,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;oBAC7E,qBAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,EAAA;;oBAAtE,WAAW,GAAG,SAAwD;oBACtE,OAAO,GAAG,GAAG,CAAC,IAAI,CAAA;oBAClB,KAAK,GAAG,KAAK,CAAC,iCAAiC,CAAC,OAAO,CAAC,CAAA;oBAC9D,IAAI,KAAK,KAAK,IAAI;wBAAE,sBAAO,yBAAyB,CAAC,KAAK,EAAE,sBAAsB,EAAE,GAAG,EAAE,MAAM,CAAC,EAAA;oBAC1F,KAAK,GAAG,GAAG,CAAC,KAAK,CAAA;oBACjB,MAAM,GAAG,GAAG,CAAC,MAAM,CAAA;oBACR,qBAAM,OAAO,CAAC,kBAAkB,gCAAM,WAAW,GAAK,KAAK,GAAK,MAAM,GAAI,OAAO,CAAC,EAAA;;oBAA7F,QAAQ,GAAG,SAAkF;oBACnG,GAAG,CAAC,IAAI,CAAC,EAAC,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAC,CAAC,CAAA;;;;oBAC3B,yBAAyB,CAAC,GAAC,EAAE,GAAC,CAAC,OAAO,IAAI,GAAC,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;;;;;SAC/E,CAAC,CAAA;IACF,OAAO,EAAE,MAAM,EAAE,UAAC,IAAY,IAAK,OAAA,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,cAAM,OAAA,MAAM,CAAC,GAAG,CAAC,gCAAgC,GAAG,IAAI,CAAC,EAAnD,CAAmD,CAAC,EAA3E,CAA2E,EAAE,CAAA;AACpH,CAAC,EAAA"}
|
||||
96
build/http_client.js
Normal file
96
build/http_client.js
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
var axios_1 = __importDefault(require("axios"));
|
||||
var Types = __importStar(require("./types"));
|
||||
exports.default = (function (retrievers) { return ({
|
||||
EncryptionExchange: function (request) { return __awaiter(void 0, void 0, void 0, function () {
|
||||
var auth, data, error;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0: return [4 /*yield*/, retrievers.retrieveNoAuthAuth()];
|
||||
case 1:
|
||||
auth = _a.sent();
|
||||
if (auth === null)
|
||||
throw new Error('retrieveNoAuthAuth() returned null');
|
||||
return [4 /*yield*/, axios_1.default.post('/api/encryption/exchange', request, { headers: { 'authorization': auth } })];
|
||||
case 2:
|
||||
data = (_a.sent()).data;
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string')
|
||||
return [2 /*return*/, data];
|
||||
if (data.status === 'OK') {
|
||||
error = Types.EmptyValidate(data.result);
|
||||
if (error === null) {
|
||||
return [2 /*return*/, data];
|
||||
}
|
||||
else
|
||||
return [2 /*return*/, { status: 'ERROR', reason: error.message }];
|
||||
}
|
||||
return [2 /*return*/, { status: 'ERROR', reason: 'invalid response' }];
|
||||
}
|
||||
});
|
||||
}); },
|
||||
}); });
|
||||
//# sourceMappingURL=http_client.js.map
|
||||
1
build/http_client.js.map
Normal file
1
build/http_client.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"http_client.js","sourceRoot":"","sources":["../proto/autogenerated/ts/http_client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+DAA+D;AAC/D,gDAAyB;AACzB,6CAAgC;AAQhC,mBAAe,UAAC,UAA6B,IAAK,OAAA,CAAC;IAC/C,kBAAkB,EAAE,UAAO,OAAwC;;;;wBAClD,qBAAM,UAAU,CAAC,kBAAkB,EAAE,EAAA;;oBAA5C,IAAI,GAAG,SAAqC;oBAClD,IAAI,IAAI,KAAK,IAAI;wBAAE,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAA;oBACvD,qBAAM,eAAK,CAAC,IAAI,CAAC,0BAA0B,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,EAAE,eAAe,EAAE,IAAI,EAAE,EAAE,CAAC,EAAA;;oBAAtG,IAAI,GAAK,CAAA,SAA6F,CAAA,KAAlG;oBACZ,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ;wBAAE,sBAAO,IAAI,EAAA;oBAC3E,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,EAAE;wBAChB,KAAK,GAAG,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;wBAC9C,IAAI,KAAK,KAAK,IAAI,EAAE;4BAAE,sBAAO,IAAI,EAAA;yBAAE;;4BAAM,sBAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,EAAA;qBAC7F;oBACD,sBAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,EAAA;;;SACzD;CACJ,CAAC,EAZgD,CAYhD,EAAA"}
|
||||
116
build/proto/autogenerated/ts/express_server.js
Normal file
116
build/proto/autogenerated/ts/express_server.js
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
"use strict";
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = __importDefault(require("express"));
|
||||
const Types = __importStar(require("./types"));
|
||||
const logErrorAndReturnResponse = (error, response, res, logger) => { logger.error(error.message || error); res.json({ status: 'ERROR', reason: response }); };
|
||||
exports.default = (methods, opts) => {
|
||||
const logger = opts.logger || { log: console.log, error: console.error };
|
||||
const app = (0, express_1.default)();
|
||||
if (!opts.allowNotImplementedMethods && !methods.Health)
|
||||
throw new Error('method: Health is not implemented');
|
||||
app.get('/health', (req, res) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!methods.Health)
|
||||
throw new Error('method: Health is not implemented');
|
||||
const authContext = yield opts.NoAuthAuthGuard(req.headers['authorization']);
|
||||
const query = req.query;
|
||||
const params = req.params;
|
||||
yield methods.Health(Object.assign(Object.assign(Object.assign({}, authContext), query), params));
|
||||
res.json({ status: 'OK' });
|
||||
}
|
||||
catch (ex) {
|
||||
const e = ex;
|
||||
logErrorAndReturnResponse(e, e.message || e, res, logger);
|
||||
if (opts.throwErrors)
|
||||
throw e;
|
||||
}
|
||||
}));
|
||||
if (!opts.allowNotImplementedMethods && !methods.EncryptionExchange)
|
||||
throw new Error('method: EncryptionExchange is not implemented');
|
||||
app.post('/api/encryption/exchange', (req, res) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!methods.EncryptionExchange)
|
||||
throw new Error('method: EncryptionExchange is not implemented');
|
||||
const authContext = yield opts.NoAuthAuthGuard(req.headers['authorization']);
|
||||
const request = req.body;
|
||||
const error = Types.EncryptionExchangeRequestValidate(request);
|
||||
if (error !== null)
|
||||
return logErrorAndReturnResponse(error, 'invalid request body', res, logger);
|
||||
const query = req.query;
|
||||
const params = req.params;
|
||||
yield methods.EncryptionExchange(Object.assign(Object.assign(Object.assign({}, authContext), query), params), request);
|
||||
res.json({ status: 'OK' });
|
||||
}
|
||||
catch (ex) {
|
||||
const e = ex;
|
||||
logErrorAndReturnResponse(e, e.message || e, res, logger);
|
||||
if (opts.throwErrors)
|
||||
throw e;
|
||||
}
|
||||
}));
|
||||
if (!opts.allowNotImplementedMethods && !methods.LndGetInfo)
|
||||
throw new Error('method: LndGetInfo is not implemented');
|
||||
app.get('/api/lnd/getinfo', (req, res) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
try {
|
||||
if (!methods.LndGetInfo)
|
||||
throw new Error('method: LndGetInfo is not implemented');
|
||||
const authContext = yield opts.NoAuthAuthGuard(req.headers['authorization']);
|
||||
const query = req.query;
|
||||
const params = req.params;
|
||||
const response = yield methods.LndGetInfo(Object.assign(Object.assign(Object.assign({}, authContext), query), params));
|
||||
res.json({ status: 'OK', result: response });
|
||||
}
|
||||
catch (ex) {
|
||||
const e = ex;
|
||||
logErrorAndReturnResponse(e, e.message || e, res, logger);
|
||||
if (opts.throwErrors)
|
||||
throw e;
|
||||
}
|
||||
}));
|
||||
var server;
|
||||
return {
|
||||
Close: () => { if (!server) {
|
||||
throw new Error('tried closing server before starting');
|
||||
}
|
||||
else
|
||||
server.close(); },
|
||||
Listen: (port) => { server = app.listen(port, () => logger.log('Example app listening on port ' + port)); }
|
||||
};
|
||||
};
|
||||
//# sourceMappingURL=express_server.js.map
|
||||
1
build/proto/autogenerated/ts/express_server.js.map
Normal file
1
build/proto/autogenerated/ts/express_server.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"express_server.js","sourceRoot":"","sources":["../../../../proto/autogenerated/ts/express_server.ts"],"names":[],"mappings":";AAAA,+DAA+D;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAE/D,sDAA2C;AAC3C,+CAAgC;AAWhC,MAAM,yBAAyB,GAAG,CAAC,KAAY,EAAE,QAAgB,EAAE,GAAa,EAAE,MAAc,EAAE,EAAE,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,OAAO,IAAI,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAA,CAAC,CAAC,CAAA;AAC9L,kBAAe,CAAC,OAA4B,EAAE,IAAmB,EAAE,EAAE;IACjE,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,IAAI,EAAE,GAAG,EAAE,OAAO,CAAC,GAAG,EAAE,KAAK,EAAE,OAAO,CAAC,KAAK,EAAE,CAAA;IACxE,MAAM,GAAG,GAAG,IAAA,iBAAO,GAAE,CAAA;IACrB,IAAI,CAAC,IAAI,CAAC,0BAA0B,IAAI,CAAC,OAAO,CAAC,MAAM;QAAE,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;IAC7G,GAAG,CAAC,GAAG,CAAC,SAAS,EAAE,CAAO,GAAG,EAAE,GAAG,EAAE,EAAE;QAClC,IAAI;YACA,IAAI,CAAC,OAAO,CAAC,MAAM;gBAAE,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAA;YACzE,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,CAAA;YAC5E,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAA;YACvB,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,CAAA;YACzB,MAAM,OAAO,CAAC,MAAM,+CAAM,WAAW,GAAK,KAAK,GAAK,MAAM,EAAG,CAAA;YAC7D,GAAG,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAA;SAC7B;QAAC,OAAO,EAAE,EAAE;YAAE,MAAM,CAAC,GAAG,EAAS,CAAC;YAAC,yBAAyB,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAAC,IAAI,IAAI,CAAC,WAAW;gBAAE,MAAM,CAAC,CAAA;SAAE;IAClI,CAAC,CAAA,CAAC,CAAA;IACF,IAAI,CAAC,IAAI,CAAC,0BAA0B,IAAI,CAAC,OAAO,CAAC,kBAAkB;QAAE,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;IACrI,GAAG,CAAC,IAAI,CAAC,0BAA0B,EAAE,CAAO,GAAG,EAAE,GAAG,EAAE,EAAE;QACpD,IAAI;YACA,IAAI,CAAC,OAAO,CAAC,kBAAkB;gBAAE,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAA;YACjG,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,CAAA;YAC5E,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,CAAA;YACxB,MAAM,KAAK,GAAG,KAAK,CAAC,iCAAiC,CAAC,OAAO,CAAC,CAAA;YAC9D,IAAI,KAAK,KAAK,IAAI;gBAAE,OAAO,yBAAyB,CAAC,KAAK,EAAE,sBAAsB,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;YAChG,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAA;YACvB,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,CAAA;YACzB,MAAM,OAAO,CAAC,kBAAkB,+CAAM,WAAW,GAAK,KAAK,GAAK,MAAM,GAAI,OAAO,CAAC,CAAA;YAClF,GAAG,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAA;SAC7B;QAAC,OAAO,EAAE,EAAE;YAAE,MAAM,CAAC,GAAG,EAAS,CAAC;YAAC,yBAAyB,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAAC,IAAI,IAAI,CAAC,WAAW;gBAAE,MAAM,CAAC,CAAA;SAAE;IAClI,CAAC,CAAA,CAAC,CAAA;IACF,IAAI,CAAC,IAAI,CAAC,0BAA0B,IAAI,CAAC,OAAO,CAAC,UAAU;QAAE,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAA;IACrH,GAAG,CAAC,GAAG,CAAC,kBAAkB,EAAE,CAAO,GAAG,EAAE,GAAG,EAAE,EAAE;QAC3C,IAAI;YACA,IAAI,CAAC,OAAO,CAAC,UAAU;gBAAE,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAA;YACjF,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,CAAC,CAAC,CAAA;YAC5E,MAAM,KAAK,GAAG,GAAG,CAAC,KAAK,CAAA;YACvB,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM,CAAA;YACzB,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,UAAU,+CAAM,WAAW,GAAK,KAAK,GAAK,MAAM,EAAG,CAAA;YAClF,GAAG,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAA;SAC/C;QAAC,OAAO,EAAE,EAAE;YAAE,MAAM,CAAC,GAAG,EAAS,CAAC;YAAC,yBAAyB,CAAC,CAAC,EAAE,CAAC,CAAC,OAAO,IAAI,CAAC,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAAC,IAAI,IAAI,CAAC,WAAW;gBAAE,MAAM,CAAC,CAAA;SAAE;IAClI,CAAC,CAAA,CAAC,CAAA;IACF,IAAI,MAAyC,CAAA;IAC7C,OAAO;QACH,KAAK,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC,MAAM,EAAE;YAAE,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAA;SAAE;;YAAM,MAAM,CAAC,KAAK,EAAE,CAAA,CAAC,CAAC;QAC7G,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE,GAAG,MAAM,GAAG,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,gCAAgC,GAAG,IAAI,CAAC,CAAC,CAAA,CAAC,CAAC;KACrH,CAAA;AACL,CAAC,CAAA"}
|
||||
84
build/proto/autogenerated/ts/http_client.js
Normal file
84
build/proto/autogenerated/ts/http_client.js
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
const axios_1 = __importDefault(require("axios"));
|
||||
const Types = __importStar(require("./types"));
|
||||
exports.default = (params) => ({
|
||||
Health: () => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const auth = yield params.retrieveNoAuthAuth();
|
||||
if (auth === null)
|
||||
throw new Error('retrieveNoAuthAuth() returned null');
|
||||
const { data } = yield axios_1.default.get(params.baseUrl + '/health', { headers: { 'authorization': auth } });
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string')
|
||||
return data;
|
||||
if (data.status === 'OK') {
|
||||
return data;
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' };
|
||||
}),
|
||||
EncryptionExchange: (request) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const auth = yield params.retrieveNoAuthAuth();
|
||||
if (auth === null)
|
||||
throw new Error('retrieveNoAuthAuth() returned null');
|
||||
const { data } = yield axios_1.default.post(params.baseUrl + '/api/encryption/exchange', request, { headers: { 'authorization': auth } });
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string')
|
||||
return data;
|
||||
if (data.status === 'OK') {
|
||||
return data;
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' };
|
||||
}),
|
||||
LndGetInfo: () => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const auth = yield params.retrieveNoAuthAuth();
|
||||
if (auth === null)
|
||||
throw new Error('retrieveNoAuthAuth() returned null');
|
||||
const { data } = yield axios_1.default.get(params.baseUrl + '/api/lnd/getinfo', { headers: { 'authorization': auth } });
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string')
|
||||
return data;
|
||||
if (data.status === 'OK') {
|
||||
const error = Types.LndGetInfoResponseValidate(data.result);
|
||||
if (error === null) {
|
||||
return data;
|
||||
}
|
||||
else
|
||||
return { status: 'ERROR', reason: error.message };
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' };
|
||||
}),
|
||||
});
|
||||
//# sourceMappingURL=http_client.js.map
|
||||
1
build/proto/autogenerated/ts/http_client.js.map
Normal file
1
build/proto/autogenerated/ts/http_client.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"http_client.js","sourceRoot":"","sources":["../../../../proto/autogenerated/ts/http_client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+DAA+D;AAC/D,kDAAyB;AACzB,+CAAgC;AAShC,kBAAe,CAAC,MAAoB,EAAE,EAAE,CAAC,CAAC;IACtC,MAAM,EAAE,GAAkD,EAAE;QACxD,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,kBAAkB,EAAE,CAAA;QAC9C,IAAI,IAAI,KAAK,IAAI;YAAE,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAA;QACxE,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,eAAK,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,GAAG,SAAS,EAAE,EAAE,OAAO,EAAE,EAAE,eAAe,EAAE,IAAI,EAAE,EAAE,CAAC,CAAA;QACpG,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QAC3E,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,EAAE;YACtB,OAAO,IAAI,CAAA;SACd;QACD,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,CAAA;IAC1D,CAAC,CAAA;IACD,kBAAkB,EAAE,CAAO,OAAwC,EAA2C,EAAE;QAC5G,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,kBAAkB,EAAE,CAAA;QAC9C,IAAI,IAAI,KAAK,IAAI;YAAE,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAA;QACxE,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,GAAG,0BAA0B,EAAE,OAAO,EAAE,EAAE,OAAO,EAAE,EAAE,eAAe,EAAE,IAAI,EAAE,EAAE,CAAC,CAAA;QAC/H,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QAC3E,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,EAAE;YACtB,OAAO,IAAI,CAAA;SACd;QACD,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,CAAA;IAC1D,CAAC,CAAA;IACD,UAAU,EAAE,GAAuE,EAAE;QACjF,MAAM,IAAI,GAAG,MAAM,MAAM,CAAC,kBAAkB,EAAE,CAAA;QAC9C,IAAI,IAAI,KAAK,IAAI;YAAE,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAA;QACxE,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,eAAK,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,GAAG,kBAAkB,EAAE,EAAE,OAAO,EAAE,EAAE,eAAe,EAAE,IAAI,EAAE,EAAE,CAAC,CAAA;QAC7G,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,IAAI,OAAO,IAAI,CAAC,MAAM,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QAC3E,IAAI,IAAI,CAAC,MAAM,KAAK,IAAI,EAAE;YACtB,MAAM,KAAK,GAAG,KAAK,CAAC,0BAA0B,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;YAC3D,IAAI,KAAK,KAAK,IAAI,EAAE;gBAAE,OAAO,IAAI,CAAA;aAAE;;gBAAM,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,CAAC,OAAO,EAAE,CAAA;SAC7F;QACD,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,kBAAkB,EAAE,CAAA;IAC1D,CAAC,CAAA;CACJ,CAAC,CAAA"}
|
||||
44
build/proto/autogenerated/ts/types.js
Normal file
44
build/proto/autogenerated/ts/types.js
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
"use strict";
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.EncryptionExchangeRequestValidate = exports.EncryptionExchangeRequestOptionalFields = exports.EmptyValidate = exports.EmptyOptionalFields = exports.LndGetInfoResponseValidate = exports.LndGetInfoResponseOptionalFields = void 0;
|
||||
exports.LndGetInfoResponseOptionalFields = [];
|
||||
const LndGetInfoResponseValidate = (o, opts = {}, path = 'LndGetInfoResponse::root.') => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet)
|
||||
return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message');
|
||||
if (typeof o !== 'object' || o === null)
|
||||
return new Error(path + ': object is not an instance of an object or is null');
|
||||
if (typeof o.alias !== 'string')
|
||||
return new Error(`${path}.alias: is not a string`);
|
||||
if (opts.alias_CustomCheck && !opts.alias_CustomCheck(o.alias))
|
||||
return new Error(`${path}.alias: custom check failed`);
|
||||
return null;
|
||||
};
|
||||
exports.LndGetInfoResponseValidate = LndGetInfoResponseValidate;
|
||||
exports.EmptyOptionalFields = [];
|
||||
const EmptyValidate = (o, opts = {}, path = 'Empty::root.') => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet)
|
||||
return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message');
|
||||
if (typeof o !== 'object' || o === null)
|
||||
return new Error(path + ': object is not an instance of an object or is null');
|
||||
return null;
|
||||
};
|
||||
exports.EmptyValidate = EmptyValidate;
|
||||
exports.EncryptionExchangeRequestOptionalFields = [];
|
||||
const EncryptionExchangeRequestValidate = (o, opts = {}, path = 'EncryptionExchangeRequest::root.') => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet)
|
||||
return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message');
|
||||
if (typeof o !== 'object' || o === null)
|
||||
return new Error(path + ': object is not an instance of an object or is null');
|
||||
if (typeof o.public_key !== 'string')
|
||||
return new Error(`${path}.public_key: is not a string`);
|
||||
if (opts.public_key_CustomCheck && !opts.public_key_CustomCheck(o.public_key))
|
||||
return new Error(`${path}.public_key: custom check failed`);
|
||||
if (typeof o.device_id !== 'string')
|
||||
return new Error(`${path}.device_id: is not a string`);
|
||||
if (opts.device_id_CustomCheck && !opts.device_id_CustomCheck(o.device_id))
|
||||
return new Error(`${path}.device_id: custom check failed`);
|
||||
return null;
|
||||
};
|
||||
exports.EncryptionExchangeRequestValidate = EncryptionExchangeRequestValidate;
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
build/proto/autogenerated/ts/types.js.map
Normal file
1
build/proto/autogenerated/ts/types.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../../proto/autogenerated/ts/types.ts"],"names":[],"mappings":";AAAA,+DAA+D;;;AAyClD,QAAA,gCAAgC,GAAO,EAAE,CAAA;AAK/C,MAAM,0BAA0B,GAAG,CAAC,CAAsB,EAAE,OAAkC,EAAE,EAAE,OAAe,2BAA2B,EAAgB,EAAE;IACjK,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,kBAAkB;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,yFAAyF,CAAC,CAAA;IAC5K,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,KAAK,IAAI;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,qDAAqD,CAAC,CAAA;IAEvH,IAAI,OAAO,CAAC,CAAC,KAAK,KAAK,QAAQ;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,yBAAyB,CAAC,CAAA;IACnF,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,6BAA6B,CAAC,CAAA;IAEtH,OAAO,IAAI,CAAA;AACf,CAAC,CAAA;AARY,QAAA,0BAA0B,8BAQtC;AAIY,QAAA,mBAAmB,GAAO,EAAE,CAAA;AAIlC,MAAM,aAAa,GAAG,CAAC,CAAS,EAAE,OAAqB,EAAE,EAAE,OAAe,cAAc,EAAgB,EAAE;IAC7G,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,kBAAkB;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,yFAAyF,CAAC,CAAA;IAC5K,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,KAAK,IAAI;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,qDAAqD,CAAC,CAAA;IAEvH,OAAO,IAAI,CAAA;AACf,CAAC,CAAA;AALY,QAAA,aAAa,iBAKzB;AAMY,QAAA,uCAAuC,GAAO,EAAE,CAAA;AAMtD,MAAM,iCAAiC,GAAG,CAAC,CAA6B,EAAE,OAAyC,EAAE,EAAE,OAAe,kCAAkC,EAAgB,EAAE;IAC7L,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,kBAAkB;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,yFAAyF,CAAC,CAAA;IAC5K,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,KAAK,IAAI;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,qDAAqD,CAAC,CAAA;IAEvH,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,QAAQ;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,8BAA8B,CAAC,CAAA;IAC7F,IAAI,IAAI,CAAC,sBAAsB,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,CAAC,CAAC,UAAU,CAAC;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,kCAAkC,CAAC,CAAA;IAE1I,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,6BAA6B,CAAC,CAAA;IAC3F,IAAI,IAAI,CAAC,qBAAqB,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS,CAAC;QAAE,OAAO,IAAI,KAAK,CAAC,GAAG,IAAI,iCAAiC,CAAC,CAAA;IAEtI,OAAO,IAAI,CAAA;AACf,CAAC,CAAA;AAXY,QAAA,iCAAiC,qCAW7C"}
|
||||
67
build/proto/lnd/invoices.client.js
Normal file
67
build/proto/lnd/invoices.client.js
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.InvoicesClient = void 0;
|
||||
const invoices_1 = require("./invoices");
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
/**
|
||||
* Invoices is a service that can be used to create, accept, settle and cancel
|
||||
* invoices.
|
||||
*
|
||||
* @generated from protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
class InvoicesClient {
|
||||
constructor(_transport) {
|
||||
this._transport = _transport;
|
||||
this.typeName = invoices_1.Invoices.typeName;
|
||||
this.methods = invoices_1.Invoices.methods;
|
||||
this.options = invoices_1.Invoices.options;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeSingleInvoice returns a uni-directional stream (server -> client)
|
||||
* to notify the client of state transitions of the specified invoice.
|
||||
* Initially the current invoice state is always sent out.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeSingleInvoice(invoicesrpc.SubscribeSingleInvoiceRequest) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeSingleInvoice(input, options) {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* CancelInvoice cancels a currently open invoice. If the invoice is already
|
||||
* canceled, this call will succeed. If the invoice is already settled, it will
|
||||
* fail.
|
||||
*
|
||||
* @generated from protobuf rpc: CancelInvoice(invoicesrpc.CancelInvoiceMsg) returns (invoicesrpc.CancelInvoiceResp);
|
||||
*/
|
||||
cancelInvoice(input, options) {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* AddHoldInvoice creates a hold invoice. It ties the invoice to the hash
|
||||
* supplied in the request.
|
||||
*
|
||||
* @generated from protobuf rpc: AddHoldInvoice(invoicesrpc.AddHoldInvoiceRequest) returns (invoicesrpc.AddHoldInvoiceResp);
|
||||
*/
|
||||
addHoldInvoice(input, options) {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SettleInvoice settles an accepted invoice. If the invoice is already
|
||||
* settled, this call will succeed.
|
||||
*
|
||||
* @generated from protobuf rpc: SettleInvoice(invoicesrpc.SettleInvoiceMsg) returns (invoicesrpc.SettleInvoiceResp);
|
||||
*/
|
||||
settleInvoice(input, options) {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
exports.InvoicesClient = InvoicesClient;
|
||||
//# sourceMappingURL=invoices.client.js.map
|
||||
1
build/proto/lnd/invoices.client.js.map
Normal file
1
build/proto/lnd/invoices.client.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"invoices.client.js","sourceRoot":"","sources":["../../../proto/lnd/invoices.client.ts"],"names":[],"mappings":";;;AAKA,yCAAsC;AAQtC,0DAA0D;AA+C1D;;;;;GAKG;AACH,MAAa,cAAc;IAIvB,YAA6B,UAAwB;QAAxB,eAAU,GAAV,UAAU,CAAc;QAHrD,aAAQ,GAAG,mBAAQ,CAAC,QAAQ,CAAC;QAC7B,YAAO,GAAG,mBAAQ,CAAC,OAAO,CAAC;QAC3B,YAAO,GAAG,mBAAQ,CAAC,OAAO,CAAC;IAE3B,CAAC;IACD;;;;;;;OAOG;IACH,sBAAsB,CAAC,KAAoC,EAAE,OAAoB;QAC7E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAyC,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC1H,CAAC;IACD;;;;;;;OAOG;IACH,aAAa,CAAC,KAAuB,EAAE,OAAoB;QACvD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAsC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC7G,CAAC;IACD;;;;;;OAMG;IACH,cAAc,CAAC,KAA4B,EAAE,OAAoB;QAC7D,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA4C,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACnH,CAAC;IACD;;;;;;OAMG;IACH,aAAa,CAAC,KAAuB,EAAE,OAAoB;QACvD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAsC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC7G,CAAC;CACJ;AApDD,wCAoDC"}
|
||||
91
build/proto/lnd/invoices.grpc-client.js
Normal file
91
build/proto/lnd/invoices.grpc-client.js
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.InvoicesClient = void 0;
|
||||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string,client_grpc1
|
||||
// @generated from protobuf file "invoices.proto" (package "invoicesrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
const invoices_1 = require("./invoices");
|
||||
const grpc = __importStar(require("@grpc/grpc-js"));
|
||||
/**
|
||||
* Invoices is a service that can be used to create, accept, settle and cancel
|
||||
* invoices.
|
||||
*
|
||||
* @generated from protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
class InvoicesClient extends grpc.Client {
|
||||
constructor(address, credentials, options = {}, binaryOptions = {}) {
|
||||
super(address, credentials, options);
|
||||
this._binaryOptions = binaryOptions;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeSingleInvoice returns a uni-directional stream (server -> client)
|
||||
* to notify the client of state transitions of the specified invoice.
|
||||
* Initially the current invoice state is always sent out.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeSingleInvoice(invoicesrpc.SubscribeSingleInvoiceRequest) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeSingleInvoice(input, metadata, options) {
|
||||
const method = invoices_1.Invoices.methods[0];
|
||||
return this.makeServerStreamRequest(`/${invoices_1.Invoices.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* CancelInvoice cancels a currently open invoice. If the invoice is already
|
||||
* canceled, this call will succeed. If the invoice is already settled, it will
|
||||
* fail.
|
||||
*
|
||||
* @generated from protobuf rpc: CancelInvoice(invoicesrpc.CancelInvoiceMsg) returns (invoicesrpc.CancelInvoiceResp);
|
||||
*/
|
||||
cancelInvoice(input, metadata, options, callback) {
|
||||
const method = invoices_1.Invoices.methods[1];
|
||||
return this.makeUnaryRequest(`/${invoices_1.Invoices.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* AddHoldInvoice creates a hold invoice. It ties the invoice to the hash
|
||||
* supplied in the request.
|
||||
*
|
||||
* @generated from protobuf rpc: AddHoldInvoice(invoicesrpc.AddHoldInvoiceRequest) returns (invoicesrpc.AddHoldInvoiceResp);
|
||||
*/
|
||||
addHoldInvoice(input, metadata, options, callback) {
|
||||
const method = invoices_1.Invoices.methods[2];
|
||||
return this.makeUnaryRequest(`/${invoices_1.Invoices.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SettleInvoice settles an accepted invoice. If the invoice is already
|
||||
* settled, this call will succeed.
|
||||
*
|
||||
* @generated from protobuf rpc: SettleInvoice(invoicesrpc.SettleInvoiceMsg) returns (invoicesrpc.SettleInvoiceResp);
|
||||
*/
|
||||
settleInvoice(input, metadata, options, callback) {
|
||||
const method = invoices_1.Invoices.methods[3];
|
||||
return this.makeUnaryRequest(`/${invoices_1.Invoices.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
}
|
||||
exports.InvoicesClient = InvoicesClient;
|
||||
//# sourceMappingURL=invoices.grpc-client.js.map
|
||||
1
build/proto/lnd/invoices.grpc-client.js.map
Normal file
1
build/proto/lnd/invoices.grpc-client.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"invoices.grpc-client.js","sourceRoot":"","sources":["../../../proto/lnd/invoices.grpc-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+EAA+E;AAC/E,wFAAwF;AACxF,iBAAiB;AACjB,yCAAsC;AAWtC,oDAAsC;AAqDtC;;;;;GAKG;AACH,MAAa,cAAe,SAAQ,IAAI,CAAC,MAAM;IAE3C,YAAY,OAAe,EAAE,WAAoC,EAAE,UAA8B,EAAE,EAAE,gBAAiE,EAAE;QACpK,KAAK,CAAC,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;QACrC,IAAI,CAAC,cAAc,GAAG,aAAa,CAAC;IACxC,CAAC;IACD;;;;;;;OAOG;IACH,sBAAsB,CAAC,KAAoC,EAAE,QAA2C,EAAE,OAA0B;QAChI,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnC,OAAO,IAAI,CAAC,uBAAuB,CAAyC,IAAI,mBAAQ,CAAC,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,KAAoC,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,KAAa,EAAW,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,EAAE,KAAK,EAAG,QAAgB,EAAE,OAAO,CAAC,CAAC;IACtV,CAAC;IACD;;;;;;;OAOG;IACH,aAAa,CAAC,KAAuB,EAAE,QAAiH,EAAE,OAAiG,EAAE,QAA+E;QACxU,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnC,OAAO,IAAI,CAAC,gBAAgB,CAAsC,IAAI,mBAAQ,CAAC,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,KAAuB,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,KAAa,EAAqB,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,EAAE,KAAK,EAAG,QAAgB,EAAG,OAAe,EAAG,QAAgB,CAAC,CAAC;IACrW,CAAC;IACD;;;;;;OAMG;IACH,cAAc,CAAC,KAA4B,EAAE,QAAkH,EAAE,OAAkG,EAAE,QAAgF;QACjV,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnC,OAAO,IAAI,CAAC,gBAAgB,CAA4C,IAAI,mBAAQ,CAAC,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,KAA4B,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,KAAa,EAAsB,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,EAAE,KAAK,EAAG,QAAgB,EAAG,OAAe,EAAG,QAAgB,CAAC,CAAC;IACjX,CAAC;IACD;;;;;;OAMG;IACH,aAAa,CAAC,KAAuB,EAAE,QAAiH,EAAE,OAAiG,EAAE,QAA+E;QACxU,MAAM,MAAM,GAAG,mBAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QACnC,OAAO,IAAI,CAAC,gBAAgB,CAAsC,IAAI,mBAAQ,CAAC,QAAQ,IAAI,MAAM,CAAC,IAAI,EAAE,EAAE,CAAC,KAAuB,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,KAAa,EAAqB,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,UAAU,CAAC,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,EAAE,KAAK,EAAG,QAAgB,EAAG,OAAe,EAAG,QAAgB,CAAC,CAAC;IACrW,CAAC;CACJ;AApDD,wCAoDC"}
|
||||
374
build/proto/lnd/invoices.js
Normal file
374
build/proto/lnd/invoices.js
Normal file
|
|
@ -0,0 +1,374 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Invoices = exports.SubscribeSingleInvoiceRequest = exports.SettleInvoiceResp = exports.SettleInvoiceMsg = exports.AddHoldInvoiceResp = exports.AddHoldInvoiceRequest = exports.CancelInvoiceResp = exports.CancelInvoiceMsg = void 0;
|
||||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string
|
||||
// @generated from protobuf file "invoices.proto" (package "invoicesrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
const rpc_1 = require("./rpc");
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
const runtime_1 = require("@protobuf-ts/runtime");
|
||||
const runtime_2 = require("@protobuf-ts/runtime");
|
||||
const runtime_3 = require("@protobuf-ts/runtime");
|
||||
const runtime_4 = require("@protobuf-ts/runtime");
|
||||
const runtime_5 = require("@protobuf-ts/runtime");
|
||||
const rpc_2 = require("./rpc");
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelInvoiceMsg$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.CancelInvoiceMsg", [
|
||||
{ no: 1, name: "payment_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { paymentHash: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes payment_hash */ 1:
|
||||
message.paymentHash = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes payment_hash = 1; */
|
||||
if (message.paymentHash.length)
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).bytes(message.paymentHash);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.CancelInvoiceMsg
|
||||
*/
|
||||
exports.CancelInvoiceMsg = new CancelInvoiceMsg$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelInvoiceResp$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.CancelInvoiceResp", []);
|
||||
}
|
||||
create(value) {
|
||||
const message = {};
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
return target !== null && target !== void 0 ? target : this.create();
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.CancelInvoiceResp
|
||||
*/
|
||||
exports.CancelInvoiceResp = new CancelInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class AddHoldInvoiceRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.AddHoldInvoiceRequest", [
|
||||
{ no: 1, name: "memo", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
|
||||
{ no: 3, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 10, name: "value_msat", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "description_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
|
||||
{ no: 5, name: "expiry", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "fallback_addr", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 7, name: "cltv_expiry", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
|
||||
{ no: 8, name: "route_hints", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => rpc_2.RouteHint },
|
||||
{ no: 9, name: "private", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { memo: "", hash: new Uint8Array(0), value: "0", valueMsat: "0", descriptionHash: new Uint8Array(0), expiry: "0", fallbackAddr: "", cltvExpiry: "0", routeHints: [], private: false };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string memo */ 1:
|
||||
message.memo = reader.string();
|
||||
break;
|
||||
case /* bytes hash */ 2:
|
||||
message.hash = reader.bytes();
|
||||
break;
|
||||
case /* int64 value */ 3:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
case /* int64 value_msat */ 10:
|
||||
message.valueMsat = reader.int64().toString();
|
||||
break;
|
||||
case /* bytes description_hash */ 4:
|
||||
message.descriptionHash = reader.bytes();
|
||||
break;
|
||||
case /* int64 expiry */ 5:
|
||||
message.expiry = reader.int64().toString();
|
||||
break;
|
||||
case /* string fallback_addr */ 6:
|
||||
message.fallbackAddr = reader.string();
|
||||
break;
|
||||
case /* uint64 cltv_expiry */ 7:
|
||||
message.cltvExpiry = reader.uint64().toString();
|
||||
break;
|
||||
case /* repeated lnrpc.RouteHint route_hints */ 8:
|
||||
message.routeHints.push(rpc_2.RouteHint.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
case /* bool private */ 9:
|
||||
message.private = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string memo = 1; */
|
||||
if (message.memo !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.memo);
|
||||
/* bytes hash = 2; */
|
||||
if (message.hash.length)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).bytes(message.hash);
|
||||
/* int64 value = 3; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(3, runtime_1.WireType.Varint).int64(message.value);
|
||||
/* int64 value_msat = 10; */
|
||||
if (message.valueMsat !== "0")
|
||||
writer.tag(10, runtime_1.WireType.Varint).int64(message.valueMsat);
|
||||
/* bytes description_hash = 4; */
|
||||
if (message.descriptionHash.length)
|
||||
writer.tag(4, runtime_1.WireType.LengthDelimited).bytes(message.descriptionHash);
|
||||
/* int64 expiry = 5; */
|
||||
if (message.expiry !== "0")
|
||||
writer.tag(5, runtime_1.WireType.Varint).int64(message.expiry);
|
||||
/* string fallback_addr = 6; */
|
||||
if (message.fallbackAddr !== "")
|
||||
writer.tag(6, runtime_1.WireType.LengthDelimited).string(message.fallbackAddr);
|
||||
/* uint64 cltv_expiry = 7; */
|
||||
if (message.cltvExpiry !== "0")
|
||||
writer.tag(7, runtime_1.WireType.Varint).uint64(message.cltvExpiry);
|
||||
/* repeated lnrpc.RouteHint route_hints = 8; */
|
||||
for (let i = 0; i < message.routeHints.length; i++)
|
||||
rpc_2.RouteHint.internalBinaryWrite(message.routeHints[i], writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join();
|
||||
/* bool private = 9; */
|
||||
if (message.private !== false)
|
||||
writer.tag(9, runtime_1.WireType.Varint).bool(message.private);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.AddHoldInvoiceRequest
|
||||
*/
|
||||
exports.AddHoldInvoiceRequest = new AddHoldInvoiceRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class AddHoldInvoiceResp$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.AddHoldInvoiceResp", [
|
||||
{ no: 1, name: "payment_request", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { paymentRequest: "" };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string payment_request */ 1:
|
||||
message.paymentRequest = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* string payment_request = 1; */
|
||||
if (message.paymentRequest !== "")
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.paymentRequest);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.AddHoldInvoiceResp
|
||||
*/
|
||||
exports.AddHoldInvoiceResp = new AddHoldInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SettleInvoiceMsg$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.SettleInvoiceMsg", [
|
||||
{ no: 1, name: "preimage", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { preimage: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes preimage */ 1:
|
||||
message.preimage = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes preimage = 1; */
|
||||
if (message.preimage.length)
|
||||
writer.tag(1, runtime_1.WireType.LengthDelimited).bytes(message.preimage);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SettleInvoiceMsg
|
||||
*/
|
||||
exports.SettleInvoiceMsg = new SettleInvoiceMsg$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SettleInvoiceResp$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.SettleInvoiceResp", []);
|
||||
}
|
||||
create(value) {
|
||||
const message = {};
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
return target !== null && target !== void 0 ? target : this.create();
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SettleInvoiceResp
|
||||
*/
|
||||
exports.SettleInvoiceResp = new SettleInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SubscribeSingleInvoiceRequest$Type extends runtime_5.MessageType {
|
||||
constructor() {
|
||||
super("invoicesrpc.SubscribeSingleInvoiceRequest", [
|
||||
{ no: 2, name: "r_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value) {
|
||||
const message = { rHash: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
(0, runtime_3.reflectionMergePartial)(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader, length, options, target) {
|
||||
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes r_hash */ 2:
|
||||
message.rHash = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message, writer, options) {
|
||||
/* bytes r_hash = 2; */
|
||||
if (message.rHash.length)
|
||||
writer.tag(2, runtime_1.WireType.LengthDelimited).bytes(message.rHash);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SubscribeSingleInvoiceRequest
|
||||
*/
|
||||
exports.SubscribeSingleInvoiceRequest = new SubscribeSingleInvoiceRequest$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
exports.Invoices = new runtime_rpc_1.ServiceType("invoicesrpc.Invoices", [
|
||||
{ name: "SubscribeSingleInvoice", serverStreaming: true, options: {}, I: exports.SubscribeSingleInvoiceRequest, O: rpc_1.Invoice },
|
||||
{ name: "CancelInvoice", options: {}, I: exports.CancelInvoiceMsg, O: exports.CancelInvoiceResp },
|
||||
{ name: "AddHoldInvoice", options: {}, I: exports.AddHoldInvoiceRequest, O: exports.AddHoldInvoiceResp },
|
||||
{ name: "SettleInvoice", options: {}, I: exports.SettleInvoiceMsg, O: exports.SettleInvoiceResp }
|
||||
]);
|
||||
//# sourceMappingURL=invoices.js.map
|
||||
1
build/proto/lnd/invoices.js.map
Normal file
1
build/proto/lnd/invoices.js.map
Normal file
File diff suppressed because one or more lines are too long
225
build/proto/lnd/router.client.js
Normal file
225
build/proto/lnd/router.client.js
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RouterClient = void 0;
|
||||
const router_1 = require("./router");
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
/**
|
||||
* Router is a service that offers advanced interaction with the router
|
||||
* subsystem of the daemon.
|
||||
*
|
||||
* @generated from protobuf service routerrpc.Router
|
||||
*/
|
||||
class RouterClient {
|
||||
constructor(_transport) {
|
||||
this._transport = _transport;
|
||||
this.typeName = router_1.Router.typeName;
|
||||
this.methods = router_1.Router.methods;
|
||||
this.options = router_1.Router.options;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendPaymentV2 attempts to route a payment described by the passed
|
||||
* PaymentRequest to the final destination. The call returns a stream of
|
||||
* payment updates.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentV2(routerrpc.SendPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
sendPaymentV2(input, options) {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* TrackPaymentV2 returns an update stream for the payment identified by the
|
||||
* payment hash.
|
||||
*
|
||||
* @generated from protobuf rpc: TrackPaymentV2(routerrpc.TrackPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
trackPaymentV2(input, options) {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* EstimateRouteFee allows callers to obtain a lower bound w.r.t how much it
|
||||
* may cost to send an HTLC to the target end destination.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateRouteFee(routerrpc.RouteFeeRequest) returns (routerrpc.RouteFeeResponse);
|
||||
*/
|
||||
estimateRouteFee(input, options) {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendToRouteV2. SendToRoute attempts to make a payment via
|
||||
* the specified route. This method differs from SendPayment in that it
|
||||
* allows users to specify a full route manually. This can be used for
|
||||
* things like rebalancing, and atomic swaps. It differs from the newer
|
||||
* SendToRouteV2 in that it doesn't return the full HTLC information.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(routerrpc.SendToRouteRequest) returns (routerrpc.SendToRouteResponse);
|
||||
*/
|
||||
sendToRoute(input, options) {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendToRouteV2 attempts to make a payment via the specified route. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteV2(routerrpc.SendToRouteRequest) returns (lnrpc.HTLCAttempt);
|
||||
*/
|
||||
sendToRouteV2(input, options) {
|
||||
const method = this.methods[4], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ResetMissionControl clears all mission control state and starts with a clean
|
||||
* slate.
|
||||
*
|
||||
* @generated from protobuf rpc: ResetMissionControl(routerrpc.ResetMissionControlRequest) returns (routerrpc.ResetMissionControlResponse);
|
||||
*/
|
||||
resetMissionControl(input, options) {
|
||||
const method = this.methods[5], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryMissionControl exposes the internal mission control state to callers.
|
||||
* It is a development feature.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryMissionControl(routerrpc.QueryMissionControlRequest) returns (routerrpc.QueryMissionControlResponse);
|
||||
*/
|
||||
queryMissionControl(input, options) {
|
||||
const method = this.methods[6], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* XImportMissionControl is an experimental API that imports the state provided
|
||||
* to the internal mission control's state, using all results which are more
|
||||
* recent than our existing values. These values will only be imported
|
||||
* in-memory, and will not be persisted across restarts.
|
||||
*
|
||||
* @generated from protobuf rpc: XImportMissionControl(routerrpc.XImportMissionControlRequest) returns (routerrpc.XImportMissionControlResponse);
|
||||
*/
|
||||
xImportMissionControl(input, options) {
|
||||
const method = this.methods[7], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* GetMissionControlConfig returns mission control's current config.
|
||||
*
|
||||
* @generated from protobuf rpc: GetMissionControlConfig(routerrpc.GetMissionControlConfigRequest) returns (routerrpc.GetMissionControlConfigResponse);
|
||||
*/
|
||||
getMissionControlConfig(input, options) {
|
||||
const method = this.methods[8], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SetMissionControlConfig will set mission control's config, if the config
|
||||
* provided is valid.
|
||||
*
|
||||
* @generated from protobuf rpc: SetMissionControlConfig(routerrpc.SetMissionControlConfigRequest) returns (routerrpc.SetMissionControlConfigResponse);
|
||||
*/
|
||||
setMissionControlConfig(input, options) {
|
||||
const method = this.methods[9], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryProbability returns the current success probability estimate for a
|
||||
* given node pair and amount.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryProbability(routerrpc.QueryProbabilityRequest) returns (routerrpc.QueryProbabilityResponse);
|
||||
*/
|
||||
queryProbability(input, options) {
|
||||
const method = this.methods[10], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* BuildRoute builds a fully specified route based on a list of hop public
|
||||
* keys. It retrieves the relevant channel policies from the graph in order to
|
||||
* calculate the correct fees and time locks.
|
||||
*
|
||||
* @generated from protobuf rpc: BuildRoute(routerrpc.BuildRouteRequest) returns (routerrpc.BuildRouteResponse);
|
||||
*/
|
||||
buildRoute(input, options) {
|
||||
const method = this.methods[11], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeHtlcEvents creates a uni-directional stream from the server to
|
||||
* the client which delivers a stream of htlc events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeHtlcEvents(routerrpc.SubscribeHtlcEventsRequest) returns (stream routerrpc.HtlcEvent);
|
||||
*/
|
||||
subscribeHtlcEvents(input, options) {
|
||||
const method = this.methods[12], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendPaymentV2. SendPayment attempts to route a payment
|
||||
* described by the passed PaymentRequest to the final destination. The call
|
||||
* returns a stream of payment status updates.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(routerrpc.SendPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
sendPayment(input, options) {
|
||||
const method = this.methods[13], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use TrackPaymentV2. TrackPayment returns an update stream for
|
||||
* the payment identified by the payment hash.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: TrackPayment(routerrpc.TrackPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
trackPayment(input, options) {
|
||||
const method = this.methods[14], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* *
|
||||
* HtlcInterceptor dispatches a bi-directional streaming RPC in which
|
||||
* Forwarded HTLC requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND if this htlc should be intercepted.
|
||||
* In case of interception, the htlc can be either settled, cancelled or
|
||||
* resumed later by using the ResolveHoldForward endpoint.
|
||||
*
|
||||
* @generated from protobuf rpc: HtlcInterceptor(stream routerrpc.ForwardHtlcInterceptResponse) returns (stream routerrpc.ForwardHtlcInterceptRequest);
|
||||
*/
|
||||
htlcInterceptor(options) {
|
||||
const method = this.methods[15], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("duplex", this._transport, method, opt);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* UpdateChanStatus attempts to manually set the state of a channel
|
||||
* (enabled, disabled, or auto). A manual "disable" request will cause the
|
||||
* channel to stay disabled until a subsequent manual request of either
|
||||
* "enable" or "auto".
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChanStatus(routerrpc.UpdateChanStatusRequest) returns (routerrpc.UpdateChanStatusResponse);
|
||||
*/
|
||||
updateChanStatus(input, options) {
|
||||
const method = this.methods[16], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
exports.RouterClient = RouterClient;
|
||||
//# sourceMappingURL=router.client.js.map
|
||||
1
build/proto/lnd/router.client.js.map
Normal file
1
build/proto/lnd/router.client.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"router.client.js","sourceRoot":"","sources":["../../../proto/lnd/router.client.ts"],"names":[],"mappings":";;;AAKA,qCAAkC;AA8BlC,0DAA0D;AAsK1D;;;;;GAKG;AACH,MAAa,YAAY;IAIrB,YAA6B,UAAwB;QAAxB,eAAU,GAAV,UAAU,CAAc;QAHrD,aAAQ,GAAG,eAAM,CAAC,QAAQ,CAAC;QAC3B,YAAO,GAAG,eAAM,CAAC,OAAO,CAAC;QACzB,YAAO,GAAG,eAAM,CAAC,OAAO,CAAC;IAEzB,CAAC;IACD;;;;;;;OAOG;IACH,aAAa,CAAC,KAAyB,EAAE,OAAoB;QACzD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA8B,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC/G,CAAC;IACD;;;;;;OAMG;IACH,cAAc,CAAC,KAA0B,EAAE,OAAoB;QAC3D,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA+B,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAChH,CAAC;IACD;;;;;;OAMG;IACH,gBAAgB,CAAC,KAAsB,EAAE,OAAoB;QACzD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAoC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC3G,CAAC;IACD;;;;;;;;;;OAUG;IACH,WAAW,CAAC,KAAyB,EAAE,OAAoB;QACvD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA0C,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACjH,CAAC;IACD;;;;;;;;OAQG;IACH,aAAa,CAAC,KAAyB,EAAE,OAAoB;QACzD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAkC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACzG,CAAC;IACD;;;;;;OAMG;IACH,mBAAmB,CAAC,KAAiC,EAAE,OAAoB;QACvE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA0D,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACjI,CAAC;IACD;;;;;;OAMG;IACH,mBAAmB,CAAC,KAAiC,EAAE,OAAoB;QACvE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA0D,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACjI,CAAC;IACD;;;;;;;;OAQG;IACH,qBAAqB,CAAC,KAAmC,EAAE,OAAoB;QAC3E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAA8D,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACrI,CAAC;IACD;;;;;OAKG;IACH,uBAAuB,CAAC,KAAqC,EAAE,OAAoB;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAkE,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACzI,CAAC;IACD;;;;;;OAMG;IACH,uBAAuB,CAAC,KAAqC,EAAE,OAAoB;QAC/E,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC5E,OAAO,IAAA,4BAAc,EAAkE,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACzI,CAAC;IACD;;;;;;OAMG;IACH,gBAAgB,CAAC,KAA8B,EAAE,OAAoB;QACjE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAoD,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC3H,CAAC;IACD;;;;;;;OAOG;IACH,UAAU,CAAC,KAAwB,EAAE,OAAoB;QACrD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAwC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC/G,CAAC;IACD;;;;;;OAMG;IACH,mBAAmB,CAAC,KAAiC,EAAE,OAAoB;QACvE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAwC,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACzH,CAAC;IACD;;;;;;;;OAQG;IACH,WAAW,CAAC,KAAyB,EAAE,OAAoB;QACvD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAoC,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACrH,CAAC;IACD;;;;;;;OAOG;IACH,YAAY,CAAC,KAA0B,EAAE,OAAoB;QACzD,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAqC,iBAAiB,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IACtH,CAAC;IACD;;;;;;;;;OASG;IACH,eAAe,CAAC,OAAoB;QAChC,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAA4D,QAAQ,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC;IAC7H,CAAC;IACD;;;;;;;;OAQG;IACH,gBAAgB,CAAC,KAA8B,EAAE,OAAoB;QACjE,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,EAAE,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC;QAC7E,OAAO,IAAA,4BAAc,EAAoD,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;IAC3H,CAAC;CACJ;AAlND,oCAkNC"}
|
||||
249
build/proto/lnd/router.grpc-client.js
Normal file
249
build/proto/lnd/router.grpc-client.js
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RouterClient = void 0;
|
||||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string,client_grpc1
|
||||
// @generated from protobuf file "router.proto" (package "routerrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
const router_1 = require("./router");
|
||||
const grpc = __importStar(require("@grpc/grpc-js"));
|
||||
/**
|
||||
* Router is a service that offers advanced interaction with the router
|
||||
* subsystem of the daemon.
|
||||
*
|
||||
* @generated from protobuf service routerrpc.Router
|
||||
*/
|
||||
class RouterClient extends grpc.Client {
|
||||
constructor(address, credentials, options = {}, binaryOptions = {}) {
|
||||
super(address, credentials, options);
|
||||
this._binaryOptions = binaryOptions;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendPaymentV2 attempts to route a payment described by the passed
|
||||
* PaymentRequest to the final destination. The call returns a stream of
|
||||
* payment updates.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentV2(routerrpc.SendPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
sendPaymentV2(input, metadata, options) {
|
||||
const method = router_1.Router.methods[0];
|
||||
return this.makeServerStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* TrackPaymentV2 returns an update stream for the payment identified by the
|
||||
* payment hash.
|
||||
*
|
||||
* @generated from protobuf rpc: TrackPaymentV2(routerrpc.TrackPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
trackPaymentV2(input, metadata, options) {
|
||||
const method = router_1.Router.methods[1];
|
||||
return this.makeServerStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* EstimateRouteFee allows callers to obtain a lower bound w.r.t how much it
|
||||
* may cost to send an HTLC to the target end destination.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateRouteFee(routerrpc.RouteFeeRequest) returns (routerrpc.RouteFeeResponse);
|
||||
*/
|
||||
estimateRouteFee(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[2];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendToRouteV2. SendToRoute attempts to make a payment via
|
||||
* the specified route. This method differs from SendPayment in that it
|
||||
* allows users to specify a full route manually. This can be used for
|
||||
* things like rebalancing, and atomic swaps. It differs from the newer
|
||||
* SendToRouteV2 in that it doesn't return the full HTLC information.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(routerrpc.SendToRouteRequest) returns (routerrpc.SendToRouteResponse);
|
||||
*/
|
||||
sendToRoute(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[3];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendToRouteV2 attempts to make a payment via the specified route. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteV2(routerrpc.SendToRouteRequest) returns (lnrpc.HTLCAttempt);
|
||||
*/
|
||||
sendToRouteV2(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[4];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ResetMissionControl clears all mission control state and starts with a clean
|
||||
* slate.
|
||||
*
|
||||
* @generated from protobuf rpc: ResetMissionControl(routerrpc.ResetMissionControlRequest) returns (routerrpc.ResetMissionControlResponse);
|
||||
*/
|
||||
resetMissionControl(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[5];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryMissionControl exposes the internal mission control state to callers.
|
||||
* It is a development feature.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryMissionControl(routerrpc.QueryMissionControlRequest) returns (routerrpc.QueryMissionControlResponse);
|
||||
*/
|
||||
queryMissionControl(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[6];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* XImportMissionControl is an experimental API that imports the state provided
|
||||
* to the internal mission control's state, using all results which are more
|
||||
* recent than our existing values. These values will only be imported
|
||||
* in-memory, and will not be persisted across restarts.
|
||||
*
|
||||
* @generated from protobuf rpc: XImportMissionControl(routerrpc.XImportMissionControlRequest) returns (routerrpc.XImportMissionControlResponse);
|
||||
*/
|
||||
xImportMissionControl(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[7];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* GetMissionControlConfig returns mission control's current config.
|
||||
*
|
||||
* @generated from protobuf rpc: GetMissionControlConfig(routerrpc.GetMissionControlConfigRequest) returns (routerrpc.GetMissionControlConfigResponse);
|
||||
*/
|
||||
getMissionControlConfig(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[8];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SetMissionControlConfig will set mission control's config, if the config
|
||||
* provided is valid.
|
||||
*
|
||||
* @generated from protobuf rpc: SetMissionControlConfig(routerrpc.SetMissionControlConfigRequest) returns (routerrpc.SetMissionControlConfigResponse);
|
||||
*/
|
||||
setMissionControlConfig(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[9];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryProbability returns the current success probability estimate for a
|
||||
* given node pair and amount.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryProbability(routerrpc.QueryProbabilityRequest) returns (routerrpc.QueryProbabilityResponse);
|
||||
*/
|
||||
queryProbability(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[10];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* BuildRoute builds a fully specified route based on a list of hop public
|
||||
* keys. It retrieves the relevant channel policies from the graph in order to
|
||||
* calculate the correct fees and time locks.
|
||||
*
|
||||
* @generated from protobuf rpc: BuildRoute(routerrpc.BuildRouteRequest) returns (routerrpc.BuildRouteResponse);
|
||||
*/
|
||||
buildRoute(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[11];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeHtlcEvents creates a uni-directional stream from the server to
|
||||
* the client which delivers a stream of htlc events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeHtlcEvents(routerrpc.SubscribeHtlcEventsRequest) returns (stream routerrpc.HtlcEvent);
|
||||
*/
|
||||
subscribeHtlcEvents(input, metadata, options) {
|
||||
const method = router_1.Router.methods[12];
|
||||
return this.makeServerStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendPaymentV2. SendPayment attempts to route a payment
|
||||
* described by the passed PaymentRequest to the final destination. The call
|
||||
* returns a stream of payment status updates.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(routerrpc.SendPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
sendPayment(input, metadata, options) {
|
||||
const method = router_1.Router.methods[13];
|
||||
return this.makeServerStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use TrackPaymentV2. TrackPayment returns an update stream for
|
||||
* the payment identified by the payment hash.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: TrackPayment(routerrpc.TrackPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
trackPayment(input, metadata, options) {
|
||||
const method = router_1.Router.methods[14];
|
||||
return this.makeServerStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* *
|
||||
* HtlcInterceptor dispatches a bi-directional streaming RPC in which
|
||||
* Forwarded HTLC requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND if this htlc should be intercepted.
|
||||
* In case of interception, the htlc can be either settled, cancelled or
|
||||
* resumed later by using the ResolveHoldForward endpoint.
|
||||
*
|
||||
* @generated from protobuf rpc: HtlcInterceptor(stream routerrpc.ForwardHtlcInterceptResponse) returns (stream routerrpc.ForwardHtlcInterceptRequest);
|
||||
*/
|
||||
htlcInterceptor(metadata, options) {
|
||||
const method = router_1.Router.methods[15];
|
||||
return this.makeBidiStreamRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* UpdateChanStatus attempts to manually set the state of a channel
|
||||
* (enabled, disabled, or auto). A manual "disable" request will cause the
|
||||
* channel to stay disabled until a subsequent manual request of either
|
||||
* "enable" or "auto".
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChanStatus(routerrpc.UpdateChanStatusRequest) returns (routerrpc.UpdateChanStatusResponse);
|
||||
*/
|
||||
updateChanStatus(input, metadata, options, callback) {
|
||||
const method = router_1.Router.methods[16];
|
||||
return this.makeUnaryRequest(`/${router_1.Router.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
}
|
||||
exports.RouterClient = RouterClient;
|
||||
//# sourceMappingURL=router.grpc-client.js.map
|
||||
1
build/proto/lnd/router.grpc-client.js.map
Normal file
1
build/proto/lnd/router.grpc-client.js.map
Normal file
File diff suppressed because one or more lines are too long
2309
build/proto/lnd/router.js
Normal file
2309
build/proto/lnd/router.js
Normal file
File diff suppressed because it is too large
Load diff
1
build/proto/lnd/router.js.map
Normal file
1
build/proto/lnd/router.js.map
Normal file
File diff suppressed because one or more lines are too long
803
build/proto/lnd/rpc.client.js
Normal file
803
build/proto/lnd/rpc.client.js
Normal file
|
|
@ -0,0 +1,803 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.LightningClient = void 0;
|
||||
const rpc_1 = require("./rpc");
|
||||
const runtime_rpc_1 = require("@protobuf-ts/runtime-rpc");
|
||||
//
|
||||
// Comments in this file will be directly parsed into the API
|
||||
// Documentation as descriptions of the associated method, message, or field.
|
||||
// These descriptions should go right above the definition of the object, and
|
||||
// can be in either block or // comment format.
|
||||
//
|
||||
// An RPC method can be matched to an lncli command by placing a line in the
|
||||
// beginning of the description in exactly the following format:
|
||||
// lncli: `methodname`
|
||||
//
|
||||
// Failure to specify the exact name of the command will cause documentation
|
||||
// generation to fail.
|
||||
//
|
||||
// More information on how exactly the gRPC documentation is generated from
|
||||
// this proto file can be found here:
|
||||
// https://github.com/lightninglabs/lightning-api
|
||||
/**
|
||||
* Lightning is the main RPC server of the daemon.
|
||||
*
|
||||
* @generated from protobuf service lnrpc.Lightning
|
||||
*/
|
||||
class LightningClient {
|
||||
constructor(_transport) {
|
||||
this._transport = _transport;
|
||||
this.typeName = rpc_1.Lightning.typeName;
|
||||
this.methods = rpc_1.Lightning.methods;
|
||||
this.options = rpc_1.Lightning.options;
|
||||
}
|
||||
/**
|
||||
* lncli: `walletbalance`
|
||||
* WalletBalance returns total unspent outputs(confirmed and unconfirmed), all
|
||||
* confirmed unspent outputs and all unconfirmed unspent outputs under control
|
||||
* of the wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: WalletBalance(lnrpc.WalletBalanceRequest) returns (lnrpc.WalletBalanceResponse);
|
||||
*/
|
||||
walletBalance(input, options) {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `channelbalance`
|
||||
* ChannelBalance returns a report on the total funds across all open channels,
|
||||
* categorized in local/remote, pending local/remote and unsettled local/remote
|
||||
* balances.
|
||||
*
|
||||
* @generated from protobuf rpc: ChannelBalance(lnrpc.ChannelBalanceRequest) returns (lnrpc.ChannelBalanceResponse);
|
||||
*/
|
||||
channelBalance(input, options) {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listchaintxns`
|
||||
* GetTransactions returns a list describing all the known transactions
|
||||
* relevant to the wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: GetTransactions(lnrpc.GetTransactionsRequest) returns (lnrpc.TransactionDetails);
|
||||
*/
|
||||
getTransactions(input, options) {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `estimatefee`
|
||||
* EstimateFee asks the chain backend to estimate the fee rate and total fees
|
||||
* for a transaction that pays to multiple specified outputs.
|
||||
*
|
||||
* When using REST, the `AddrToAmount` map type can be set by appending
|
||||
* `&AddrToAmount[<address>]=<amount_to_send>` to the URL. Unfortunately this
|
||||
* map type doesn't appear in the REST API documentation because of a bug in
|
||||
* the grpc-gateway library.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateFee(lnrpc.EstimateFeeRequest) returns (lnrpc.EstimateFeeResponse);
|
||||
*/
|
||||
estimateFee(input, options) {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendcoins`
|
||||
* SendCoins executes a request to send coins to a particular address. Unlike
|
||||
* SendMany, this RPC call only allows creating a single output at a time. If
|
||||
* neither target_conf, or sat_per_vbyte are set, then the internal wallet will
|
||||
* consult its fee model to determine a fee for the default confirmation
|
||||
* target.
|
||||
*
|
||||
* @generated from protobuf rpc: SendCoins(lnrpc.SendCoinsRequest) returns (lnrpc.SendCoinsResponse);
|
||||
*/
|
||||
sendCoins(input, options) {
|
||||
const method = this.methods[4], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listunspent`
|
||||
* Deprecated, use walletrpc.ListUnspent instead.
|
||||
*
|
||||
* ListUnspent returns a list of all utxos spendable by the wallet with a
|
||||
* number of confirmations between the specified minimum and maximum.
|
||||
*
|
||||
* @generated from protobuf rpc: ListUnspent(lnrpc.ListUnspentRequest) returns (lnrpc.ListUnspentResponse);
|
||||
*/
|
||||
listUnspent(input, options) {
|
||||
const method = this.methods[5], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeTransactions creates a uni-directional stream from the server to
|
||||
* the client in which any newly discovered transactions relevant to the
|
||||
* wallet are sent over.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeTransactions(lnrpc.GetTransactionsRequest) returns (stream lnrpc.Transaction);
|
||||
*/
|
||||
subscribeTransactions(input, options) {
|
||||
const method = this.methods[6], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendmany`
|
||||
* SendMany handles a request for a transaction that creates multiple specified
|
||||
* outputs in parallel. If neither target_conf, or sat_per_vbyte are set, then
|
||||
* the internal wallet will consult its fee model to determine a fee for the
|
||||
* default confirmation target.
|
||||
*
|
||||
* @generated from protobuf rpc: SendMany(lnrpc.SendManyRequest) returns (lnrpc.SendManyResponse);
|
||||
*/
|
||||
sendMany(input, options) {
|
||||
const method = this.methods[7], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `newaddress`
|
||||
* NewAddress creates a new address under control of the local wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: NewAddress(lnrpc.NewAddressRequest) returns (lnrpc.NewAddressResponse);
|
||||
*/
|
||||
newAddress(input, options) {
|
||||
const method = this.methods[8], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `signmessage`
|
||||
* SignMessage signs a message with this node's private key. The returned
|
||||
* signature string is `zbase32` encoded and pubkey recoverable, meaning that
|
||||
* only the message digest and signature are needed for verification.
|
||||
*
|
||||
* @generated from protobuf rpc: SignMessage(lnrpc.SignMessageRequest) returns (lnrpc.SignMessageResponse);
|
||||
*/
|
||||
signMessage(input, options) {
|
||||
const method = this.methods[9], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `verifymessage`
|
||||
* VerifyMessage verifies a signature over a msg. The signature must be
|
||||
* zbase32 encoded and signed by an active node in the resident node's
|
||||
* channel database. In addition to returning the validity of the signature,
|
||||
* VerifyMessage also returns the recovered pubkey from the signature.
|
||||
*
|
||||
* @generated from protobuf rpc: VerifyMessage(lnrpc.VerifyMessageRequest) returns (lnrpc.VerifyMessageResponse);
|
||||
*/
|
||||
verifyMessage(input, options) {
|
||||
const method = this.methods[10], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `connect`
|
||||
* ConnectPeer attempts to establish a connection to a remote peer. This is at
|
||||
* the networking level, and is used for communication between nodes. This is
|
||||
* distinct from establishing a channel with a peer.
|
||||
*
|
||||
* @generated from protobuf rpc: ConnectPeer(lnrpc.ConnectPeerRequest) returns (lnrpc.ConnectPeerResponse);
|
||||
*/
|
||||
connectPeer(input, options) {
|
||||
const method = this.methods[11], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `disconnect`
|
||||
* DisconnectPeer attempts to disconnect one peer from another identified by a
|
||||
* given pubKey. In the case that we currently have a pending or active channel
|
||||
* with the target peer, then this action will be not be allowed.
|
||||
*
|
||||
* @generated from protobuf rpc: DisconnectPeer(lnrpc.DisconnectPeerRequest) returns (lnrpc.DisconnectPeerResponse);
|
||||
*/
|
||||
disconnectPeer(input, options) {
|
||||
const method = this.methods[12], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpeers`
|
||||
* ListPeers returns a verbose listing of all currently active peers.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPeers(lnrpc.ListPeersRequest) returns (lnrpc.ListPeersResponse);
|
||||
*/
|
||||
listPeers(input, options) {
|
||||
const method = this.methods[13], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribePeerEvents creates a uni-directional stream from the server to
|
||||
* the client in which any events relevant to the state of peers are sent
|
||||
* over. Events include peers going online and offline.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribePeerEvents(lnrpc.PeerEventSubscription) returns (stream lnrpc.PeerEvent);
|
||||
*/
|
||||
subscribePeerEvents(input, options) {
|
||||
const method = this.methods[14], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `getinfo`
|
||||
* GetInfo returns general information concerning the lightning node including
|
||||
* it's identity pubkey, alias, the chains it is connected to, and information
|
||||
* concerning the number of open+pending channels.
|
||||
*
|
||||
* @generated from protobuf rpc: GetInfo(lnrpc.GetInfoRequest) returns (lnrpc.GetInfoResponse);
|
||||
*/
|
||||
getInfo(input, options) {
|
||||
const method = this.methods[15], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* * lncli: `getrecoveryinfo`
|
||||
* GetRecoveryInfo returns information concerning the recovery mode including
|
||||
* whether it's in a recovery mode, whether the recovery is finished, and the
|
||||
* progress made so far.
|
||||
*
|
||||
* @generated from protobuf rpc: GetRecoveryInfo(lnrpc.GetRecoveryInfoRequest) returns (lnrpc.GetRecoveryInfoResponse);
|
||||
*/
|
||||
getRecoveryInfo(input, options) {
|
||||
const method = this.methods[16], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
// TODO(roasbeef): merge with below with bool?
|
||||
/**
|
||||
* lncli: `pendingchannels`
|
||||
* PendingChannels returns a list of all the channels that are currently
|
||||
* considered "pending". A channel is pending if it has finished the funding
|
||||
* workflow and is waiting for confirmations for the funding txn, or is in the
|
||||
* process of closure, either initiated cooperatively or non-cooperatively.
|
||||
*
|
||||
* @generated from protobuf rpc: PendingChannels(lnrpc.PendingChannelsRequest) returns (lnrpc.PendingChannelsResponse);
|
||||
*/
|
||||
pendingChannels(input, options) {
|
||||
const method = this.methods[17], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listchannels`
|
||||
* ListChannels returns a description of all the open channels that this node
|
||||
* is a participant in.
|
||||
*
|
||||
* @generated from protobuf rpc: ListChannels(lnrpc.ListChannelsRequest) returns (lnrpc.ListChannelsResponse);
|
||||
*/
|
||||
listChannels(input, options) {
|
||||
const method = this.methods[18], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelEvents creates a uni-directional stream from the server to
|
||||
* the client in which any updates relevant to the state of the channels are
|
||||
* sent over. Events include new active channels, inactive channels, and closed
|
||||
* channels.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelEvents(lnrpc.ChannelEventSubscription) returns (stream lnrpc.ChannelEventUpdate);
|
||||
*/
|
||||
subscribeChannelEvents(input, options) {
|
||||
const method = this.methods[19], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `closedchannels`
|
||||
* ClosedChannels returns a description of all the closed channels that
|
||||
* this node was a participant in.
|
||||
*
|
||||
* @generated from protobuf rpc: ClosedChannels(lnrpc.ClosedChannelsRequest) returns (lnrpc.ClosedChannelsResponse);
|
||||
*/
|
||||
closedChannels(input, options) {
|
||||
const method = this.methods[20], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* OpenChannelSync is a synchronous version of the OpenChannel RPC call. This
|
||||
* call is meant to be consumed by clients to the REST proxy. As with all
|
||||
* other sync calls, all byte slices are intended to be populated as hex
|
||||
* encoded strings.
|
||||
*
|
||||
* @generated from protobuf rpc: OpenChannelSync(lnrpc.OpenChannelRequest) returns (lnrpc.ChannelPoint);
|
||||
*/
|
||||
openChannelSync(input, options) {
|
||||
const method = this.methods[21], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `openchannel`
|
||||
* OpenChannel attempts to open a singly funded channel specified in the
|
||||
* request to a remote peer. Users are able to specify a target number of
|
||||
* blocks that the funding transaction should be confirmed in, or a manual fee
|
||||
* rate to us for the funding transaction. If neither are specified, then a
|
||||
* lax block confirmation target is used. Each OpenStatusUpdate will return
|
||||
* the pending channel ID of the in-progress channel. Depending on the
|
||||
* arguments specified in the OpenChannelRequest, this pending channel ID can
|
||||
* then be used to manually progress the channel funding flow.
|
||||
*
|
||||
* @generated from protobuf rpc: OpenChannel(lnrpc.OpenChannelRequest) returns (stream lnrpc.OpenStatusUpdate);
|
||||
*/
|
||||
openChannel(input, options) {
|
||||
const method = this.methods[22], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* FundingStateStep is an advanced funding related call that allows the caller
|
||||
* to either execute some preparatory steps for a funding workflow, or
|
||||
* manually progress a funding workflow. The primary way a funding flow is
|
||||
* identified is via its pending channel ID. As an example, this method can be
|
||||
* used to specify that we're expecting a funding flow for a particular
|
||||
* pending channel ID, for which we need to use specific parameters.
|
||||
* Alternatively, this can be used to interactively drive PSBT signing for
|
||||
* funding for partially complete funding transactions.
|
||||
*
|
||||
* @generated from protobuf rpc: FundingStateStep(lnrpc.FundingTransitionMsg) returns (lnrpc.FundingStateStepResp);
|
||||
*/
|
||||
fundingStateStep(input, options) {
|
||||
const method = this.methods[23], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ChannelAcceptor dispatches a bi-directional streaming RPC in which
|
||||
* OpenChannel requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND whether or not to accept the channel. This allows
|
||||
* node operators to specify their own criteria for accepting inbound channels
|
||||
* through a single persistent connection.
|
||||
*
|
||||
* @generated from protobuf rpc: ChannelAcceptor(stream lnrpc.ChannelAcceptResponse) returns (stream lnrpc.ChannelAcceptRequest);
|
||||
*/
|
||||
channelAcceptor(options) {
|
||||
const method = this.methods[24], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("duplex", this._transport, method, opt);
|
||||
}
|
||||
/**
|
||||
* lncli: `closechannel`
|
||||
* CloseChannel attempts to close an active channel identified by its channel
|
||||
* outpoint (ChannelPoint). The actions of this method can additionally be
|
||||
* augmented to attempt a force close after a timeout period in the case of an
|
||||
* inactive peer. If a non-force close (cooperative closure) is requested,
|
||||
* then the user can specify either a target number of blocks until the
|
||||
* closure transaction is confirmed, or a manual fee rate. If neither are
|
||||
* specified, then a default lax, block confirmation target is used.
|
||||
*
|
||||
* @generated from protobuf rpc: CloseChannel(lnrpc.CloseChannelRequest) returns (stream lnrpc.CloseStatusUpdate);
|
||||
*/
|
||||
closeChannel(input, options) {
|
||||
const method = this.methods[25], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `abandonchannel`
|
||||
* AbandonChannel removes all channel state from the database except for a
|
||||
* close summary. This method can be used to get rid of permanently unusable
|
||||
* channels due to bugs fixed in newer versions of lnd. This method can also be
|
||||
* used to remove externally funded channels where the funding transaction was
|
||||
* never broadcast. Only available for non-externally funded channels in dev
|
||||
* build.
|
||||
*
|
||||
* @generated from protobuf rpc: AbandonChannel(lnrpc.AbandonChannelRequest) returns (lnrpc.AbandonChannelResponse);
|
||||
*/
|
||||
abandonChannel(input, options) {
|
||||
const method = this.methods[26], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendpayment`
|
||||
* Deprecated, use routerrpc.SendPaymentV2. SendPayment dispatches a
|
||||
* bi-directional streaming RPC for sending payments through the Lightning
|
||||
* Network. A single RPC invocation creates a persistent bi-directional
|
||||
* stream allowing clients to rapidly send payments through the Lightning
|
||||
* Network with a single persistent connection.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(stream lnrpc.SendRequest) returns (stream lnrpc.SendResponse);
|
||||
*/
|
||||
sendPayment(options) {
|
||||
const method = this.methods[27], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("duplex", this._transport, method, opt);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendPaymentSync is the synchronous non-streaming version of SendPayment.
|
||||
* This RPC is intended to be consumed by clients of the REST proxy.
|
||||
* Additionally, this RPC expects the destination's public key and the payment
|
||||
* hash (if any) to be encoded as hex strings.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentSync(lnrpc.SendRequest) returns (lnrpc.SendResponse);
|
||||
*/
|
||||
sendPaymentSync(input, options) {
|
||||
const method = this.methods[28], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendtoroute`
|
||||
* Deprecated, use routerrpc.SendToRouteV2. SendToRoute is a bi-directional
|
||||
* streaming RPC for sending payment through the Lightning Network. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(stream lnrpc.SendToRouteRequest) returns (stream lnrpc.SendResponse);
|
||||
*/
|
||||
sendToRoute(options) {
|
||||
const method = this.methods[29], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("duplex", this._transport, method, opt);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendToRouteSync is a synchronous version of SendToRoute. It Will block
|
||||
* until the payment either fails or succeeds.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteSync(lnrpc.SendToRouteRequest) returns (lnrpc.SendResponse);
|
||||
*/
|
||||
sendToRouteSync(input, options) {
|
||||
const method = this.methods[30], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `addinvoice`
|
||||
* AddInvoice attempts to add a new invoice to the invoice database. Any
|
||||
* duplicated invoices are rejected, therefore all invoices *must* have a
|
||||
* unique payment preimage.
|
||||
*
|
||||
* @generated from protobuf rpc: AddInvoice(lnrpc.Invoice) returns (lnrpc.AddInvoiceResponse);
|
||||
*/
|
||||
addInvoice(input, options) {
|
||||
const method = this.methods[31], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listinvoices`
|
||||
* ListInvoices returns a list of all the invoices currently stored within the
|
||||
* database. Any active debug invoices are ignored. It has full support for
|
||||
* paginated responses, allowing users to query for specific invoices through
|
||||
* their add_index. This can be done by using either the first_index_offset or
|
||||
* last_index_offset fields included in the response as the index_offset of the
|
||||
* next request. By default, the first 100 invoices created will be returned.
|
||||
* Backwards pagination is also supported through the Reversed flag.
|
||||
*
|
||||
* @generated from protobuf rpc: ListInvoices(lnrpc.ListInvoiceRequest) returns (lnrpc.ListInvoiceResponse);
|
||||
*/
|
||||
listInvoices(input, options) {
|
||||
const method = this.methods[32], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `lookupinvoice`
|
||||
* LookupInvoice attempts to look up an invoice according to its payment hash.
|
||||
* The passed payment hash *must* be exactly 32 bytes, if not, an error is
|
||||
* returned.
|
||||
*
|
||||
* @generated from protobuf rpc: LookupInvoice(lnrpc.PaymentHash) returns (lnrpc.Invoice);
|
||||
*/
|
||||
lookupInvoice(input, options) {
|
||||
const method = this.methods[33], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeInvoices returns a uni-directional stream (server -> client) for
|
||||
* notifying the client of newly added/settled invoices. The caller can
|
||||
* optionally specify the add_index and/or the settle_index. If the add_index
|
||||
* is specified, then we'll first start by sending add invoice events for all
|
||||
* invoices with an add_index greater than the specified value. If the
|
||||
* settle_index is specified, the next, we'll send out all settle events for
|
||||
* invoices with a settle_index greater than the specified value. One or both
|
||||
* of these fields can be set. If no fields are set, then we'll only send out
|
||||
* the latest add/settle events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeInvoices(lnrpc.InvoiceSubscription) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeInvoices(input, options) {
|
||||
const method = this.methods[34], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `decodepayreq`
|
||||
* DecodePayReq takes an encoded payment request string and attempts to decode
|
||||
* it, returning a full description of the conditions encoded within the
|
||||
* payment request.
|
||||
*
|
||||
* @generated from protobuf rpc: DecodePayReq(lnrpc.PayReqString) returns (lnrpc.PayReq);
|
||||
*/
|
||||
decodePayReq(input, options) {
|
||||
const method = this.methods[35], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpayments`
|
||||
* ListPayments returns a list of all outgoing payments.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPayments(lnrpc.ListPaymentsRequest) returns (lnrpc.ListPaymentsResponse);
|
||||
*/
|
||||
listPayments(input, options) {
|
||||
const method = this.methods[36], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* DeleteAllPayments deletes all outgoing payments from DB.
|
||||
*
|
||||
* @generated from protobuf rpc: DeleteAllPayments(lnrpc.DeleteAllPaymentsRequest) returns (lnrpc.DeleteAllPaymentsResponse);
|
||||
*/
|
||||
deleteAllPayments(input, options) {
|
||||
const method = this.methods[37], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `describegraph`
|
||||
* DescribeGraph returns a description of the latest graph state from the
|
||||
* point of view of the node. The graph information is partitioned into two
|
||||
* components: all the nodes/vertexes, and all the edges that connect the
|
||||
* vertexes themselves. As this is a directed graph, the edges also contain
|
||||
* the node directional specific routing policy which includes: the time lock
|
||||
* delta, fee information, etc.
|
||||
*
|
||||
* @generated from protobuf rpc: DescribeGraph(lnrpc.ChannelGraphRequest) returns (lnrpc.ChannelGraph);
|
||||
*/
|
||||
describeGraph(input, options) {
|
||||
const method = this.methods[38], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnodemetrics`
|
||||
* GetNodeMetrics returns node metrics calculated from the graph. Currently
|
||||
* the only supported metric is betweenness centrality of individual nodes.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNodeMetrics(lnrpc.NodeMetricsRequest) returns (lnrpc.NodeMetricsResponse);
|
||||
*/
|
||||
getNodeMetrics(input, options) {
|
||||
const method = this.methods[39], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `getchaninfo`
|
||||
* GetChanInfo returns the latest authenticated network announcement for the
|
||||
* given channel identified by its channel ID: an 8-byte integer which
|
||||
* uniquely identifies the location of transaction's funding output within the
|
||||
* blockchain.
|
||||
*
|
||||
* @generated from protobuf rpc: GetChanInfo(lnrpc.ChanInfoRequest) returns (lnrpc.ChannelEdge);
|
||||
*/
|
||||
getChanInfo(input, options) {
|
||||
const method = this.methods[40], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnodeinfo`
|
||||
* GetNodeInfo returns the latest advertised, aggregated, and authenticated
|
||||
* channel information for the specified node identified by its public key.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNodeInfo(lnrpc.NodeInfoRequest) returns (lnrpc.NodeInfo);
|
||||
*/
|
||||
getNodeInfo(input, options) {
|
||||
const method = this.methods[41], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `queryroutes`
|
||||
* QueryRoutes attempts to query the daemon's Channel Router for a possible
|
||||
* route to a target destination capable of carrying a specific amount of
|
||||
* satoshis. The returned route contains the full details required to craft and
|
||||
* send an HTLC, also including the necessary information that should be
|
||||
* present within the Sphinx packet encapsulated within the HTLC.
|
||||
*
|
||||
* When using REST, the `dest_custom_records` map type can be set by appending
|
||||
* `&dest_custom_records[<record_number>]=<record_data_base64_url_encoded>`
|
||||
* to the URL. Unfortunately this map type doesn't appear in the REST API
|
||||
* documentation because of a bug in the grpc-gateway library.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryRoutes(lnrpc.QueryRoutesRequest) returns (lnrpc.QueryRoutesResponse);
|
||||
*/
|
||||
queryRoutes(input, options) {
|
||||
const method = this.methods[42], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnetworkinfo`
|
||||
* GetNetworkInfo returns some basic stats about the known channel graph from
|
||||
* the point of view of the node.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNetworkInfo(lnrpc.NetworkInfoRequest) returns (lnrpc.NetworkInfo);
|
||||
*/
|
||||
getNetworkInfo(input, options) {
|
||||
const method = this.methods[43], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `stop`
|
||||
* StopDaemon will send a shutdown request to the interrupt handler, triggering
|
||||
* a graceful shutdown of the daemon.
|
||||
*
|
||||
* @generated from protobuf rpc: StopDaemon(lnrpc.StopRequest) returns (lnrpc.StopResponse);
|
||||
*/
|
||||
stopDaemon(input, options) {
|
||||
const method = this.methods[44], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelGraph launches a streaming RPC that allows the caller to
|
||||
* receive notifications upon any changes to the channel graph topology from
|
||||
* the point of view of the responding node. Events notified include: new
|
||||
* nodes coming online, nodes updating their authenticated attributes, new
|
||||
* channels being advertised, updates in the routing policy for a directional
|
||||
* channel edge, and when channels are closed on-chain.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelGraph(lnrpc.GraphTopologySubscription) returns (stream lnrpc.GraphTopologyUpdate);
|
||||
*/
|
||||
subscribeChannelGraph(input, options) {
|
||||
const method = this.methods[45], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `debuglevel`
|
||||
* DebugLevel allows a caller to programmatically set the logging verbosity of
|
||||
* lnd. The logging can be targeted according to a coarse daemon-wide logging
|
||||
* level, or in a granular fashion to specify the logging for a target
|
||||
* sub-system.
|
||||
*
|
||||
* @generated from protobuf rpc: DebugLevel(lnrpc.DebugLevelRequest) returns (lnrpc.DebugLevelResponse);
|
||||
*/
|
||||
debugLevel(input, options) {
|
||||
const method = this.methods[46], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `feereport`
|
||||
* FeeReport allows the caller to obtain a report detailing the current fee
|
||||
* schedule enforced by the node globally for each channel.
|
||||
*
|
||||
* @generated from protobuf rpc: FeeReport(lnrpc.FeeReportRequest) returns (lnrpc.FeeReportResponse);
|
||||
*/
|
||||
feeReport(input, options) {
|
||||
const method = this.methods[47], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `updatechanpolicy`
|
||||
* UpdateChannelPolicy allows the caller to update the fee schedule and
|
||||
* channel policies for all channels globally, or a particular channel.
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChannelPolicy(lnrpc.PolicyUpdateRequest) returns (lnrpc.PolicyUpdateResponse);
|
||||
*/
|
||||
updateChannelPolicy(input, options) {
|
||||
const method = this.methods[48], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `fwdinghistory`
|
||||
* ForwardingHistory allows the caller to query the htlcswitch for a record of
|
||||
* all HTLCs forwarded within the target time range, and integer offset
|
||||
* within that time range. If no time-range is specified, then the first chunk
|
||||
* of the past 24 hrs of forwarding history are returned.
|
||||
*
|
||||
* A list of forwarding events are returned. The size of each forwarding event
|
||||
* is 40 bytes, and the max message size able to be returned in gRPC is 4 MiB.
|
||||
* As a result each message can only contain 50k entries. Each response has
|
||||
* the index offset of the last entry. The index offset can be provided to the
|
||||
* request to allow the caller to skip a series of records.
|
||||
*
|
||||
* @generated from protobuf rpc: ForwardingHistory(lnrpc.ForwardingHistoryRequest) returns (lnrpc.ForwardingHistoryResponse);
|
||||
*/
|
||||
forwardingHistory(input, options) {
|
||||
const method = this.methods[49], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `exportchanbackup`
|
||||
* ExportChannelBackup attempts to return an encrypted static channel backup
|
||||
* for the target channel identified by it channel point. The backup is
|
||||
* encrypted with a key generated from the aezeed seed of the user. The
|
||||
* returned backup can either be restored using the RestoreChannelBackup
|
||||
* method once lnd is running, or via the InitWallet and UnlockWallet methods
|
||||
* from the WalletUnlocker service.
|
||||
*
|
||||
* @generated from protobuf rpc: ExportChannelBackup(lnrpc.ExportChannelBackupRequest) returns (lnrpc.ChannelBackup);
|
||||
*/
|
||||
exportChannelBackup(input, options) {
|
||||
const method = this.methods[50], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ExportAllChannelBackups returns static channel backups for all existing
|
||||
* channels known to lnd. A set of regular singular static channel backups for
|
||||
* each channel are returned. Additionally, a multi-channel backup is returned
|
||||
* as well, which contains a single encrypted blob containing the backups of
|
||||
* each channel.
|
||||
*
|
||||
* @generated from protobuf rpc: ExportAllChannelBackups(lnrpc.ChanBackupExportRequest) returns (lnrpc.ChanBackupSnapshot);
|
||||
*/
|
||||
exportAllChannelBackups(input, options) {
|
||||
const method = this.methods[51], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* VerifyChanBackup allows a caller to verify the integrity of a channel backup
|
||||
* snapshot. This method will accept either a packed Single or a packed Multi.
|
||||
* Specifying both will result in an error.
|
||||
*
|
||||
* @generated from protobuf rpc: VerifyChanBackup(lnrpc.ChanBackupSnapshot) returns (lnrpc.VerifyChanBackupResponse);
|
||||
*/
|
||||
verifyChanBackup(input, options) {
|
||||
const method = this.methods[52], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `restorechanbackup`
|
||||
* RestoreChannelBackups accepts a set of singular channel backups, or a
|
||||
* single encrypted multi-chan backup and attempts to recover any funds
|
||||
* remaining within the channel. If we are able to unpack the backup, then the
|
||||
* new channel will be shown under listchannels, as well as pending channels.
|
||||
*
|
||||
* @generated from protobuf rpc: RestoreChannelBackups(lnrpc.RestoreChanBackupRequest) returns (lnrpc.RestoreBackupResponse);
|
||||
*/
|
||||
restoreChannelBackups(input, options) {
|
||||
const method = this.methods[53], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelBackups allows a client to sub-subscribe to the most up to
|
||||
* date information concerning the state of all channel backups. Each time a
|
||||
* new channel is added, we return the new set of channels, along with a
|
||||
* multi-chan backup containing the backup info for all channels. Each time a
|
||||
* channel is closed, we send a new update, which contains new new chan back
|
||||
* ups, but the updated set of encrypted multi-chan backups with the closed
|
||||
* channel(s) removed.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelBackups(lnrpc.ChannelBackupSubscription) returns (stream lnrpc.ChanBackupSnapshot);
|
||||
*/
|
||||
subscribeChannelBackups(input, options) {
|
||||
const method = this.methods[54], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `bakemacaroon`
|
||||
* BakeMacaroon allows the creation of a new macaroon with custom read and
|
||||
* write permissions. No first-party caveats are added since this can be done
|
||||
* offline.
|
||||
*
|
||||
* @generated from protobuf rpc: BakeMacaroon(lnrpc.BakeMacaroonRequest) returns (lnrpc.BakeMacaroonResponse);
|
||||
*/
|
||||
bakeMacaroon(input, options) {
|
||||
const method = this.methods[55], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listmacaroonids`
|
||||
* ListMacaroonIDs returns all root key IDs that are in use.
|
||||
*
|
||||
* @generated from protobuf rpc: ListMacaroonIDs(lnrpc.ListMacaroonIDsRequest) returns (lnrpc.ListMacaroonIDsResponse);
|
||||
*/
|
||||
listMacaroonIDs(input, options) {
|
||||
const method = this.methods[56], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `deletemacaroonid`
|
||||
* DeleteMacaroonID deletes the specified macaroon ID and invalidates all
|
||||
* macaroons derived from that ID.
|
||||
*
|
||||
* @generated from protobuf rpc: DeleteMacaroonID(lnrpc.DeleteMacaroonIDRequest) returns (lnrpc.DeleteMacaroonIDResponse);
|
||||
*/
|
||||
deleteMacaroonID(input, options) {
|
||||
const method = this.methods[57], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpermissions`
|
||||
* ListPermissions lists all RPC method URIs and their required macaroon
|
||||
* permissions to access them.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPermissions(lnrpc.ListPermissionsRequest) returns (lnrpc.ListPermissionsResponse);
|
||||
*/
|
||||
listPermissions(input, options) {
|
||||
const method = this.methods[58], opt = this._transport.mergeOptions(options);
|
||||
return (0, runtime_rpc_1.stackIntercept)("unary", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
exports.LightningClient = LightningClient;
|
||||
//# sourceMappingURL=rpc.client.js.map
|
||||
1
build/proto/lnd/rpc.client.js.map
Normal file
1
build/proto/lnd/rpc.client.js.map
Normal file
File diff suppressed because one or more lines are too long
827
build/proto/lnd/rpc.grpc-client.js
Normal file
827
build/proto/lnd/rpc.grpc-client.js
Normal file
|
|
@ -0,0 +1,827 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.LightningClient = void 0;
|
||||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string,client_grpc1
|
||||
// @generated from protobuf file "rpc.proto" (package "lnrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
const rpc_1 = require("./rpc");
|
||||
const grpc = __importStar(require("@grpc/grpc-js"));
|
||||
//
|
||||
// Comments in this file will be directly parsed into the API
|
||||
// Documentation as descriptions of the associated method, message, or field.
|
||||
// These descriptions should go right above the definition of the object, and
|
||||
// can be in either block or // comment format.
|
||||
//
|
||||
// An RPC method can be matched to an lncli command by placing a line in the
|
||||
// beginning of the description in exactly the following format:
|
||||
// lncli: `methodname`
|
||||
//
|
||||
// Failure to specify the exact name of the command will cause documentation
|
||||
// generation to fail.
|
||||
//
|
||||
// More information on how exactly the gRPC documentation is generated from
|
||||
// this proto file can be found here:
|
||||
// https://github.com/lightninglabs/lightning-api
|
||||
/**
|
||||
* Lightning is the main RPC server of the daemon.
|
||||
*
|
||||
* @generated from protobuf service lnrpc.Lightning
|
||||
*/
|
||||
class LightningClient extends grpc.Client {
|
||||
constructor(address, credentials, options = {}, binaryOptions = {}) {
|
||||
super(address, credentials, options);
|
||||
this._binaryOptions = binaryOptions;
|
||||
}
|
||||
/**
|
||||
* lncli: `walletbalance`
|
||||
* WalletBalance returns total unspent outputs(confirmed and unconfirmed), all
|
||||
* confirmed unspent outputs and all unconfirmed unspent outputs under control
|
||||
* of the wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: WalletBalance(lnrpc.WalletBalanceRequest) returns (lnrpc.WalletBalanceResponse);
|
||||
*/
|
||||
walletBalance(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[0];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `channelbalance`
|
||||
* ChannelBalance returns a report on the total funds across all open channels,
|
||||
* categorized in local/remote, pending local/remote and unsettled local/remote
|
||||
* balances.
|
||||
*
|
||||
* @generated from protobuf rpc: ChannelBalance(lnrpc.ChannelBalanceRequest) returns (lnrpc.ChannelBalanceResponse);
|
||||
*/
|
||||
channelBalance(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[1];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listchaintxns`
|
||||
* GetTransactions returns a list describing all the known transactions
|
||||
* relevant to the wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: GetTransactions(lnrpc.GetTransactionsRequest) returns (lnrpc.TransactionDetails);
|
||||
*/
|
||||
getTransactions(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[2];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `estimatefee`
|
||||
* EstimateFee asks the chain backend to estimate the fee rate and total fees
|
||||
* for a transaction that pays to multiple specified outputs.
|
||||
*
|
||||
* When using REST, the `AddrToAmount` map type can be set by appending
|
||||
* `&AddrToAmount[<address>]=<amount_to_send>` to the URL. Unfortunately this
|
||||
* map type doesn't appear in the REST API documentation because of a bug in
|
||||
* the grpc-gateway library.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateFee(lnrpc.EstimateFeeRequest) returns (lnrpc.EstimateFeeResponse);
|
||||
*/
|
||||
estimateFee(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[3];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendcoins`
|
||||
* SendCoins executes a request to send coins to a particular address. Unlike
|
||||
* SendMany, this RPC call only allows creating a single output at a time. If
|
||||
* neither target_conf, or sat_per_vbyte are set, then the internal wallet will
|
||||
* consult its fee model to determine a fee for the default confirmation
|
||||
* target.
|
||||
*
|
||||
* @generated from protobuf rpc: SendCoins(lnrpc.SendCoinsRequest) returns (lnrpc.SendCoinsResponse);
|
||||
*/
|
||||
sendCoins(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[4];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listunspent`
|
||||
* Deprecated, use walletrpc.ListUnspent instead.
|
||||
*
|
||||
* ListUnspent returns a list of all utxos spendable by the wallet with a
|
||||
* number of confirmations between the specified minimum and maximum.
|
||||
*
|
||||
* @generated from protobuf rpc: ListUnspent(lnrpc.ListUnspentRequest) returns (lnrpc.ListUnspentResponse);
|
||||
*/
|
||||
listUnspent(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[5];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeTransactions creates a uni-directional stream from the server to
|
||||
* the client in which any newly discovered transactions relevant to the
|
||||
* wallet are sent over.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeTransactions(lnrpc.GetTransactionsRequest) returns (stream lnrpc.Transaction);
|
||||
*/
|
||||
subscribeTransactions(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[6];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendmany`
|
||||
* SendMany handles a request for a transaction that creates multiple specified
|
||||
* outputs in parallel. If neither target_conf, or sat_per_vbyte are set, then
|
||||
* the internal wallet will consult its fee model to determine a fee for the
|
||||
* default confirmation target.
|
||||
*
|
||||
* @generated from protobuf rpc: SendMany(lnrpc.SendManyRequest) returns (lnrpc.SendManyResponse);
|
||||
*/
|
||||
sendMany(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[7];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `newaddress`
|
||||
* NewAddress creates a new address under control of the local wallet.
|
||||
*
|
||||
* @generated from protobuf rpc: NewAddress(lnrpc.NewAddressRequest) returns (lnrpc.NewAddressResponse);
|
||||
*/
|
||||
newAddress(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[8];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `signmessage`
|
||||
* SignMessage signs a message with this node's private key. The returned
|
||||
* signature string is `zbase32` encoded and pubkey recoverable, meaning that
|
||||
* only the message digest and signature are needed for verification.
|
||||
*
|
||||
* @generated from protobuf rpc: SignMessage(lnrpc.SignMessageRequest) returns (lnrpc.SignMessageResponse);
|
||||
*/
|
||||
signMessage(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[9];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `verifymessage`
|
||||
* VerifyMessage verifies a signature over a msg. The signature must be
|
||||
* zbase32 encoded and signed by an active node in the resident node's
|
||||
* channel database. In addition to returning the validity of the signature,
|
||||
* VerifyMessage also returns the recovered pubkey from the signature.
|
||||
*
|
||||
* @generated from protobuf rpc: VerifyMessage(lnrpc.VerifyMessageRequest) returns (lnrpc.VerifyMessageResponse);
|
||||
*/
|
||||
verifyMessage(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[10];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `connect`
|
||||
* ConnectPeer attempts to establish a connection to a remote peer. This is at
|
||||
* the networking level, and is used for communication between nodes. This is
|
||||
* distinct from establishing a channel with a peer.
|
||||
*
|
||||
* @generated from protobuf rpc: ConnectPeer(lnrpc.ConnectPeerRequest) returns (lnrpc.ConnectPeerResponse);
|
||||
*/
|
||||
connectPeer(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[11];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `disconnect`
|
||||
* DisconnectPeer attempts to disconnect one peer from another identified by a
|
||||
* given pubKey. In the case that we currently have a pending or active channel
|
||||
* with the target peer, then this action will be not be allowed.
|
||||
*
|
||||
* @generated from protobuf rpc: DisconnectPeer(lnrpc.DisconnectPeerRequest) returns (lnrpc.DisconnectPeerResponse);
|
||||
*/
|
||||
disconnectPeer(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[12];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpeers`
|
||||
* ListPeers returns a verbose listing of all currently active peers.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPeers(lnrpc.ListPeersRequest) returns (lnrpc.ListPeersResponse);
|
||||
*/
|
||||
listPeers(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[13];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribePeerEvents creates a uni-directional stream from the server to
|
||||
* the client in which any events relevant to the state of peers are sent
|
||||
* over. Events include peers going online and offline.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribePeerEvents(lnrpc.PeerEventSubscription) returns (stream lnrpc.PeerEvent);
|
||||
*/
|
||||
subscribePeerEvents(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[14];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `getinfo`
|
||||
* GetInfo returns general information concerning the lightning node including
|
||||
* it's identity pubkey, alias, the chains it is connected to, and information
|
||||
* concerning the number of open+pending channels.
|
||||
*
|
||||
* @generated from protobuf rpc: GetInfo(lnrpc.GetInfoRequest) returns (lnrpc.GetInfoResponse);
|
||||
*/
|
||||
getInfo(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[15];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* * lncli: `getrecoveryinfo`
|
||||
* GetRecoveryInfo returns information concerning the recovery mode including
|
||||
* whether it's in a recovery mode, whether the recovery is finished, and the
|
||||
* progress made so far.
|
||||
*
|
||||
* @generated from protobuf rpc: GetRecoveryInfo(lnrpc.GetRecoveryInfoRequest) returns (lnrpc.GetRecoveryInfoResponse);
|
||||
*/
|
||||
getRecoveryInfo(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[16];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
// TODO(roasbeef): merge with below with bool?
|
||||
/**
|
||||
* lncli: `pendingchannels`
|
||||
* PendingChannels returns a list of all the channels that are currently
|
||||
* considered "pending". A channel is pending if it has finished the funding
|
||||
* workflow and is waiting for confirmations for the funding txn, or is in the
|
||||
* process of closure, either initiated cooperatively or non-cooperatively.
|
||||
*
|
||||
* @generated from protobuf rpc: PendingChannels(lnrpc.PendingChannelsRequest) returns (lnrpc.PendingChannelsResponse);
|
||||
*/
|
||||
pendingChannels(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[17];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listchannels`
|
||||
* ListChannels returns a description of all the open channels that this node
|
||||
* is a participant in.
|
||||
*
|
||||
* @generated from protobuf rpc: ListChannels(lnrpc.ListChannelsRequest) returns (lnrpc.ListChannelsResponse);
|
||||
*/
|
||||
listChannels(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[18];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelEvents creates a uni-directional stream from the server to
|
||||
* the client in which any updates relevant to the state of the channels are
|
||||
* sent over. Events include new active channels, inactive channels, and closed
|
||||
* channels.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelEvents(lnrpc.ChannelEventSubscription) returns (stream lnrpc.ChannelEventUpdate);
|
||||
*/
|
||||
subscribeChannelEvents(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[19];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `closedchannels`
|
||||
* ClosedChannels returns a description of all the closed channels that
|
||||
* this node was a participant in.
|
||||
*
|
||||
* @generated from protobuf rpc: ClosedChannels(lnrpc.ClosedChannelsRequest) returns (lnrpc.ClosedChannelsResponse);
|
||||
*/
|
||||
closedChannels(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[20];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* OpenChannelSync is a synchronous version of the OpenChannel RPC call. This
|
||||
* call is meant to be consumed by clients to the REST proxy. As with all
|
||||
* other sync calls, all byte slices are intended to be populated as hex
|
||||
* encoded strings.
|
||||
*
|
||||
* @generated from protobuf rpc: OpenChannelSync(lnrpc.OpenChannelRequest) returns (lnrpc.ChannelPoint);
|
||||
*/
|
||||
openChannelSync(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[21];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `openchannel`
|
||||
* OpenChannel attempts to open a singly funded channel specified in the
|
||||
* request to a remote peer. Users are able to specify a target number of
|
||||
* blocks that the funding transaction should be confirmed in, or a manual fee
|
||||
* rate to us for the funding transaction. If neither are specified, then a
|
||||
* lax block confirmation target is used. Each OpenStatusUpdate will return
|
||||
* the pending channel ID of the in-progress channel. Depending on the
|
||||
* arguments specified in the OpenChannelRequest, this pending channel ID can
|
||||
* then be used to manually progress the channel funding flow.
|
||||
*
|
||||
* @generated from protobuf rpc: OpenChannel(lnrpc.OpenChannelRequest) returns (stream lnrpc.OpenStatusUpdate);
|
||||
*/
|
||||
openChannel(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[22];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* FundingStateStep is an advanced funding related call that allows the caller
|
||||
* to either execute some preparatory steps for a funding workflow, or
|
||||
* manually progress a funding workflow. The primary way a funding flow is
|
||||
* identified is via its pending channel ID. As an example, this method can be
|
||||
* used to specify that we're expecting a funding flow for a particular
|
||||
* pending channel ID, for which we need to use specific parameters.
|
||||
* Alternatively, this can be used to interactively drive PSBT signing for
|
||||
* funding for partially complete funding transactions.
|
||||
*
|
||||
* @generated from protobuf rpc: FundingStateStep(lnrpc.FundingTransitionMsg) returns (lnrpc.FundingStateStepResp);
|
||||
*/
|
||||
fundingStateStep(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[23];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ChannelAcceptor dispatches a bi-directional streaming RPC in which
|
||||
* OpenChannel requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND whether or not to accept the channel. This allows
|
||||
* node operators to specify their own criteria for accepting inbound channels
|
||||
* through a single persistent connection.
|
||||
*
|
||||
* @generated from protobuf rpc: ChannelAcceptor(stream lnrpc.ChannelAcceptResponse) returns (stream lnrpc.ChannelAcceptRequest);
|
||||
*/
|
||||
channelAcceptor(metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[24];
|
||||
return this.makeBidiStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `closechannel`
|
||||
* CloseChannel attempts to close an active channel identified by its channel
|
||||
* outpoint (ChannelPoint). The actions of this method can additionally be
|
||||
* augmented to attempt a force close after a timeout period in the case of an
|
||||
* inactive peer. If a non-force close (cooperative closure) is requested,
|
||||
* then the user can specify either a target number of blocks until the
|
||||
* closure transaction is confirmed, or a manual fee rate. If neither are
|
||||
* specified, then a default lax, block confirmation target is used.
|
||||
*
|
||||
* @generated from protobuf rpc: CloseChannel(lnrpc.CloseChannelRequest) returns (stream lnrpc.CloseStatusUpdate);
|
||||
*/
|
||||
closeChannel(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[25];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `abandonchannel`
|
||||
* AbandonChannel removes all channel state from the database except for a
|
||||
* close summary. This method can be used to get rid of permanently unusable
|
||||
* channels due to bugs fixed in newer versions of lnd. This method can also be
|
||||
* used to remove externally funded channels where the funding transaction was
|
||||
* never broadcast. Only available for non-externally funded channels in dev
|
||||
* build.
|
||||
*
|
||||
* @generated from protobuf rpc: AbandonChannel(lnrpc.AbandonChannelRequest) returns (lnrpc.AbandonChannelResponse);
|
||||
*/
|
||||
abandonChannel(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[26];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendpayment`
|
||||
* Deprecated, use routerrpc.SendPaymentV2. SendPayment dispatches a
|
||||
* bi-directional streaming RPC for sending payments through the Lightning
|
||||
* Network. A single RPC invocation creates a persistent bi-directional
|
||||
* stream allowing clients to rapidly send payments through the Lightning
|
||||
* Network with a single persistent connection.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(stream lnrpc.SendRequest) returns (stream lnrpc.SendResponse);
|
||||
*/
|
||||
sendPayment(metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[27];
|
||||
return this.makeBidiStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendPaymentSync is the synchronous non-streaming version of SendPayment.
|
||||
* This RPC is intended to be consumed by clients of the REST proxy.
|
||||
* Additionally, this RPC expects the destination's public key and the payment
|
||||
* hash (if any) to be encoded as hex strings.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentSync(lnrpc.SendRequest) returns (lnrpc.SendResponse);
|
||||
*/
|
||||
sendPaymentSync(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[28];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `sendtoroute`
|
||||
* Deprecated, use routerrpc.SendToRouteV2. SendToRoute is a bi-directional
|
||||
* streaming RPC for sending payment through the Lightning Network. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(stream lnrpc.SendToRouteRequest) returns (stream lnrpc.SendResponse);
|
||||
*/
|
||||
sendToRoute(metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[29];
|
||||
return this.makeBidiStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), metadata, options);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendToRouteSync is a synchronous version of SendToRoute. It Will block
|
||||
* until the payment either fails or succeeds.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteSync(lnrpc.SendToRouteRequest) returns (lnrpc.SendResponse);
|
||||
*/
|
||||
sendToRouteSync(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[30];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `addinvoice`
|
||||
* AddInvoice attempts to add a new invoice to the invoice database. Any
|
||||
* duplicated invoices are rejected, therefore all invoices *must* have a
|
||||
* unique payment preimage.
|
||||
*
|
||||
* @generated from protobuf rpc: AddInvoice(lnrpc.Invoice) returns (lnrpc.AddInvoiceResponse);
|
||||
*/
|
||||
addInvoice(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[31];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listinvoices`
|
||||
* ListInvoices returns a list of all the invoices currently stored within the
|
||||
* database. Any active debug invoices are ignored. It has full support for
|
||||
* paginated responses, allowing users to query for specific invoices through
|
||||
* their add_index. This can be done by using either the first_index_offset or
|
||||
* last_index_offset fields included in the response as the index_offset of the
|
||||
* next request. By default, the first 100 invoices created will be returned.
|
||||
* Backwards pagination is also supported through the Reversed flag.
|
||||
*
|
||||
* @generated from protobuf rpc: ListInvoices(lnrpc.ListInvoiceRequest) returns (lnrpc.ListInvoiceResponse);
|
||||
*/
|
||||
listInvoices(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[32];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `lookupinvoice`
|
||||
* LookupInvoice attempts to look up an invoice according to its payment hash.
|
||||
* The passed payment hash *must* be exactly 32 bytes, if not, an error is
|
||||
* returned.
|
||||
*
|
||||
* @generated from protobuf rpc: LookupInvoice(lnrpc.PaymentHash) returns (lnrpc.Invoice);
|
||||
*/
|
||||
lookupInvoice(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[33];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeInvoices returns a uni-directional stream (server -> client) for
|
||||
* notifying the client of newly added/settled invoices. The caller can
|
||||
* optionally specify the add_index and/or the settle_index. If the add_index
|
||||
* is specified, then we'll first start by sending add invoice events for all
|
||||
* invoices with an add_index greater than the specified value. If the
|
||||
* settle_index is specified, the next, we'll send out all settle events for
|
||||
* invoices with a settle_index greater than the specified value. One or both
|
||||
* of these fields can be set. If no fields are set, then we'll only send out
|
||||
* the latest add/settle events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeInvoices(lnrpc.InvoiceSubscription) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeInvoices(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[34];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `decodepayreq`
|
||||
* DecodePayReq takes an encoded payment request string and attempts to decode
|
||||
* it, returning a full description of the conditions encoded within the
|
||||
* payment request.
|
||||
*
|
||||
* @generated from protobuf rpc: DecodePayReq(lnrpc.PayReqString) returns (lnrpc.PayReq);
|
||||
*/
|
||||
decodePayReq(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[35];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpayments`
|
||||
* ListPayments returns a list of all outgoing payments.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPayments(lnrpc.ListPaymentsRequest) returns (lnrpc.ListPaymentsResponse);
|
||||
*/
|
||||
listPayments(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[36];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* DeleteAllPayments deletes all outgoing payments from DB.
|
||||
*
|
||||
* @generated from protobuf rpc: DeleteAllPayments(lnrpc.DeleteAllPaymentsRequest) returns (lnrpc.DeleteAllPaymentsResponse);
|
||||
*/
|
||||
deleteAllPayments(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[37];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `describegraph`
|
||||
* DescribeGraph returns a description of the latest graph state from the
|
||||
* point of view of the node. The graph information is partitioned into two
|
||||
* components: all the nodes/vertexes, and all the edges that connect the
|
||||
* vertexes themselves. As this is a directed graph, the edges also contain
|
||||
* the node directional specific routing policy which includes: the time lock
|
||||
* delta, fee information, etc.
|
||||
*
|
||||
* @generated from protobuf rpc: DescribeGraph(lnrpc.ChannelGraphRequest) returns (lnrpc.ChannelGraph);
|
||||
*/
|
||||
describeGraph(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[38];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnodemetrics`
|
||||
* GetNodeMetrics returns node metrics calculated from the graph. Currently
|
||||
* the only supported metric is betweenness centrality of individual nodes.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNodeMetrics(lnrpc.NodeMetricsRequest) returns (lnrpc.NodeMetricsResponse);
|
||||
*/
|
||||
getNodeMetrics(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[39];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `getchaninfo`
|
||||
* GetChanInfo returns the latest authenticated network announcement for the
|
||||
* given channel identified by its channel ID: an 8-byte integer which
|
||||
* uniquely identifies the location of transaction's funding output within the
|
||||
* blockchain.
|
||||
*
|
||||
* @generated from protobuf rpc: GetChanInfo(lnrpc.ChanInfoRequest) returns (lnrpc.ChannelEdge);
|
||||
*/
|
||||
getChanInfo(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[40];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnodeinfo`
|
||||
* GetNodeInfo returns the latest advertised, aggregated, and authenticated
|
||||
* channel information for the specified node identified by its public key.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNodeInfo(lnrpc.NodeInfoRequest) returns (lnrpc.NodeInfo);
|
||||
*/
|
||||
getNodeInfo(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[41];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `queryroutes`
|
||||
* QueryRoutes attempts to query the daemon's Channel Router for a possible
|
||||
* route to a target destination capable of carrying a specific amount of
|
||||
* satoshis. The returned route contains the full details required to craft and
|
||||
* send an HTLC, also including the necessary information that should be
|
||||
* present within the Sphinx packet encapsulated within the HTLC.
|
||||
*
|
||||
* When using REST, the `dest_custom_records` map type can be set by appending
|
||||
* `&dest_custom_records[<record_number>]=<record_data_base64_url_encoded>`
|
||||
* to the URL. Unfortunately this map type doesn't appear in the REST API
|
||||
* documentation because of a bug in the grpc-gateway library.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryRoutes(lnrpc.QueryRoutesRequest) returns (lnrpc.QueryRoutesResponse);
|
||||
*/
|
||||
queryRoutes(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[42];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `getnetworkinfo`
|
||||
* GetNetworkInfo returns some basic stats about the known channel graph from
|
||||
* the point of view of the node.
|
||||
*
|
||||
* @generated from protobuf rpc: GetNetworkInfo(lnrpc.NetworkInfoRequest) returns (lnrpc.NetworkInfo);
|
||||
*/
|
||||
getNetworkInfo(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[43];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `stop`
|
||||
* StopDaemon will send a shutdown request to the interrupt handler, triggering
|
||||
* a graceful shutdown of the daemon.
|
||||
*
|
||||
* @generated from protobuf rpc: StopDaemon(lnrpc.StopRequest) returns (lnrpc.StopResponse);
|
||||
*/
|
||||
stopDaemon(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[44];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelGraph launches a streaming RPC that allows the caller to
|
||||
* receive notifications upon any changes to the channel graph topology from
|
||||
* the point of view of the responding node. Events notified include: new
|
||||
* nodes coming online, nodes updating their authenticated attributes, new
|
||||
* channels being advertised, updates in the routing policy for a directional
|
||||
* channel edge, and when channels are closed on-chain.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelGraph(lnrpc.GraphTopologySubscription) returns (stream lnrpc.GraphTopologyUpdate);
|
||||
*/
|
||||
subscribeChannelGraph(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[45];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `debuglevel`
|
||||
* DebugLevel allows a caller to programmatically set the logging verbosity of
|
||||
* lnd. The logging can be targeted according to a coarse daemon-wide logging
|
||||
* level, or in a granular fashion to specify the logging for a target
|
||||
* sub-system.
|
||||
*
|
||||
* @generated from protobuf rpc: DebugLevel(lnrpc.DebugLevelRequest) returns (lnrpc.DebugLevelResponse);
|
||||
*/
|
||||
debugLevel(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[46];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `feereport`
|
||||
* FeeReport allows the caller to obtain a report detailing the current fee
|
||||
* schedule enforced by the node globally for each channel.
|
||||
*
|
||||
* @generated from protobuf rpc: FeeReport(lnrpc.FeeReportRequest) returns (lnrpc.FeeReportResponse);
|
||||
*/
|
||||
feeReport(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[47];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `updatechanpolicy`
|
||||
* UpdateChannelPolicy allows the caller to update the fee schedule and
|
||||
* channel policies for all channels globally, or a particular channel.
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChannelPolicy(lnrpc.PolicyUpdateRequest) returns (lnrpc.PolicyUpdateResponse);
|
||||
*/
|
||||
updateChannelPolicy(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[48];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `fwdinghistory`
|
||||
* ForwardingHistory allows the caller to query the htlcswitch for a record of
|
||||
* all HTLCs forwarded within the target time range, and integer offset
|
||||
* within that time range. If no time-range is specified, then the first chunk
|
||||
* of the past 24 hrs of forwarding history are returned.
|
||||
*
|
||||
* A list of forwarding events are returned. The size of each forwarding event
|
||||
* is 40 bytes, and the max message size able to be returned in gRPC is 4 MiB.
|
||||
* As a result each message can only contain 50k entries. Each response has
|
||||
* the index offset of the last entry. The index offset can be provided to the
|
||||
* request to allow the caller to skip a series of records.
|
||||
*
|
||||
* @generated from protobuf rpc: ForwardingHistory(lnrpc.ForwardingHistoryRequest) returns (lnrpc.ForwardingHistoryResponse);
|
||||
*/
|
||||
forwardingHistory(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[49];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `exportchanbackup`
|
||||
* ExportChannelBackup attempts to return an encrypted static channel backup
|
||||
* for the target channel identified by it channel point. The backup is
|
||||
* encrypted with a key generated from the aezeed seed of the user. The
|
||||
* returned backup can either be restored using the RestoreChannelBackup
|
||||
* method once lnd is running, or via the InitWallet and UnlockWallet methods
|
||||
* from the WalletUnlocker service.
|
||||
*
|
||||
* @generated from protobuf rpc: ExportChannelBackup(lnrpc.ExportChannelBackupRequest) returns (lnrpc.ChannelBackup);
|
||||
*/
|
||||
exportChannelBackup(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[50];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ExportAllChannelBackups returns static channel backups for all existing
|
||||
* channels known to lnd. A set of regular singular static channel backups for
|
||||
* each channel are returned. Additionally, a multi-channel backup is returned
|
||||
* as well, which contains a single encrypted blob containing the backups of
|
||||
* each channel.
|
||||
*
|
||||
* @generated from protobuf rpc: ExportAllChannelBackups(lnrpc.ChanBackupExportRequest) returns (lnrpc.ChanBackupSnapshot);
|
||||
*/
|
||||
exportAllChannelBackups(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[51];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* VerifyChanBackup allows a caller to verify the integrity of a channel backup
|
||||
* snapshot. This method will accept either a packed Single or a packed Multi.
|
||||
* Specifying both will result in an error.
|
||||
*
|
||||
* @generated from protobuf rpc: VerifyChanBackup(lnrpc.ChanBackupSnapshot) returns (lnrpc.VerifyChanBackupResponse);
|
||||
*/
|
||||
verifyChanBackup(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[52];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `restorechanbackup`
|
||||
* RestoreChannelBackups accepts a set of singular channel backups, or a
|
||||
* single encrypted multi-chan backup and attempts to recover any funds
|
||||
* remaining within the channel. If we are able to unpack the backup, then the
|
||||
* new channel will be shown under listchannels, as well as pending channels.
|
||||
*
|
||||
* @generated from protobuf rpc: RestoreChannelBackups(lnrpc.RestoreChanBackupRequest) returns (lnrpc.RestoreBackupResponse);
|
||||
*/
|
||||
restoreChannelBackups(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[53];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeChannelBackups allows a client to sub-subscribe to the most up to
|
||||
* date information concerning the state of all channel backups. Each time a
|
||||
* new channel is added, we return the new set of channels, along with a
|
||||
* multi-chan backup containing the backup info for all channels. Each time a
|
||||
* channel is closed, we send a new update, which contains new new chan back
|
||||
* ups, but the updated set of encrypted multi-chan backups with the closed
|
||||
* channel(s) removed.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeChannelBackups(lnrpc.ChannelBackupSubscription) returns (stream lnrpc.ChanBackupSnapshot);
|
||||
*/
|
||||
subscribeChannelBackups(input, metadata, options) {
|
||||
const method = rpc_1.Lightning.methods[54];
|
||||
return this.makeServerStreamRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options);
|
||||
}
|
||||
/**
|
||||
* lncli: `bakemacaroon`
|
||||
* BakeMacaroon allows the creation of a new macaroon with custom read and
|
||||
* write permissions. No first-party caveats are added since this can be done
|
||||
* offline.
|
||||
*
|
||||
* @generated from protobuf rpc: BakeMacaroon(lnrpc.BakeMacaroonRequest) returns (lnrpc.BakeMacaroonResponse);
|
||||
*/
|
||||
bakeMacaroon(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[55];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listmacaroonids`
|
||||
* ListMacaroonIDs returns all root key IDs that are in use.
|
||||
*
|
||||
* @generated from protobuf rpc: ListMacaroonIDs(lnrpc.ListMacaroonIDsRequest) returns (lnrpc.ListMacaroonIDsResponse);
|
||||
*/
|
||||
listMacaroonIDs(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[56];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `deletemacaroonid`
|
||||
* DeleteMacaroonID deletes the specified macaroon ID and invalidates all
|
||||
* macaroons derived from that ID.
|
||||
*
|
||||
* @generated from protobuf rpc: DeleteMacaroonID(lnrpc.DeleteMacaroonIDRequest) returns (lnrpc.DeleteMacaroonIDResponse);
|
||||
*/
|
||||
deleteMacaroonID(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[57];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
/**
|
||||
* lncli: `listpermissions`
|
||||
* ListPermissions lists all RPC method URIs and their required macaroon
|
||||
* permissions to access them.
|
||||
*
|
||||
* @generated from protobuf rpc: ListPermissions(lnrpc.ListPermissionsRequest) returns (lnrpc.ListPermissionsResponse);
|
||||
*/
|
||||
listPermissions(input, metadata, options, callback) {
|
||||
const method = rpc_1.Lightning.methods[58];
|
||||
return this.makeUnaryRequest(`/${rpc_1.Lightning.typeName}/${method.name}`, (value) => Buffer.from(method.I.toBinary(value, this._binaryOptions)), (value) => method.O.fromBinary(value, this._binaryOptions), input, metadata, options, callback);
|
||||
}
|
||||
}
|
||||
exports.LightningClient = LightningClient;
|
||||
//# sourceMappingURL=rpc.grpc-client.js.map
|
||||
1
build/proto/lnd/rpc.grpc-client.js.map
Normal file
1
build/proto/lnd/rpc.grpc-client.js.map
Normal file
File diff suppressed because one or more lines are too long
12362
build/proto/lnd/rpc.js
Normal file
12362
build/proto/lnd/rpc.js
Normal file
File diff suppressed because it is too large
Load diff
1
build/proto/lnd/rpc.js.map
Normal file
1
build/proto/lnd/rpc.js.map
Normal file
File diff suppressed because one or more lines are too long
19
build/src/auth.js
Normal file
19
build/src/auth.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const serverOptions = {
|
||||
AdminAuthGuard: (authHeader) => __awaiter(void 0, void 0, void 0, function* () { console.log("admin auth login with header: " + authHeader); return { pub: "__pubkey__" }; }),
|
||||
GuestAuthGuard: (authHeader) => __awaiter(void 0, void 0, void 0, function* () { console.log("guest auth login with header: " + authHeader); return { token: "__token__" }; }),
|
||||
NoAuthAuthGuard: (_) => __awaiter(void 0, void 0, void 0, function* () { return ({}); }),
|
||||
encryptionCallback: (_, b) => b
|
||||
};
|
||||
exports.default = serverOptions;
|
||||
//# sourceMappingURL=auth.js.map
|
||||
1
build/src/auth.js.map
Normal file
1
build/src/auth.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"auth.js","sourceRoot":"","sources":["../../src/auth.ts"],"names":[],"mappings":";;;;;;;;;;;AAEA,MAAM,aAAa,GAAkB;IACjC,cAAc,EAAE,CAAO,UAAU,EAAE,EAAE,kDAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,EAAE,GAAG,EAAE,YAAY,EAAE,CAAA,CAAC,CAAC,CAAA;IAClI,cAAc,EAAE,CAAO,UAAU,EAAE,EAAE,kDAAG,OAAO,CAAC,GAAG,CAAC,gCAAgC,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,CAAA,CAAC,CAAC,CAAA;IACnI,eAAe,EAAE,CAAO,CAAC,EAAE,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;IAClC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;CAClC,CAAA;AACD,kBAAe,aAAa,CAAA"}
|
||||
9
build/src/index.js
Normal file
9
build/src/index.js
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
require("dotenv/config");
|
||||
const server_1 = __importDefault(require("./server"));
|
||||
server_1.default.Listen(3000);
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
build/src/index.js.map
Normal file
1
build/src/index.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;AAAA,yBAAsB;AACtB,sDAA6B;AAG7B,gBAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA"}
|
||||
85
build/src/index.spec.js
Normal file
85
build/src/index.spec.js
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ServerTestSuite = void 0;
|
||||
require("dotenv/config"); // TODO - test env
|
||||
const testyts_1 = require("testyts");
|
||||
const express_server_1 = __importDefault(require("../proto/autogenerated/ts/express_server"));
|
||||
const http_client_1 = __importDefault(require("../proto/autogenerated/ts/http_client"));
|
||||
const main_1 = __importDefault(require("./services/main"));
|
||||
const auth_1 = __importDefault(require("./auth"));
|
||||
const testPort = 4000;
|
||||
const server = (0, express_server_1.default)(main_1.default, Object.assign(Object.assign({}, auth_1.default), { throwErrors: true }));
|
||||
const client = (0, http_client_1.default)({
|
||||
baseUrl: `http://localhost:${testPort}`,
|
||||
retrieveAdminAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
retrieveGuestAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
retrieveNoAuthAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
});
|
||||
let ServerTestSuite = class ServerTestSuite {
|
||||
startServer() {
|
||||
server.Listen(testPort);
|
||||
}
|
||||
stopServer() {
|
||||
server.Close();
|
||||
}
|
||||
health() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield client.Health();
|
||||
});
|
||||
}
|
||||
getInfo() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
console.log(yield client.LndGetInfo());
|
||||
});
|
||||
}
|
||||
};
|
||||
__decorate([
|
||||
(0, testyts_1.BeforeAll)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ServerTestSuite.prototype, "startServer", null);
|
||||
__decorate([
|
||||
(0, testyts_1.AfterAll)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ServerTestSuite.prototype, "stopServer", null);
|
||||
__decorate([
|
||||
(0, testyts_1.Test)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ServerTestSuite.prototype, "health", null);
|
||||
__decorate([
|
||||
(0, testyts_1.Test)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ServerTestSuite.prototype, "getInfo", null);
|
||||
ServerTestSuite = __decorate([
|
||||
(0, testyts_1.TestSuite)()
|
||||
], ServerTestSuite);
|
||||
exports.ServerTestSuite = ServerTestSuite;
|
||||
//# sourceMappingURL=index.spec.js.map
|
||||
1
build/src/index.spec.js.map
Normal file
1
build/src/index.spec.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.spec.js","sourceRoot":"","sources":["../../src/index.spec.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,yBAAsB,CAAC,kBAAkB;AACzC,qCAAuE;AACvE,8FAAgE;AAChE,wFAA6D;AAC7D,2DAAsC;AACtC,kDAAmC;AACnC,MAAM,QAAQ,GAAG,IAAI,CAAA;AACrB,MAAM,MAAM,GAAG,IAAA,wBAAS,EAAC,cAAO,kCAAO,cAAa,KAAE,WAAW,EAAE,IAAI,IAAG,CAAA;AAC1E,MAAM,MAAM,GAAG,IAAA,qBAAS,EAAC;IACrB,OAAO,EAAE,oBAAoB,QAAQ,EAAE;IACvC,iBAAiB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;IACnC,iBAAiB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;IACnC,kBAAkB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;CACvC,CAAC,CAAA;AAEF,IAAa,eAAe,GAA5B,MAAa,eAAe;IAExB,WAAW;QACP,MAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;IAC3B,CAAC;IAED,UAAU;QACN,MAAM,CAAC,KAAK,EAAE,CAAA;IAClB,CAAC;IAEK,MAAM;;YACR,MAAM,MAAM,CAAC,MAAM,EAAE,CAAA;QACzB,CAAC;KAAA;IAGK,OAAO;;YACT,OAAO,CAAC,GAAG,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1C,CAAC;KAAA;CACJ,CAAA;AAhBG;IADC,IAAA,mBAAS,GAAE;;;;kDAGX;AAED;IADC,IAAA,kBAAQ,GAAE;;;;iDAGV;AAED;IADC,IAAA,cAAI,GAAE;;;;6CAGN;AAGD;IADC,IAAA,cAAI,GAAE;;;;8CAGN;AAjBQ,eAAe;IAD3B,IAAA,mBAAS,GAAE;GACC,eAAe,CAkB3B;AAlBY,0CAAe"}
|
||||
10
build/src/server.js
Normal file
10
build/src/server.js
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_server_1 = __importDefault(require("../proto/autogenerated/ts/express_server"));
|
||||
const main_1 = __importDefault(require("./services/main"));
|
||||
const auth_1 = __importDefault(require("./auth"));
|
||||
exports.default = (0, express_server_1.default)(main_1.default, auth_1.default);
|
||||
//# sourceMappingURL=server.js.map
|
||||
1
build/src/server.js.map
Normal file
1
build/src/server.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"server.js","sourceRoot":"","sources":["../../src/server.ts"],"names":[],"mappings":";;;;;AAAA,8FAAgE;AAChE,2DAAqC;AACrC,kDAAmC;AACnC,kBAAe,IAAA,wBAAS,EAAC,cAAO,EAAE,cAAa,CAAC,CAAA"}
|
||||
82
build/src/server.spec.js
Normal file
82
build/src/server.spec.js
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
"use strict";
|
||||
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
||||
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||
};
|
||||
var __metadata = (this && this.__metadata) || function (k, v) {
|
||||
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ServerTestSuite = void 0;
|
||||
require("dotenv/config"); // TODO - test env
|
||||
const testyts_1 = require("testyts");
|
||||
const http_client_1 = __importDefault(require("../proto/autogenerated/ts/http_client"));
|
||||
const server_1 = __importDefault(require("./server"));
|
||||
const testPort = 4000;
|
||||
const client = (0, http_client_1.default)({
|
||||
baseUrl: `http://localhost:${testPort}`,
|
||||
retrieveAdminAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
retrieveGuestAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
retrieveNoAuthAuth: () => __awaiter(void 0, void 0, void 0, function* () { return (""); }),
|
||||
});
|
||||
let ServerTestSuite = class ServerTestSuite {
|
||||
startServer() {
|
||||
server_1.default.Listen(testPort);
|
||||
}
|
||||
stopServer() {
|
||||
server_1.default.Close();
|
||||
}
|
||||
health() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield client.Health();
|
||||
});
|
||||
}
|
||||
getInfo() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
console.log(yield client.LndGetInfo());
|
||||
});
|
||||
}
|
||||
};
|
||||
__decorate([
|
||||
(0, testyts_1.BeforeAll)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ServerTestSuite.prototype, "startServer", null);
|
||||
__decorate([
|
||||
(0, testyts_1.AfterAll)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", void 0)
|
||||
], ServerTestSuite.prototype, "stopServer", null);
|
||||
__decorate([
|
||||
(0, testyts_1.Test)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ServerTestSuite.prototype, "health", null);
|
||||
__decorate([
|
||||
(0, testyts_1.Test)(),
|
||||
__metadata("design:type", Function),
|
||||
__metadata("design:paramtypes", []),
|
||||
__metadata("design:returntype", Promise)
|
||||
], ServerTestSuite.prototype, "getInfo", null);
|
||||
ServerTestSuite = __decorate([
|
||||
(0, testyts_1.TestSuite)()
|
||||
], ServerTestSuite);
|
||||
exports.ServerTestSuite = ServerTestSuite;
|
||||
//# sourceMappingURL=server.spec.js.map
|
||||
1
build/src/server.spec.js.map
Normal file
1
build/src/server.spec.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"server.spec.js","sourceRoot":"","sources":["../../src/server.spec.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,yBAAsB,CAAC,kBAAkB;AACzC,qCAAuE;AACvE,wFAA6D;AAC7D,sDAA8C;AAC9C,MAAM,QAAQ,GAAG,IAAI,CAAA;AACrB,MAAM,MAAM,GAAG,IAAA,qBAAS,EAAC;IACrB,OAAO,EAAE,oBAAoB,QAAQ,EAAE;IACvC,iBAAiB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;IACnC,iBAAiB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;IACnC,kBAAkB,EAAE,GAAS,EAAE,kDAAC,OAAA,CAAC,EAAE,CAAC,CAAA,GAAA;CACvC,CAAC,CAAA;AAEF,IAAa,eAAe,GAA5B,MAAa,eAAe;IAExB,WAAW;QACP,gBAAM,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;IAC3B,CAAC;IAED,UAAU;QACN,gBAAM,CAAC,KAAK,EAAE,CAAA;IAClB,CAAC;IAEK,MAAM;;YACR,MAAM,MAAM,CAAC,MAAM,EAAE,CAAA;QACzB,CAAC;KAAA;IAGK,OAAO;;YACT,OAAO,CAAC,GAAG,CAAC,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC,CAAA;QAC1C,CAAC;KAAA;CACJ,CAAA;AAhBG;IADC,IAAA,mBAAS,GAAE;;;;kDAGX;AAED;IADC,IAAA,kBAAQ,GAAE;;;;iDAGV;AAED;IADC,IAAA,cAAI,GAAE;;;;6CAGN;AAGD;IADC,IAAA,cAAI,GAAE;;;;8CAGN;AAjBQ,eAAe;IAD3B,IAAA,mBAAS,GAAE;GACC,eAAe,CAkB3B;AAlBY,0CAAe"}
|
||||
45
build/src/services/lnd/index.js
Normal file
45
build/src/services/lnd/index.js
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//const grpc = require('@grpc/grpc-js');
|
||||
const grpc_js_1 = require("@grpc/grpc-js");
|
||||
const grpc_transport_1 = require("@protobuf-ts/grpc-transport");
|
||||
const fs_1 = __importDefault(require("fs"));
|
||||
const rpc_client_1 = require("../../../proto/lnd/rpc.client");
|
||||
const invoices_client_1 = require("../../../proto/lnd/invoices.client");
|
||||
const router_client_1 = require("../../../proto/lnd/router.client");
|
||||
const lndAddr = process.env.LND_ADDRESS;
|
||||
const lndCertPath = process.env.LND_CERT_PATH;
|
||||
const lndMacaroonPath = process.env.LND_MACAROON_PATH;
|
||||
if (!lndAddr || !lndCertPath || !lndMacaroonPath) {
|
||||
throw new Error(`Something missing from ADDR/TLS/MACAROON`);
|
||||
}
|
||||
const lndCert = fs_1.default.readFileSync(lndCertPath);
|
||||
const macaroon = fs_1.default.readFileSync(lndMacaroonPath).toString('hex');
|
||||
const sslCreds = grpc_js_1.credentials.createSsl(lndCert);
|
||||
const macaroonCreds = grpc_js_1.credentials.createFromMetadataGenerator(function (args, callback) {
|
||||
let metadata = new grpc_js_1.Metadata();
|
||||
metadata.add('macaroon', macaroon);
|
||||
callback(null, metadata);
|
||||
});
|
||||
const creds = grpc_js_1.credentials.combineChannelCredentials(sslCreds, macaroonCreds);
|
||||
const transport = new grpc_transport_1.GrpcTransport({ host: lndAddr, channelCredentials: creds });
|
||||
const lightning = new rpc_client_1.LightningClient(transport);
|
||||
const invoices = new invoices_client_1.InvoicesClient(transport);
|
||||
const router = new router_client_1.RouterClient(transport);
|
||||
const DefaultMetadata = (deadline = 10 * 1000) => ({ deadline: Date.now() + deadline });
|
||||
exports.default = {
|
||||
getInfo: () => __awaiter(void 0, void 0, void 0, function* () { return (yield lightning.getInfo({}, DefaultMetadata())).response; })
|
||||
};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
build/src/services/lnd/index.js.map
Normal file
1
build/src/services/lnd/index.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/services/lnd/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,wCAAwC;AACxC,2CAAqD;AACrD,gEAA4D;AAC5D,4CAAmB;AACnB,8DAA+D;AAC/D,wEAAmE;AACnE,oEAA+D;AAG/D,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC;AACxC,MAAM,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC;AAC9C,MAAM,eAAe,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC;AACtD,IAAI,CAAC,OAAO,IAAI,CAAC,WAAW,IAAI,CAAC,eAAe,EAAE;IAC9C,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;CAC/D;AACD,MAAM,OAAO,GAAG,YAAE,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC;AAC7C,MAAM,QAAQ,GAAG,YAAE,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AAClE,MAAM,QAAQ,GAAG,qBAAW,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;AAChD,MAAM,aAAa,GAAG,qBAAW,CAAC,2BAA2B,CACzD,UAAU,IAAS,EAAE,QAAa;IAC9B,IAAI,QAAQ,GAAG,IAAI,kBAAQ,EAAE,CAAC;IAC9B,QAAQ,CAAC,GAAG,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACnC,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AAC7B,CAAC,CACJ,CAAC;AACF,MAAM,KAAK,GAAG,qBAAW,CAAC,yBAAyB,CAC/C,QAAQ,EACR,aAAa,CAChB,CAAC;AACF,MAAM,SAAS,GAAG,IAAI,8BAAa,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,kBAAkB,EAAE,KAAK,EAAE,CAAC,CAAA;AACjF,MAAM,SAAS,GAAG,IAAI,4BAAe,CAAC,SAAS,CAAC,CAAA;AAChD,MAAM,QAAQ,GAAG,IAAI,gCAAc,CAAC,SAAS,CAAC,CAAA;AAC9C,MAAM,MAAM,GAAG,IAAI,4BAAY,CAAC,SAAS,CAAC,CAAA;AAC1C,MAAM,eAAe,GAAG,CAAC,QAAQ,GAAG,EAAE,GAAG,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,GAAG,EAAE,GAAG,QAAQ,EAAE,CAAC,CAAA;AAEvF,kBAAe;IACX,OAAO,EAAE,GAAmC,EAAE,kDAAC,OAAA,CAAC,MAAM,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,eAAe,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAA,GAAA;CAC3G,CAAA"}
|
||||
25
build/src/services/main/index.js
Normal file
25
build/src/services/main/index.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const lnd_1 = __importDefault(require("../lnd"));
|
||||
const methods = {
|
||||
EncryptionExchange: (ctx, req) => __awaiter(void 0, void 0, void 0, function* () { }),
|
||||
Health: (ctx) => __awaiter(void 0, void 0, void 0, function* () { }),
|
||||
LndGetInfo: (ctx) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const info = yield lnd_1.default.getInfo();
|
||||
return { alias: info.alias };
|
||||
})
|
||||
};
|
||||
exports.default = methods;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
build/src/services/main/index.js.map
Normal file
1
build/src/services/main/index.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/services/main/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AACA,iDAAwB;AACxB,MAAM,OAAO,GAAwB;IACjC,kBAAkB,EAAE,CAAO,GAAqC,EAAE,GAAoC,EAAiB,EAAE,kDAAG,CAAC,CAAA;IAC7H,MAAM,EAAE,CAAO,GAAyB,EAAiB,EAAE,kDAAG,CAAC,CAAA;IAC/D,UAAU,EAAE,CAAO,GAA6B,EAAqC,EAAE;QACnF,MAAM,IAAI,GAAG,MAAM,aAAG,CAAC,OAAO,EAAE,CAAA;QAChC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,CAAA;IAEhC,CAAC,CAAA;CACJ,CAAA;AACD,kBAAe,OAAO,CAAA"}
|
||||
35
build/types.js
Normal file
35
build/types.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
"use strict";
|
||||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.EmptyValidate = exports.EmptyOptionalFields = exports.EncryptionExchangeRequestValidate = exports.EncryptionExchangeRequestOptionalFields = void 0;
|
||||
exports.EncryptionExchangeRequestOptionalFields = [];
|
||||
var EncryptionExchangeRequestValidate = function (o, opts, path) {
|
||||
if (opts === void 0) { opts = {}; }
|
||||
if (path === void 0) { path = 'EncryptionExchangeRequest::root.'; }
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet)
|
||||
return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message');
|
||||
if (typeof o !== 'object' || o === null)
|
||||
return new Error(path + ': object is not an instance of an object or is null');
|
||||
if (typeof o.public_key !== 'string')
|
||||
return new Error("".concat(path, ".public_key: is not a string"));
|
||||
if (opts.public_key_CustomCheck && !opts.public_key_CustomCheck(o.public_key))
|
||||
return new Error("".concat(path, ".public_key: custom check failed"));
|
||||
if (typeof o.device_id !== 'string')
|
||||
return new Error("".concat(path, ".device_id: is not a string"));
|
||||
if (opts.device_id_CustomCheck && !opts.device_id_CustomCheck(o.device_id))
|
||||
return new Error("".concat(path, ".device_id: custom check failed"));
|
||||
return null;
|
||||
};
|
||||
exports.EncryptionExchangeRequestValidate = EncryptionExchangeRequestValidate;
|
||||
exports.EmptyOptionalFields = [];
|
||||
var EmptyValidate = function (o, opts, path) {
|
||||
if (opts === void 0) { opts = {}; }
|
||||
if (path === void 0) { path = 'Empty::root.'; }
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet)
|
||||
return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message');
|
||||
if (typeof o !== 'object' || o === null)
|
||||
return new Error(path + ': object is not an instance of an object or is null');
|
||||
return null;
|
||||
};
|
||||
exports.EmptyValidate = EmptyValidate;
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
build/types.js.map
Normal file
1
build/types.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"types.js","sourceRoot":"","sources":["../proto/autogenerated/ts/types.ts"],"names":[],"mappings":";AAAA,+DAA+D;;;AA8BlD,QAAA,uCAAuC,GAAO,EAAE,CAAA;AAMtD,IAAM,iCAAiC,GAAG,UAAC,CAA6B,EAAE,IAA2C,EAAE,IAAiD;IAA9F,qBAAA,EAAA,SAA2C;IAAE,qBAAA,EAAA,yCAAiD;IAC3K,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,kBAAkB;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,yFAAyF,CAAC,CAAA;IAC5K,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,KAAK,IAAI;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,qDAAqD,CAAC,CAAA;IAEvH,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,QAAQ;QAAE,OAAO,IAAI,KAAK,CAAC,UAAG,IAAI,iCAA8B,CAAC,CAAA;IAC7F,IAAI,IAAI,CAAC,sBAAsB,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,CAAC,CAAC,UAAU,CAAC;QAAE,OAAO,IAAI,KAAK,CAAC,UAAG,IAAI,qCAAkC,CAAC,CAAA;IAE1I,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ;QAAE,OAAO,IAAI,KAAK,CAAC,UAAG,IAAI,gCAA6B,CAAC,CAAA;IAC3F,IAAI,IAAI,CAAC,qBAAqB,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS,CAAC;QAAE,OAAO,IAAI,KAAK,CAAC,UAAG,IAAI,oCAAiC,CAAC,CAAA;IAEtI,OAAO,IAAI,CAAA;AACf,CAAC,CAAA;AAXY,QAAA,iCAAiC,qCAW7C;AAIY,QAAA,mBAAmB,GAAO,EAAE,CAAA;AAIlC,IAAM,aAAa,GAAG,UAAC,CAAS,EAAE,IAAuB,EAAE,IAA6B;IAAtD,qBAAA,EAAA,SAAuB;IAAE,qBAAA,EAAA,qBAA6B;IAC3F,IAAI,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,kBAAkB;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,yFAAyF,CAAC,CAAA;IAC5K,IAAI,OAAO,CAAC,KAAK,QAAQ,IAAI,CAAC,KAAK,IAAI;QAAE,OAAO,IAAI,KAAK,CAAC,IAAI,GAAG,qDAAqD,CAAC,CAAA;IAEvH,OAAO,IAAI,CAAA;AACf,CAAC,CAAA;AALY,QAAA,aAAa,iBAKzB"}
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
version: "3.8"
|
||||
networks:
|
||||
default:
|
||||
external: true
|
||||
name: 2_default
|
||||
services:
|
||||
web:
|
||||
image: shockwallet/api:latest
|
||||
command: -c -h 0.0.0.0 -l polar-n2-alice:10009 -m /root/.lnd/data/chain/bitcoin/regtest/admin.macaroon -d /root/.lnd/tls.cert
|
||||
restart: on-failure
|
||||
stop_grace_period: 1m
|
||||
ports:
|
||||
- 9835:9835
|
||||
volumes:
|
||||
- C:\Users\boufn\.polar\networks\2\volumes\lnd\alice:/root/.lnd
|
||||
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
const os = require("os");
|
||||
const path = require("path");
|
||||
const platform = os.platform();
|
||||
const homeDir = os.homedir();
|
||||
|
||||
const getLndDirectory = () => {
|
||||
if (platform === "darwin") {
|
||||
return homeDir + "/Library/Application Support/Lnd";
|
||||
} else if (platform === "win32") {
|
||||
// eslint-disable-next-line no-process-env
|
||||
const { APPDATA = "" } = process.env;
|
||||
return path.resolve(APPDATA, "../Local/Lnd");
|
||||
}
|
||||
|
||||
return homeDir + "/.lnd";
|
||||
};
|
||||
|
||||
const parsePath = (filePath = "") => {
|
||||
if (platform === "win32") {
|
||||
return filePath.replace("/", "\\");
|
||||
}
|
||||
|
||||
return filePath;
|
||||
};
|
||||
|
||||
const lndDirectory = getLndDirectory();
|
||||
|
||||
module.exports = (mainnet = false) => {
|
||||
const network = mainnet ? "mainnet" : "testnet";
|
||||
|
||||
return {
|
||||
serverPort: 9835,
|
||||
serverHost: "localhost",
|
||||
lndAddress: "127.0.0.1:9735",
|
||||
maxNumRoutesToQuery: 20,
|
||||
lndProto: parsePath(`${__dirname}/rpc.proto`),
|
||||
routerProto: parsePath(`${__dirname}/router.proto`),
|
||||
invoicesProto: parsePath(`${__dirname}/invoices.proto`),
|
||||
walletUnlockerProto: parsePath(`${__dirname}/walletunlocker.proto`),
|
||||
lndHost: "localhost:10009",
|
||||
lndCertPath: parsePath(`${lndDirectory}/tls.cert`),
|
||||
macaroonPath: parsePath(
|
||||
`${lndDirectory}/data/chain/bitcoin/${network}/admin.macaroon`
|
||||
),
|
||||
dataPath: parsePath(`${lndDirectory}/data`),
|
||||
loglevel: "info",
|
||||
logfile: "shockapi.log",
|
||||
lndLogFile: parsePath(`${lndDirectory}/logs/bitcoin/${network}/lnd.log`),
|
||||
lndDirPath: lndDirectory,
|
||||
peers: ['https://gun.shock.network/gun','https://gun-eu.shock.network/gun'],
|
||||
useTLS: false,
|
||||
tokenExpirationMS: 259200000,
|
||||
localtunnelHost:'https://tunnel.rip'
|
||||
};
|
||||
};
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
/** @prettier */
|
||||
|
||||
const { createLogger, transports, format } = require('winston')
|
||||
const util = require('util')
|
||||
require('winston-daily-rotate-file')
|
||||
|
||||
// @ts-ignore
|
||||
const transform = info => {
|
||||
const args = info[Symbol.for('splat')]
|
||||
if (args) {
|
||||
return { ...info, message: util.format(info.message, ...args) }
|
||||
}
|
||||
return info
|
||||
}
|
||||
|
||||
const logFormatter = () => ({ transform })
|
||||
|
||||
const formatter = format.combine(
|
||||
format.colorize(),
|
||||
format.errors({ stack: true }),
|
||||
logFormatter(),
|
||||
format.prettyPrint(),
|
||||
format.timestamp(),
|
||||
format.simple(),
|
||||
format.align(),
|
||||
format.printf(info => {
|
||||
const { timestamp, level, message, stack, exception } = info
|
||||
|
||||
const ts = timestamp.slice(0, 19).replace('T', ' ')
|
||||
const isObject = typeof message === 'object'
|
||||
const formattedJson = isObject ? JSON.stringify(message, null, 2) : message
|
||||
const formattedException = exception ? exception.stack : ''
|
||||
const errorMessage = stack || formattedException
|
||||
const formattedMessage = errorMessage ? errorMessage : formattedJson
|
||||
|
||||
return `${ts} [${level}]: ${formattedMessage}`
|
||||
})
|
||||
)
|
||||
|
||||
const Logger = createLogger({
|
||||
format: formatter,
|
||||
transports: [
|
||||
new transports.DailyRotateFile({
|
||||
filename: 'shockapi.log',
|
||||
datePattern: 'yyyy-MM-DD',
|
||||
// https://github.com/winstonjs/winston-daily-rotate-file/issues/188
|
||||
json: false,
|
||||
maxSize: 1000000,
|
||||
maxFiles: 7,
|
||||
handleExceptions: true
|
||||
}),
|
||||
new transports.Console({
|
||||
handleExceptions: true
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
module.exports = Logger
|
||||
|
|
@ -1,238 +0,0 @@
|
|||
syntax = "proto3";
|
||||
|
||||
import "rpc.proto";
|
||||
|
||||
package lnrpc;
|
||||
|
||||
option go_package = "github.com/lightningnetwork/lnd/lnrpc";
|
||||
|
||||
/*
|
||||
* Comments in this file will be directly parsed into the API
|
||||
* Documentation as descriptions of the associated method, message, or field.
|
||||
* These descriptions should go right above the definition of the object, and
|
||||
* can be in either block or // comment format.
|
||||
*
|
||||
* An RPC method can be matched to an lncli command by placing a line in the
|
||||
* beginning of the description in exactly the following format:
|
||||
* lncli: `methodname`
|
||||
*
|
||||
* Failure to specify the exact name of the command will cause documentation
|
||||
* generation to fail.
|
||||
*
|
||||
* More information on how exactly the gRPC documentation is generated from
|
||||
* this proto file can be found here:
|
||||
* https://github.com/lightninglabs/lightning-api
|
||||
*/
|
||||
|
||||
// WalletUnlocker is a service that is used to set up a wallet password for
|
||||
// lnd at first startup, and unlock a previously set up wallet.
|
||||
service WalletUnlocker {
|
||||
/*
|
||||
GenSeed is the first method that should be used to instantiate a new lnd
|
||||
instance. This method allows a caller to generate a new aezeed cipher seed
|
||||
given an optional passphrase. If provided, the passphrase will be necessary
|
||||
to decrypt the cipherseed to expose the internal wallet seed.
|
||||
|
||||
Once the cipherseed is obtained and verified by the user, the InitWallet
|
||||
method should be used to commit the newly generated seed, and create the
|
||||
wallet.
|
||||
*/
|
||||
rpc GenSeed (GenSeedRequest) returns (GenSeedResponse);
|
||||
|
||||
/*
|
||||
InitWallet is used when lnd is starting up for the first time to fully
|
||||
initialize the daemon and its internal wallet. At the very least a wallet
|
||||
password must be provided. This will be used to encrypt sensitive material
|
||||
on disk.
|
||||
|
||||
In the case of a recovery scenario, the user can also specify their aezeed
|
||||
mnemonic and passphrase. If set, then the daemon will use this prior state
|
||||
to initialize its internal wallet.
|
||||
|
||||
Alternatively, this can be used along with the GenSeed RPC to obtain a
|
||||
seed, then present it to the user. Once it has been verified by the user,
|
||||
the seed can be fed into this RPC in order to commit the new wallet.
|
||||
*/
|
||||
rpc InitWallet (InitWalletRequest) returns (InitWalletResponse);
|
||||
|
||||
/* lncli: `unlock`
|
||||
UnlockWallet is used at startup of lnd to provide a password to unlock
|
||||
the wallet database.
|
||||
*/
|
||||
rpc UnlockWallet (UnlockWalletRequest) returns (UnlockWalletResponse);
|
||||
|
||||
/* lncli: `changepassword`
|
||||
ChangePassword changes the password of the encrypted wallet. This will
|
||||
automatically unlock the wallet database if successful.
|
||||
*/
|
||||
rpc ChangePassword (ChangePasswordRequest) returns (ChangePasswordResponse);
|
||||
}
|
||||
|
||||
message GenSeedRequest {
|
||||
/*
|
||||
aezeed_passphrase is an optional user provided passphrase that will be used
|
||||
to encrypt the generated aezeed cipher seed. When using REST, this field
|
||||
must be encoded as base64.
|
||||
*/
|
||||
bytes aezeed_passphrase = 1;
|
||||
|
||||
/*
|
||||
seed_entropy is an optional 16-bytes generated via CSPRNG. If not
|
||||
specified, then a fresh set of randomness will be used to create the seed.
|
||||
When using REST, this field must be encoded as base64.
|
||||
*/
|
||||
bytes seed_entropy = 2;
|
||||
}
|
||||
message GenSeedResponse {
|
||||
/*
|
||||
cipher_seed_mnemonic is a 24-word mnemonic that encodes a prior aezeed
|
||||
cipher seed obtained by the user. This field is optional, as if not
|
||||
provided, then the daemon will generate a new cipher seed for the user.
|
||||
Otherwise, then the daemon will attempt to recover the wallet state linked
|
||||
to this cipher seed.
|
||||
*/
|
||||
repeated string cipher_seed_mnemonic = 1;
|
||||
|
||||
/*
|
||||
enciphered_seed are the raw aezeed cipher seed bytes. This is the raw
|
||||
cipher text before run through our mnemonic encoding scheme.
|
||||
*/
|
||||
bytes enciphered_seed = 2;
|
||||
}
|
||||
|
||||
message InitWalletRequest {
|
||||
/*
|
||||
wallet_password is the passphrase that should be used to encrypt the
|
||||
wallet. This MUST be at least 8 chars in length. After creation, this
|
||||
password is required to unlock the daemon. When using REST, this field
|
||||
must be encoded as base64.
|
||||
*/
|
||||
bytes wallet_password = 1;
|
||||
|
||||
/*
|
||||
cipher_seed_mnemonic is a 24-word mnemonic that encodes a prior aezeed
|
||||
cipher seed obtained by the user. This may have been generated by the
|
||||
GenSeed method, or be an existing seed.
|
||||
*/
|
||||
repeated string cipher_seed_mnemonic = 2;
|
||||
|
||||
/*
|
||||
aezeed_passphrase is an optional user provided passphrase that will be used
|
||||
to encrypt the generated aezeed cipher seed. When using REST, this field
|
||||
must be encoded as base64.
|
||||
*/
|
||||
bytes aezeed_passphrase = 3;
|
||||
|
||||
/*
|
||||
recovery_window is an optional argument specifying the address lookahead
|
||||
when restoring a wallet seed. The recovery window applies to each
|
||||
individual branch of the BIP44 derivation paths. Supplying a recovery
|
||||
window of zero indicates that no addresses should be recovered, such after
|
||||
the first initialization of the wallet.
|
||||
*/
|
||||
int32 recovery_window = 4;
|
||||
|
||||
/*
|
||||
channel_backups is an optional argument that allows clients to recover the
|
||||
settled funds within a set of channels. This should be populated if the
|
||||
user was unable to close out all channels and sweep funds before partial or
|
||||
total data loss occurred. If specified, then after on-chain recovery of
|
||||
funds, lnd begin to carry out the data loss recovery protocol in order to
|
||||
recover the funds in each channel from a remote force closed transaction.
|
||||
*/
|
||||
ChanBackupSnapshot channel_backups = 5;
|
||||
|
||||
/*
|
||||
stateless_init is an optional argument instructing the daemon NOT to create
|
||||
any *.macaroon files in its filesystem. If this parameter is set, then the
|
||||
admin macaroon returned in the response MUST be stored by the caller of the
|
||||
RPC as otherwise all access to the daemon will be lost!
|
||||
*/
|
||||
bool stateless_init = 6;
|
||||
}
|
||||
message InitWalletResponse {
|
||||
/*
|
||||
The binary serialized admin macaroon that can be used to access the daemon
|
||||
after creating the wallet. If the stateless_init parameter was set to true,
|
||||
this is the ONLY copy of the macaroon and MUST be stored safely by the
|
||||
caller. Otherwise a copy of this macaroon is also persisted on disk by the
|
||||
daemon, together with other macaroon files.
|
||||
*/
|
||||
bytes admin_macaroon = 1;
|
||||
}
|
||||
|
||||
message UnlockWalletRequest {
|
||||
/*
|
||||
wallet_password should be the current valid passphrase for the daemon. This
|
||||
will be required to decrypt on-disk material that the daemon requires to
|
||||
function properly. When using REST, this field must be encoded as base64.
|
||||
*/
|
||||
bytes wallet_password = 1;
|
||||
|
||||
/*
|
||||
recovery_window is an optional argument specifying the address lookahead
|
||||
when restoring a wallet seed. The recovery window applies to each
|
||||
individual branch of the BIP44 derivation paths. Supplying a recovery
|
||||
window of zero indicates that no addresses should be recovered, such after
|
||||
the first initialization of the wallet.
|
||||
*/
|
||||
int32 recovery_window = 2;
|
||||
|
||||
/*
|
||||
channel_backups is an optional argument that allows clients to recover the
|
||||
settled funds within a set of channels. This should be populated if the
|
||||
user was unable to close out all channels and sweep funds before partial or
|
||||
total data loss occurred. If specified, then after on-chain recovery of
|
||||
funds, lnd begin to carry out the data loss recovery protocol in order to
|
||||
recover the funds in each channel from a remote force closed transaction.
|
||||
*/
|
||||
ChanBackupSnapshot channel_backups = 3;
|
||||
|
||||
/*
|
||||
stateless_init is an optional argument instructing the daemon NOT to create
|
||||
any *.macaroon files in its file system.
|
||||
*/
|
||||
bool stateless_init = 4;
|
||||
}
|
||||
message UnlockWalletResponse {
|
||||
}
|
||||
|
||||
message ChangePasswordRequest {
|
||||
/*
|
||||
current_password should be the current valid passphrase used to unlock the
|
||||
daemon. When using REST, this field must be encoded as base64.
|
||||
*/
|
||||
bytes current_password = 1;
|
||||
|
||||
/*
|
||||
new_password should be the new passphrase that will be needed to unlock the
|
||||
daemon. When using REST, this field must be encoded as base64.
|
||||
*/
|
||||
bytes new_password = 2;
|
||||
|
||||
/*
|
||||
stateless_init is an optional argument instructing the daemon NOT to create
|
||||
any *.macaroon files in its filesystem. If this parameter is set, then the
|
||||
admin macaroon returned in the response MUST be stored by the caller of the
|
||||
RPC as otherwise all access to the daemon will be lost!
|
||||
*/
|
||||
bool stateless_init = 3;
|
||||
|
||||
/*
|
||||
new_macaroon_root_key is an optional argument instructing the daemon to
|
||||
rotate the macaroon root key when set to true. This will invalidate all
|
||||
previously generated macaroons.
|
||||
*/
|
||||
bool new_macaroon_root_key = 4;
|
||||
}
|
||||
message ChangePasswordResponse {
|
||||
/*
|
||||
The binary serialized admin macaroon that can be used to access the daemon
|
||||
after rotating the macaroon root key. If both the stateless_init and
|
||||
new_macaroon_root_key parameter were set to true, this is the ONLY copy of
|
||||
the macaroon that was created from the new root key and MUST be stored
|
||||
safely by the caller. Otherwise a copy of this macaroon is also persisted on
|
||||
disk by the daemon, together with other macaroon files.
|
||||
*/
|
||||
bytes admin_macaroon = 1;
|
||||
}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
module.exports = {
|
||||
/**
|
||||
* @param {string} path
|
||||
*/
|
||||
MACAROON_PATH: path => `
|
||||
The specified macaroon path "${path}" was not found.
|
||||
This issue can be caused by:
|
||||
|
||||
1. Setting an invalid path for your Macaroon file.
|
||||
2. Not initializing your wallet before using the ShockAPI
|
||||
`,
|
||||
/**
|
||||
* @param {string} path
|
||||
*/
|
||||
CERT_PATH: path => `
|
||||
The specified LND certificate file "${path}" was not found.
|
||||
This issue can be caused by:
|
||||
|
||||
1. Setting an invalid path for your Certificates.
|
||||
2. Not initializing your wallet before using the ShockAPI
|
||||
`,
|
||||
CERT_MISSING: () =>
|
||||
"Required LND certificate path missing from application configuration.",
|
||||
/**
|
||||
* @param {string|null} macaroonPath
|
||||
* @param {string} lndCertPath
|
||||
*/
|
||||
CERT_AND_MACAROON_MISSING: (macaroonPath, lndCertPath) =>
|
||||
`
|
||||
You neither specified an LND cert path nor a Macaroon path. Please make sure both files exist in the paths you've specified:
|
||||
|
||||
Macaroon Path: ${macaroonPath ? macaroonPath : "N/A"}
|
||||
LND Certificates path: ${lndCertPath ? lndCertPath : "N/A"}
|
||||
`
|
||||
};
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/ash
|
||||
node main -h 0.0.0.0 \
|
||||
-m admin.macaroon \
|
||||
-d tls.cert \
|
||||
-l $LND_ADDR
|
||||
78
guntest.html
78
guntest.html
|
|
@ -1,78 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="ie=edge">
|
||||
<title>Document</title>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
</body>
|
||||
<script src="node_modules/gun/gun.js"></script>
|
||||
<script src="node_modules/gun/sea.js"></script>
|
||||
<script src="node_modules/gun/lib/open.js"></script>
|
||||
<script src="node_modules/gun/lib/open.js"></script>
|
||||
<script src="node_modules/gun/lib/load.js"></script>
|
||||
<script src="node_modules/gun/lib/unset.js"></script>
|
||||
<script src="node_modules/gun/lib/promise.js"></script>
|
||||
<script src="node_modules/gun/lib/then.js"></script>
|
||||
<script src="node_modules/gun/nts.js"></script>
|
||||
<script>
|
||||
|
||||
gun = Gun({
|
||||
peers: ['https://gun.shock.network/gun','https://gun-eu.shock.network/gun'],
|
||||
axe: false
|
||||
})
|
||||
|
||||
setInterval(() => {
|
||||
console.log('peers', Object.keys(gun.back('opt').peers))
|
||||
},5000)
|
||||
|
||||
user = gun.user()
|
||||
|
||||
node = gun.get('foo').get('bar')
|
||||
|
||||
capdog = gun.user('qsgziGQS99sPUxV1CRwwRckn9cG6cJ3prbDsrbL7qko.oRbCaVKwJFQURWrS1pFhkfAzrkEvkQgBRIUz9uoWtrg')
|
||||
explorador = gun.user(`zBQkPb1ohbdjVp_29TKFXyv_0g3amKgRJRqKr0E-Oyk.yB1P4UmOrzkGuPEL5zUgLETJWyYpM9K3l2ycNlt8jiY`)
|
||||
pleb = gun.user(`e1C60yZ1Cm3Mkceq7L9SmH6QQ7zsDdbibPFeQz7tNsk._1VlqJNo8BIJmzz2D5WELiMiRjBh3DBlDvzC6fNltZw`)
|
||||
boblazar = gun.user(`g6fcZ_1zyFwV1jR1eNK1GTUr2sSlEDL1D5vBsSvKoKg.2OA9MQHO2c1wjv6L-VPBFf36EZXjgQ1nnZFbOE9_5-o`)
|
||||
|
||||
const UPPER = 100
|
||||
|
||||
clearSet = (node) => {
|
||||
node.once((map) => {
|
||||
Object.keys(map).forEach(key => node.get(key).put(null))
|
||||
}, { wait: 1500 })
|
||||
}
|
||||
|
||||
put = async () => {
|
||||
const res = await fetch(`https://jsonplaceholder.typicode.com/posts`)
|
||||
/** @type {Array<any>} */
|
||||
const data = await res.json()
|
||||
|
||||
const obj = {}
|
||||
|
||||
data.slice(0, UPPER).forEach((v, i) => obj[i] = v)
|
||||
|
||||
node.put(obj, ack => {
|
||||
console.log(ack.err ? `err: ${ack.err}` : 'ok')
|
||||
})
|
||||
}
|
||||
|
||||
erase = () => {
|
||||
(new Array(UPPER)).fill(null).map((_, i) => i).forEach(n => {
|
||||
node.get(n).put(null, ack => {
|
||||
console.log(ack.err ? `err: ${ack.err}` : 'ok')
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
</script>
|
||||
<style>
|
||||
|
||||
body {
|
||||
background-color: black;
|
||||
}
|
||||
</style>
|
||||
</html>
|
||||
27
main.js
27
main.js
|
|
@ -1,27 +0,0 @@
|
|||
const program = require("commander");
|
||||
|
||||
const {version} = (JSON.parse(require('fs').readFileSync("./package.json", "utf-8")))
|
||||
|
||||
// parse command line parameters
|
||||
program
|
||||
.version(version)
|
||||
.option("-s, --serverport [port]", "web server http listening port (defaults to 9835)")
|
||||
.option("-h, --serverhost [host]", "web server listening host (defaults to localhost)")
|
||||
.option("-l, --lndhost [host:port]", "RPC lnd host (defaults to localhost:10009)")
|
||||
.option("-u, --user [login]", "basic authentication login")
|
||||
.option("-p, --pwd [password]", "basic authentication password")
|
||||
.option("-m, --macaroon-path [file path]", "path to admin.macaroon file")
|
||||
.option("-d, --lnd-cert-path [file path]", "path to LND cert file")
|
||||
.option("-f, --logfile [file path]", "path to file where to store the application logs")
|
||||
.option("-e, --loglevel [level]", "level of logs to display (debug, info, warn, error)")
|
||||
.option("-k, --le-email [email]", "lets encrypt required contact email")
|
||||
.option("-c, --mainnet", "run server on mainnet mode")
|
||||
.option("-t, --tunnel","create a localtunnel to listen behind a firewall")
|
||||
.option('-r, --lndaddress', 'Lnd address, defaults to 127.0.0.1:9735')
|
||||
.option('-a, --use-TLS', 'use TLS')
|
||||
.option('-i, --https-cert [path]', 'HTTPS certificate path')
|
||||
.option('-y, --https-cert-key [path]', 'HTTPS certificate key path')
|
||||
.parse(process.argv);
|
||||
|
||||
// load server
|
||||
require("./src/server")(program); // Standard server version
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
{
|
||||
"watch": ["src/", "services/", "utils/", "constants/", "config/"],
|
||||
"ignore": ["node_modules/", ".git", "radata/", ".storage/", "*.log.*"],
|
||||
"verbose": true,
|
||||
"ext": "js"
|
||||
}
|
||||
4457
package-lock.json
generated
Normal file
4457
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
146
package.json
146
package.json
|
|
@ -1,118 +1,52 @@
|
|||
{
|
||||
"name": "shockapi",
|
||||
"version": "2021.9.19",
|
||||
"name": "lightning.pub",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "src/server.js",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node main.js -h 0.0.0.0 -c",
|
||||
"dev": "node --trace-warnings --max-old-space-size=4096 main.js -h 0.0.0.0",
|
||||
"dev:watch": "nodemon main.js -- -h 0.0.0.0",
|
||||
"dev:attach": "node --inspect --trace-warnings --max-old-space-size=4096 main.js -h 0.0.0.0",
|
||||
"test": "mocha ./utils -b -t 50000 --recursive",
|
||||
"typecheck": "tsc",
|
||||
"lint": "eslint \"services/gunDB/**/*.js\"",
|
||||
"format": "prettier --write \"./**/*.js\"",
|
||||
"test:gun": "ts-node src/__gun__tests__/*.ts && rimraf -rf GUN-TEST-*",
|
||||
"test:gun:epub": "node testscript.js on capdog.epub"
|
||||
"test": "tsc && testyts",
|
||||
"start": "tsc && ts-node src/index",
|
||||
"build_autogenerated": "cd proto && rimraf autogenerated && protoc -I ./service --pub_out=. service/*",
|
||||
"build_lnd_client_1": "cd proto && protoc -I ./others --plugin=.\\node_modules\\.bin\\protoc-gen-ts_proto.cmd --ts_proto_out=./lnd --ts_proto_opt=esModuleInterop=true others/* ",
|
||||
"build_lnd_client": "cd proto && rimraf lnd/* && npx protoc --ts_out ./lnd --ts_opt long_type_string --proto_path others others/* "
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/shocknet/Lightning.Pub.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/shocknet/Lightning.Pub/issues"
|
||||
},
|
||||
"homepage": "https://github.com/shocknet/Lightning.Pub#readme",
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "^1.2.2",
|
||||
"@grpc/proto-loader": "^0.5.5",
|
||||
"assert-never": "^1.2.1",
|
||||
"axios": "^0.21.1",
|
||||
"basic-auth": "^2.0.0",
|
||||
"big.js": "^5.2.2",
|
||||
"bitcore-lib": "^0.15.0",
|
||||
"bluebird": "^3.7.2",
|
||||
"body-parser": "^1.16.0",
|
||||
"colors": "^1.4.0",
|
||||
"command-exists": "^1.2.6",
|
||||
"commander": "^2.9.0",
|
||||
"compression": "^1.7.4",
|
||||
"cors": "^2.8.5",
|
||||
"debug": "^3.1.0",
|
||||
"dotenv": "^8.1.0",
|
||||
"@grpc/grpc-js": "^1.6.7",
|
||||
"@protobuf-ts/grpc-transport": "^2.5.0",
|
||||
"@protobuf-ts/plugin": "^2.5.0",
|
||||
"@protobuf-ts/runtime": "^2.5.0",
|
||||
"@types/express": "^4.17.13",
|
||||
"@types/node": "^17.0.31",
|
||||
"@types/secp256k1": "^4.0.3",
|
||||
"axios": "^0.27.2",
|
||||
"copyfiles": "^2.4.1",
|
||||
"dotenv": "^16.0.0",
|
||||
"eccrypto": "^1.1.6",
|
||||
"express": "^4.14.1",
|
||||
"express-session": "^1.17.1",
|
||||
"google-proto-files": "^1.0.3",
|
||||
"graphviz": "0.0.8",
|
||||
"gun": "amark/gun#77162fcb68eb61f24d980fa3f3653598f56ee593",
|
||||
"husky": "^4.2.5",
|
||||
"hybrid-relay-client": "git://github.com/shocknet/hybridRelayClient#a99e57794cf7a62f0f5b6aef53a35d6b77d0a889",
|
||||
"jsonfile": "^4.0.0",
|
||||
"jsonwebtoken": "^8.3.0",
|
||||
"localtunnel": "git://github.com/shocknet/localtunnel#40cc2c2a46b05da2217bf2e20da11a5343a5cce7",
|
||||
"express": "^4.18.1",
|
||||
"grpc-tools": "^1.11.2",
|
||||
"lodash": "^4.17.21",
|
||||
"method-override": "^2.3.7",
|
||||
"node-fetch": "^2.6.1",
|
||||
"node-persist": "^3.1.0",
|
||||
"promise": "^8.1.0",
|
||||
"qrcode-terminal": "^0.12.0",
|
||||
"ramda": "^0.27.1",
|
||||
"request": "^2.88.2",
|
||||
"request-promise": "^4.2.6",
|
||||
"response-time": "^2.3.2",
|
||||
"shelljs": "^0.8.2",
|
||||
"shock-common": "^37.0.0",
|
||||
"socket.io": "4.0.1",
|
||||
"socket.io-msgpack-parser": "^3.0.1",
|
||||
"text-encoding": "^0.7.0",
|
||||
"tingodb": "^0.6.1",
|
||||
"uuid": "3.x.x",
|
||||
"winston": "^3.3.3",
|
||||
"winston-daily-rotate-file": "^4.5.0"
|
||||
"rimraf": "^3.0.2",
|
||||
"rxjs": "^7.5.5",
|
||||
"secp256k1": "^4.0.3",
|
||||
"ts-node": "^10.7.0",
|
||||
"ts-proto": "^1.112.1",
|
||||
"typescript": "^4.6.4",
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-class-properties": "^7.12.1",
|
||||
"@types/bluebird": "^3.5.32",
|
||||
"@types/dotenv": "^6.1.1",
|
||||
"@types/eccrypto": "^1.1.2",
|
||||
"@types/express": "^4.17.1",
|
||||
"@types/gun": "^0.9.2",
|
||||
"@types/jsonwebtoken": "^8.3.7",
|
||||
"@types/lodash": "^4.14.168",
|
||||
"@types/mocha": "^9.0.0",
|
||||
"@types/node-fetch": "^2.5.8",
|
||||
"@types/node-persist": "^3.1.1",
|
||||
"@types/ramda": "types/npm-ramda#dist",
|
||||
"@types/random-words": "^1.1.2",
|
||||
"@types/react": "16.x.x",
|
||||
"@types/uuid": "3.x.x",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
|
||||
"eslint": "^6.6.0",
|
||||
"eslint-config-prettier": "^6.5.0",
|
||||
"eslint-plugin-babel": "^5.3.1",
|
||||
"eslint-plugin-mocha": "^9.0.0",
|
||||
"eslint-plugin-prettier": "^3.1.4",
|
||||
"expect": "^27.2.1",
|
||||
"lint-staged": "^10.2.2",
|
||||
"mocha": "^9.1.1",
|
||||
"nodemon": "^2.0.7",
|
||||
"prettier": "^1.18.2",
|
||||
"random-words": "^1.1.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"ts-node": "^9.1.1",
|
||||
"ts-type": "^1.2.16",
|
||||
"typescript": "^4.5.4"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"prettier --check",
|
||||
"eslint"
|
||||
],
|
||||
"*.ts": [
|
||||
"prettier --check"
|
||||
]
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {}
|
||||
},
|
||||
"engines": {
|
||||
"npm": "Use yarn!"
|
||||
},
|
||||
"packageManager": "yarn@3.1.1"
|
||||
"@types/eccrypto": "^1.1.3",
|
||||
"@types/lodash": "^4.14.182",
|
||||
"@types/uuid": "^8.3.4",
|
||||
"testyts": "^1.5.0"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
134
proto/autogenerated/debug.txt
Normal file
134
proto/autogenerated/debug.txt
Normal file
|
|
@ -0,0 +1,134 @@
|
|||
([]*main.Method) (len=3 cap=4) {
|
||||
(*main.Method)(0xc00029e730)({
|
||||
in: (main.MethodMessage) {
|
||||
name: (string) (len=5) "Empty",
|
||||
hasZeroFields: (bool) true
|
||||
},
|
||||
name: (string) (len=6) "Health",
|
||||
out: (main.MethodMessage) {
|
||||
name: (string) (len=5) "Empty",
|
||||
hasZeroFields: (bool) true
|
||||
},
|
||||
opts: (*main.methodOptions)(0xc000039b60)({
|
||||
authType: (*main.supportedAuth)(0xc0003a2b40)({
|
||||
id: (string) (len=7) "no_auth",
|
||||
name: (string) (len=6) "NoAuth",
|
||||
encrypted: (bool) false,
|
||||
context: (map[string]string) {
|
||||
}
|
||||
}),
|
||||
method: (string) (len=3) "get",
|
||||
route: (main.decodedRoute) {
|
||||
route: (string) (len=7) "/health",
|
||||
params: ([]string) <nil>
|
||||
},
|
||||
query: ([]string) <nil>
|
||||
})
|
||||
}),
|
||||
(*main.Method)(0xc00029e780)({
|
||||
in: (main.MethodMessage) {
|
||||
name: (string) (len=25) "EncryptionExchangeRequest",
|
||||
hasZeroFields: (bool) false
|
||||
},
|
||||
name: (string) (len=18) "EncryptionExchange",
|
||||
out: (main.MethodMessage) {
|
||||
name: (string) (len=5) "Empty",
|
||||
hasZeroFields: (bool) true
|
||||
},
|
||||
opts: (*main.methodOptions)(0xc000039ce0)({
|
||||
authType: (*main.supportedAuth)(0xc0003a2c00)({
|
||||
id: (string) (len=7) "no_auth",
|
||||
name: (string) (len=6) "NoAuth",
|
||||
encrypted: (bool) false,
|
||||
context: (map[string]string) {
|
||||
}
|
||||
}),
|
||||
method: (string) (len=4) "post",
|
||||
route: (main.decodedRoute) {
|
||||
route: (string) (len=24) "/api/encryption/exchange",
|
||||
params: ([]string) <nil>
|
||||
},
|
||||
query: ([]string) <nil>
|
||||
})
|
||||
}),
|
||||
(*main.Method)(0xc00029e7d0)({
|
||||
in: (main.MethodMessage) {
|
||||
name: (string) (len=5) "Empty",
|
||||
hasZeroFields: (bool) true
|
||||
},
|
||||
name: (string) (len=10) "LndGetInfo",
|
||||
out: (main.MethodMessage) {
|
||||
name: (string) (len=18) "LndGetInfoResponse",
|
||||
hasZeroFields: (bool) false
|
||||
},
|
||||
opts: (*main.methodOptions)(0xc000039e60)({
|
||||
authType: (*main.supportedAuth)(0xc0003a2cc0)({
|
||||
id: (string) (len=7) "no_auth",
|
||||
name: (string) (len=6) "NoAuth",
|
||||
encrypted: (bool) false,
|
||||
context: (map[string]string) {
|
||||
}
|
||||
}),
|
||||
method: (string) (len=3) "get",
|
||||
route: (main.decodedRoute) {
|
||||
route: (string) (len=16) "/api/lnd/getinfo",
|
||||
params: ([]string) <nil>
|
||||
},
|
||||
query: ([]string) <nil>
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
([]*main.Enum) <nil>
|
||||
|
||||
(map[string]*main.Message) (len=3) {
|
||||
(string) (len=25) "EncryptionExchangeRequest": (*main.Message)(0xc000238f40)({
|
||||
fullName: (string) (len=25) "EncryptionExchangeRequest",
|
||||
name: (string) (len=25) "EncryptionExchangeRequest",
|
||||
fields: ([]*main.Field) (len=2 cap=2) {
|
||||
(*main.Field)(0xc0003a2390)({
|
||||
name: (string) (len=10) "public_key",
|
||||
kind: (string) (len=6) "string",
|
||||
isMap: (bool) false,
|
||||
isArray: (bool) false,
|
||||
isEnum: (bool) false,
|
||||
isMessage: (bool) false,
|
||||
isOptional: (bool) false
|
||||
}),
|
||||
(*main.Field)(0xc0003a23c0)({
|
||||
name: (string) (len=9) "device_id",
|
||||
kind: (string) (len=6) "string",
|
||||
isMap: (bool) false,
|
||||
isArray: (bool) false,
|
||||
isEnum: (bool) false,
|
||||
isMessage: (bool) false,
|
||||
isOptional: (bool) false
|
||||
})
|
||||
}
|
||||
}),
|
||||
(string) (len=18) "LndGetInfoResponse": (*main.Message)(0xc000238f80)({
|
||||
fullName: (string) (len=18) "LndGetInfoResponse",
|
||||
name: (string) (len=18) "LndGetInfoResponse",
|
||||
fields: ([]*main.Field) (len=1 cap=1) {
|
||||
(*main.Field)(0xc0003a23f0)({
|
||||
name: (string) (len=5) "alias",
|
||||
kind: (string) (len=6) "string",
|
||||
isMap: (bool) false,
|
||||
isArray: (bool) false,
|
||||
isEnum: (bool) false,
|
||||
isMessage: (bool) false,
|
||||
isOptional: (bool) false
|
||||
})
|
||||
}
|
||||
}),
|
||||
(string) (len=5) "Empty": (*main.Message)(0xc000238f00)({
|
||||
fullName: (string) (len=5) "Empty",
|
||||
name: (string) (len=5) "Empty",
|
||||
fields: ([]*main.Field) <nil>
|
||||
})
|
||||
}
|
||||
|
||||
parsing file: structs 3
|
||||
parsing file: methods 2
|
||||
-> [{no_auth NoAuth false map[]} {guest Guest false map[token:string]} {admin Admin true map[pub:string]}]
|
||||
|
||||
60
proto/autogenerated/ts/express_server.ts
Normal file
60
proto/autogenerated/ts/express_server.ts
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
|
||||
import express, { Response } from 'express'
|
||||
import * as Types from './types'
|
||||
export type Logger = { log: (v: any) => void, error: (v: any) => void }
|
||||
export type ServerOptions = {
|
||||
allowNotImplementedMethods?: number
|
||||
logger?: Logger
|
||||
throwErrors?: true
|
||||
NoAuthAuthGuard: (authorizationHeader?: string) => Promise<Types.NoAuthContext>
|
||||
GuestAuthGuard: (authorizationHeader?: string) => Promise<Types.GuestContext>
|
||||
AdminAuthGuard: (authorizationHeader?: string) => Promise<Types.AdminContext>
|
||||
encryptionCallback: (ctx: Types.AuthContext, body: any) => Promise<string>
|
||||
}
|
||||
const logErrorAndReturnResponse = (error: Error, response: string, res: Response, logger: Logger) => { logger.error(error.message || error); res.json({ status: 'ERROR', reason: response }) }
|
||||
export default (methods: Types.ServerMethods, opts: ServerOptions) => {
|
||||
const logger = opts.logger || { log: console.log, error: console.error }
|
||||
const app = express()
|
||||
if (!opts.allowNotImplementedMethods && !methods.Health) throw new Error('method: Health is not implemented')
|
||||
app.get('/health', async (req, res) => {
|
||||
try {
|
||||
if (!methods.Health) throw new Error('method: Health is not implemented')
|
||||
const authContext = await opts.NoAuthAuthGuard(req.headers['authorization'])
|
||||
const query = req.query
|
||||
const params = req.params
|
||||
await methods.Health({ ...authContext, ...query, ...params })
|
||||
res.json({ status: 'OK' })
|
||||
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger); if (opts.throwErrors) throw e }
|
||||
})
|
||||
if (!opts.allowNotImplementedMethods && !methods.EncryptionExchange) throw new Error('method: EncryptionExchange is not implemented')
|
||||
app.post('/api/encryption/exchange', async (req, res) => {
|
||||
try {
|
||||
if (!methods.EncryptionExchange) throw new Error('method: EncryptionExchange is not implemented')
|
||||
const authContext = await opts.NoAuthAuthGuard(req.headers['authorization'])
|
||||
const request = req.body
|
||||
const error = Types.EncryptionExchangeRequestValidate(request)
|
||||
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger)
|
||||
const query = req.query
|
||||
const params = req.params
|
||||
await methods.EncryptionExchange({ ...authContext, ...query, ...params }, request)
|
||||
res.json({ status: 'OK' })
|
||||
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger); if (opts.throwErrors) throw e }
|
||||
})
|
||||
if (!opts.allowNotImplementedMethods && !methods.LndGetInfo) throw new Error('method: LndGetInfo is not implemented')
|
||||
app.get('/api/lnd/getinfo', async (req, res) => {
|
||||
try {
|
||||
if (!methods.LndGetInfo) throw new Error('method: LndGetInfo is not implemented')
|
||||
const authContext = await opts.NoAuthAuthGuard(req.headers['authorization'])
|
||||
const query = req.query
|
||||
const params = req.params
|
||||
const response = await methods.LndGetInfo({ ...authContext, ...query, ...params })
|
||||
res.json({ status: 'OK', result: response })
|
||||
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger); if (opts.throwErrors) throw e }
|
||||
})
|
||||
var server: { close: () => void } | undefined
|
||||
return {
|
||||
Close: () => { if (!server) { throw new Error('tried closing server before starting') } else server.close() },
|
||||
Listen: (port: number) => { server = app.listen(port, () => logger.log('Example app listening on port ' + port)) }
|
||||
}
|
||||
}
|
||||
44
proto/autogenerated/ts/http_client.ts
Normal file
44
proto/autogenerated/ts/http_client.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
import axios from 'axios'
|
||||
import * as Types from './types'
|
||||
export type ResultError = { status: 'ERROR', reason: string }
|
||||
|
||||
export type ClientParams = {
|
||||
baseUrl: string
|
||||
retrieveNoAuthAuth: () => Promise<string | null>
|
||||
retrieveGuestAuth: () => Promise<string | null>
|
||||
retrieveAdminAuth: () => Promise<string | null>
|
||||
}
|
||||
export default (params: ClientParams) => ({
|
||||
Health: async (): Promise<ResultError | { status: 'OK' }> => {
|
||||
const auth = await params.retrieveNoAuthAuth()
|
||||
if (auth === null) throw new Error('retrieveNoAuthAuth() returned null')
|
||||
const { data } = await axios.get(params.baseUrl + '/health', { headers: { 'authorization': auth } })
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
|
||||
if (data.status === 'OK') {
|
||||
return data
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' }
|
||||
},
|
||||
EncryptionExchange: async (request: Types.EncryptionExchangeRequest): Promise<ResultError | { status: 'OK' }> => {
|
||||
const auth = await params.retrieveNoAuthAuth()
|
||||
if (auth === null) throw new Error('retrieveNoAuthAuth() returned null')
|
||||
const { data } = await axios.post(params.baseUrl + '/api/encryption/exchange', request, { headers: { 'authorization': auth } })
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
|
||||
if (data.status === 'OK') {
|
||||
return data
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' }
|
||||
},
|
||||
LndGetInfo: async (): Promise<ResultError | { status: 'OK', result: Types.Empty }> => {
|
||||
const auth = await params.retrieveNoAuthAuth()
|
||||
if (auth === null) throw new Error('retrieveNoAuthAuth() returned null')
|
||||
const { data } = await axios.get(params.baseUrl + '/api/lnd/getinfo', { headers: { 'authorization': auth } })
|
||||
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
|
||||
if (data.status === 'OK') {
|
||||
const error = Types.LndGetInfoResponseValidate(data.result)
|
||||
if (error === null) { return data } else return { status: 'ERROR', reason: error.message }
|
||||
}
|
||||
return { status: 'ERROR', reason: 'invalid response' }
|
||||
},
|
||||
})
|
||||
92
proto/autogenerated/ts/types.ts
Normal file
92
proto/autogenerated/ts/types.ts
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
// This file was autogenerated from a .proto file, DO NOT EDIT!
|
||||
|
||||
export type NoAuthContext = {
|
||||
}
|
||||
export type GuestContext = {
|
||||
token: string
|
||||
}
|
||||
export type AdminContext = {
|
||||
pub: string
|
||||
}
|
||||
export type AuthContext = NoAuthContext | GuestContext | AdminContext
|
||||
|
||||
export type Health_Query = {
|
||||
}
|
||||
export type Health_RouteParams = {
|
||||
}
|
||||
export type Health_Context = Health_Query & Health_RouteParams & NoAuthContext
|
||||
export type EncryptionExchange_Query = {
|
||||
}
|
||||
export type EncryptionExchange_RouteParams = {
|
||||
}
|
||||
export type EncryptionExchange_Context = EncryptionExchange_Query & EncryptionExchange_RouteParams & NoAuthContext
|
||||
export type LndGetInfo_Query = {
|
||||
}
|
||||
export type LndGetInfo_RouteParams = {
|
||||
}
|
||||
export type LndGetInfo_Context = LndGetInfo_Query & LndGetInfo_RouteParams & NoAuthContext
|
||||
export type ServerMethods = {
|
||||
Health?: (ctx: Health_Context) => Promise<void>
|
||||
EncryptionExchange?: (ctx: EncryptionExchange_Context, req: EncryptionExchangeRequest) => Promise<void>
|
||||
LndGetInfo?: (ctx: LndGetInfo_Context) => Promise<LndGetInfoResponse>
|
||||
}
|
||||
|
||||
|
||||
export type OptionsBaseMessage = {
|
||||
allOptionalsAreSet?: true
|
||||
}
|
||||
|
||||
export type LndGetInfoResponse = {
|
||||
alias: string
|
||||
}
|
||||
export const LndGetInfoResponseOptionalFields: [] = []
|
||||
export type LndGetInfoResponseOptions = OptionsBaseMessage & {
|
||||
checkOptionalsAreSet?: []
|
||||
alias_CustomCheck?: (v: string) => boolean
|
||||
}
|
||||
export const LndGetInfoResponseValidate = (o?: LndGetInfoResponse, opts: LndGetInfoResponseOptions = {}, path: string = 'LndGetInfoResponse::root.'): Error | null => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
|
||||
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
|
||||
|
||||
if (typeof o.alias !== 'string') return new Error(`${path}.alias: is not a string`)
|
||||
if (opts.alias_CustomCheck && !opts.alias_CustomCheck(o.alias)) return new Error(`${path}.alias: custom check failed`)
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export type Empty = {
|
||||
}
|
||||
export const EmptyOptionalFields: [] = []
|
||||
export type EmptyOptions = OptionsBaseMessage & {
|
||||
checkOptionalsAreSet?: []
|
||||
}
|
||||
export const EmptyValidate = (o?: Empty, opts: EmptyOptions = {}, path: string = 'Empty::root.'): Error | null => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
|
||||
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export type EncryptionExchangeRequest = {
|
||||
public_key: string
|
||||
device_id: string
|
||||
}
|
||||
export const EncryptionExchangeRequestOptionalFields: [] = []
|
||||
export type EncryptionExchangeRequestOptions = OptionsBaseMessage & {
|
||||
checkOptionalsAreSet?: []
|
||||
public_key_CustomCheck?: (v: string) => boolean
|
||||
device_id_CustomCheck?: (v: string) => boolean
|
||||
}
|
||||
export const EncryptionExchangeRequestValidate = (o?: EncryptionExchangeRequest, opts: EncryptionExchangeRequestOptions = {}, path: string = 'EncryptionExchangeRequest::root.'): Error | null => {
|
||||
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
|
||||
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
|
||||
|
||||
if (typeof o.public_key !== 'string') return new Error(`${path}.public_key: is not a string`)
|
||||
if (opts.public_key_CustomCheck && !opts.public_key_CustomCheck(o.public_key)) return new Error(`${path}.public_key: custom check failed`)
|
||||
|
||||
if (typeof o.device_id !== 'string') return new Error(`${path}.device_id: is not a string`)
|
||||
if (opts.device_id_CustomCheck && !opts.device_id_CustomCheck(o.device_id)) return new Error(`${path}.device_id: custom check failed`)
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
119
proto/lnd/invoices.client.ts
Normal file
119
proto/lnd/invoices.client.ts
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string
|
||||
// @generated from protobuf file "invoices.proto" (package "invoicesrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
|
||||
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
|
||||
import { Invoices } from "./invoices";
|
||||
import type { SettleInvoiceResp } from "./invoices";
|
||||
import type { SettleInvoiceMsg } from "./invoices";
|
||||
import type { AddHoldInvoiceResp } from "./invoices";
|
||||
import type { AddHoldInvoiceRequest } from "./invoices";
|
||||
import type { CancelInvoiceResp } from "./invoices";
|
||||
import type { CancelInvoiceMsg } from "./invoices";
|
||||
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
|
||||
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
|
||||
import type { Invoice } from "./rpc";
|
||||
import type { SubscribeSingleInvoiceRequest } from "./invoices";
|
||||
import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc";
|
||||
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
|
||||
/**
|
||||
* Invoices is a service that can be used to create, accept, settle and cancel
|
||||
* invoices.
|
||||
*
|
||||
* @generated from protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
export interface IInvoicesClient {
|
||||
/**
|
||||
*
|
||||
* SubscribeSingleInvoice returns a uni-directional stream (server -> client)
|
||||
* to notify the client of state transitions of the specified invoice.
|
||||
* Initially the current invoice state is always sent out.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeSingleInvoice(invoicesrpc.SubscribeSingleInvoiceRequest) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeSingleInvoice(input: SubscribeSingleInvoiceRequest, options?: RpcOptions): ServerStreamingCall<SubscribeSingleInvoiceRequest, Invoice>;
|
||||
/**
|
||||
*
|
||||
* CancelInvoice cancels a currently open invoice. If the invoice is already
|
||||
* canceled, this call will succeed. If the invoice is already settled, it will
|
||||
* fail.
|
||||
*
|
||||
* @generated from protobuf rpc: CancelInvoice(invoicesrpc.CancelInvoiceMsg) returns (invoicesrpc.CancelInvoiceResp);
|
||||
*/
|
||||
cancelInvoice(input: CancelInvoiceMsg, options?: RpcOptions): UnaryCall<CancelInvoiceMsg, CancelInvoiceResp>;
|
||||
/**
|
||||
*
|
||||
* AddHoldInvoice creates a hold invoice. It ties the invoice to the hash
|
||||
* supplied in the request.
|
||||
*
|
||||
* @generated from protobuf rpc: AddHoldInvoice(invoicesrpc.AddHoldInvoiceRequest) returns (invoicesrpc.AddHoldInvoiceResp);
|
||||
*/
|
||||
addHoldInvoice(input: AddHoldInvoiceRequest, options?: RpcOptions): UnaryCall<AddHoldInvoiceRequest, AddHoldInvoiceResp>;
|
||||
/**
|
||||
*
|
||||
* SettleInvoice settles an accepted invoice. If the invoice is already
|
||||
* settled, this call will succeed.
|
||||
*
|
||||
* @generated from protobuf rpc: SettleInvoice(invoicesrpc.SettleInvoiceMsg) returns (invoicesrpc.SettleInvoiceResp);
|
||||
*/
|
||||
settleInvoice(input: SettleInvoiceMsg, options?: RpcOptions): UnaryCall<SettleInvoiceMsg, SettleInvoiceResp>;
|
||||
}
|
||||
/**
|
||||
* Invoices is a service that can be used to create, accept, settle and cancel
|
||||
* invoices.
|
||||
*
|
||||
* @generated from protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
export class InvoicesClient implements IInvoicesClient, ServiceInfo {
|
||||
typeName = Invoices.typeName;
|
||||
methods = Invoices.methods;
|
||||
options = Invoices.options;
|
||||
constructor(private readonly _transport: RpcTransport) {
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeSingleInvoice returns a uni-directional stream (server -> client)
|
||||
* to notify the client of state transitions of the specified invoice.
|
||||
* Initially the current invoice state is always sent out.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeSingleInvoice(invoicesrpc.SubscribeSingleInvoiceRequest) returns (stream lnrpc.Invoice);
|
||||
*/
|
||||
subscribeSingleInvoice(input: SubscribeSingleInvoiceRequest, options?: RpcOptions): ServerStreamingCall<SubscribeSingleInvoiceRequest, Invoice> {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SubscribeSingleInvoiceRequest, Invoice>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* CancelInvoice cancels a currently open invoice. If the invoice is already
|
||||
* canceled, this call will succeed. If the invoice is already settled, it will
|
||||
* fail.
|
||||
*
|
||||
* @generated from protobuf rpc: CancelInvoice(invoicesrpc.CancelInvoiceMsg) returns (invoicesrpc.CancelInvoiceResp);
|
||||
*/
|
||||
cancelInvoice(input: CancelInvoiceMsg, options?: RpcOptions): UnaryCall<CancelInvoiceMsg, CancelInvoiceResp> {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<CancelInvoiceMsg, CancelInvoiceResp>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* AddHoldInvoice creates a hold invoice. It ties the invoice to the hash
|
||||
* supplied in the request.
|
||||
*
|
||||
* @generated from protobuf rpc: AddHoldInvoice(invoicesrpc.AddHoldInvoiceRequest) returns (invoicesrpc.AddHoldInvoiceResp);
|
||||
*/
|
||||
addHoldInvoice(input: AddHoldInvoiceRequest, options?: RpcOptions): UnaryCall<AddHoldInvoiceRequest, AddHoldInvoiceResp> {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<AddHoldInvoiceRequest, AddHoldInvoiceResp>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SettleInvoice settles an accepted invoice. If the invoice is already
|
||||
* settled, this call will succeed.
|
||||
*
|
||||
* @generated from protobuf rpc: SettleInvoice(invoicesrpc.SettleInvoiceMsg) returns (invoicesrpc.SettleInvoiceResp);
|
||||
*/
|
||||
settleInvoice(input: SettleInvoiceMsg, options?: RpcOptions): UnaryCall<SettleInvoiceMsg, SettleInvoiceResp> {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SettleInvoiceMsg, SettleInvoiceResp>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
513
proto/lnd/invoices.ts
Normal file
513
proto/lnd/invoices.ts
Normal file
|
|
@ -0,0 +1,513 @@
|
|||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string
|
||||
// @generated from protobuf file "invoices.proto" (package "invoicesrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
import { Invoice } from "./rpc";
|
||||
import { ServiceType } from "@protobuf-ts/runtime-rpc";
|
||||
import type { BinaryWriteOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryWriter } from "@protobuf-ts/runtime";
|
||||
import { WireType } from "@protobuf-ts/runtime";
|
||||
import type { BinaryReadOptions } from "@protobuf-ts/runtime";
|
||||
import type { IBinaryReader } from "@protobuf-ts/runtime";
|
||||
import { UnknownFieldHandler } from "@protobuf-ts/runtime";
|
||||
import type { PartialMessage } from "@protobuf-ts/runtime";
|
||||
import { reflectionMergePartial } from "@protobuf-ts/runtime";
|
||||
import { MESSAGE_TYPE } from "@protobuf-ts/runtime";
|
||||
import { MessageType } from "@protobuf-ts/runtime";
|
||||
import { RouteHint } from "./rpc";
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.CancelInvoiceMsg
|
||||
*/
|
||||
export interface CancelInvoiceMsg {
|
||||
/**
|
||||
* Hash corresponding to the (hold) invoice to cancel.
|
||||
*
|
||||
* @generated from protobuf field: bytes payment_hash = 1;
|
||||
*/
|
||||
paymentHash: Uint8Array;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.CancelInvoiceResp
|
||||
*/
|
||||
export interface CancelInvoiceResp {
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.AddHoldInvoiceRequest
|
||||
*/
|
||||
export interface AddHoldInvoiceRequest {
|
||||
/**
|
||||
*
|
||||
* An optional memo to attach along with the invoice. Used for record keeping
|
||||
* purposes for the invoice's creator, and will also be set in the description
|
||||
* field of the encoded payment request if the description_hash field is not
|
||||
* being used.
|
||||
*
|
||||
* @generated from protobuf field: string memo = 1;
|
||||
*/
|
||||
memo: string;
|
||||
/**
|
||||
* The hash of the preimage
|
||||
*
|
||||
* @generated from protobuf field: bytes hash = 2;
|
||||
*/
|
||||
hash: Uint8Array;
|
||||
/**
|
||||
*
|
||||
* The value of this invoice in satoshis
|
||||
*
|
||||
* The fields value and value_msat are mutually exclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 value = 3;
|
||||
*/
|
||||
value: string;
|
||||
/**
|
||||
*
|
||||
* The value of this invoice in millisatoshis
|
||||
*
|
||||
* The fields value and value_msat are mutually exclusive.
|
||||
*
|
||||
* @generated from protobuf field: int64 value_msat = 10;
|
||||
*/
|
||||
valueMsat: string;
|
||||
/**
|
||||
*
|
||||
* Hash (SHA-256) of a description of the payment. Used if the description of
|
||||
* payment (memo) is too long to naturally fit within the description field
|
||||
* of an encoded payment request.
|
||||
*
|
||||
* @generated from protobuf field: bytes description_hash = 4;
|
||||
*/
|
||||
descriptionHash: Uint8Array;
|
||||
/**
|
||||
* Payment request expiry time in seconds. Default is 3600 (1 hour).
|
||||
*
|
||||
* @generated from protobuf field: int64 expiry = 5;
|
||||
*/
|
||||
expiry: string;
|
||||
/**
|
||||
* Fallback on-chain address.
|
||||
*
|
||||
* @generated from protobuf field: string fallback_addr = 6;
|
||||
*/
|
||||
fallbackAddr: string;
|
||||
/**
|
||||
* Delta to use for the time-lock of the CLTV extended to the final hop.
|
||||
*
|
||||
* @generated from protobuf field: uint64 cltv_expiry = 7;
|
||||
*/
|
||||
cltvExpiry: string;
|
||||
/**
|
||||
*
|
||||
* Route hints that can each be individually used to assist in reaching the
|
||||
* invoice's destination.
|
||||
*
|
||||
* @generated from protobuf field: repeated lnrpc.RouteHint route_hints = 8;
|
||||
*/
|
||||
routeHints: RouteHint[];
|
||||
/**
|
||||
* Whether this invoice should include routing hints for private channels.
|
||||
*
|
||||
* @generated from protobuf field: bool private = 9;
|
||||
*/
|
||||
private: boolean;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.AddHoldInvoiceResp
|
||||
*/
|
||||
export interface AddHoldInvoiceResp {
|
||||
/**
|
||||
*
|
||||
* A bare-bones invoice for a payment within the Lightning Network. With the
|
||||
* details of the invoice, the sender has all the data necessary to send a
|
||||
* payment to the recipient.
|
||||
*
|
||||
* @generated from protobuf field: string payment_request = 1;
|
||||
*/
|
||||
paymentRequest: string;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.SettleInvoiceMsg
|
||||
*/
|
||||
export interface SettleInvoiceMsg {
|
||||
/**
|
||||
* Externally discovered pre-image that should be used to settle the hold
|
||||
* invoice.
|
||||
*
|
||||
* @generated from protobuf field: bytes preimage = 1;
|
||||
*/
|
||||
preimage: Uint8Array;
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.SettleInvoiceResp
|
||||
*/
|
||||
export interface SettleInvoiceResp {
|
||||
}
|
||||
/**
|
||||
* @generated from protobuf message invoicesrpc.SubscribeSingleInvoiceRequest
|
||||
*/
|
||||
export interface SubscribeSingleInvoiceRequest {
|
||||
/**
|
||||
* Hash corresponding to the (hold) invoice to subscribe to.
|
||||
*
|
||||
* @generated from protobuf field: bytes r_hash = 2;
|
||||
*/
|
||||
rHash: Uint8Array;
|
||||
}
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelInvoiceMsg$Type extends MessageType<CancelInvoiceMsg> {
|
||||
constructor() {
|
||||
super("invoicesrpc.CancelInvoiceMsg", [
|
||||
{ no: 1, name: "payment_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<CancelInvoiceMsg>): CancelInvoiceMsg {
|
||||
const message = { paymentHash: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CancelInvoiceMsg>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelInvoiceMsg): CancelInvoiceMsg {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes payment_hash */ 1:
|
||||
message.paymentHash = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: CancelInvoiceMsg, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bytes payment_hash = 1; */
|
||||
if (message.paymentHash.length)
|
||||
writer.tag(1, WireType.LengthDelimited).bytes(message.paymentHash);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.CancelInvoiceMsg
|
||||
*/
|
||||
export const CancelInvoiceMsg = new CancelInvoiceMsg$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class CancelInvoiceResp$Type extends MessageType<CancelInvoiceResp> {
|
||||
constructor() {
|
||||
super("invoicesrpc.CancelInvoiceResp", []);
|
||||
}
|
||||
create(value?: PartialMessage<CancelInvoiceResp>): CancelInvoiceResp {
|
||||
const message = {};
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<CancelInvoiceResp>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelInvoiceResp): CancelInvoiceResp {
|
||||
return target ?? this.create();
|
||||
}
|
||||
internalBinaryWrite(message: CancelInvoiceResp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.CancelInvoiceResp
|
||||
*/
|
||||
export const CancelInvoiceResp = new CancelInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class AddHoldInvoiceRequest$Type extends MessageType<AddHoldInvoiceRequest> {
|
||||
constructor() {
|
||||
super("invoicesrpc.AddHoldInvoiceRequest", [
|
||||
{ no: 1, name: "memo", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 2, name: "hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
|
||||
{ no: 3, name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 10, name: "value_msat", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 4, name: "description_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ },
|
||||
{ no: 5, name: "expiry", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
|
||||
{ no: 6, name: "fallback_addr", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
|
||||
{ no: 7, name: "cltv_expiry", kind: "scalar", T: 4 /*ScalarType.UINT64*/ },
|
||||
{ no: 8, name: "route_hints", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => RouteHint },
|
||||
{ no: 9, name: "private", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<AddHoldInvoiceRequest>): AddHoldInvoiceRequest {
|
||||
const message = { memo: "", hash: new Uint8Array(0), value: "0", valueMsat: "0", descriptionHash: new Uint8Array(0), expiry: "0", fallbackAddr: "", cltvExpiry: "0", routeHints: [], private: false };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<AddHoldInvoiceRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: AddHoldInvoiceRequest): AddHoldInvoiceRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string memo */ 1:
|
||||
message.memo = reader.string();
|
||||
break;
|
||||
case /* bytes hash */ 2:
|
||||
message.hash = reader.bytes();
|
||||
break;
|
||||
case /* int64 value */ 3:
|
||||
message.value = reader.int64().toString();
|
||||
break;
|
||||
case /* int64 value_msat */ 10:
|
||||
message.valueMsat = reader.int64().toString();
|
||||
break;
|
||||
case /* bytes description_hash */ 4:
|
||||
message.descriptionHash = reader.bytes();
|
||||
break;
|
||||
case /* int64 expiry */ 5:
|
||||
message.expiry = reader.int64().toString();
|
||||
break;
|
||||
case /* string fallback_addr */ 6:
|
||||
message.fallbackAddr = reader.string();
|
||||
break;
|
||||
case /* uint64 cltv_expiry */ 7:
|
||||
message.cltvExpiry = reader.uint64().toString();
|
||||
break;
|
||||
case /* repeated lnrpc.RouteHint route_hints */ 8:
|
||||
message.routeHints.push(RouteHint.internalBinaryRead(reader, reader.uint32(), options));
|
||||
break;
|
||||
case /* bool private */ 9:
|
||||
message.private = reader.bool();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: AddHoldInvoiceRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string memo = 1; */
|
||||
if (message.memo !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.memo);
|
||||
/* bytes hash = 2; */
|
||||
if (message.hash.length)
|
||||
writer.tag(2, WireType.LengthDelimited).bytes(message.hash);
|
||||
/* int64 value = 3; */
|
||||
if (message.value !== "0")
|
||||
writer.tag(3, WireType.Varint).int64(message.value);
|
||||
/* int64 value_msat = 10; */
|
||||
if (message.valueMsat !== "0")
|
||||
writer.tag(10, WireType.Varint).int64(message.valueMsat);
|
||||
/* bytes description_hash = 4; */
|
||||
if (message.descriptionHash.length)
|
||||
writer.tag(4, WireType.LengthDelimited).bytes(message.descriptionHash);
|
||||
/* int64 expiry = 5; */
|
||||
if (message.expiry !== "0")
|
||||
writer.tag(5, WireType.Varint).int64(message.expiry);
|
||||
/* string fallback_addr = 6; */
|
||||
if (message.fallbackAddr !== "")
|
||||
writer.tag(6, WireType.LengthDelimited).string(message.fallbackAddr);
|
||||
/* uint64 cltv_expiry = 7; */
|
||||
if (message.cltvExpiry !== "0")
|
||||
writer.tag(7, WireType.Varint).uint64(message.cltvExpiry);
|
||||
/* repeated lnrpc.RouteHint route_hints = 8; */
|
||||
for (let i = 0; i < message.routeHints.length; i++)
|
||||
RouteHint.internalBinaryWrite(message.routeHints[i], writer.tag(8, WireType.LengthDelimited).fork(), options).join();
|
||||
/* bool private = 9; */
|
||||
if (message.private !== false)
|
||||
writer.tag(9, WireType.Varint).bool(message.private);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.AddHoldInvoiceRequest
|
||||
*/
|
||||
export const AddHoldInvoiceRequest = new AddHoldInvoiceRequest$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class AddHoldInvoiceResp$Type extends MessageType<AddHoldInvoiceResp> {
|
||||
constructor() {
|
||||
super("invoicesrpc.AddHoldInvoiceResp", [
|
||||
{ no: 1, name: "payment_request", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<AddHoldInvoiceResp>): AddHoldInvoiceResp {
|
||||
const message = { paymentRequest: "" };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<AddHoldInvoiceResp>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: AddHoldInvoiceResp): AddHoldInvoiceResp {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* string payment_request */ 1:
|
||||
message.paymentRequest = reader.string();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: AddHoldInvoiceResp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* string payment_request = 1; */
|
||||
if (message.paymentRequest !== "")
|
||||
writer.tag(1, WireType.LengthDelimited).string(message.paymentRequest);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.AddHoldInvoiceResp
|
||||
*/
|
||||
export const AddHoldInvoiceResp = new AddHoldInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SettleInvoiceMsg$Type extends MessageType<SettleInvoiceMsg> {
|
||||
constructor() {
|
||||
super("invoicesrpc.SettleInvoiceMsg", [
|
||||
{ no: 1, name: "preimage", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<SettleInvoiceMsg>): SettleInvoiceMsg {
|
||||
const message = { preimage: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<SettleInvoiceMsg>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SettleInvoiceMsg): SettleInvoiceMsg {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes preimage */ 1:
|
||||
message.preimage = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: SettleInvoiceMsg, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bytes preimage = 1; */
|
||||
if (message.preimage.length)
|
||||
writer.tag(1, WireType.LengthDelimited).bytes(message.preimage);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SettleInvoiceMsg
|
||||
*/
|
||||
export const SettleInvoiceMsg = new SettleInvoiceMsg$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SettleInvoiceResp$Type extends MessageType<SettleInvoiceResp> {
|
||||
constructor() {
|
||||
super("invoicesrpc.SettleInvoiceResp", []);
|
||||
}
|
||||
create(value?: PartialMessage<SettleInvoiceResp>): SettleInvoiceResp {
|
||||
const message = {};
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<SettleInvoiceResp>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SettleInvoiceResp): SettleInvoiceResp {
|
||||
return target ?? this.create();
|
||||
}
|
||||
internalBinaryWrite(message: SettleInvoiceResp, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SettleInvoiceResp
|
||||
*/
|
||||
export const SettleInvoiceResp = new SettleInvoiceResp$Type();
|
||||
// @generated message type with reflection information, may provide speed optimized methods
|
||||
class SubscribeSingleInvoiceRequest$Type extends MessageType<SubscribeSingleInvoiceRequest> {
|
||||
constructor() {
|
||||
super("invoicesrpc.SubscribeSingleInvoiceRequest", [
|
||||
{ no: 2, name: "r_hash", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }
|
||||
]);
|
||||
}
|
||||
create(value?: PartialMessage<SubscribeSingleInvoiceRequest>): SubscribeSingleInvoiceRequest {
|
||||
const message = { rHash: new Uint8Array(0) };
|
||||
globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this });
|
||||
if (value !== undefined)
|
||||
reflectionMergePartial<SubscribeSingleInvoiceRequest>(this, message, value);
|
||||
return message;
|
||||
}
|
||||
internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SubscribeSingleInvoiceRequest): SubscribeSingleInvoiceRequest {
|
||||
let message = target ?? this.create(), end = reader.pos + length;
|
||||
while (reader.pos < end) {
|
||||
let [fieldNo, wireType] = reader.tag();
|
||||
switch (fieldNo) {
|
||||
case /* bytes r_hash */ 2:
|
||||
message.rHash = reader.bytes();
|
||||
break;
|
||||
default:
|
||||
let u = options.readUnknownField;
|
||||
if (u === "throw")
|
||||
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
|
||||
let d = reader.skip(wireType);
|
||||
if (u !== false)
|
||||
(u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
internalBinaryWrite(message: SubscribeSingleInvoiceRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter {
|
||||
/* bytes r_hash = 2; */
|
||||
if (message.rHash.length)
|
||||
writer.tag(2, WireType.LengthDelimited).bytes(message.rHash);
|
||||
let u = options.writeUnknownFields;
|
||||
if (u !== false)
|
||||
(u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
|
||||
return writer;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @generated MessageType for protobuf message invoicesrpc.SubscribeSingleInvoiceRequest
|
||||
*/
|
||||
export const SubscribeSingleInvoiceRequest = new SubscribeSingleInvoiceRequest$Type();
|
||||
/**
|
||||
* @generated ServiceType for protobuf service invoicesrpc.Invoices
|
||||
*/
|
||||
export const Invoices = new ServiceType("invoicesrpc.Invoices", [
|
||||
{ name: "SubscribeSingleInvoice", serverStreaming: true, options: {}, I: SubscribeSingleInvoiceRequest, O: Invoice },
|
||||
{ name: "CancelInvoice", options: {}, I: CancelInvoiceMsg, O: CancelInvoiceResp },
|
||||
{ name: "AddHoldInvoice", options: {}, I: AddHoldInvoiceRequest, O: AddHoldInvoiceResp },
|
||||
{ name: "SettleInvoice", options: {}, I: SettleInvoiceMsg, O: SettleInvoiceResp }
|
||||
]);
|
||||
418
proto/lnd/router.client.ts
Normal file
418
proto/lnd/router.client.ts
Normal file
|
|
@ -0,0 +1,418 @@
|
|||
// @generated by protobuf-ts 2.5.0 with parameter long_type_string
|
||||
// @generated from protobuf file "router.proto" (package "routerrpc", syntax proto3)
|
||||
// tslint:disable
|
||||
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
|
||||
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
|
||||
import { Router } from "./router";
|
||||
import type { UpdateChanStatusResponse } from "./router";
|
||||
import type { UpdateChanStatusRequest } from "./router";
|
||||
import type { ForwardHtlcInterceptRequest } from "./router";
|
||||
import type { ForwardHtlcInterceptResponse } from "./router";
|
||||
import type { DuplexStreamingCall } from "@protobuf-ts/runtime-rpc";
|
||||
import type { PaymentStatus } from "./router";
|
||||
import type { HtlcEvent } from "./router";
|
||||
import type { SubscribeHtlcEventsRequest } from "./router";
|
||||
import type { BuildRouteResponse } from "./router";
|
||||
import type { BuildRouteRequest } from "./router";
|
||||
import type { QueryProbabilityResponse } from "./router";
|
||||
import type { QueryProbabilityRequest } from "./router";
|
||||
import type { SetMissionControlConfigResponse } from "./router";
|
||||
import type { SetMissionControlConfigRequest } from "./router";
|
||||
import type { GetMissionControlConfigResponse } from "./router";
|
||||
import type { GetMissionControlConfigRequest } from "./router";
|
||||
import type { XImportMissionControlResponse } from "./router";
|
||||
import type { XImportMissionControlRequest } from "./router";
|
||||
import type { QueryMissionControlResponse } from "./router";
|
||||
import type { QueryMissionControlRequest } from "./router";
|
||||
import type { ResetMissionControlResponse } from "./router";
|
||||
import type { ResetMissionControlRequest } from "./router";
|
||||
import type { HTLCAttempt } from "./rpc";
|
||||
import type { SendToRouteResponse } from "./router";
|
||||
import type { SendToRouteRequest } from "./router";
|
||||
import type { RouteFeeResponse } from "./router";
|
||||
import type { RouteFeeRequest } from "./router";
|
||||
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
|
||||
import type { TrackPaymentRequest } from "./router";
|
||||
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
|
||||
import type { Payment } from "./rpc";
|
||||
import type { SendPaymentRequest } from "./router";
|
||||
import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc";
|
||||
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
|
||||
/**
|
||||
* Router is a service that offers advanced interaction with the router
|
||||
* subsystem of the daemon.
|
||||
*
|
||||
* @generated from protobuf service routerrpc.Router
|
||||
*/
|
||||
export interface IRouterClient {
|
||||
/**
|
||||
*
|
||||
* SendPaymentV2 attempts to route a payment described by the passed
|
||||
* PaymentRequest to the final destination. The call returns a stream of
|
||||
* payment updates.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentV2(routerrpc.SendPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
sendPaymentV2(input: SendPaymentRequest, options?: RpcOptions): ServerStreamingCall<SendPaymentRequest, Payment>;
|
||||
/**
|
||||
*
|
||||
* TrackPaymentV2 returns an update stream for the payment identified by the
|
||||
* payment hash.
|
||||
*
|
||||
* @generated from protobuf rpc: TrackPaymentV2(routerrpc.TrackPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
trackPaymentV2(input: TrackPaymentRequest, options?: RpcOptions): ServerStreamingCall<TrackPaymentRequest, Payment>;
|
||||
/**
|
||||
*
|
||||
* EstimateRouteFee allows callers to obtain a lower bound w.r.t how much it
|
||||
* may cost to send an HTLC to the target end destination.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateRouteFee(routerrpc.RouteFeeRequest) returns (routerrpc.RouteFeeResponse);
|
||||
*/
|
||||
estimateRouteFee(input: RouteFeeRequest, options?: RpcOptions): UnaryCall<RouteFeeRequest, RouteFeeResponse>;
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendToRouteV2. SendToRoute attempts to make a payment via
|
||||
* the specified route. This method differs from SendPayment in that it
|
||||
* allows users to specify a full route manually. This can be used for
|
||||
* things like rebalancing, and atomic swaps. It differs from the newer
|
||||
* SendToRouteV2 in that it doesn't return the full HTLC information.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(routerrpc.SendToRouteRequest) returns (routerrpc.SendToRouteResponse);
|
||||
*/
|
||||
sendToRoute(input: SendToRouteRequest, options?: RpcOptions): UnaryCall<SendToRouteRequest, SendToRouteResponse>;
|
||||
/**
|
||||
*
|
||||
* SendToRouteV2 attempts to make a payment via the specified route. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteV2(routerrpc.SendToRouteRequest) returns (lnrpc.HTLCAttempt);
|
||||
*/
|
||||
sendToRouteV2(input: SendToRouteRequest, options?: RpcOptions): UnaryCall<SendToRouteRequest, HTLCAttempt>;
|
||||
/**
|
||||
*
|
||||
* ResetMissionControl clears all mission control state and starts with a clean
|
||||
* slate.
|
||||
*
|
||||
* @generated from protobuf rpc: ResetMissionControl(routerrpc.ResetMissionControlRequest) returns (routerrpc.ResetMissionControlResponse);
|
||||
*/
|
||||
resetMissionControl(input: ResetMissionControlRequest, options?: RpcOptions): UnaryCall<ResetMissionControlRequest, ResetMissionControlResponse>;
|
||||
/**
|
||||
*
|
||||
* QueryMissionControl exposes the internal mission control state to callers.
|
||||
* It is a development feature.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryMissionControl(routerrpc.QueryMissionControlRequest) returns (routerrpc.QueryMissionControlResponse);
|
||||
*/
|
||||
queryMissionControl(input: QueryMissionControlRequest, options?: RpcOptions): UnaryCall<QueryMissionControlRequest, QueryMissionControlResponse>;
|
||||
/**
|
||||
*
|
||||
* XImportMissionControl is an experimental API that imports the state provided
|
||||
* to the internal mission control's state, using all results which are more
|
||||
* recent than our existing values. These values will only be imported
|
||||
* in-memory, and will not be persisted across restarts.
|
||||
*
|
||||
* @generated from protobuf rpc: XImportMissionControl(routerrpc.XImportMissionControlRequest) returns (routerrpc.XImportMissionControlResponse);
|
||||
*/
|
||||
xImportMissionControl(input: XImportMissionControlRequest, options?: RpcOptions): UnaryCall<XImportMissionControlRequest, XImportMissionControlResponse>;
|
||||
/**
|
||||
*
|
||||
* GetMissionControlConfig returns mission control's current config.
|
||||
*
|
||||
* @generated from protobuf rpc: GetMissionControlConfig(routerrpc.GetMissionControlConfigRequest) returns (routerrpc.GetMissionControlConfigResponse);
|
||||
*/
|
||||
getMissionControlConfig(input: GetMissionControlConfigRequest, options?: RpcOptions): UnaryCall<GetMissionControlConfigRequest, GetMissionControlConfigResponse>;
|
||||
/**
|
||||
*
|
||||
* SetMissionControlConfig will set mission control's config, if the config
|
||||
* provided is valid.
|
||||
*
|
||||
* @generated from protobuf rpc: SetMissionControlConfig(routerrpc.SetMissionControlConfigRequest) returns (routerrpc.SetMissionControlConfigResponse);
|
||||
*/
|
||||
setMissionControlConfig(input: SetMissionControlConfigRequest, options?: RpcOptions): UnaryCall<SetMissionControlConfigRequest, SetMissionControlConfigResponse>;
|
||||
/**
|
||||
*
|
||||
* QueryProbability returns the current success probability estimate for a
|
||||
* given node pair and amount.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryProbability(routerrpc.QueryProbabilityRequest) returns (routerrpc.QueryProbabilityResponse);
|
||||
*/
|
||||
queryProbability(input: QueryProbabilityRequest, options?: RpcOptions): UnaryCall<QueryProbabilityRequest, QueryProbabilityResponse>;
|
||||
/**
|
||||
*
|
||||
* BuildRoute builds a fully specified route based on a list of hop public
|
||||
* keys. It retrieves the relevant channel policies from the graph in order to
|
||||
* calculate the correct fees and time locks.
|
||||
*
|
||||
* @generated from protobuf rpc: BuildRoute(routerrpc.BuildRouteRequest) returns (routerrpc.BuildRouteResponse);
|
||||
*/
|
||||
buildRoute(input: BuildRouteRequest, options?: RpcOptions): UnaryCall<BuildRouteRequest, BuildRouteResponse>;
|
||||
/**
|
||||
*
|
||||
* SubscribeHtlcEvents creates a uni-directional stream from the server to
|
||||
* the client which delivers a stream of htlc events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeHtlcEvents(routerrpc.SubscribeHtlcEventsRequest) returns (stream routerrpc.HtlcEvent);
|
||||
*/
|
||||
subscribeHtlcEvents(input: SubscribeHtlcEventsRequest, options?: RpcOptions): ServerStreamingCall<SubscribeHtlcEventsRequest, HtlcEvent>;
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendPaymentV2. SendPayment attempts to route a payment
|
||||
* described by the passed PaymentRequest to the final destination. The call
|
||||
* returns a stream of payment status updates.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(routerrpc.SendPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
sendPayment(input: SendPaymentRequest, options?: RpcOptions): ServerStreamingCall<SendPaymentRequest, PaymentStatus>;
|
||||
/**
|
||||
*
|
||||
* Deprecated, use TrackPaymentV2. TrackPayment returns an update stream for
|
||||
* the payment identified by the payment hash.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: TrackPayment(routerrpc.TrackPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
trackPayment(input: TrackPaymentRequest, options?: RpcOptions): ServerStreamingCall<TrackPaymentRequest, PaymentStatus>;
|
||||
/**
|
||||
* *
|
||||
* HtlcInterceptor dispatches a bi-directional streaming RPC in which
|
||||
* Forwarded HTLC requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND if this htlc should be intercepted.
|
||||
* In case of interception, the htlc can be either settled, cancelled or
|
||||
* resumed later by using the ResolveHoldForward endpoint.
|
||||
*
|
||||
* @generated from protobuf rpc: HtlcInterceptor(stream routerrpc.ForwardHtlcInterceptResponse) returns (stream routerrpc.ForwardHtlcInterceptRequest);
|
||||
*/
|
||||
htlcInterceptor(options?: RpcOptions): DuplexStreamingCall<ForwardHtlcInterceptResponse, ForwardHtlcInterceptRequest>;
|
||||
/**
|
||||
*
|
||||
* UpdateChanStatus attempts to manually set the state of a channel
|
||||
* (enabled, disabled, or auto). A manual "disable" request will cause the
|
||||
* channel to stay disabled until a subsequent manual request of either
|
||||
* "enable" or "auto".
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChanStatus(routerrpc.UpdateChanStatusRequest) returns (routerrpc.UpdateChanStatusResponse);
|
||||
*/
|
||||
updateChanStatus(input: UpdateChanStatusRequest, options?: RpcOptions): UnaryCall<UpdateChanStatusRequest, UpdateChanStatusResponse>;
|
||||
}
|
||||
/**
|
||||
* Router is a service that offers advanced interaction with the router
|
||||
* subsystem of the daemon.
|
||||
*
|
||||
* @generated from protobuf service routerrpc.Router
|
||||
*/
|
||||
export class RouterClient implements IRouterClient, ServiceInfo {
|
||||
typeName = Router.typeName;
|
||||
methods = Router.methods;
|
||||
options = Router.options;
|
||||
constructor(private readonly _transport: RpcTransport) {
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendPaymentV2 attempts to route a payment described by the passed
|
||||
* PaymentRequest to the final destination. The call returns a stream of
|
||||
* payment updates.
|
||||
*
|
||||
* @generated from protobuf rpc: SendPaymentV2(routerrpc.SendPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
sendPaymentV2(input: SendPaymentRequest, options?: RpcOptions): ServerStreamingCall<SendPaymentRequest, Payment> {
|
||||
const method = this.methods[0], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SendPaymentRequest, Payment>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* TrackPaymentV2 returns an update stream for the payment identified by the
|
||||
* payment hash.
|
||||
*
|
||||
* @generated from protobuf rpc: TrackPaymentV2(routerrpc.TrackPaymentRequest) returns (stream lnrpc.Payment);
|
||||
*/
|
||||
trackPaymentV2(input: TrackPaymentRequest, options?: RpcOptions): ServerStreamingCall<TrackPaymentRequest, Payment> {
|
||||
const method = this.methods[1], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<TrackPaymentRequest, Payment>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* EstimateRouteFee allows callers to obtain a lower bound w.r.t how much it
|
||||
* may cost to send an HTLC to the target end destination.
|
||||
*
|
||||
* @generated from protobuf rpc: EstimateRouteFee(routerrpc.RouteFeeRequest) returns (routerrpc.RouteFeeResponse);
|
||||
*/
|
||||
estimateRouteFee(input: RouteFeeRequest, options?: RpcOptions): UnaryCall<RouteFeeRequest, RouteFeeResponse> {
|
||||
const method = this.methods[2], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<RouteFeeRequest, RouteFeeResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendToRouteV2. SendToRoute attempts to make a payment via
|
||||
* the specified route. This method differs from SendPayment in that it
|
||||
* allows users to specify a full route manually. This can be used for
|
||||
* things like rebalancing, and atomic swaps. It differs from the newer
|
||||
* SendToRouteV2 in that it doesn't return the full HTLC information.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendToRoute(routerrpc.SendToRouteRequest) returns (routerrpc.SendToRouteResponse);
|
||||
*/
|
||||
sendToRoute(input: SendToRouteRequest, options?: RpcOptions): UnaryCall<SendToRouteRequest, SendToRouteResponse> {
|
||||
const method = this.methods[3], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SendToRouteRequest, SendToRouteResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SendToRouteV2 attempts to make a payment via the specified route. This
|
||||
* method differs from SendPayment in that it allows users to specify a full
|
||||
* route manually. This can be used for things like rebalancing, and atomic
|
||||
* swaps.
|
||||
*
|
||||
* @generated from protobuf rpc: SendToRouteV2(routerrpc.SendToRouteRequest) returns (lnrpc.HTLCAttempt);
|
||||
*/
|
||||
sendToRouteV2(input: SendToRouteRequest, options?: RpcOptions): UnaryCall<SendToRouteRequest, HTLCAttempt> {
|
||||
const method = this.methods[4], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SendToRouteRequest, HTLCAttempt>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* ResetMissionControl clears all mission control state and starts with a clean
|
||||
* slate.
|
||||
*
|
||||
* @generated from protobuf rpc: ResetMissionControl(routerrpc.ResetMissionControlRequest) returns (routerrpc.ResetMissionControlResponse);
|
||||
*/
|
||||
resetMissionControl(input: ResetMissionControlRequest, options?: RpcOptions): UnaryCall<ResetMissionControlRequest, ResetMissionControlResponse> {
|
||||
const method = this.methods[5], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<ResetMissionControlRequest, ResetMissionControlResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryMissionControl exposes the internal mission control state to callers.
|
||||
* It is a development feature.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryMissionControl(routerrpc.QueryMissionControlRequest) returns (routerrpc.QueryMissionControlResponse);
|
||||
*/
|
||||
queryMissionControl(input: QueryMissionControlRequest, options?: RpcOptions): UnaryCall<QueryMissionControlRequest, QueryMissionControlResponse> {
|
||||
const method = this.methods[6], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<QueryMissionControlRequest, QueryMissionControlResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* XImportMissionControl is an experimental API that imports the state provided
|
||||
* to the internal mission control's state, using all results which are more
|
||||
* recent than our existing values. These values will only be imported
|
||||
* in-memory, and will not be persisted across restarts.
|
||||
*
|
||||
* @generated from protobuf rpc: XImportMissionControl(routerrpc.XImportMissionControlRequest) returns (routerrpc.XImportMissionControlResponse);
|
||||
*/
|
||||
xImportMissionControl(input: XImportMissionControlRequest, options?: RpcOptions): UnaryCall<XImportMissionControlRequest, XImportMissionControlResponse> {
|
||||
const method = this.methods[7], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<XImportMissionControlRequest, XImportMissionControlResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* GetMissionControlConfig returns mission control's current config.
|
||||
*
|
||||
* @generated from protobuf rpc: GetMissionControlConfig(routerrpc.GetMissionControlConfigRequest) returns (routerrpc.GetMissionControlConfigResponse);
|
||||
*/
|
||||
getMissionControlConfig(input: GetMissionControlConfigRequest, options?: RpcOptions): UnaryCall<GetMissionControlConfigRequest, GetMissionControlConfigResponse> {
|
||||
const method = this.methods[8], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<GetMissionControlConfigRequest, GetMissionControlConfigResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SetMissionControlConfig will set mission control's config, if the config
|
||||
* provided is valid.
|
||||
*
|
||||
* @generated from protobuf rpc: SetMissionControlConfig(routerrpc.SetMissionControlConfigRequest) returns (routerrpc.SetMissionControlConfigResponse);
|
||||
*/
|
||||
setMissionControlConfig(input: SetMissionControlConfigRequest, options?: RpcOptions): UnaryCall<SetMissionControlConfigRequest, SetMissionControlConfigResponse> {
|
||||
const method = this.methods[9], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SetMissionControlConfigRequest, SetMissionControlConfigResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* QueryProbability returns the current success probability estimate for a
|
||||
* given node pair and amount.
|
||||
*
|
||||
* @generated from protobuf rpc: QueryProbability(routerrpc.QueryProbabilityRequest) returns (routerrpc.QueryProbabilityResponse);
|
||||
*/
|
||||
queryProbability(input: QueryProbabilityRequest, options?: RpcOptions): UnaryCall<QueryProbabilityRequest, QueryProbabilityResponse> {
|
||||
const method = this.methods[10], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<QueryProbabilityRequest, QueryProbabilityResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* BuildRoute builds a fully specified route based on a list of hop public
|
||||
* keys. It retrieves the relevant channel policies from the graph in order to
|
||||
* calculate the correct fees and time locks.
|
||||
*
|
||||
* @generated from protobuf rpc: BuildRoute(routerrpc.BuildRouteRequest) returns (routerrpc.BuildRouteResponse);
|
||||
*/
|
||||
buildRoute(input: BuildRouteRequest, options?: RpcOptions): UnaryCall<BuildRouteRequest, BuildRouteResponse> {
|
||||
const method = this.methods[11], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<BuildRouteRequest, BuildRouteResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* SubscribeHtlcEvents creates a uni-directional stream from the server to
|
||||
* the client which delivers a stream of htlc events.
|
||||
*
|
||||
* @generated from protobuf rpc: SubscribeHtlcEvents(routerrpc.SubscribeHtlcEventsRequest) returns (stream routerrpc.HtlcEvent);
|
||||
*/
|
||||
subscribeHtlcEvents(input: SubscribeHtlcEventsRequest, options?: RpcOptions): ServerStreamingCall<SubscribeHtlcEventsRequest, HtlcEvent> {
|
||||
const method = this.methods[12], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SubscribeHtlcEventsRequest, HtlcEvent>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use SendPaymentV2. SendPayment attempts to route a payment
|
||||
* described by the passed PaymentRequest to the final destination. The call
|
||||
* returns a stream of payment status updates.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: SendPayment(routerrpc.SendPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
sendPayment(input: SendPaymentRequest, options?: RpcOptions): ServerStreamingCall<SendPaymentRequest, PaymentStatus> {
|
||||
const method = this.methods[13], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<SendPaymentRequest, PaymentStatus>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* Deprecated, use TrackPaymentV2. TrackPayment returns an update stream for
|
||||
* the payment identified by the payment hash.
|
||||
*
|
||||
* @deprecated
|
||||
* @generated from protobuf rpc: TrackPayment(routerrpc.TrackPaymentRequest) returns (stream routerrpc.PaymentStatus);
|
||||
*/
|
||||
trackPayment(input: TrackPaymentRequest, options?: RpcOptions): ServerStreamingCall<TrackPaymentRequest, PaymentStatus> {
|
||||
const method = this.methods[14], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<TrackPaymentRequest, PaymentStatus>("serverStreaming", this._transport, method, opt, input);
|
||||
}
|
||||
/**
|
||||
* *
|
||||
* HtlcInterceptor dispatches a bi-directional streaming RPC in which
|
||||
* Forwarded HTLC requests are sent to the client and the client responds with
|
||||
* a boolean that tells LND if this htlc should be intercepted.
|
||||
* In case of interception, the htlc can be either settled, cancelled or
|
||||
* resumed later by using the ResolveHoldForward endpoint.
|
||||
*
|
||||
* @generated from protobuf rpc: HtlcInterceptor(stream routerrpc.ForwardHtlcInterceptResponse) returns (stream routerrpc.ForwardHtlcInterceptRequest);
|
||||
*/
|
||||
htlcInterceptor(options?: RpcOptions): DuplexStreamingCall<ForwardHtlcInterceptResponse, ForwardHtlcInterceptRequest> {
|
||||
const method = this.methods[15], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<ForwardHtlcInterceptResponse, ForwardHtlcInterceptRequest>("duplex", this._transport, method, opt);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* UpdateChanStatus attempts to manually set the state of a channel
|
||||
* (enabled, disabled, or auto). A manual "disable" request will cause the
|
||||
* channel to stay disabled until a subsequent manual request of either
|
||||
* "enable" or "auto".
|
||||
*
|
||||
* @generated from protobuf rpc: UpdateChanStatus(routerrpc.UpdateChanStatusRequest) returns (routerrpc.UpdateChanStatusResponse);
|
||||
*/
|
||||
updateChanStatus(input: UpdateChanStatusRequest, options?: RpcOptions): UnaryCall<UpdateChanStatusRequest, UpdateChanStatusResponse> {
|
||||
const method = this.methods[16], opt = this._transport.mergeOptions(options);
|
||||
return stackIntercept<UpdateChanStatusRequest, UpdateChanStatusResponse>("unary", this._transport, method, opt, input);
|
||||
}
|
||||
}
|
||||
3230
proto/lnd/router.ts
Normal file
3230
proto/lnd/router.ts
Normal file
File diff suppressed because it is too large
Load diff
1530
proto/lnd/rpc.client.ts
Normal file
1530
proto/lnd/rpc.client.ts
Normal file
File diff suppressed because it is too large
Load diff
17827
proto/lnd/rpc.ts
Normal file
17827
proto/lnd/rpc.ts
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -426,9 +426,8 @@ service Lightning {
|
|||
/* lncli: `fwdinghistory`
|
||||
ForwardingHistory allows the caller to query the htlcswitch for a record of
|
||||
all HTLCs forwarded within the target time range, and integer offset
|
||||
within that time range, for a maximum number of events. If no maximum number
|
||||
of events is specified, up to 100 events will be returned. If no time-range
|
||||
is specified, then events will be returned in the order that they occured.
|
||||
within that time range. If no time-range is specified, then the first chunk
|
||||
of the past 24 hrs of forwarding history are returned.
|
||||
|
||||
A list of forwarding events are returned. The size of each forwarding event
|
||||
is 40 bytes, and the max message size able to be returned in gRPC is 4 MiB.
|
||||
BIN
proto/protoc-gen-pub.exe
Normal file
BIN
proto/protoc-gen-pub.exe
Normal file
Binary file not shown.
80
proto/service/methods.proto
Normal file
80
proto/service/methods.proto
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package methods;
|
||||
|
||||
import "google/protobuf/descriptor.proto";
|
||||
import "structs.proto";
|
||||
|
||||
option go_package = "github.com/shocknet/lightning.pub";
|
||||
option (file_options) = {
|
||||
supported_http_methods:["post", "get"];
|
||||
supported_auths:[
|
||||
{
|
||||
id: "no_auth"
|
||||
name: "NoAuth"
|
||||
context:[]
|
||||
},
|
||||
{
|
||||
id: "guest"
|
||||
name: "Guest",
|
||||
context:{
|
||||
key:"token",
|
||||
value:"string"
|
||||
}
|
||||
},
|
||||
{
|
||||
id: "admin",
|
||||
name: "Admin",
|
||||
encrypted:true,
|
||||
context:{
|
||||
key:"pub",
|
||||
value:"string"
|
||||
}
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
message MethodQueryOptions {
|
||||
repeated string items = 1;
|
||||
}
|
||||
|
||||
extend google.protobuf.MethodOptions { // TODO: move this stuff to dep repo?
|
||||
string auth_type = 50003;
|
||||
string http_method = 50004;
|
||||
string http_route = 50005;
|
||||
MethodQueryOptions query = 50006;
|
||||
|
||||
}
|
||||
|
||||
message ProtoFileOptions {
|
||||
message SupportedAuth {
|
||||
string id = 1;
|
||||
string name = 2;
|
||||
bool encrypted = 3;
|
||||
map<string,string> context = 4;
|
||||
}
|
||||
repeated SupportedAuth supported_auths = 1;
|
||||
repeated string supported_http_methods = 2;
|
||||
}
|
||||
|
||||
extend google.protobuf.FileOptions {
|
||||
ProtoFileOptions file_options = 50004;
|
||||
}
|
||||
|
||||
service LightningPub {
|
||||
rpc Health(structs.Empty) returns (structs.Empty){
|
||||
option (auth_type) = "NoAuth";
|
||||
option (http_method) = "get";
|
||||
option (http_route) = "/health";
|
||||
};
|
||||
rpc EncryptionExchange(structs.EncryptionExchangeRequest) returns (structs.Empty){
|
||||
option (auth_type) = "NoAuth";
|
||||
option (http_method) = "post";
|
||||
option (http_route) = "/api/encryption/exchange";
|
||||
};
|
||||
rpc LndGetInfo(structs.Empty) returns (structs.LndGetInfoResponse){
|
||||
option (auth_type) = "NoAuth";
|
||||
option (http_method) = "get";
|
||||
option (http_route) = "/api/lnd/getinfo";
|
||||
};
|
||||
}
|
||||
17
proto/service/structs.proto
Normal file
17
proto/service/structs.proto
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package structs;
|
||||
|
||||
option go_package = "github.com/shocknet/lightning.pub";
|
||||
|
||||
message Empty {}
|
||||
|
||||
|
||||
message EncryptionExchangeRequest {
|
||||
string public_key = 1;
|
||||
string device_id = 2;
|
||||
}
|
||||
|
||||
message LndGetInfoResponse {
|
||||
string alias = 1;
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Document</title>
|
||||
<script src="/qrCodeGenerator"></script>
|
||||
</head>
|
||||
<body>
|
||||
<p id="errorContainer"></p>
|
||||
<div>
|
||||
<h3>Tunnel</h3>
|
||||
<p id="tunnelState"></p>
|
||||
</div>
|
||||
<div>
|
||||
<h3>Access Secret</h3>
|
||||
<p id="accessSecretState"></p>
|
||||
</div>
|
||||
<div id="qrcode"></div>
|
||||
<script>
|
||||
fetch(`${window.location.origin}/api/accessInfo`)
|
||||
.then(res => res.json())
|
||||
.then(j => {
|
||||
console.log(j)
|
||||
if(j.field){
|
||||
document.querySelector('#errorContainer').innerHTML ='there was an error, unable to load access information, reason: '+ j.message
|
||||
return
|
||||
}
|
||||
|
||||
const tunnelUrl = handleTunnelInfo(j)
|
||||
const accessCode = handleAccessCode(j)
|
||||
|
||||
const baseUrl = tunnelUrl ? tunnelUrl : window.location.host
|
||||
const finalUrl = accessCode ? `${accessCode}#${baseUrl}` : baseUrl
|
||||
new QRCode(document.getElementById("qrcode"), finalUrl);
|
||||
|
||||
})
|
||||
.catch(e => {
|
||||
console.log(e.message)
|
||||
})
|
||||
|
||||
const handleTunnelInfo = (res) => {
|
||||
|
||||
|
||||
const tunnelState = document.querySelector("#tunnelState")
|
||||
if(res.tunnelDisabled){
|
||||
tunnelState.innerHTML = 'The tunnel service is disabled'
|
||||
return
|
||||
}
|
||||
if(res.relayNotFound) {
|
||||
tunnelState.innerHTML = 'The tunnel service seems broken'
|
||||
return
|
||||
}
|
||||
tunnelState.innerHTML = `Tunnel URL: ${res.relayId}@${res.relayUrl}`
|
||||
return `${res.relayId}@${res.relayUrl}`
|
||||
}
|
||||
|
||||
const handleAccessCode = (res) => {
|
||||
const accessSecretState = document.querySelector("#accessSecretState")
|
||||
if(res.accessSecretDisabled){
|
||||
accessSecretState.innerHTML = 'The access secret is disabled'
|
||||
return
|
||||
}
|
||||
if(res.accessCodeNotFound){
|
||||
accessSecretState.innerHTML = 'The access secret seems broken'
|
||||
return
|
||||
}
|
||||
if(res.accessCodeUsed){
|
||||
accessSecretState.innerHTML = 'The access secret was already used'
|
||||
return
|
||||
}
|
||||
accessSecretState.innerHTML = `Access Secret: ${res.accessCode}`
|
||||
return res.accessCode
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Document</title>
|
||||
<style>
|
||||
html,
|
||||
body{
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
.main{
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
|
||||
}
|
||||
.content{
|
||||
font-size: 4rem;
|
||||
background: #333333;
|
||||
padding: 1rem;
|
||||
border-radius: 0.5rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
}
|
||||
#content-name{
|
||||
color: #FFFFFF;
|
||||
text-shadow: 0 0 10px #FFFFFF;
|
||||
margin:0
|
||||
}
|
||||
#content-message{
|
||||
color: #FFFFFF;
|
||||
text-shadow: 4px 3px 0 #7A7A7A;
|
||||
margin:0
|
||||
|
||||
}
|
||||
#content-amount{
|
||||
color: #FFFFFF;
|
||||
text-shadow: 0 -1px 4px #FFF, 0 -2px 10px #ff0, 0 -10px 20px #ff8000, 0 -18px 40px #F00;
|
||||
margin:0
|
||||
}
|
||||
.hide{
|
||||
display:none
|
||||
}
|
||||
</style>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js" integrity="sha512-q/dWJ3kcmjBLU4Qc47E4A9kTB4m3wuTY7vkFJDTZKjTs8jhyGQnaUrxa0Ytd0ssMZhbNua9hE+E7Qv1j+DyZwA==" crossorigin="anonymous"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/3.1.3/socket.io.msgpack.min.js" crossorigin="anonymous"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="main">
|
||||
<div class="content hide">
|
||||
<p id="content-name">some random name i dont know</p>
|
||||
<p id="content-message">JUST TIPPED YOU!</p>
|
||||
<p id="content-amount">100sats</p>
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
const queryString = window.location.search;
|
||||
const urlParams = new URLSearchParams(queryString);
|
||||
const accessId = urlParams.get("accessId")
|
||||
const relayId = urlParams.get("x-shock-hybrid-relay-id-x")
|
||||
|
||||
const socketSetting = {
|
||||
reconnection: true,
|
||||
rejectUnauthorized: false,
|
||||
withCredentials: true,
|
||||
transports: ["websocket"]
|
||||
}
|
||||
var socket = io(`${location.origin}/streams`,socketSetting);
|
||||
socket.emit('hybridRelayId',{id:relayId})
|
||||
socket.on("connect", () => {
|
||||
setTimeout(()=>{socket.emit("accessId",accessId)},500)
|
||||
})
|
||||
let latestTimeout = null
|
||||
socket.on("update",(update)=>{
|
||||
const name = document.querySelector("#content-name")
|
||||
name.innerHTML = update.name
|
||||
const message = document.querySelector("#content-message")
|
||||
message.innerHTML = update.message
|
||||
const amount = document.querySelector("#content-amount")
|
||||
amount.innerHTML = update.amount
|
||||
|
||||
const content = document.querySelector(".content")
|
||||
content.classList.remove("hide")
|
||||
clearTimeout(latestTimeout)
|
||||
latestTimeout = setTimeout(()=>{
|
||||
content.classList.add("hide")
|
||||
},5000)
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
1
public/qrcode.min.js
vendored
1
public/qrcode.min.js
vendored
File diff suppressed because one or more lines are too long
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* @prettier
|
||||
*/
|
||||
// @ts-nocheck
|
||||
const jwt = require('jsonwebtoken')
|
||||
const uuidv1 = require('uuid/v1')
|
||||
const jsonfile = require('jsonfile')
|
||||
const path = require('path')
|
||||
const logger = require('../../config/log')
|
||||
const Storage = require('node-persist')
|
||||
const FS = require('../../utils/fs')
|
||||
|
||||
const rootFolder = process.resourcesPath || __dirname
|
||||
|
||||
class Auth {
|
||||
readSecrets = async () => {
|
||||
const secrets = await Storage.get('auth/secrets')
|
||||
|
||||
if (secrets) {
|
||||
return secrets
|
||||
}
|
||||
|
||||
const newSecrets = await Storage.set('auth/secrets', {})
|
||||
|
||||
return newSecrets
|
||||
}
|
||||
|
||||
async writeSecrets(key, value) {
|
||||
const allSecrets = await this.readSecrets()
|
||||
const newSecrets = await Storage.set('auth/secrets', {
|
||||
...allSecrets,
|
||||
[key]: value
|
||||
})
|
||||
return newSecrets
|
||||
}
|
||||
|
||||
async generateToken() {
|
||||
const timestamp = Date.now()
|
||||
const secret = uuidv1()
|
||||
logger.info('Generating new secret...')
|
||||
const token = jwt.sign(
|
||||
{
|
||||
data: { timestamp }
|
||||
},
|
||||
secret,
|
||||
{ expiresIn: '500h' }
|
||||
)
|
||||
logger.info('Saving secret...')
|
||||
await this.writeSecrets(timestamp, secret)
|
||||
return token
|
||||
}
|
||||
|
||||
async validateToken(token) {
|
||||
try {
|
||||
const key = jwt.decode(token).data.timestamp
|
||||
const secrets = await this.readSecrets()
|
||||
const secret = secrets[key]
|
||||
if (!secret) {
|
||||
throw { valid: false }
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
jwt.verify(token, secret, (err, decoded) => {
|
||||
if (err) {
|
||||
logger.info('validateToken err', err)
|
||||
reject(err)
|
||||
} else {
|
||||
// logger.info('decoded', decoded)
|
||||
resolve({ valid: true })
|
||||
}
|
||||
})
|
||||
})
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Auth()
|
||||
|
|
@ -1,486 +0,0 @@
|
|||
/**
|
||||
* @format
|
||||
*/
|
||||
const Common = require('shock-common')
|
||||
const Gun = require('../../../utils/GunSmith')
|
||||
// @ts-ignore
|
||||
require('gun/nts')
|
||||
const logger = require('../../../config/log')
|
||||
// @ts-ignore
|
||||
// Gun.log = () => {}
|
||||
// @ts-ignore
|
||||
require('gun/lib/open')
|
||||
// @ts-ignore
|
||||
require('gun/lib/load')
|
||||
//@ts-ignore
|
||||
const { encryptedEmit, encryptedOn } = require('../../../utils/ECC/socket')
|
||||
const Key = require('../contact-api/key')
|
||||
const Config = require('../config')
|
||||
|
||||
/** @type {import('../contact-api/SimpleGUN').ISEA} */
|
||||
// @ts-ignore
|
||||
const SEAx = require('gun/sea')
|
||||
// Re-enable in the future, when SEA errors inside user.auth/etc actually
|
||||
// propagate up.
|
||||
// SEAx.throw = true
|
||||
|
||||
/** @type {import('../contact-api/SimpleGUN').ISEA} */
|
||||
const mySEA = {}
|
||||
|
||||
// Avoid this: https://github.com/amark/gun/issues/804 and any other issues
|
||||
const $$__SHOCKWALLET__ENCRYPTED__ = '$$_SHOCKWALLET__ENCRYPTED__'
|
||||
const $$__SHOCKWALLET__MSG__ = '$$__SHOCKWALLET__MSG__'
|
||||
const $$__SHOCKWALLET__NUMBER__ = '$$__SHOCKWALLET__NUMBER__'
|
||||
const $$__SHOCKWALLET__BOOLEAN__ = '$$__SHOCKWALLET__BOOLEAN__'
|
||||
|
||||
mySEA.encrypt = (msg, secret) => {
|
||||
if (typeof secret !== 'string') {
|
||||
throw new TypeError(
|
||||
`mySEA.encrypt() -> expected secret to be a an string, args: |msg| -- ${JSON.stringify(
|
||||
secret
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (secret.length < 1) {
|
||||
throw new TypeError(
|
||||
`mySEA.encrypt() -> expected secret to be a populated string`
|
||||
)
|
||||
}
|
||||
|
||||
let strToEncode = ''
|
||||
|
||||
if (typeof msg === 'string') {
|
||||
if (msg.length === 0) {
|
||||
throw new TypeError(
|
||||
'mySEA.encrypt() -> expected msg to be a populated string'
|
||||
)
|
||||
}
|
||||
|
||||
strToEncode = $$__SHOCKWALLET__MSG__ + msg
|
||||
} else if (typeof msg === 'boolean') {
|
||||
strToEncode = $$__SHOCKWALLET__BOOLEAN__ + msg
|
||||
} else if (typeof msg === 'number') {
|
||||
strToEncode = $$__SHOCKWALLET__NUMBER__ + msg
|
||||
} else {
|
||||
throw new TypeError('mySea.encrypt() -> Not a valid msg type.')
|
||||
}
|
||||
|
||||
return SEAx.encrypt(strToEncode, secret).then(encMsg => {
|
||||
return $$__SHOCKWALLET__ENCRYPTED__ + encMsg
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} encMsg
|
||||
* @param {string} secret
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
const decryptBase = (encMsg, secret) => {
|
||||
if (typeof encMsg !== 'string') {
|
||||
throw new TypeError(
|
||||
'mySEA.encrypt() -> expected encMsg to be an string instead got: ' +
|
||||
typeof encMsg
|
||||
)
|
||||
}
|
||||
|
||||
if (encMsg.length === 0) {
|
||||
throw new TypeError(
|
||||
'mySEA.encrypt() -> expected encMsg to be a populated string'
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof secret !== 'string') {
|
||||
throw new TypeError('mySea.decrypt() -> expected secret to be an string')
|
||||
}
|
||||
|
||||
if (secret.length === 0) {
|
||||
throw new TypeError(
|
||||
'mySea.decrypt() -> expected secret to be a populated string'
|
||||
)
|
||||
}
|
||||
|
||||
if (encMsg.indexOf($$__SHOCKWALLET__ENCRYPTED__) !== 0) {
|
||||
throw new TypeError(
|
||||
'Trying to pass a non prefixed encrypted string to mySea.decrypt(): ' +
|
||||
encMsg
|
||||
)
|
||||
}
|
||||
|
||||
return SEAx.decrypt(
|
||||
encMsg.slice($$__SHOCKWALLET__ENCRYPTED__.length),
|
||||
secret
|
||||
).then(decodedMsg => {
|
||||
if (typeof decodedMsg !== 'string') {
|
||||
throw new TypeError('Could not decrypt')
|
||||
}
|
||||
|
||||
if (decodedMsg.startsWith($$__SHOCKWALLET__MSG__)) {
|
||||
return decodedMsg.slice($$__SHOCKWALLET__MSG__.length)
|
||||
} else if (decodedMsg.startsWith($$__SHOCKWALLET__BOOLEAN__)) {
|
||||
const dec = decodedMsg.slice($$__SHOCKWALLET__BOOLEAN__.length)
|
||||
if (dec === 'true') {
|
||||
return true
|
||||
} else if (dec === 'false') {
|
||||
return false
|
||||
}
|
||||
throw new Error('Could not decrypt boolean value.')
|
||||
} else if (decodedMsg.startsWith($$__SHOCKWALLET__NUMBER__)) {
|
||||
return Number(decodedMsg.slice($$__SHOCKWALLET__NUMBER__.length))
|
||||
}
|
||||
|
||||
throw new TypeError(
|
||||
`mySea.encrypt() -> Unexpected type of prefix found inside decrypted value, first 20 characters: ${decodedMsg.slice(
|
||||
0,
|
||||
20
|
||||
)}`
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
mySEA.decrypt = (encMsg, secret) => {
|
||||
return decryptBase(encMsg, secret)
|
||||
}
|
||||
|
||||
mySEA.decryptNumber = (encMsg, secret) => {
|
||||
return decryptBase(encMsg, secret)
|
||||
}
|
||||
|
||||
mySEA.decryptBoolean = (encMsg, secret) => {
|
||||
return decryptBase(encMsg, secret)
|
||||
}
|
||||
|
||||
mySEA.secret = async (recipientOrSenderEpub, recipientOrSenderSEA) => {
|
||||
if (typeof recipientOrSenderEpub !== 'string') {
|
||||
throw new TypeError(
|
||||
'epub has to be an string, args:' +
|
||||
`${JSON.stringify(recipientOrSenderEpub)} -- ${JSON.stringify(
|
||||
recipientOrSenderSEA
|
||||
)}`
|
||||
)
|
||||
}
|
||||
if (recipientOrSenderEpub.length === 0) {
|
||||
throw new TypeError(
|
||||
'epub has to be populated string, args: ' +
|
||||
`${JSON.stringify(recipientOrSenderEpub)} -- ${JSON.stringify(
|
||||
recipientOrSenderSEA
|
||||
)}`
|
||||
)
|
||||
}
|
||||
if (typeof recipientOrSenderSEA !== 'object') {
|
||||
throw new TypeError(
|
||||
'sea has to be an object, args: ' +
|
||||
`${JSON.stringify(recipientOrSenderEpub)} -- ${JSON.stringify(
|
||||
recipientOrSenderSEA
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (recipientOrSenderSEA === null) {
|
||||
throw new TypeError(
|
||||
'sea has to be non null, args: ' +
|
||||
`${JSON.stringify(recipientOrSenderEpub)} -- ${JSON.stringify(
|
||||
recipientOrSenderSEA
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (recipientOrSenderEpub === recipientOrSenderSEA.pub) {
|
||||
throw new Error(
|
||||
'Do not use pub for mySecret, args: ' +
|
||||
`${JSON.stringify(recipientOrSenderEpub)} -- ${JSON.stringify(
|
||||
recipientOrSenderSEA
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
const sec = await SEAx.secret(recipientOrSenderEpub, recipientOrSenderSEA)
|
||||
|
||||
if (typeof sec !== 'string') {
|
||||
throw new TypeError(
|
||||
`Could not generate secret, args: ${JSON.stringify(
|
||||
recipientOrSenderEpub
|
||||
)} -- ${JSON.stringify(recipientOrSenderSEA)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (sec.length === 0) {
|
||||
throw new TypeError(
|
||||
`SEA.secret returned an empty string!, args: ${JSON.stringify(
|
||||
recipientOrSenderEpub
|
||||
)} -- ${JSON.stringify(recipientOrSenderSEA)}`
|
||||
)
|
||||
}
|
||||
|
||||
return sec
|
||||
}
|
||||
|
||||
const { Constants } = require('shock-common')
|
||||
|
||||
const API = require('../contact-api/index')
|
||||
|
||||
/**
|
||||
* @typedef {import('../contact-api/SimpleGUN').GUNNode} GUNNode
|
||||
* @typedef {import('../contact-api/SimpleGUN').UserGUNNode} UserGUNNode
|
||||
* @typedef {import('../contact-api/SimpleGUN').ValidDataValue} ValidDataValue
|
||||
*/
|
||||
|
||||
// TO DO: move to common repo
|
||||
/**
|
||||
* @typedef {object} Emission
|
||||
* @prop {boolean} ok
|
||||
* @prop {any} msg
|
||||
* @prop {Record<string, any>} origBody
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} EncryptedEmissionLegacy
|
||||
* @prop {string} encryptedData
|
||||
* @prop {string} encryptedKey
|
||||
* @prop {string} iv
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {object} EncryptedEmission
|
||||
* @prop {string} ciphertext
|
||||
* @prop {string} mac
|
||||
* @prop {string} iv
|
||||
* @prop {string} ephemPublicKey
|
||||
*/
|
||||
|
||||
// TO DO: move to common repo
|
||||
/**
|
||||
* @typedef {object} SimpleSocket
|
||||
* @prop {(eventName: string, data?: Emission|EncryptedEmissionLegacy|EncryptedEmission|ValidDataValue) => void} emit
|
||||
* @prop {(eventName: string, handler: (data: any, callback: (err?: any, data?: any) => void) => void) => void} on
|
||||
* @prop {{ auth: { [key: string]: any } }} handshake
|
||||
*/
|
||||
|
||||
/* eslint-disable init-declarations */
|
||||
|
||||
const gun = Gun({
|
||||
axe: false,
|
||||
peers: Config.PEERS
|
||||
})
|
||||
|
||||
const user = gun.user()
|
||||
|
||||
/* eslint-enable init-declarations */
|
||||
|
||||
/** @type {string|null} */
|
||||
let mySec = null
|
||||
|
||||
/** @returns {string} */
|
||||
const getMySecret = () => /** @type {string} */ (mySec)
|
||||
|
||||
let _isAuthenticating = false
|
||||
let _isRegistering = false
|
||||
|
||||
const isAuthenticated = () => typeof user.is === 'object' && user.is !== null
|
||||
const isAuthenticating = () => _isAuthenticating
|
||||
const isRegistering = () => _isRegistering
|
||||
|
||||
const getGun = () => {
|
||||
throw new Error('NO GUNS')
|
||||
}
|
||||
|
||||
const getUser = () => {
|
||||
if (!user.is) {
|
||||
logger.warn('called getUser() without being authenticated')
|
||||
throw new Error(Constants.ErrorCode.NOT_AUTH)
|
||||
}
|
||||
return user
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a promise containing the public key of the newly created user.
|
||||
* @param {string} alias
|
||||
* @param {string} pass
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
const authenticate = async (alias, pass) => {
|
||||
if (!Common.isPopulatedString(alias)) {
|
||||
throw new TypeError(
|
||||
`Expected alias to be a populated string, instead got: ${alias}`
|
||||
)
|
||||
}
|
||||
if (!Common.isPopulatedString(pass)) {
|
||||
throw new TypeError(
|
||||
`Expected pass to be a populated string, instead got: ${pass}`
|
||||
)
|
||||
}
|
||||
|
||||
if (isAuthenticating()) {
|
||||
throw new Error(
|
||||
'Cannot authenticate while another authentication attempt is going on'
|
||||
)
|
||||
}
|
||||
|
||||
_isAuthenticating = true
|
||||
|
||||
const ack = await new Promise(res => {
|
||||
user.auth(alias, pass, _ack => {
|
||||
res(_ack)
|
||||
})
|
||||
})
|
||||
|
||||
_isAuthenticating = false
|
||||
|
||||
if (typeof ack.err === 'string') {
|
||||
throw new Error(ack.err)
|
||||
} else if (typeof ack.sea === 'object') {
|
||||
mySec = await mySEA.secret(user._.sea.epub, user._.sea)
|
||||
// clock skew
|
||||
await new Promise(res => setTimeout(res, 2000))
|
||||
|
||||
await /** @type {Promise<void>} */ (new Promise((res, rej) => {
|
||||
user.get(Key.FOLLOWS).put(
|
||||
{
|
||||
unused: null
|
||||
},
|
||||
ack => {
|
||||
if (ack.err && typeof ack.err !== 'number') {
|
||||
rej(
|
||||
new Error(
|
||||
`Error initializing follows: ${JSON.stringify(
|
||||
ack.err,
|
||||
null,
|
||||
4
|
||||
)}`
|
||||
)
|
||||
)
|
||||
} else {
|
||||
res()
|
||||
}
|
||||
}
|
||||
)
|
||||
}))
|
||||
|
||||
// move this to a subscription; implement off() ? todo
|
||||
API.Jobs.onOrders(user, gun, mySEA)
|
||||
API.Jobs.lastSeenNode(user)
|
||||
API.Events.onSeedBackup(() => {}, user, mySEA)
|
||||
|
||||
return ack.sea.pub
|
||||
} else {
|
||||
logger.info(ack)
|
||||
logger.error(
|
||||
`Unknown error, wrong password? Ack looks like: ${JSON.stringify(ack)}`
|
||||
)
|
||||
throw new Error(`Didn't work, bad password?`)
|
||||
}
|
||||
}
|
||||
|
||||
const logoff = () => {
|
||||
user.leave()
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an user for gun. Returns a promise containing the public key of the
|
||||
* newly created user.
|
||||
* @param {string} alias
|
||||
* @param {string} pass
|
||||
* @throws {Error} If gun is authenticated or is in the process of
|
||||
* authenticating. Use `isAuthenticating()` and `isAuthenticated()` to check for
|
||||
* this first. It can also throw if the alias is already registered on gun.
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
const register = async (alias, pass) => {
|
||||
if (isRegistering()) {
|
||||
throw new Error('Already registering.')
|
||||
}
|
||||
|
||||
if (isAuthenticating()) {
|
||||
throw new Error(
|
||||
'Cannot register while gun is being authenticated (reminder: there should only be one user created for each node).'
|
||||
)
|
||||
}
|
||||
|
||||
if (isAuthenticated()) {
|
||||
throw new Error(
|
||||
'Cannot register if gun is already authenticated (reminder: there should only be one user created for each node).'
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Peers provided to gun.
|
||||
*/
|
||||
const peers = Object.values(gun._.opt.peers)
|
||||
|
||||
const theresPeers = peers.length > 0
|
||||
const atLeastOneIsConnected = peers.some(
|
||||
p => p.wire && p.wire.readyState === 1
|
||||
)
|
||||
|
||||
if (theresPeers && !atLeastOneIsConnected) {
|
||||
throw new Error(
|
||||
'Not connected to any peers for checking of duplicate aliases'
|
||||
)
|
||||
}
|
||||
|
||||
if (theresPeers && atLeastOneIsConnected) {
|
||||
await new Promise(res => setTimeout(res, 300))
|
||||
|
||||
const userData = await new Promise(res => {
|
||||
gun.get(`~@${alias}`).once(ud => res(ud), {
|
||||
// https://github.com/amark/gun/pull/971#issue-438630761
|
||||
wait: 1500
|
||||
})
|
||||
})
|
||||
|
||||
if (userData) {
|
||||
throw new Error(
|
||||
'The given alias has been used before, use a unique alias instead. (Caught at 2nd try)'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
_isRegistering = true
|
||||
|
||||
/** @type {import('../contact-api/SimpleGUN').CreateAck} */
|
||||
const ack = await new Promise(res =>
|
||||
user.create(alias, pass, ack => res(ack))
|
||||
)
|
||||
|
||||
// An empty ack object seems to be caused by a duplicate alias sign up
|
||||
if ('{}' === JSON.stringify(ack)) {
|
||||
throw new Error(
|
||||
'The given alias has been used before, use an unique alias instead. (Empty ack)'
|
||||
)
|
||||
}
|
||||
|
||||
_isRegistering = false
|
||||
|
||||
if (typeof ack.err === 'string') {
|
||||
throw new Error(ack.err)
|
||||
} else if (typeof ack.pub === 'string' || typeof user._.sea === 'object') {
|
||||
// OK
|
||||
} else {
|
||||
throw new Error('unknown error, ack: ' + JSON.stringify(ack))
|
||||
}
|
||||
|
||||
// restart instances so write to user graph work, there's an issue with gun
|
||||
// (at least on node) where after initial user creation, writes to user graph
|
||||
// don't work
|
||||
// instantiateGun()
|
||||
|
||||
logoff()
|
||||
|
||||
return authenticate(alias, pass)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
authenticate,
|
||||
isAuthenticated,
|
||||
isAuthenticating,
|
||||
isRegistering,
|
||||
gun,
|
||||
user,
|
||||
register,
|
||||
getGun,
|
||||
getUser,
|
||||
mySEA,
|
||||
getMySecret,
|
||||
logoff,
|
||||
$$__SHOCKWALLET__ENCRYPTED__
|
||||
}
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
/**
|
||||
* @format
|
||||
*/
|
||||
/* eslint-disable no-process-env */
|
||||
|
||||
const dotenv = require('dotenv')
|
||||
const defaults = require('../../config/defaults')(false)
|
||||
|
||||
dotenv.config()
|
||||
|
||||
// @ts-ignore Let it crash if undefined
|
||||
exports.DATA_FILE_NAME = process.env.DATA_FILE_NAME || defaults.dataFileName
|
||||
|
||||
/**
|
||||
* @type {string[]}
|
||||
*/
|
||||
exports.PEERS = process.env.PEERS
|
||||
? JSON.parse(process.env.PEERS)
|
||||
: defaults.peers
|
||||
|
||||
exports.MS_TO_TOKEN_EXPIRATION = Number(
|
||||
process.env.MS_TO_TOKEN_EXPIRATION || defaults.tokenExpirationMS
|
||||
)
|
||||
|
||||
exports.SHOW_LOG = process.env.SHOW_GUN_DB_LOG === 'true'
|
||||
|
|
@ -1,149 +0,0 @@
|
|||
/**
|
||||
* @prettier
|
||||
*/
|
||||
export type Primitive = boolean | string | number
|
||||
|
||||
export interface Data {
|
||||
[K: string]: ValidDataValue
|
||||
}
|
||||
|
||||
export type ValidDataValue = Primitive | null | Data
|
||||
|
||||
export interface Ack {
|
||||
err: string | undefined
|
||||
}
|
||||
|
||||
type ListenerObjSoul = {
|
||||
'#': string
|
||||
}
|
||||
|
||||
export type ListenerObj = Record<string, ListenerObjSoul | Primitive | null> & {
|
||||
_: ListenerObjSoul
|
||||
}
|
||||
|
||||
export type ListenerData = Primitive | null | ListenerObj | undefined
|
||||
|
||||
interface OpenListenerDataObj {
|
||||
[k: string]: OpenListenerData
|
||||
}
|
||||
|
||||
export type Listener = (data: ListenerData, key: string) => void
|
||||
export type Callback = (ack: Ack) => void
|
||||
|
||||
export interface Peer {
|
||||
url: string
|
||||
id: string
|
||||
wire?: {
|
||||
readyState: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface Soul {
|
||||
get: string
|
||||
put: Primitive | null | object | undefined
|
||||
opt: {
|
||||
peers: Record<string, Peer>
|
||||
}
|
||||
}
|
||||
|
||||
export type OpenListenerData = Primitive | null | OpenListenerDataObj
|
||||
export type OpenListener = (data: OpenListenerData, key: string) => void
|
||||
|
||||
export type LoadListenerData = OpenListenerData
|
||||
export type LoadListener = (data: LoadListenerData, key: string) => void
|
||||
|
||||
export interface GUNNodeBase {
|
||||
_: Soul
|
||||
|
||||
map(): GUNNode
|
||||
|
||||
on(this: GUNNode, cb: Listener): void
|
||||
once(this: GUNNode, cb?: Listener, opts?: { wait: number }): GUNNode
|
||||
|
||||
open(this: GUNNode, cb?: OpenListener): GUNNode
|
||||
load(this: GUNNode, cb?: OpenListener): GUNNode
|
||||
|
||||
load(this: GUNNode, cb?: LoadListener): GUNNode
|
||||
|
||||
off(): void
|
||||
user(): UserGUNNode
|
||||
user(epub: string): GUNNode
|
||||
|
||||
then(): Promise<ListenerData>
|
||||
then<T>(cb: (v: ListenerData) => T): Promise<ListenerData>
|
||||
}
|
||||
|
||||
export interface GUNNode extends GUNNodeBase {
|
||||
get(key: string): GUNNode
|
||||
put(data: ValidDataValue | GUNNode, cb?: Callback): GUNNode
|
||||
set(data: ValidDataValue | GUNNode, cb?: Callback): GUNNode
|
||||
}
|
||||
|
||||
export interface CreateAck {
|
||||
pub: string | undefined
|
||||
err: string | undefined
|
||||
}
|
||||
|
||||
export type CreateCB = (ack: CreateAck) => void
|
||||
|
||||
export interface AuthAck {
|
||||
err: string | undefined
|
||||
sea:
|
||||
| {
|
||||
pub: string
|
||||
}
|
||||
| undefined
|
||||
}
|
||||
|
||||
export type AuthCB = (ack: AuthAck) => void
|
||||
|
||||
export interface UserPair {
|
||||
epriv: string
|
||||
epub: string
|
||||
priv: string
|
||||
pub: string
|
||||
}
|
||||
|
||||
export interface UserSoul extends Soul {
|
||||
sea: UserPair
|
||||
}
|
||||
|
||||
export interface UserGUNNode extends GUNNode {
|
||||
_: UserSoul
|
||||
auth(user: string, pass: string, cb: AuthCB): void
|
||||
is?: {
|
||||
alias: string
|
||||
pub: string
|
||||
}
|
||||
create(user: string, pass: string, cb: CreateCB): void
|
||||
leave(): void
|
||||
}
|
||||
|
||||
export interface ISEA {
|
||||
encrypt(
|
||||
message: string | number | boolean,
|
||||
senderSecret: string
|
||||
): Promise<string>
|
||||
decrypt(encryptedMessage: string, recipientSecret: string): Promise<string>
|
||||
decryptNumber(
|
||||
encryptedMessage: string,
|
||||
recipientSecret: string
|
||||
): Promise<number>
|
||||
decryptBoolean(
|
||||
encryptedMessage: string,
|
||||
recipientSecret: string
|
||||
): Promise<boolean>
|
||||
secret(
|
||||
recipientOrSenderEpub: string,
|
||||
recipientOrSenderUserPair: UserPair
|
||||
): Promise<string>
|
||||
}
|
||||
|
||||
export interface MySEA {
|
||||
encrypt(message: string, senderSecret: string): Promise<string>
|
||||
decrypt(encryptedMessage: string, recipientSecret: string): Promise<string>
|
||||
secret(
|
||||
recipientOrSenderEpub: string,
|
||||
recipientOrSenderUserPair: UserPair
|
||||
): Promise<string>
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue