Merge branch 'master' into dependabot/npm_and_yarn/method-override-3.0.0

This commit is contained in:
CapDog 2022-10-12 18:04:02 -04:00 committed by GitHub
commit 0099b6678d
89 changed files with 10386 additions and 16052 deletions

View file

@ -1,18 +1,20 @@
# Gun db storage
DATA_FILE_NAME=radata2
# Gun peer
PEERS=["http://gun.shock.network:8765/gun"]
PEERS=["https://gun.shock.network/gun","https://gun-eu.shock.network/gun"]
# API Device Token
MS_TO_TOKEN_EXPIRATION=4500000
# E2EE
DISABLE_SHOCK_ENCRYPTION=false
SHOCK_ENCRYPTION_ECC=true
CACHE_HEADERS_MANDATORY=true
SHOCK_CACHE=true
# Use only if disabling LND encrypt phrase (security risk)
TRUSTED_KEYS=true
# SSH Tunnel Provider
LOCAL_TUNNEL_SERVER=https://tunnel.rip
# Default content to your own seed server
TORRENT_SEED_URL=https://webtorrent.shock.network
# Admin token for your own seed server
TORRENT_SEED_TOKEN=jibberish
# "default" or "hosting"
DEPLOYMENT_TYPE=hosting
# allow to create a user with unlocked lnd
ALLOW_UNLOCKED_LND=false

View file

@ -1 +1,2 @@
*.ts
/public/*.min.js

View file

@ -1,10 +1,14 @@
{
"extends": ["eslint:all", "prettier", "plugin:jest/all"],
"plugins": ["prettier", "jest", "babel"],
"extends": ["eslint:all", "prettier", "plugin:mocha/recommended"],
"plugins": ["prettier", "mocha", "babel"],
"rules": {
"prettier/prettier": "error",
"strict": "off",
"mocha/no-mocha-arrows": "off",
"max-statements-per-line": "off",
"no-empty-function": "off",
"no-console": "off",

1
.github/FUNDING.yml vendored
View file

@ -1,4 +1,3 @@
# These are supported funding model platforms
github: [shocknet,]
custom: ['https://shock.pub/qsgziGQS99sPUxV1CRwwRckn9cG6cJ3prbDsrbL7qko.oRbCaVKwJFQURWrS1pFhkfAzrkEvkQgBRIUz9uoWtrg',]

30
.github/dependabot.yml vendored Normal file
View file

@ -0,0 +1,30 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: bitcore-lib
versions:
- 8.24.2
- 8.25.0
- 8.25.2
- 8.25.3
- 8.25.4
- 8.25.7
- 9.0.0
- dependency-name: socket.io
versions:
- 3.1.0
- dependency-name: commander
versions:
- 7.0.0
- 7.1.0
- dependency-name: lint-staged
versions:
- 10.5.3
- dependency-name: eslint-plugin-prettier
versions:
- 3.3.1

40
.github/workflows/dockerhub.yml vendored Normal file
View file

@ -0,0 +1,40 @@
name: Publish Docker image
on:
release:
types: [published]
jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Log in to Docker Hub
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: shockwallet/api
- name: Build and push Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View file

@ -19,9 +19,22 @@ jobs:
repo: ['shocknet/Wizard']
runs-on: ubuntu-latest
steps:
- name: Repository Dispatch
- name: 🛎️ Checkout
uses: actions/checkout@v2.3.1
with:
persist-credentials: false
ref: ${{ github.ref }}
- name: ⚙️ Install Dependencies
run: yarn install
- name: 📝 Run Tests
run: yarn test
- name: 📯 Repository Dispatch
uses: peter-evans/repository-dispatch@v1
with:
token: ${{ secrets.REPO_ACCESS_TOKEN }}
repository: ${{ matrix.repo }}
event-type: api-update
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}'

10
.gitignore vendored
View file

@ -6,10 +6,20 @@ services/auth/secrets.json
# New logger date format
*.log.*
.directory
.DS_Store
test-radata/
radata/
radata-*.tmp
*.cert
*.key
*-audit.json
# Yarn v2
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions

1
.npmrc Normal file
View file

@ -0,0 +1 @@
engine-strict = true

1
.nvmrc Normal file
View file

@ -0,0 +1 @@
v14.18.3

69
.vscode/launch.json vendored
View file

@ -1,14 +1,77 @@
{
"configurations": [
{
"name": "Launch Program",
"name": "Attach",
"port": 9229,
"request": "attach",
"skipFiles": ["<node_internals>/**"],
"type": "pwa-node"
},
{
"name": "Nodemon",
"program": "${workspaceFolder}/main.js",
"args": ["-h", "0.0.0.0", "-c"],
"args": ["--", "-h", "0.0.0.0", "-c"],
"request": "launch",
"skipFiles": ["<node_internals>/**"],
"type": "node",
"envFile": "${workspaceFolder}/.env",
"outputCapture": "std"
"outputCapture": "std",
// https://code.visualstudio.com/docs/nodejs/nodejs-debugging#_restarting-debug-sessions-automatically-when-source-is-edited
// Tip: Pressing the Stop button stops the debug session and disconnects
// from Node.js, but nodemon (and Node.js) will continue to run. To stop
// nodemon, you will have to kill it from the command line (which is
// easily possible if you use the integratedTerminal as shown above).
// Tip: In case of syntax errors, nodemon will not be able to start
// Node.js successfully until the error has been fixed. In this case, VS
// Code will continue trying to attach to Node.js but eventually give up
// (after 10 seconds). To avoid this, you can increase the timeout by
// adding a timeout attribute with a larger value (in milliseconds).
"runtimeExecutable": "${workspaceFolder}/node_modules/nodemon/bin/nodemon.js",
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"restart": true
},
{
"name": "Nodemon+Polar",
"program": "${workspaceFolder}/main.js",
"args": [
"--",
"-h",
"0.0.0.0",
"--trace-warnings",
"--max-old-space-size=4096",
"-c",
"-d",
"C:\\Users\\Predator\\AppData\\Local\\Lnd\\tls.cert",
"-m",
"C:\\Users\\Predator\\AppData\\Local\\Lnd\\data\\chain\\bitcoin\\mainnet\\admin.macaroon",
"--tunnel"
],
"request": "launch",
"skipFiles": ["<node_internals>/**"],
"type": "node",
"envFile": "${workspaceFolder}/.env",
"outputCapture": "std",
// https://code.visualstudio.com/docs/nodejs/nodejs-debugging#_restarting-debug-sessions-automatically-when-source-is-edited
// Tip: Pressing the Stop button stops the debug session and disconnects
// from Node.js, but nodemon (and Node.js) will continue to run. To stop
// nodemon, you will have to kill it from the command line (which is
// easily possible if you use the integratedTerminal as shown above).
// Tip: In case of syntax errors, nodemon will not be able to start
// Node.js successfully until the error has been fixed. In this case, VS
// Code will continue trying to attach to Node.js but eventually give up
// (after 10 seconds). To avoid this, you can increase the timeout by
// adding a timeout attribute with a larger value (in milliseconds).
"runtimeExecutable": "${workspaceFolder}/node_modules/nodemon/bin/nodemon.js",
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"restart": true
}
]
}

78
.vscode/settings.json vendored
View file

@ -3,5 +3,81 @@
"typescript.tsdk": "node_modules/typescript/lib",
"debug.node.autoAttach": "on",
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode"
"editor.defaultFormatter": "esbenp.prettier-vscode",
"cSpell.words": [
"acked",
"addinvoice",
"Authing",
"channelbalance",
"ciphertext",
"closechannel",
"closedchannels",
"Cltv",
"connectpeer",
"disconnectpeer",
"eccrypto",
"endregion",
"ephem",
"epriv",
"Epub",
"estimatefee",
"estimateroutefee",
"exportallchanbackups",
"exportchanbackup",
"falsey",
"forwardinghistory",
"getchaninfo",
"getinfo",
"getnetworkinfo",
"getnodeinfo",
"GUNRPC",
"Healthz",
"initwall",
"ISEA",
"keysend",
"kubernetes",
"listchannels",
"listinvoices",
"listpayments",
"listpeers",
"listunspent",
"lndchanbackups",
"LNDRPC",
"lndstreaming",
"lnrpc",
"lres",
"msgpack",
"newaddress",
"openchannel",
"otheruser",
"payreq",
"pendingchannels",
"preimage",
"PUBKEY",
"qrcode",
"queryroute",
"radata",
"Reqs",
"resave",
"satoshis",
"sendcoins",
"sendmany",
"sendpayment",
"sendtoroute",
"serverhost",
"serverport",
"shockping",
"SHOCKWALLET",
"signmessage",
"thenables",
"trackpayment",
"txid",
"unfollow",
"Unlocker",
"unsubscription",
"utxos",
"uuidv",
"verifymessage",
"walletbalance"
]
}

View file

@ -1,29 +1,18 @@
FROM node:12.18.0-alpine3.9
FROM node:14-buster-slim
WORKDIR /usr/src/app
ADD ./package.json /usr/src/app/package.json
ADD ./yarn.lock /usr/src/app/yarn.lock
#RUN useradd app && \
# mkdir -p /home/app/.lnd
RUN apk update && apk upgrade && \
apk add --no-cache bash git openssh
RUN yarn install
ADD . /usr/src/app
RUN ls /usr/src/app
RUN chmod +x ./docker-start.sh
#ADD ./tls.cert /usr/src/app/tls.cert
#ADD ./admin.macaroon /usr/src/app/admin.macaroon
# && \
# chown -R app:app /home/app && \
# chown -R app:app /usr/src/app && \
# chown -R app:app /start.sh
#ARG lnd_address
#ENV LND_ADDR=$lnd_address
EXPOSE 9835
CMD ["./docker-start.sh"]
VOLUME [ "/root/.lnd", "/data" ]
RUN apt-get update && apt-get install -y apt-transport-https git
WORKDIR /app
ADD ./package.json /app/package.json
ADD ./yarn.lock /app/yarn.lock
RUN yarn
ADD . /app
ENTRYPOINT [ "node", "main.js" ]

View file

@ -1,53 +1,80 @@
<h1>ShockAPI</h1>
<h1>Lightning.Pub</h1>
![GitHub last commit](https://img.shields.io/github/last-commit/shocknet/api?style=flat-square)
![GitHub last commit](https://img.shields.io/github/last-commit/shocknet/Lightning.Pub?style=flat-square)
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com)
[![Chat](https://img.shields.io/badge/chat-on%20Telegram-blue?style=flat-square)](https://t.me/Shockwallet)
[![Twitter Follow](https://img.shields.io/twitter/follow/ShockBTC?style=flat-square)](https://twitter.com/shockbtc)
[![Chat](https://img.shields.io/badge/chat-on%20Telegram-blue?style=flat-square)](https://t.me/ShockBTC)
[![Twitter Follow](https://img.shields.io/twitter/follow/ShockBTC?style=flat-square)](https://twitter.com/ShockBTC)
<p></p>
This is an alpha release of the Shockwallet backend service, providing a wrapper for [LND](https://github.com/shocknet/lnd/releases) and a daemon for a decentralized social graph over [GUN](https://gun.eco/).<br>
`Pub` enables your Lightning node with public Web API's, providing a framework for permissionless applications that depend on Lightning.
- As a wrapper for [`LND`](https://github.com/lightningnetwork/lnd/releases), `Pub` also offers node operators Enterprise-class management capabilities.
- An optional SSL proxy service is included for ease of use through zero-configuration networking.<br>
Run this service on your Lightning node and connect with a mobile device or desktop browser.
#### This repository is under rapid iteration and should only be used in development.
### Easy Installation
For easy setup on your Laptop/Desktop, [a node wizard is available here.](https://github.com/shocknet/wizard)
---
<!-- - [Easy Installation](#easy-installation)-->
- [Manual Installation](#manual-installation)
- [Docker Usage](#docker-usage)
- [Node Security](#node-security)
<!--- - [Docker for Raspberry Pi](#docker-for-raspberry-pi) -->
---
<!--- - ### Easy Installation
For easy setup on your Laptop/Desktop, [a node wizard is available here.](https://github.com/shocknet/wizard)-->
### Manual Installation
#### Notes:
* The service defaults to port `9835`
* Looks for local LND in its default path
* Default gun peer is `gun.shock.network`
* Change defaults in `defaults.js`
* Requires [Node.js](https://nodejs.org) 14
* Requires [Node.js](https://nodejs.org) 16
#### Steps:
1) Run [LND](https://github.com/shocknet/lnd/releases) - *Example mainnet startup*:
(Neutrino example requires builds with experimental flags, [our binaries](https://github.com/shocknet/lnd/releases) include them.)
1) Run [LND](https://github.com/lightningnetwork/lnd/releases) - *Example mainnet startup*:
```
./lnd --bitcoin.active --bitcoin.mainnet --bitcoin.node=neutrino --neutrino.connect=neutrino.shock.network --routing.assumechanvalid --accept-keysend --allow-circular-route --feeurl=https://nodes.lightning.computer/fees/v1/btc-fee-estimates.json
```
2) Download and Install API
2) Download and Install Lightning.Pub
```
git clone https://github.com/shocknet/api
cd api
git clone https://github.com/shocknet/Lightning.Pub
cd Lightning.Pub
yarn install
```
3) Run with `yarn start`
4) Connect with Shockwallet *(Provide your nodes IP manually or scan QR from ShockWizard)*
*Optionally, add the `--tunnel` flag to create an ssh connection through a tunnel.rip webserver for zero-configuration networking. All communication between the api and wallet is end-to-end encrypted and your privacy is protected.*
3) Run with `yarn start -t` *(`-t` is recommended but [not required](#node-security))*
4) Connect with Dashboard
### Docker for Raspberry Pi
### Docker Usage
To run `Pub` in a fully isolated environment you can use the Docker image
provided on the Docker Hub and easily interact with API's CLI interface and flags.
* [Instructions](https://gist.github.com/boufni95/3f4e1f19cf9525c3b7741b7a29f122bc)
#### Prerequisites
To use `Pub` Docker images you will need an instance of LND running, and
also if your LND related files are located in a container file system, you'll need to mount **Docker Volumes** pointed to them while starting the container.
Example of listing available configuration flags:
```
docker run --rm shockwallet/Lightning.Pub:latest --help
```
Example of running an local instance with mounted volumes:
```
docker run -v /home/$USER/.lnd:/root/.lnd --network host shockwallet/Lightning.Pub:latest
```
### Node Security
`Pub` administration API's use E2E encryption bootstrapped with PAKE to prevent interception by the proxy. There are advanced or testing scenarios where you may wish to bypass this security, to do so pass the env `TRUSTED_KEYS=false`
Communication between the administrator Dashboard and Lightning.Pub is otherwise encrypted, regardless of whether or not SSL is used, though an SSL equipped reverse proxy is recommended for better usability with web browsers.
Running with `-t` enables the built-in SSL proxy provider for ease of use via zero-configuration networking.

View file

@ -0,0 +1,16 @@
version: "3.8"
networks:
default:
external: true
name: 2_default
services:
web:
image: shockwallet/api:latest
command: -c -h 0.0.0.0 -l polar-n2-alice:10009 -m /root/.lnd/data/chain/bitcoin/regtest/admin.macaroon -d /root/.lnd/tls.cert
restart: on-failure
stop_grace_period: 1m
ports:
- 9835:9835
volumes:
- C:\Users\boufn\.polar\networks\2\volumes\lnd\alice:/root/.lnd

View file

@ -47,7 +47,7 @@ module.exports = (mainnet = false) => {
logfile: "shockapi.log",
lndLogFile: parsePath(`${lndDirectory}/logs/bitcoin/${network}/lnd.log`),
lndDirPath: lndDirectory,
peers: ['https://gun.shock.network:8765/gun'],
peers: ['https://gun.shock.network/gun','https://gun-eu.shock.network/gun'],
useTLS: false,
tokenExpirationMS: 259200000,
localtunnelHost:'https://tunnel.rip'

View file

@ -1,44 +1,58 @@
// config/log.js
/** @prettier */
const winston = require("winston");
require("winston-daily-rotate-file");
const { createLogger, transports, format } = require('winston')
const util = require('util')
require('winston-daily-rotate-file')
const winstonAttached = new Map();
// @ts-ignore
const transform = info => {
const args = info[Symbol.for('splat')]
if (args) {
return { ...info, message: util.format(info.message, ...args) }
}
return info
}
/**
* @param {string} logFileName
* @param {string} logLevel
* @returns {import("winston").Logger}
*/
module.exports = (logFileName, logLevel) => {
if (!winstonAttached.has(logFileName)) {
winston.add(new (winston.transports.DailyRotateFile)({
filename: logFileName,
datePattern: "yyyy-MM-DD",
const logFormatter = () => ({ transform })
const formatter = format.combine(
format.colorize(),
format.errors({ stack: true }),
logFormatter(),
format.prettyPrint(),
format.timestamp(),
format.simple(),
format.align(),
format.printf(info => {
const { timestamp, level, message, stack, exception } = info
const ts = timestamp.slice(0, 19).replace('T', ' ')
const isObject = typeof message === 'object'
const formattedJson = isObject ? JSON.stringify(message, null, 2) : message
const formattedException = exception ? exception.stack : ''
const errorMessage = stack || formattedException
const formattedMessage = errorMessage ? errorMessage : formattedJson
return `${ts} [${level}]: ${formattedMessage}`
})
)
const Logger = createLogger({
format: formatter,
transports: [
new transports.DailyRotateFile({
filename: 'shockapi.log',
datePattern: 'yyyy-MM-DD',
// https://github.com/winstonjs/winston-daily-rotate-file/issues/188
json: true,
json: false,
maxSize: 1000000,
maxFiles: 7,
level: logLevel
}))
winston.add(new winston.transports.Console({
format: winston.format.combine(
winston.format.colorize(),
winston.format.timestamp(),
winston.format.align(),
winston.format.printf((info) => {
const {
timestamp, level, message, ...args
} = info;
const ts = timestamp.slice(0, 19).replace('T', ' ');
return `${ts} [${level}]: ${message} ${Object.keys(args).length ? JSON.stringify(args, null, 2) : ''}`;
handleExceptions: true
}),
)
}))
winston.level = logLevel
winstonAttached.set(logFileName, winston)
}
new transports.Console({
handleExceptions: true
})
]
})
return winstonAttached.get(logFileName)
}
module.exports = Logger

View file

@ -40,9 +40,10 @@ service Router {
}
/*
SendToRouteV2 attempts to make a payment via the specified route. This method
differs from SendPayment in that it allows users to specify a full route
manually. This can be used for things like rebalancing, and atomic swaps.
SendToRouteV2 attempts to make a payment via the specified route. This
method differs from SendPayment in that it allows users to specify a full
route manually. This can be used for things like rebalancing, and atomic
swaps.
*/
rpc SendToRouteV2 (SendToRouteRequest) returns (lnrpc.HTLCAttempt);
@ -60,6 +61,28 @@ service Router {
rpc QueryMissionControl (QueryMissionControlRequest)
returns (QueryMissionControlResponse);
/*
XImportMissionControl is an experimental API that imports the state provided
to the internal mission control's state, using all results which are more
recent than our existing values. These values will only be imported
in-memory, and will not be persisted across restarts.
*/
rpc XImportMissionControl (XImportMissionControlRequest)
returns (XImportMissionControlResponse);
/*
GetMissionControlConfig returns mission control's current config.
*/
rpc GetMissionControlConfig (GetMissionControlConfigRequest)
returns (GetMissionControlConfigResponse);
/*
SetMissionControlConfig will set mission control's config, if the config
provided is valid.
*/
rpc SetMissionControlConfig (SetMissionControlConfigRequest)
returns (SetMissionControlConfigResponse);
/*
QueryProbability returns the current success probability estimate for a
given node pair and amount.
@ -97,6 +120,25 @@ service Router {
rpc TrackPayment (TrackPaymentRequest) returns (stream PaymentStatus) {
option deprecated = true;
}
/**
HtlcInterceptor dispatches a bi-directional streaming RPC in which
Forwarded HTLC requests are sent to the client and the client responds with
a boolean that tells LND if this htlc should be intercepted.
In case of interception, the htlc can be either settled, cancelled or
resumed later by using the ResolveHoldForward endpoint.
*/
rpc HtlcInterceptor (stream ForwardHtlcInterceptResponse)
returns (stream ForwardHtlcInterceptRequest);
/*
UpdateChanStatus attempts to manually set the state of a channel
(enabled, disabled, or auto). A manual "disable" request will cause the
channel to stay disabled until a subsequent manual request of either
"enable" or "auto".
*/
rpc UpdateChanStatus (UpdateChanStatusRequest)
returns (UpdateChanStatusResponse);
}
message SendPaymentRequest {
@ -126,6 +168,9 @@ message SendPaymentRequest {
*/
int32 final_cltv_delta = 4;
// An optional payment addr to be included within the last hop of the route.
bytes payment_addr = 20;
/*
A bare-bones invoice for a payment within the Lightning Network. With the
details of the invoice, the sender has all the data necessary to send a
@ -226,6 +271,19 @@ message SendPaymentRequest {
that show which htlcs are still in flight are suppressed.
*/
bool no_inflight_updates = 18;
/*
The largest payment split that should be attempted when making a payment if
splitting is necessary. Setting this value will effectively cause lnd to
split more aggressively, vs only when it thinks it needs to. Note that this
value is in milli-satoshis.
*/
uint64 max_shard_size_msat = 21;
/*
If set, an AMP-payment will be attempted.
*/
bool amp = 22;
}
message TrackPaymentRequest {
@ -299,6 +357,14 @@ message QueryMissionControlResponse {
repeated PairHistory pairs = 2;
}
message XImportMissionControlRequest {
// Node pair-level mission control state to be imported.
repeated PairHistory pairs = 1;
}
message XImportMissionControlResponse {
}
// PairHistory contains the mission control state for a particular node pair.
message PairHistory {
// The source node pubkey of the pair.
@ -340,6 +406,67 @@ message PairData {
int64 success_amt_msat = 7;
}
message GetMissionControlConfigRequest {
}
message GetMissionControlConfigResponse {
/*
Mission control's currently active config.
*/
MissionControlConfig config = 1;
}
message SetMissionControlConfigRequest {
/*
The config to set for mission control. Note that all values *must* be set,
because the full config will be applied.
*/
MissionControlConfig config = 1;
}
message SetMissionControlConfigResponse {
}
message MissionControlConfig {
/*
The amount of time mission control will take to restore a penalized node
or channel back to 50% success probability, expressed in seconds. Setting
this value to a higher value will penalize failures for longer, making
mission control less likely to route through nodes and channels that we
have previously recorded failures for.
*/
uint64 half_life_seconds = 1;
/*
The probability of success mission control should assign to hop in a route
where it has no other information available. Higher values will make mission
control more willing to try hops that we have no information about, lower
values will discourage trying these hops.
*/
float hop_probability = 2;
/*
The importance that mission control should place on historical results,
expressed as a value in [0;1]. Setting this value to 1 will ignore all
historical payments and just use the hop probability to assess the
probability of success for each hop. A zero value ignores hop probability
completely and relies entirely on historical results, unless none are
available.
*/
float weight = 3;
/*
The maximum number of payment results that mission control will store.
*/
uint32 maximum_payment_results = 4;
/*
The minimum time that must have passed since the previously recorded failure
before we raise the failure amount.
*/
uint64 minimum_failure_relax_interval = 5;
}
message QueryProbabilityRequest {
// The source node pubkey of the pair.
bytes from_node = 1;
@ -383,6 +510,9 @@ message BuildRouteRequest {
pubkey.
*/
repeated bytes hop_pubkeys = 4;
// An optional payment addr to be included within the last hop of the route.
bytes payment_addr = 5;
}
message BuildRouteResponse {
@ -579,3 +709,90 @@ message PaymentStatus {
repeated lnrpc.HTLCAttempt htlcs = 4;
}
message CircuitKey {
/// The id of the channel that the is part of this circuit.
uint64 chan_id = 1;
/// The index of the incoming htlc in the incoming channel.
uint64 htlc_id = 2;
}
message ForwardHtlcInterceptRequest {
/*
The key of this forwarded htlc. It defines the incoming channel id and
the index in this channel.
*/
CircuitKey incoming_circuit_key = 1;
// The incoming htlc amount.
uint64 incoming_amount_msat = 5;
// The incoming htlc expiry.
uint32 incoming_expiry = 6;
/*
The htlc payment hash. This value is not guaranteed to be unique per
request.
*/
bytes payment_hash = 2;
// The requested outgoing channel id for this forwarded htlc. Because of
// non-strict forwarding, this isn't necessarily the channel over which the
// packet will be forwarded eventually. A different channel to the same peer
// may be selected as well.
uint64 outgoing_requested_chan_id = 7;
// The outgoing htlc amount.
uint64 outgoing_amount_msat = 3;
// The outgoing htlc expiry.
uint32 outgoing_expiry = 4;
// Any custom records that were present in the payload.
map<uint64, bytes> custom_records = 8;
// The onion blob for the next hop
bytes onion_blob = 9;
}
/**
ForwardHtlcInterceptResponse enables the caller to resolve a previously hold
forward. The caller can choose either to:
- `Resume`: Execute the default behavior (usually forward).
- `Reject`: Fail the htlc backwards.
- `Settle`: Settle this htlc with a given preimage.
*/
message ForwardHtlcInterceptResponse {
/**
The key of this forwarded htlc. It defines the incoming channel id and
the index in this channel.
*/
CircuitKey incoming_circuit_key = 1;
// The resolve action for this intercepted htlc.
ResolveHoldForwardAction action = 2;
// The preimage in case the resolve action is Settle.
bytes preimage = 3;
}
enum ResolveHoldForwardAction {
SETTLE = 0;
FAIL = 1;
RESUME = 2;
}
message UpdateChanStatusRequest {
lnrpc.ChannelPoint chan_point = 1;
ChanStatusAction action = 2;
}
enum ChanStatusAction {
ENABLE = 0;
DISABLE = 1;
AUTO = 2;
}
message UpdateChanStatusResponse {
}

View file

@ -32,8 +32,9 @@ service Lightning {
rpc WalletBalance (WalletBalanceRequest) returns (WalletBalanceResponse);
/* lncli: `channelbalance`
ChannelBalance returns the total funds available across all open channels
in satoshis.
ChannelBalance returns a report on the total funds across all open channels,
categorized in local/remote, pending local/remote and unsettled local/remote
balances.
*/
rpc ChannelBalance (ChannelBalanceRequest) returns (ChannelBalanceResponse);
@ -46,13 +47,18 @@ service Lightning {
/* lncli: `estimatefee`
EstimateFee asks the chain backend to estimate the fee rate and total fees
for a transaction that pays to multiple specified outputs.
When using REST, the `AddrToAmount` map type can be set by appending
`&AddrToAmount[<address>]=<amount_to_send>` to the URL. Unfortunately this
map type doesn't appear in the REST API documentation because of a bug in
the grpc-gateway library.
*/
rpc EstimateFee (EstimateFeeRequest) returns (EstimateFeeResponse);
/* lncli: `sendcoins`
SendCoins executes a request to send coins to a particular address. Unlike
SendMany, this RPC call only allows creating a single output at a time. If
neither target_conf, or sat_per_byte are set, then the internal wallet will
neither target_conf, or sat_per_vbyte are set, then the internal wallet will
consult its fee model to determine a fee for the default confirmation
target.
*/
@ -76,7 +82,7 @@ service Lightning {
/* lncli: `sendmany`
SendMany handles a request for a transaction that creates multiple specified
outputs in parallel. If neither target_conf, or sat_per_byte are set, then
outputs in parallel. If neither target_conf, or sat_per_vbyte are set, then
the internal wallet will consult its fee model to determine a fee for the
default confirmation target.
*/
@ -135,6 +141,14 @@ service Lightning {
*/
rpc GetInfo (GetInfoRequest) returns (GetInfoResponse);
/** lncli: `getrecoveryinfo`
GetRecoveryInfo returns information concerning the recovery mode including
whether it's in a recovery mode, whether the recovery is finished, and the
progress made so far.
*/
rpc GetRecoveryInfo (GetRecoveryInfoRequest)
returns (GetRecoveryInfoResponse);
// TODO(roasbeef): merge with below with bool?
/* lncli: `pendingchannels`
PendingChannels returns a list of all the channels that are currently
@ -222,8 +236,10 @@ service Lightning {
/* lncli: `abandonchannel`
AbandonChannel removes all channel state from the database except for a
close summary. This method can be used to get rid of permanently unusable
channels due to bugs fixed in newer versions of lnd. Only available
when in debug builds of lnd.
channels due to bugs fixed in newer versions of lnd. This method can also be
used to remove externally funded channels where the funding transaction was
never broadcast. Only available for non-externally funded channels in dev
build.
*/
rpc AbandonChannel (AbandonChannelRequest) returns (AbandonChannelResponse);
@ -355,6 +371,11 @@ service Lightning {
satoshis. The returned route contains the full details required to craft and
send an HTLC, also including the necessary information that should be
present within the Sphinx packet encapsulated within the HTLC.
When using REST, the `dest_custom_records` map type can be set by appending
`&dest_custom_records[<record_number>]=<record_data_base64_url_encoded>`
to the URL. Unfortunately this map type doesn't appear in the REST API
documentation because of a bug in the grpc-gateway library.
*/
rpc QueryRoutes (QueryRoutesRequest) returns (QueryRoutesResponse);
@ -405,8 +426,9 @@ service Lightning {
/* lncli: `fwdinghistory`
ForwardingHistory allows the caller to query the htlcswitch for a record of
all HTLCs forwarded within the target time range, and integer offset
within that time range. If no time-range is specified, then the first chunk
of the past 24 hrs of forwarding history are returned.
within that time range, for a maximum number of events. If no maximum number
of events is specified, up to 100 events will be returned. If no time-range
is specified, then events will be returned in the order that they occured.
A list of forwarding events are returned. The size of each forwarding event
is 40 bytes, and the max message size able to be returned in gRPC is 4 MiB.
@ -473,6 +495,26 @@ service Lightning {
offline.
*/
rpc BakeMacaroon (BakeMacaroonRequest) returns (BakeMacaroonResponse);
/* lncli: `listmacaroonids`
ListMacaroonIDs returns all root key IDs that are in use.
*/
rpc ListMacaroonIDs (ListMacaroonIDsRequest)
returns (ListMacaroonIDsResponse);
/* lncli: `deletemacaroonid`
DeleteMacaroonID deletes the specified macaroon ID and invalidates all
macaroons derived from that ID.
*/
rpc DeleteMacaroonID (DeleteMacaroonIDRequest)
returns (DeleteMacaroonIDResponse);
/* lncli: `listpermissions`
ListPermissions lists all RPC method URIs and their required macaroon
permissions to access them.
*/
rpc ListPermissions (ListPermissionsRequest)
returns (ListPermissionsResponse);
}
message Utxo {
@ -541,6 +583,9 @@ message GetTransactionsRequest {
default to this option.
*/
int32 end_height = 2;
// An optional filter to only include transactions relevant to an account.
string account = 3;
}
message TransactionDetails {
@ -667,6 +712,11 @@ message SendRequest {
fallback.
*/
repeated FeatureBit dest_features = 15;
/*
The payment address of the generated invoice.
*/
bytes payment_addr = 16;
}
message SendResponse {
@ -750,6 +800,58 @@ message ChannelAcceptResponse {
// The pending channel id to which this response applies.
bytes pending_chan_id = 2;
/*
An optional error to send the initiating party to indicate why the channel
was rejected. This field *should not* contain sensitive information, it will
be sent to the initiating party. This field should only be set if accept is
false, the channel will be rejected if an error is set with accept=true
because the meaning of this response is ambiguous. Limited to 500
characters.
*/
string error = 3;
/*
The upfront shutdown address to use if the initiating peer supports option
upfront shutdown script (see ListPeers for the features supported). Note
that the channel open will fail if this value is set for a peer that does
not support this feature bit.
*/
string upfront_shutdown = 4;
/*
The csv delay (in blocks) that we require for the remote party.
*/
uint32 csv_delay = 5;
/*
The reserve amount in satoshis that we require the remote peer to adhere to.
We require that the remote peer always have some reserve amount allocated to
them so that there is always a disincentive to broadcast old state (if they
hold 0 sats on their side of the channel, there is nothing to lose).
*/
uint64 reserve_sat = 6;
/*
The maximum amount of funds in millisatoshis that we allow the remote peer
to have in outstanding htlcs.
*/
uint64 in_flight_max_msat = 7;
/*
The maximum number of htlcs that the remote peer can offer us.
*/
uint32 max_htlc_count = 8;
/*
The minimum value in millisatoshis for incoming htlcs on the channel.
*/
uint64 min_htlc_in = 9;
/*
The number of confirmations we require before we consider the channel open.
*/
uint32 min_accept_depth = 10;
}
message ChannelPoint {
@ -798,14 +900,25 @@ message EstimateFeeRequest {
// The target number of blocks that this transaction should be confirmed
// by.
int32 target_conf = 2;
// The minimum number of confirmations each one of your outputs used for
// the transaction must satisfy.
int32 min_confs = 3;
// Whether unconfirmed outputs should be used as inputs for the transaction.
bool spend_unconfirmed = 4;
}
message EstimateFeeResponse {
// The total fee in satoshis.
int64 fee_sat = 1;
// The fee rate in satoshi/byte.
int64 feerate_sat_per_byte = 2;
// Deprecated, use sat_per_vbyte.
// The fee rate in satoshi/vbyte.
int64 feerate_sat_per_byte = 2 [deprecated = true];
// The fee rate in satoshi/vbyte.
uint64 sat_per_vbyte = 3;
}
message SendManyRequest {
@ -816,12 +929,24 @@ message SendManyRequest {
// by.
int32 target_conf = 3;
// A manual fee rate set in sat/byte that should be used when crafting the
// A manual fee rate set in sat/vbyte that should be used when crafting the
// transaction.
int64 sat_per_byte = 5;
uint64 sat_per_vbyte = 4;
// Deprecated, use sat_per_vbyte.
// A manual fee rate set in sat/vbyte that should be used when crafting the
// transaction.
int64 sat_per_byte = 5 [deprecated = true];
// An optional label for the transaction, limited to 500 characters.
string label = 6;
// The minimum number of confirmations each one of your outputs used for
// the transaction must satisfy.
int32 min_confs = 7;
// Whether unconfirmed outputs should be used as inputs for the transaction.
bool spend_unconfirmed = 8;
}
message SendManyResponse {
// The id of the transaction
@ -839,9 +964,14 @@ message SendCoinsRequest {
// by.
int32 target_conf = 3;
// A manual fee rate set in sat/byte that should be used when crafting the
// A manual fee rate set in sat/vbyte that should be used when crafting the
// transaction.
int64 sat_per_byte = 5;
uint64 sat_per_vbyte = 4;
// Deprecated, use sat_per_vbyte.
// A manual fee rate set in sat/vbyte that should be used when crafting the
// transaction.
int64 sat_per_byte = 5 [deprecated = true];
/*
If set, then the amount field will be ignored, and lnd will attempt to
@ -852,6 +982,13 @@ message SendCoinsRequest {
// An optional label for the transaction, limited to 500 characters.
string label = 7;
// The minimum number of confirmations each one of your outputs used for
// the transaction must satisfy.
int32 min_confs = 8;
// Whether unconfirmed outputs should be used as inputs for the transaction.
bool spend_unconfirmed = 9;
}
message SendCoinsResponse {
// The transaction ID of the transaction
@ -864,6 +1001,9 @@ message ListUnspentRequest {
// The maximum number of confirmations to be included.
int32 max_confs = 2;
// An optional filter to only include outputs belonging to an account.
string account = 3;
}
message ListUnspentResponse {
// A list of utxos
@ -884,8 +1024,14 @@ enum AddressType {
}
message NewAddressRequest {
// The address type
// The type of address to generate.
AddressType type = 1;
/*
The name of the account to generate a new address for. If empty, the
default wallet account is used.
*/
string account = 2;
}
message NewAddressResponse {
// The newly generated wallet address
@ -929,6 +1075,12 @@ message ConnectPeerRequest {
/* If set, the daemon will attempt to persistently connect to the target
* peer. Otherwise, the call will be synchronous. */
bool perm = 2;
/*
The connection timeout value (in seconds) for this request. It won't affect
other requests.
*/
uint64 timeout = 3;
}
message ConnectPeerResponse {
}
@ -945,6 +1097,21 @@ message HTLC {
int64 amount = 2;
bytes hash_lock = 3;
uint32 expiration_height = 4;
// Index identifying the htlc on the channel.
uint64 htlc_index = 5;
// If this HTLC is involved in a forwarding operation, this field indicates
// the forwarding channel. For an outgoing htlc, it is the incoming channel.
// For an incoming htlc, it is the outgoing channel. When the htlc
// originates from this node or this node is the final destination,
// forwarding_channel will be zero. The forwarding channel will also be zero
// for htlcs that need to be forwarded but don't have a forwarding decision
// persisted yet.
uint64 forwarding_channel = 6;
// Index identifying the htlc on the forwarding channel.
uint64 forwarding_htlc_index = 7;
}
enum CommitmentType {
@ -975,6 +1142,30 @@ enum CommitmentType {
UNKNOWN_COMMITMENT_TYPE = 999;
}
message ChannelConstraints {
/*
The CSV delay expressed in relative blocks. If the channel is force closed,
we will need to wait for this many blocks before we can regain our funds.
*/
uint32 csv_delay = 1;
// The minimum satoshis this node is required to reserve in its balance.
uint64 chan_reserve_sat = 2;
// The dust limit (in satoshis) of the initiator's commitment tx.
uint64 dust_limit_sat = 3;
// The maximum amount of coins in millisatoshis that can be pending in this
// channel.
uint64 max_pending_amt_msat = 4;
// The smallest HTLC in millisatoshis that the initiator will accept.
uint64 min_htlc_msat = 5;
// The total number of incoming HTLC's that the initiator will accept.
uint32 max_accepted_htlcs = 6;
}
message Channel {
// Whether this channel is active or not
bool active = 1;
@ -1047,10 +1238,11 @@ message Channel {
repeated HTLC pending_htlcs = 15;
/*
The CSV delay expressed in relative blocks. If the channel is force closed,
we will need to wait for this many blocks before we can regain our funds.
Deprecated. The CSV delay expressed in relative blocks. If the channel is
force closed, we will need to wait for this many blocks before we can regain
our funds.
*/
uint32 csv_delay = 16;
uint32 csv_delay = 16 [deprecated = true];
// Whether this channel is advertised to the network or not.
bool private = 17;
@ -1061,13 +1253,15 @@ message Channel {
// A set of flags showing the current state of the channel.
string chan_status_flags = 19;
// The minimum satoshis this node is required to reserve in its balance.
int64 local_chan_reserve_sat = 20;
// Deprecated. The minimum satoshis this node is required to reserve in its
// balance.
int64 local_chan_reserve_sat = 20 [deprecated = true];
/*
The minimum satoshis the other node is required to reserve in its balance.
Deprecated. The minimum satoshis the other node is required to reserve in
its balance.
*/
int64 remote_chan_reserve_sat = 21;
int64 remote_chan_reserve_sat = 21 [deprecated = true];
// Deprecated. Use commitment_type.
bool static_remote_key = 22 [deprecated = true];
@ -1112,9 +1306,17 @@ message Channel {
frozen channel doest not allow a cooperative channel close by the
initiator. The thaw_height is the height that this restriction stops
applying to the channel. This field is optional, not setting it or using a
value of zero will mean the channel has no additional restrictions.
value of zero will mean the channel has no additional restrictions. The
height can be interpreted in two ways: as a relative height if the value is
less than 500,000, or as an absolute height otherwise.
*/
uint32 thaw_height = 28;
// List constraints for the local node.
ChannelConstraints local_constraints = 29;
// List constraints for the remote node.
ChannelConstraints remote_constraints = 30;
}
message ListChannelsRequest {
@ -1196,6 +1398,79 @@ message ChannelCloseSummary {
force closes, although only one party's close will be confirmed on chain.
*/
Initiator close_initiator = 12;
repeated Resolution resolutions = 13;
}
enum ResolutionType {
TYPE_UNKNOWN = 0;
// We resolved an anchor output.
ANCHOR = 1;
/*
We are resolving an incoming htlc on chain. This if this htlc is
claimed, we swept the incoming htlc with the preimage. If it is timed
out, our peer swept the timeout path.
*/
INCOMING_HTLC = 2;
/*
We are resolving an outgoing htlc on chain. If this htlc is claimed,
the remote party swept the htlc with the preimage. If it is timed out,
we swept it with the timeout path.
*/
OUTGOING_HTLC = 3;
// We force closed and need to sweep our time locked commitment output.
COMMIT = 4;
}
enum ResolutionOutcome {
// Outcome unknown.
OUTCOME_UNKNOWN = 0;
// An output was claimed on chain.
CLAIMED = 1;
// An output was left unclaimed on chain.
UNCLAIMED = 2;
/*
ResolverOutcomeAbandoned indicates that an output that we did not
claim on chain, for example an anchor that we did not sweep and a
third party claimed on chain, or a htlc that we could not decode
so left unclaimed.
*/
ABANDONED = 3;
/*
If we force closed our channel, our htlcs need to be claimed in two
stages. This outcome represents the broadcast of a timeout or success
transaction for this two stage htlc claim.
*/
FIRST_STAGE = 4;
// A htlc was timed out on chain.
TIMEOUT = 5;
}
message Resolution {
// The type of output we are resolving.
ResolutionType resolution_type = 1;
// The outcome of our on chain action that resolved the outpoint.
ResolutionOutcome outcome = 2;
// The outpoint that was spent by the resolution.
OutPoint outpoint = 3;
// The amount that was claimed by the resolution.
uint64 amount_sat = 4;
// The hex-encoded transaction ID of the sweep transaction that spent the
// output.
string sweep_txid = 5;
}
message ClosedChannelsRequest {
@ -1251,6 +1526,11 @@ message Peer {
Denotes that we are not receiving new graph updates from the peer.
*/
PASSIVE_SYNC = 2;
/*
Denotes that this peer is pinned into an active sync.
*/
PINNED_SYNC = 3;
}
// The type of sync we are currently performing with this peer.
@ -1267,6 +1547,20 @@ message Peer {
spamming us with errors at no cost.
*/
repeated TimestampedError errors = 12;
/*
The number of times we have recorded this peer going offline or coming
online, recorded across restarts. Note that this value is decreased over
time if the peer has not recently flapped, so that we can forgive peers
with historically high flap counts.
*/
int32 flap_count = 13;
/*
The timestamp of the last flap we observed for this peer. If this value is
zero, we have not observed any flaps for this peer.
*/
int64 last_flap_ns = 14;
}
message TimestampedError {
@ -1371,6 +1665,19 @@ message GetInfoResponse {
map<uint32, Feature> features = 19;
}
message GetRecoveryInfoRequest {
}
message GetRecoveryInfoResponse {
// Whether the wallet is in recovery mode
bool recovery_mode = 1;
// Whether the wallet recovery progress is finished
bool recovery_finished = 2;
// The recovery progress, ranging from 0 to 1.
double progress = 3;
}
message Chain {
// The blockchain the node is on (eg bitcoin, litecoin)
string chain = 1;
@ -1412,9 +1719,10 @@ message CloseChannelRequest {
// confirmed by.
int32 target_conf = 3;
// A manual fee rate set in sat/byte that should be used when crafting the
// Deprecated, use sat_per_vbyte.
// A manual fee rate set in sat/vbyte that should be used when crafting the
// closure transaction.
int64 sat_per_byte = 4;
int64 sat_per_byte = 4 [deprecated = true];
/*
An optional address to send funds to in the case of a cooperative close.
@ -1423,6 +1731,10 @@ message CloseChannelRequest {
to the upfront shutdown addresss.
*/
string delivery_address = 5;
// A manual fee rate set in sat/vbyte that should be used when crafting the
// closure transaction.
uint64 sat_per_vbyte = 6;
}
message CloseStatusUpdate {
@ -1460,6 +1772,10 @@ message ReadyForPsbtFunding {
}
message OpenChannelRequest {
// A manual fee rate set in sat/vbyte that should be used when crafting the
// funding transaction.
uint64 sat_per_vbyte = 1;
/*
The pubkey of the node to open a channel with. When using REST, this field
must be encoded as base64.
@ -1483,9 +1799,10 @@ message OpenChannelRequest {
// confirmed by.
int32 target_conf = 6;
// A manual fee rate set in sat/byte that should be used when crafting the
// Deprecated, use sat_per_vbyte.
// A manual fee rate set in sat/vbyte that should be used when crafting the
// funding transaction.
int64 sat_per_byte = 7;
int64 sat_per_byte = 7 [deprecated = true];
// Whether this channel should be private, not announced to the greater
// network.
@ -1527,6 +1844,24 @@ message OpenChannelRequest {
carried out in an interactive manner (PSBT based).
*/
FundingShim funding_shim = 14;
/*
The maximum amount of coins in millisatoshi that can be pending within
the channel. It only applies to the remote party.
*/
uint64 remote_max_value_in_flight_msat = 15;
/*
The maximum number of concurrent HTLCs we will allow the remote party to add
to the commitment transaction.
*/
uint32 remote_max_htlcs = 16;
/*
Max local csv is the maximum csv delay we will allow for our own commitment
transaction.
*/
uint32 max_local_csv = 17;
}
message OpenStatusUpdate {
oneof update {
@ -1601,10 +1936,11 @@ message ChanPointShim {
bytes pending_chan_id = 5;
/*
This uint32 indicates if this channel is to be considered 'frozen'. A
frozen channel does not allow a cooperative channel close by the
initiator. The thaw_height is the height that this restriction stops
applying to the channel.
This uint32 indicates if this channel is to be considered 'frozen'. A frozen
channel does not allow a cooperative channel close by the initiator. The
thaw_height is the height that this restriction stops applying to the
channel. The height can be interpreted in two ways: as a relative height if
the value is less than 500,000, or as an absolute height otherwise.
*/
uint32 thaw_height = 6;
}
@ -1622,6 +1958,16 @@ message PsbtShim {
non-empty, it must be a binary serialized PSBT.
*/
bytes base_psbt = 2;
/*
If a channel should be part of a batch (multiple channel openings in one
transaction), it can be dangerous if the whole batch transaction is
published too early before all channel opening negotiations are completed.
This flag prevents this particular channel from broadcasting the transaction
after the negotiation with the remote peer. In a batch of channel openings
this flag should be set to true for every channel but the very last.
*/
bool no_publish = 3;
}
message FundingShim {
@ -1661,12 +2007,19 @@ message FundingPsbtFinalize {
/*
The funded PSBT that contains all witness data to send the exact channel
capacity amount to the PK script returned in the open channel message in a
previous step.
previous step. Cannot be set at the same time as final_raw_tx.
*/
bytes signed_psbt = 1;
// The pending channel ID of the channel to get the PSBT for.
bytes pending_chan_id = 2;
/*
As an alternative to the signed PSBT with all witness data, the final raw
wire format transaction can also be specified directly. Cannot be set at the
same time as signed_psbt.
*/
bytes final_raw_tx = 3;
}
message FundingTransitionMsg {
@ -1909,8 +2262,17 @@ message ChannelEventUpdate {
UpdateType type = 5;
}
message WalletAccountBalance {
// The confirmed balance of the account (with >= 1 confirmations).
int64 confirmed_balance = 1;
// The unconfirmed balance of the account (with 0 confirmations).
int64 unconfirmed_balance = 2;
}
message WalletBalanceRequest {
}
message WalletBalanceResponse {
// The balance of the wallet
int64 total_balance = 1;
@ -1920,16 +2282,45 @@ message WalletBalanceResponse {
// The unconfirmed balance of a wallet(with 0 confirmations)
int64 unconfirmed_balance = 3;
// A mapping of each wallet account's name to its balance.
map<string, WalletAccountBalance> account_balance = 4;
}
message Amount {
// Value denominated in satoshis.
uint64 sat = 1;
// Value denominated in milli-satoshis.
uint64 msat = 2;
}
message ChannelBalanceRequest {
}
message ChannelBalanceResponse {
// Sum of channels balances denominated in satoshis
int64 balance = 1;
// Deprecated. Sum of channels balances denominated in satoshis
int64 balance = 1 [deprecated = true];
// Sum of channels pending balances denominated in satoshis
int64 pending_open_balance = 2;
// Deprecated. Sum of channels pending balances denominated in satoshis
int64 pending_open_balance = 2 [deprecated = true];
// Sum of channels local balances.
Amount local_balance = 3;
// Sum of channels remote balances.
Amount remote_balance = 4;
// Sum of channels local unsettled balances.
Amount unsettled_local_balance = 5;
// Sum of channels remote unsettled balances.
Amount unsettled_remote_balance = 6;
// Sum of channels pending local balances.
Amount pending_open_local_balance = 7;
// Sum of channels pending remote balances.
Amount pending_open_remote_balance = 8;
}
message QueryRoutesRequest {
@ -2088,7 +2479,7 @@ message Hop {
output index for the channel.
*/
uint64 chan_id = 1 [jstype = JS_STRING];
int64 chan_capacity = 2;
int64 chan_capacity = 2 [deprecated = true];
int64 amt_to_forward = 3 [deprecated = true];
int64 fee = 4 [deprecated = true];
uint32 expiry = 5;
@ -2110,12 +2501,22 @@ message Hop {
/*
An optional TLV record that signals the use of an MPP payment. If present,
the receiver will enforce that that the same mpp_record is included in the
final hop payload of all non-zero payments in the HTLC set. If empty, a
regular single-shot payment is or was attempted.
the receiver will enforce that the same mpp_record is included in the final
hop payload of all non-zero payments in the HTLC set. If empty, a regular
single-shot payment is or was attempted.
*/
MPPRecord mpp_record = 10;
/*
An optional TLV record that signals the use of an AMP payment. If present,
the receiver will treat all received payments including the same
(payment_addr, set_id) pair as being part of one logical payment. The
payment will be settled by XORing the root_share's together and deriving the
child hashes and preimages according to BOLT XX. Must be used in conjunction
with mpp_record.
*/
AMPRecord amp_record = 12;
/*
An optional set of key-value TLV records. This is useful within the context
of the SendToRoute call as it allows callers to specify arbitrary K-V pairs
@ -2142,6 +2543,14 @@ message MPPRecord {
int64 total_amt_msat = 10;
}
message AMPRecord {
bytes root_share = 1;
bytes set_id = 2;
uint32 child_index = 3;
}
/*
A path through the channel graph which runs over one or more channels in
succession. This struct carries all the information required to craft the
@ -2367,11 +2776,27 @@ message GraphTopologyUpdate {
repeated ClosedChannelUpdate closed_chans = 3;
}
message NodeUpdate {
repeated string addresses = 1;
/*
Deprecated, use node_addresses.
*/
repeated string addresses = 1 [deprecated = true];
string identity_key = 2;
bytes global_features = 3;
/*
Deprecated, use features.
*/
bytes global_features = 3 [deprecated = true];
string alias = 4;
string color = 5;
repeated NodeAddress node_addresses = 7;
/*
Features that the node has advertised in the init message, node
announcements and invoices.
*/
map<uint32, Feature> features = 6;
}
message ChannelEdgeUpdate {
/*
@ -2572,6 +2997,18 @@ message Invoice {
[EXPERIMENTAL].
*/
bool is_keysend = 25;
/*
The payment address of this invoice. This value will be used in MPP
payments, and also for newer invoies that always require the MPP paylaod
for added end-to-end security.
*/
bytes payment_addr = 26;
/*
Signals whether or not this is an AMP invoice.
*/
bool is_amp = 27;
}
enum InvoiceHTLCState {
@ -2611,6 +3048,31 @@ message InvoiceHTLC {
// The total amount of the mpp payment in msat.
uint64 mpp_total_amt_msat = 10;
// Details relevant to AMP HTLCs, only populated if this is an AMP HTLC.
AMP amp = 11;
}
// Details specific to AMP HTLCs.
message AMP {
// An n-of-n secret share of the root seed from which child payment hashes
// and preimages are derived.
bytes root_share = 1;
// An identifier for the HTLC set that this HTLC belongs to.
bytes set_id = 2;
// A nonce used to randomize the child preimage and child hash from a given
// root_share.
uint32 child_index = 3;
// The payment hash of the AMP HTLC.
bytes hash = 4;
// The preimage used to settle this AMP htlc. This field will only be
// populated if the invoice is in InvoiceState_ACCEPTED or
// InvoiceState_SETTLED.
bytes preimage = 5;
}
message AddInvoiceResponse {
@ -2630,6 +3092,13 @@ message AddInvoiceResponse {
invoices with an add_index greater than this one.
*/
uint64 add_index = 16;
/*
The payment address of the generated invoice. This value should be used
in all payments for this invoice as we require it for end to end
security.
*/
bytes payment_addr = 17;
}
message PaymentHash {
/*
@ -2801,6 +3270,9 @@ message Payment {
}
message HTLCAttempt {
// The unique ID that is used for this attempt.
uint64 attempt_id = 7;
enum HTLCStatus {
IN_FLIGHT = 0;
SUCCEEDED = 1;
@ -2876,6 +3348,13 @@ message ListPaymentsResponse {
}
message DeleteAllPaymentsRequest {
// Only delete failed payments.
bool failed_payments_only = 1;
/*
Only delete failed HTLCs from payments, not the payment itself.
*/
bool failed_htlcs_only = 2;
}
message DeleteAllPaymentsResponse {
@ -2883,6 +3362,8 @@ message DeleteAllPaymentsResponse {
message AbandonChannelRequest {
ChannelPoint channel_point = 1;
bool pending_funding_shim_only = 2;
}
message AbandonChannelResponse {
@ -2934,6 +3415,14 @@ enum FeatureBit {
PAYMENT_ADDR_OPT = 15;
MPP_REQ = 16;
MPP_OPT = 17;
WUMBO_CHANNELS_REQ = 18;
WUMBO_CHANNELS_OPT = 19;
ANCHORS_REQ = 20;
ANCHORS_OPT = 21;
ANCHORS_ZERO_FEE_HTLC_REQ = 22;
ANCHORS_ZERO_FEE_HTLC_OPT = 23;
AMP_REQ = 30;
AMP_OPT = 31;
}
message Feature {
@ -3034,8 +3523,8 @@ message ForwardingHistoryRequest {
}
message ForwardingEvent {
// Timestamp is the time (unix epoch offset) that this circuit was
// completed.
uint64 timestamp = 1;
// completed. Deprecated by timestamp_ns.
uint64 timestamp = 1 [deprecated = true];
// The incoming channel ID that carried the HTLC that created the circuit.
uint64 chan_id_in = 2 [jstype = JS_STRING];
@ -3066,6 +3555,10 @@ message ForwardingEvent {
// the second half of the circuit.
uint64 amt_out_msat = 10;
// The number of nanoseconds elapsed since January 1, 1970 UTC when this
// circuit was completed.
uint64 timestamp_ns = 11;
// TODO(roasbeef): add settlement latency?
// * use FPE on the chan id?
// * also list failures?
@ -3171,12 +3664,46 @@ message MacaroonPermission {
message BakeMacaroonRequest {
// The list of permissions the new macaroon should grant.
repeated MacaroonPermission permissions = 1;
// The root key ID used to create the macaroon, must be a positive integer.
uint64 root_key_id = 2;
}
message BakeMacaroonResponse {
// The hex encoded macaroon, serialized in binary format.
string macaroon = 1;
}
message ListMacaroonIDsRequest {
}
message ListMacaroonIDsResponse {
// The list of root key IDs that are in use.
repeated uint64 root_key_ids = 1;
}
message DeleteMacaroonIDRequest {
// The root key ID to be removed.
uint64 root_key_id = 1;
}
message DeleteMacaroonIDResponse {
// A boolean indicates that the deletion is successful.
bool deleted = 1;
}
message MacaroonPermissionList {
// A list of macaroon permissions.
repeated MacaroonPermission permissions = 1;
}
message ListPermissionsRequest {
}
message ListPermissionsResponse {
/*
A map between all RPC method URIs and their required macaroon permissions to
access them.
*/
map<string, MacaroonPermissionList> method_permissions = 1;
}
message Failure {
enum FailureCode {
/*
@ -3209,6 +3736,7 @@ message Failure {
PERMANENT_CHANNEL_FAILURE = 21;
EXPIRY_TOO_FAR = 22;
MPP_TIMEOUT = 23;
INVALID_ONION_PAYLOAD = 24;
/*
An internal error occurred.
@ -3339,3 +3867,14 @@ message ChannelUpdate {
*/
bytes extra_opaque_data = 12;
}
message MacaroonId {
bytes nonce = 1;
bytes storageId = 2;
repeated Op ops = 3;
}
message Op {
string entity = 1;
repeated string actions = 2;
}

File diff suppressed because it is too large Load diff

View file

@ -141,8 +141,24 @@ message InitWalletRequest {
recover the funds in each channel from a remote force closed transaction.
*/
ChanBackupSnapshot channel_backups = 5;
/*
stateless_init is an optional argument instructing the daemon NOT to create
any *.macaroon files in its filesystem. If this parameter is set, then the
admin macaroon returned in the response MUST be stored by the caller of the
RPC as otherwise all access to the daemon will be lost!
*/
bool stateless_init = 6;
}
message InitWalletResponse {
/*
The binary serialized admin macaroon that can be used to access the daemon
after creating the wallet. If the stateless_init parameter was set to true,
this is the ONLY copy of the macaroon and MUST be stored safely by the
caller. Otherwise a copy of this macaroon is also persisted on disk by the
daemon, together with other macaroon files.
*/
bytes admin_macaroon = 1;
}
message UnlockWalletRequest {
@ -171,6 +187,12 @@ message UnlockWalletRequest {
recover the funds in each channel from a remote force closed transaction.
*/
ChanBackupSnapshot channel_backups = 3;
/*
stateless_init is an optional argument instructing the daemon NOT to create
any *.macaroon files in its file system.
*/
bool stateless_init = 4;
}
message UnlockWalletResponse {
}
@ -187,6 +209,30 @@ message ChangePasswordRequest {
daemon. When using REST, this field must be encoded as base64.
*/
bytes new_password = 2;
/*
stateless_init is an optional argument instructing the daemon NOT to create
any *.macaroon files in its filesystem. If this parameter is set, then the
admin macaroon returned in the response MUST be stored by the caller of the
RPC as otherwise all access to the daemon will be lost!
*/
bool stateless_init = 3;
/*
new_macaroon_root_key is an optional argument instructing the daemon to
rotate the macaroon root key when set to true. This will invalidate all
previously generated macaroons.
*/
bool new_macaroon_root_key = 4;
}
message ChangePasswordResponse {
/*
The binary serialized admin macaroon that can be used to access the daemon
after rotating the macaroon root key. If both the stateless_init and
new_macaroon_root_key parameter were set to true, this is the ONLY copy of
the macaroon that was created from the new root key and MUST be stored
safely by the caller. Otherwise a copy of this macaroon is also persisted on
disk by the daemon, together with other macaroon files.
*/
bytes admin_macaroon = 1;
}

View file

@ -21,19 +21,31 @@
<script>
gun = Gun({
peers: [
'https://gun.shock.network:8765/gun',
//'http://gun2.shock.network:8765/gun'
],
peers: ['https://gun.shock.network/gun','https://gun-eu.shock.network/gun'],
axe: false
})
setInterval(() => {
console.log('peers', Object.keys(gun.back('opt').peers))
},5000)
user = gun.user()
node = gun.get('foo').get('bar')
capdog = gun.user('qsgziGQS99sPUxV1CRwwRckn9cG6cJ3prbDsrbL7qko.oRbCaVKwJFQURWrS1pFhkfAzrkEvkQgBRIUz9uoWtrg')
explorador = gun.user(`zBQkPb1ohbdjVp_29TKFXyv_0g3amKgRJRqKr0E-Oyk.yB1P4UmOrzkGuPEL5zUgLETJWyYpM9K3l2ycNlt8jiY`)
pleb = gun.user(`e1C60yZ1Cm3Mkceq7L9SmH6QQ7zsDdbibPFeQz7tNsk._1VlqJNo8BIJmzz2D5WELiMiRjBh3DBlDvzC6fNltZw`)
boblazar = gun.user(`g6fcZ_1zyFwV1jR1eNK1GTUr2sSlEDL1D5vBsSvKoKg.2OA9MQHO2c1wjv6L-VPBFf36EZXjgQ1nnZFbOE9_5-o`)
const UPPER = 100
clearSet = (node) => {
node.once((map) => {
Object.keys(map).forEach(key => node.get(key).put(null))
}, { wait: 1500 })
}
put = async () => {
const res = await fetch(`https://jsonplaceholder.typicode.com/posts`)
/** @type {Array<any>} */

View file

@ -18,6 +18,9 @@ program
.option("-c, --mainnet", "run server on mainnet mode")
.option("-t, --tunnel","create a localtunnel to listen behind a firewall")
.option('-r, --lndaddress', 'Lnd address, defaults to 127.0.0.1:9735')
.option('-a, --use-TLS', 'use TLS')
.option('-i, --https-cert [path]', 'HTTPS certificate path')
.option('-y, --https-cert-key [path]', 'HTTPS certificate key path')
.parse(process.argv);
// load server

6
nodemon.json Normal file
View file

@ -0,0 +1,6 @@
{
"watch": ["src/", "services/", "utils/", "constants/", "config/"],
"ignore": ["node_modules/", ".git", "radata/", ".storage/", "*.log.*"],
"verbose": true,
"ext": "js"
}

View file

@ -1,18 +1,19 @@
{
"name": "shockapi",
"version": "2021.04.10",
"version": "2021.9.19",
"description": "",
"main": "src/server.js",
"scripts": {
"start": "node main.js -h 0.0.0.0 -c",
"dev": "node --trace-warnings --max-old-space-size=4096 main.js -h 0.0.0.0",
"dev:watch": "nodemon main.js -- -h 0.0.0.0",
"test": "jest --no-cache",
"test:watch": "jest --no-cache --watch",
"dev:attach": "node --inspect --trace-warnings --max-old-space-size=4096 main.js -h 0.0.0.0",
"test": "mocha ./utils -b -t 50000 --recursive",
"typecheck": "tsc",
"lint": "eslint \"services/gunDB/**/*.js\"",
"format": "prettier --write \"./**/*.js\"",
"test:gun": "ts-node src/__gun__tests__/*.ts && rimraf -rf GUN-TEST-*"
"test:gun": "ts-node src/__gun__tests__/*.ts && rimraf -rf GUN-TEST-*",
"test:gun:epub": "node testscript.js on capdog.epub"
},
"author": "",
"license": "ISC",
@ -20,7 +21,7 @@
"@grpc/grpc-js": "^1.2.2",
"@grpc/proto-loader": "^0.5.5",
"assert-never": "^1.2.1",
"axios": "^0.21.1",
"axios": "^1.1.2",
"basic-auth": "^2.0.0",
"big.js": "^5.2.2",
"bitcore-lib": "^0.15.0",
@ -30,7 +31,7 @@
"command-exists": "^1.2.6",
"commander": "^2.9.0",
"compression": "^1.7.4",
"cors": "^2.8.4",
"cors": "^2.8.5",
"debug": "^3.1.0",
"dotenv": "^8.1.0",
"eccrypto": "^1.1.6",
@ -38,9 +39,9 @@
"express-session": "^1.17.1",
"google-proto-files": "^1.0.3",
"graphviz": "0.0.8",
"grpc": "1.24.4",
"gun": "git://github.com/amark/gun#97aa976c97e6219a9f93095d32c220dcd371ca62",
"gun": "amark/gun#77162fcb68eb61f24d980fa3f3653598f56ee593",
"husky": "^4.2.5",
"hybrid-relay-client": "git://github.com/shocknet/hybridRelayClient#a99e57794cf7a62f0f5b6aef53a35d6b77d0a889",
"jsonfile": "^4.0.0",
"jsonwebtoken": "^8.3.0",
"localtunnel": "git://github.com/shocknet/localtunnel#40cc2c2a46b05da2217bf2e20da11a5343a5cce7",
@ -55,11 +56,12 @@
"request-promise": "^4.2.6",
"response-time": "^2.3.2",
"shelljs": "^0.8.2",
"shock-common": "^34.0.0",
"shock-common": "^37.0.0",
"socket.io": "4.0.1",
"socket.io-msgpack-parser": "^3.0.1",
"text-encoding": "^0.7.0",
"tingodb": "^0.6.1",
"uuid": "3.x.x",
"winston": "^3.3.3",
"winston-daily-rotate-file": "^4.5.0"
},
@ -70,29 +72,32 @@
"@types/eccrypto": "^1.1.2",
"@types/express": "^4.17.1",
"@types/gun": "^0.9.2",
"@types/jest": "^24.0.18",
"@types/jsonwebtoken": "^8.3.7",
"@types/lodash": "^4.14.168",
"@types/mocha": "^9.0.0",
"@types/node-fetch": "^2.5.8",
"@types/node-persist": "^3.1.1",
"@types/ramda": "types/npm-ramda#dist",
"@types/random-words": "^1.1.2",
"@types/react": "16.x.x",
"@types/uuid": "^3.4.5",
"@types/uuid": "3.x.x",
"babel-eslint": "^10.1.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.26.2",
"eslint": "^6.6.0",
"eslint": "^8.25.0",
"eslint-config-prettier": "^6.5.0",
"eslint-plugin-babel": "^5.3.1",
"eslint-plugin-jest": "^22.20.1",
"eslint-plugin-mocha": "^9.0.0",
"eslint-plugin-prettier": "^3.1.4",
"jest": "^24.9.0",
"expect": "^27.2.1",
"lint-staged": "^10.2.2",
"nodemon": "^1.19.3",
"mocha": "^9.1.1",
"nodemon": "^2.0.7",
"prettier": "^1.18.2",
"random-words": "^1.1.1",
"rimraf": "^3.0.2",
"ts-node": "^9.1.1",
"ts-type": "^1.2.16",
"typescript": "latest"
"ts-type": "^3.0.1",
"typescript": "^4.5.4"
},
"lint-staged": {
"*.js": [
@ -104,8 +109,10 @@
]
},
"husky": {
"hooks": {
"pre-commit": "yarn lint && yarn typecheck && yarn lint-staged"
}
}
"hooks": {}
},
"engines": {
"npm": "Use yarn!"
},
"packageManager": "yarn@3.1.1"
}

78
public/localHomepage.html Normal file
View file

@ -0,0 +1,78 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
<script src="/qrCodeGenerator"></script>
</head>
<body>
<p id="errorContainer"></p>
<div>
<h3>Tunnel</h3>
<p id="tunnelState"></p>
</div>
<div>
<h3>Access Secret</h3>
<p id="accessSecretState"></p>
</div>
<div id="qrcode"></div>
<script>
fetch(`${window.location.origin}/api/accessInfo`)
.then(res => res.json())
.then(j => {
console.log(j)
if(j.field){
document.querySelector('#errorContainer').innerHTML ='there was an error, unable to load access information, reason: '+ j.message
return
}
const tunnelUrl = handleTunnelInfo(j)
const accessCode = handleAccessCode(j)
const baseUrl = tunnelUrl ? tunnelUrl : window.location.host
const finalUrl = accessCode ? `${accessCode}#${baseUrl}` : baseUrl
new QRCode(document.getElementById("qrcode"), finalUrl);
})
.catch(e => {
console.log(e.message)
})
const handleTunnelInfo = (res) => {
const tunnelState = document.querySelector("#tunnelState")
if(res.tunnelDisabled){
tunnelState.innerHTML = 'The tunnel service is disabled'
return
}
if(res.relayNotFound) {
tunnelState.innerHTML = 'The tunnel service seems broken'
return
}
tunnelState.innerHTML = `Tunnel URL: ${res.relayId}@${res.relayUrl}`
return `${res.relayId}@${res.relayUrl}`
}
const handleAccessCode = (res) => {
const accessSecretState = document.querySelector("#accessSecretState")
if(res.accessSecretDisabled){
accessSecretState.innerHTML = 'The access secret is disabled'
return
}
if(res.accessCodeNotFound){
accessSecretState.innerHTML = 'The access secret seems broken'
return
}
if(res.accessCodeUsed){
accessSecretState.innerHTML = 'The access secret was already used'
return
}
accessSecretState.innerHTML = `Access Secret: ${res.accessCode}`
return res.accessCode
}
</script>
</body>
</html>

View file

@ -48,22 +48,33 @@
}
</style>
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js" integrity="sha512-q/dWJ3kcmjBLU4Qc47E4A9kTB4m3wuTY7vkFJDTZKjTs8jhyGQnaUrxa0Ytd0ssMZhbNua9hE+E7Qv1j+DyZwA==" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/3.1.3/socket.io.msgpack.min.js" crossorigin="anonymous"></script>
</head>
<body>
<div class="main">
<div class="content hide">
<p id="content-name">fdsigfudfsbigbfduigbdfb</p>
<p id="content-name">some random name i dont know</p>
<p id="content-message">JUST TIPPED YOU!</p>
<p id="content-amount">100sats</p>
</div>
</div>
<script>
console.log(location.origin)
const queryString = window.location.search;
const urlParams = new URLSearchParams(queryString);
const postID = urlParams.get("postID")
var socket = io(`${location.origin}/streams`);
socket.emit("postID",postID)
const accessId = urlParams.get("accessId")
const relayId = urlParams.get("x-shock-hybrid-relay-id-x")
const socketSetting = {
reconnection: true,
rejectUnauthorized: false,
withCredentials: true,
transports: ["websocket"]
}
var socket = io(`${location.origin}/streams`,socketSetting);
socket.emit('hybridRelayId',{id:relayId})
socket.on("connect", () => {
setTimeout(()=>{socket.emit("accessId",accessId)},500)
})
let latestTimeout = null
socket.on("update",(update)=>{
const name = document.querySelector("#content-name")

1
public/qrcode.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View file

@ -6,7 +6,7 @@ const jwt = require('jsonwebtoken')
const uuidv1 = require('uuid/v1')
const jsonfile = require('jsonfile')
const path = require('path')
const logger = require('winston')
const logger = require('../../config/log')
const Storage = require('node-persist')
const FS = require('../../utils/fs')

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,598 +2,23 @@
* @prettier
*/
const debounce = require('lodash/debounce')
const logger = require('winston')
const {
Constants: { ErrorCode },
Schema,
Utils: CommonUtils
Constants: { ErrorCode }
} = require('shock-common')
const Key = require('../key')
const Utils = require('../utils')
/**
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').ListenerData} ListenerData
* @typedef {import('shock-common').Schema.HandshakeRequest} HandshakeRequest
* @typedef {import('shock-common').Schema.Message} Message
* @typedef {import('shock-common').Schema.Outgoing} Outgoing
* @typedef {import('shock-common').Schema.PartialOutgoing} PartialOutgoing
* @typedef {import('shock-common').Schema.Chat} Chat
* @typedef {import('shock-common').Schema.ChatMessage} ChatMessage
* @typedef {import('shock-common').Schema.SimpleSentRequest} SimpleSentRequest
* @typedef {import('shock-common').Schema.SimpleReceivedRequest} SimpleReceivedRequest
*/
/// <reference path="../../../utils/GunSmith/Smith.ts" />
const DEBOUNCE_WAIT_TIME = 500
/**
* @param {(userToIncoming: Record<string, string>) => void} cb
* @param {UserGUNNode} user Pass only for testing purposes.
* @param {ISEA} SEA
* @returns {void}
*/
const __onUserToIncoming = (cb, user, SEA) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
const callb = debounce(cb, DEBOUNCE_WAIT_TIME)
/** @type {Record<string, string>} */
const userToIncoming = {}
const mySecret = require('../../Mediator').getMySecret()
user
.get(Key.USER_TO_INCOMING)
.map()
.on(async (encryptedIncomingID, userPub) => {
if (typeof encryptedIncomingID !== 'string') {
if (encryptedIncomingID === null) {
// on disconnect
delete userToIncoming[userPub]
} else {
logger.error(
'got a non string non null value inside user to incoming'
)
}
return
}
if (encryptedIncomingID.length === 0) {
logger.error('got an empty string value')
return
}
const incomingID = await SEA.decrypt(encryptedIncomingID, mySecret)
if (typeof incomingID === 'undefined') {
logger.warn('could not decrypt incomingID inside __onUserToIncoming')
return
}
userToIncoming[userPub] = incomingID
callb(userToIncoming)
})
}
/** @type {Set<(av: string|null) => void>} */
const avatarListeners = new Set()
/** @type {string|null} */
let currentAvatar = null
const getAvatar = () => currentAvatar
/** @param {string|null} av */
const setAvatar = av => {
currentAvatar = av
avatarListeners.forEach(l => l(currentAvatar))
}
let avatarSubbed = false
/**
* @param {(avatar: string|null) => void} cb
* @param {UserGUNNode} user Pass only for testing purposes.
* @throws {Error} If user hasn't been auth.
* @returns {() => void}
*/
const onAvatar = (cb, user) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
avatarListeners.add(cb)
cb(currentAvatar)
if (!avatarSubbed) {
avatarSubbed = true
user
.get(Key.PROFILE_BINARY)
.get(Key.AVATAR)
.on(avatar => {
if (typeof avatar === 'string' || avatar === null) {
setAvatar(avatar)
}
})
}
return () => {
avatarListeners.delete(cb)
}
}
/**
* @param {(blacklist: string[]) => void} cb
* @param {UserGUNNode} user
* @returns {void}
*/
const onBlacklist = (cb, user) => {
/** @type {string[]} */
const blacklist = []
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
const callb = debounce(cb, DEBOUNCE_WAIT_TIME)
// Initial value if no items are in blacklist in gun
callb(blacklist)
user
.get(Key.BLACKLIST)
.map()
.on(publicKey => {
if (typeof publicKey === 'string' && publicKey.length > 0) {
blacklist.push(publicKey)
callb(blacklist)
} else {
logger.warn('Invalid public key received for blacklist')
}
})
}
/** @type {Set<(addr: string|null) => void>} */
const addressListeners = new Set()
/** @type {string|null} */
let currentAddress = null
const getHandshakeAddress = () => currentAddress
/** @param {string|null} addr */
const setAddress = addr => {
currentAddress = addr
addressListeners.forEach(l => l(currentAddress))
}
let addrSubbed = false
/**
* @param {(currentHandshakeAddress: string|null) => void} cb
* @param {UserGUNNode} user
* @returns {() => void}
*/
const onCurrentHandshakeAddress = (cb, user) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
addressListeners.add(cb)
cb(currentAddress)
if (!addrSubbed) {
addrSubbed = true
user.get(Key.CURRENT_HANDSHAKE_ADDRESS).on(addr => {
if (typeof addr !== 'string') {
logger.error('expected handshake address to be an string')
setAddress(null)
return
}
setAddress(addr)
})
}
return () => {
addressListeners.delete(cb)
}
}
/** @type {Set<(dn: string|null) => void>} */
const dnListeners = new Set()
/** @type {string|null} */
let currentDn = null
const getDisplayName = () => currentDn
/** @param {string|null} dn */
const setDn = dn => {
currentDn = dn
dnListeners.forEach(l => l(currentDn))
}
let dnSubbed = false
/**
* @param {(displayName: string|null) => void} cb
* @param {UserGUNNode} user Pass only for testing purposes.
* @throws {Error} If user hasn't been auth.
* @returns {() => void}
*/
const onDisplayName = (cb, user) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
cb(currentDn)
dnListeners.add(cb)
if (!dnSubbed) {
dnSubbed = true
user
.get(Key.PROFILE)
.get(Key.DISPLAY_NAME)
.on(displayName => {
if (typeof displayName === 'string' || displayName === null) {
setDn(displayName)
}
})
}
return () => {
dnListeners.delete(cb)
}
}
/**
* @param {(messages: Record<string, Message>) => void} cb
* @param {string} userPK Public key of the user from whom the incoming
* messages will be obtained.
* @param {string} incomingFeedID ID of the outgoing feed from which the
* incoming messages will be obtained.
* @param {GUNNode} gun (Pass only for testing purposes)
* @param {UserGUNNode} user
* @param {ISEA} SEA
* @returns {void}
*/
const onIncomingMessages = (cb, userPK, incomingFeedID, gun, user, SEA) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
const callb = debounce(cb, DEBOUNCE_WAIT_TIME)
const otherUser = gun.user(userPK)
/**
* @type {Record<string, Message>}
*/
const messages = {}
callb(messages)
otherUser
.get(Key.OUTGOINGS)
.get(incomingFeedID)
.get(Key.MESSAGES)
.map()
.on(async (data, key) => {
if (!Schema.isMessage(data)) {
logger.warn('non-message received')
return
}
/** @type {string} */
const recipientEpub = await Utils.pubToEpub(userPK)
const secret = await SEA.secret(recipientEpub, user._.sea)
let { body } = data
body = await SEA.decrypt(body, secret)
messages[key] = {
body,
timestamp: data.timestamp
}
callb(messages)
})
}
/**
* @typedef {Record<string, Outgoing|null>} Outgoings
* @typedef {(outgoings: Outgoings) => void} OutgoingsListener
*/
/**
* @type {Outgoings}
*/
let currentOutgoings = {}
const getCurrentOutgoings = () => currentOutgoings
/** @type {Set<OutgoingsListener>} */
const outgoingsListeners = new Set()
outgoingsListeners.add(o => {
const values = Object.values(o)
const nulls = values.filter(x => x === null).length
const nonNulls = values.length - nulls
logger.info(`new outgoings, ${nulls} nulls and ${nonNulls} nonNulls`)
})
const notifyOutgoingsListeners = () => {
outgoingsListeners.forEach(l => l(currentOutgoings))
}
let outSubbed = false
/**
* @param {OutgoingsListener} cb
* @returns {() => void}
*/
const onOutgoing = cb => {
outgoingsListeners.add(cb)
cb(currentOutgoings)
if (!outSubbed) {
const user = require('../../Mediator').getUser()
user.get(Key.OUTGOINGS).open(
debounce(async data => {
try {
if (typeof data !== 'object' || data === null) {
currentOutgoings = {}
notifyOutgoingsListeners()
return
}
/** @type {Record<string, Outgoing|null>} */
const newOuts = {}
const SEA = require('../../Mediator').mySEA
const mySecret = await Utils.mySecret()
await CommonUtils.asyncForEach(
Object.entries(data),
async ([id, out]) => {
if (typeof out !== 'object') {
return
}
if (out === null) {
newOuts[id] = null
return
}
const { with: encPub, messages } = out
if (typeof encPub !== 'string') {
return
}
const pub = await SEA.decrypt(encPub, mySecret)
if (!newOuts[id]) {
newOuts[id] = {
with: pub,
messages: {}
}
}
const ourSec = await SEA.secret(
await Utils.pubToEpub(pub),
user._.sea
)
if (typeof messages === 'object' && messages !== null) {
await CommonUtils.asyncForEach(
Object.entries(messages),
async ([mid, msg]) => {
if (typeof msg === 'object' && msg !== null) {
if (
typeof msg.body === 'string' &&
typeof msg.timestamp === 'number'
) {
const newOut = newOuts[id]
if (!newOut) {
return
}
newOut.messages[mid] = {
body: await SEA.decrypt(msg.body, ourSec),
timestamp: msg.timestamp
}
}
}
}
)
}
}
)
currentOutgoings = newOuts
notifyOutgoingsListeners()
} catch (e) {
logger.info('--------------------------')
logger.info('Events -> onOutgoing')
logger.info(e)
logger.info('--------------------------')
}
}, 400)
)
outSubbed = true
}
return () => {
outgoingsListeners.delete(cb)
}
}
////////////////////////////////////////////////////////////////////////////////
/**
* @typedef {(chats: Chat[]) => void} ChatsListener
*/
/** @type {Chat[]} */
let currentChats = []
const getChats = () => currentChats
/** @type {Set<ChatsListener>} */
const chatsListeners = new Set()
chatsListeners.add(c => {
logger.info(`Chats: ${c.length}`)
})
const notifyChatsListeners = () => {
chatsListeners.forEach(l => l(currentChats))
}
const processChats = debounce(() => {
const Streams = require('../streams')
const pubToAvatar = Streams.getPubToAvatar()
const pubToDn = Streams.getPubToDn()
const pubToLastSeenApp = Streams.getPubToLastSeenApp()
const existingOutgoings = /** @type {[string, Outgoing][]} */ (Object.entries(
getCurrentOutgoings()
).filter(([_, o]) => o !== null))
const pubToFeed = Streams.getPubToFeed()
/** @type {Chat[]} */
const newChats = []
for (const [outID, out] of existingOutgoings) {
if (typeof pubToAvatar[out.with] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onAvatar(() => {}, out.with)()
}
if (typeof pubToDn[out.with] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onDisplayName(() => {}, out.with)()
}
if (typeof pubToLastSeenApp[out.with] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onPubToLastSeenApp(() => {}, out.with)()
}
/** @type {ChatMessage[]} */
let msgs = Object.entries(out.messages)
.map(([mid, m]) => ({
id: mid,
outgoing: true,
body: m.body,
timestamp: m.timestamp
}))
// filter out null messages
.filter(m => typeof m.body === 'string')
const incoming = pubToFeed[out.with]
if (Array.isArray(incoming)) {
msgs = [...msgs, ...incoming]
}
/** @type {Chat} */
const chat = {
recipientPublicKey: out.with,
didDisconnect: pubToFeed[out.with] === 'disconnected',
id: out.with + outID,
messages: msgs,
recipientAvatar: null,
recipientDisplayName: null,
lastSeenApp: pubToLastSeenApp[out.with] || null
}
newChats.push(chat)
}
currentChats = newChats.filter(
c =>
Array.isArray(pubToFeed[c.recipientPublicKey]) ||
pubToFeed[c.recipientPublicKey] === 'disconnected'
)
notifyChatsListeners()
}, 750)
let onChatsSubbed = false
/**
* Massages all of the more primitive data structures into a more manageable
* 'Chat' paradigm.
* @param {ChatsListener} cb
* @returns {() => void}
*/
const onChats = cb => {
if (!chatsListeners.add(cb)) {
throw new Error('Tried to subscribe twice')
}
cb(currentChats)
if (!onChatsSubbed) {
const Streams = require('../streams')
onOutgoing(processChats)
Streams.onAvatar(processChats)
Streams.onDisplayName(processChats)
Streams.onPubToFeed(processChats)
Streams.onPubToLastSeenApp(processChats)
onChatsSubbed = true
}
return () => {
if (!chatsListeners.delete(cb)) {
throw new Error('Tried to unsubscribe twice')
}
}
}
/** @type {string|null} */
let currentBio = null
/**
* @param {(bio: string|null) => void} cb
* @param {UserGUNNode} user Pass only for testing purposes.
* @throws {Error} If user hasn't been auth.
* @returns {void}outgoingsListeners.forEach()
*/
const onBio = (cb, user) => {
if (!user.is) {
throw new Error(ErrorCode.NOT_AUTH)
}
const callb = debounce(cb, DEBOUNCE_WAIT_TIME)
// Initial value if avvatar is undefined in gun
callb(currentBio)
user.get(Key.BIO).on(bio => {
if (typeof bio === 'string' || bio === null) {
currentBio = bio
callb(bio)
}
})
}
/** @type {string|null} */
let currentSeedBackup = null
/**
* @param {(seedBackup: string|null) => void} cb
* @param {UserGUNNode} user
* @param {ISEA} SEA
* @param {Smith.UserSmithNode} user
* @param {import('../SimpleGUN').ISEA} SEA
* @throws {Error} If user hasn't been auth.
* @returns {void}
*/
@ -616,23 +41,5 @@ const onSeedBackup = (cb, user, SEA) => {
}
module.exports = {
__onUserToIncoming,
onAvatar,
onBlacklist,
onCurrentHandshakeAddress,
onDisplayName,
onIncomingMessages,
onOutgoing,
getCurrentOutgoings,
onSimplerReceivedRequests: require('./onReceivedReqs').onReceivedReqs,
onSimplerSentRequests: require('./onSentReqs').onSentReqs,
getCurrentSentReqs: require('./onSentReqs').getCurrentSentReqs,
getCurrentReceivedReqs: require('./onReceivedReqs').getReceivedReqs,
onBio,
onSeedBackup,
onChats,
getAvatar,
getDisplayName,
getHandshakeAddress,
getChats
onSeedBackup
}

View file

@ -1,151 +0,0 @@
/** @format */
const debounce = require('lodash/debounce')
const logger = require('winston')
const { Schema } = require('shock-common')
const size = require('lodash/size')
const Key = require('../key')
const Streams = require('../streams')
/**
* @typedef {Readonly<import('shock-common').Schema.SimpleReceivedRequest>} SimpleReceivedRequest
* @typedef {(reqs: ReadonlyArray<SimpleReceivedRequest>) => void} Listener
*/
/** @type {Set<Listener>} */
const listeners = new Set()
/** @type {string|null} */
let currentAddress = null
/** @type {Record<string, SimpleReceivedRequest>} */
let currReceivedReqsMap = {}
/**
* Unprocessed requests in current handshake node.
* @type {Record<string, import('shock-common').Schema.HandshakeRequest>}
*/
let currAddressData = {}
/** @returns {SimpleReceivedRequest[]} */
const getReceivedReqs = () => Object.values(currReceivedReqsMap)
/** @param {Record<string, SimpleReceivedRequest>} reqs */
const setReceivedReqsMap = reqs => {
currReceivedReqsMap = reqs
listeners.forEach(l => l(getReceivedReqs()))
}
listeners.add(() => {
logger.info(`new received reqs: ${size(getReceivedReqs())}`)
})
const react = debounce(() => {
/** @type {Record<string, SimpleReceivedRequest>} */
const newReceivedReqsMap = {}
const pubToFeed = Streams.getPubToFeed()
const pubToAvatar = Streams.getPubToAvatar()
const pubToDn = Streams.getPubToDn()
for (const [id, req] of Object.entries(currAddressData)) {
const inContact = Array.isArray(pubToFeed[req.from])
const isDisconnected = pubToFeed[req.from] === 'disconnected'
if (typeof pubToAvatar[req.from] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onAvatar(() => {}, req.from)()
}
if (typeof pubToDn[req.from] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onDisplayName(() => {}, req.from)()
}
if (!inContact && !isDisconnected) {
newReceivedReqsMap[req.from] = {
id,
requestorAvatar: null,
requestorDisplayName: null,
requestorPK: req.from,
timestamp: req.timestamp
}
}
}
setReceivedReqsMap(newReceivedReqsMap)
}, 750)
/**
* @param {string} addr
* @returns {(data: import('../SimpleGUN').OpenListenerData) => void}
*/
const listenerForAddr = addr => data => {
// did invalidate
if (addr !== currentAddress) {
return
}
if (typeof data !== 'object' || data === null) {
currAddressData = {}
} else {
for (const [id, req] of Object.entries(data)) {
// no need to update them just write them once
if (Schema.isHandshakeRequest(req) && !currAddressData[id]) {
currAddressData[id] = req
}
}
}
logger.info('data for address length: ' + size(addr))
react()
}
let subbed = false
/**
* @param {Listener} cb
* @returns {() => void}
*/
const onReceivedReqs = cb => {
listeners.add(cb)
cb(getReceivedReqs())
if (!subbed) {
const user = require('../../Mediator').getUser()
if (!user.is) {
logger.warn('Tried subscribing to onReceivedReqs without authing')
}
require('./index').onCurrentHandshakeAddress(addr => {
if (currentAddress === addr) {
return
}
currentAddress = addr
currAddressData = {}
setReceivedReqsMap({})
if (typeof addr === 'string') {
require('../../Mediator')
.getGun()
.get(Key.HANDSHAKE_NODES)
.get(addr)
.open(listenerForAddr(addr))
}
}, user)
Streams.onAvatar(react)
Streams.onDisplayName(react)
Streams.onPubToFeed(react)
subbed = true
}
return () => {
listeners.delete(cb)
}
}
module.exports = {
getReceivedReqs,
onReceivedReqs
}

View file

@ -1,146 +0,0 @@
/** @format */
const debounce = require('lodash/debounce')
const logger = require('winston')
const size = require('lodash/size')
const Streams = require('../streams')
/**
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').ListenerData} ListenerData
* @typedef {import('shock-common').Schema.HandshakeRequest} HandshakeRequest
* @typedef {import('shock-common').Schema.Message} Message
* @typedef {import('shock-common').Schema.Outgoing} Outgoing
* @typedef {import('shock-common').Schema.PartialOutgoing} PartialOutgoing
* @typedef {import('shock-common').Schema.Chat} Chat
* @typedef {import('shock-common').Schema.ChatMessage} ChatMessage
* @typedef {import('shock-common').Schema.SimpleSentRequest} SimpleSentRequest
* @typedef {import('shock-common').Schema.SimpleReceivedRequest} SimpleReceivedRequest
*/
/**
* @typedef {(chats: SimpleSentRequest[]) => void} Listener
*/
/** @type {Set<Listener>} */
const listeners = new Set()
/** @type {SimpleSentRequest[]} */
let currentReqs = []
listeners.add(() => {
logger.info(`new sent reqs length: ${size(currentReqs)}`)
})
const getCurrentSentReqs = () => currentReqs
// any time any of the streams we use notifies us that it changed, we fire up
// react()
const react = debounce(() => {
/** @type {SimpleSentRequest[]} */
const newReqs = []
// reactive streams
// maps a pk to its current handshake address
const pubToHAddr = Streams.getAddresses()
// a set or list containing copies of sent requests
const storedReqs = Streams.getStoredReqs()
// maps a pk to the last request sent to it (so old stored reqs are invalidated)
const pubToLastSentReqID = Streams.getSentReqIDs()
// maps a pk to a feed, messages if subbed and pk is pubbing, null /
// 'disconnected' otherwise
const pubToFeed = Streams.getPubToFeed()
// pk to avatar
const pubToAvatar = Streams.getPubToAvatar()
// pk to display name
const pubToDN = Streams.getPubToDn()
logger.info(`pubToLastSentREqID length: ${size(pubToLastSentReqID)}`)
for (const storedReq of storedReqs) {
const { handshakeAddress, recipientPub, sentReqID, timestamp } = storedReq
const currAddress = pubToHAddr[recipientPub]
const lastReqID = pubToLastSentReqID[recipientPub]
// invalidate if this stored request is not the last one sent to this
// particular pk
const isStale = typeof lastReqID !== 'undefined' && lastReqID !== sentReqID
// invalidate if we are in a pub/sub state to this pk (handshake in place)
const isConnected = Array.isArray(pubToFeed[recipientPub])
if (isStale || isConnected) {
// eslint-disable-next-line no-continue
continue
}
// no address for this pk? let's ask the corresponding stream to sub to
// gun.user(pk).get('currentAddr')
if (typeof currAddress === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onAddresses(() => {}, recipientPub)()
}
// no avatar for this pk? let's ask the corresponding stream to sub to
// gun.user(pk).get('avatar')
if (typeof pubToAvatar[recipientPub] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onAvatar(() => {}, recipientPub)()
}
// no display name for this pk? let's ask the corresponding stream to sub to
// gun.user(pk).get('displayName')
if (typeof pubToDN[recipientPub] === 'undefined') {
// eslint-disable-next-line no-empty-function
Streams.onDisplayName(() => {}, recipientPub)()
}
newReqs.push({
id: sentReqID,
recipientAvatar: null,
recipientChangedRequestAddress:
// if we haven't received the other's user current handshake address,
// let's assume he hasn't changed it and that this request is still
// valid
typeof currAddress !== 'undefined' && handshakeAddress !== currAddress,
recipientDisplayName: null,
recipientPublicKey: recipientPub,
timestamp
})
}
currentReqs = newReqs
listeners.forEach(l => l(currentReqs))
}, 750)
let subbed = false
/**
* Massages all of the more primitive data structures into a more manageable
* 'Chat' paradigm.
* @param {Listener} cb
* @returns {() => void}
*/
const onSentReqs = cb => {
listeners.add(cb)
cb(currentReqs)
if (!subbed) {
Streams.onAddresses(react)
Streams.onStoredReqs(react)
Streams.onLastSentReqIDs(react)
Streams.onPubToFeed(react)
Streams.onAvatar(react)
Streams.onDisplayName(react)
subbed = true
}
return () => {
listeners.delete(cb)
}
}
module.exports = {
onSentReqs,
getCurrentSentReqs
}

View file

@ -1,89 +0,0 @@
/**
* @format
*/
//@ts-ignore
const Common = require('shock-common')
const isFinite = require('lodash/isFinite')
const shuffle = require('lodash/shuffle')
const R = require('ramda')
const { asyncFilter } = require('../../../../utils')
const Follows = require('./follows')
const Wall = require('./wall')
/**
* @param {number} numberOfPublicKeyGroups
* @param {number} pageRequested
* @returns {[ number , number ]}
*/
const calculateWallRequest = (numberOfPublicKeyGroups, pageRequested) => {
// thanks to sebassdc
return [
(pageRequested - 1) % numberOfPublicKeyGroups,
Math.ceil(pageRequested / numberOfPublicKeyGroups)
]
}
/**
* @param {number} page
* @throws {TypeError}
* @throws {RangeError}
* @returns {Promise<any>}
*/
//@returns {Promise<Common.SchemaTypes.Post[]>}
const getFeedPage = async page => {
if (!isFinite(page)) {
throw new TypeError(`Please provide an actual number for [page]`)
}
if (page <= 0) {
throw new RangeError(`Please provide only positive numbers for [page]`)
}
const subbedPublicKeys = Object.values(await Follows.currentFollows()).map(
f => f.user
)
if (subbedPublicKeys.length === 0) {
return []
}
// say there are 20 public keys total
// page 1: page 1 from first 10 public keys
// page 2: page 1 from second 10 public keys
// page 3: page 2 from first 10 public keys
// page 4: page 2 from first 10 public keys
// etc
// thanks to sebassdc (github)
const pagedPublicKeys = R.splitEvery(10, shuffle(subbedPublicKeys))
const [publicKeyGroupIdx, pageToRequest] = calculateWallRequest(
pagedPublicKeys.length,
page
)
const publicKeysRaw = pagedPublicKeys[publicKeyGroupIdx]
const publicKeys = await asyncFilter(
publicKeysRaw,
// reject public keys for which the page to request would result in an out
// of bounds error
async pk => pageToRequest <= (await Wall.getWallTotalPages(pk))
)
const fetchedPages = await Promise.all(
publicKeys.map(pk => Wall.getWallPage(pageToRequest, pk))
)
const fetchedPostsGroups = fetchedPages.map(wp => Object.values(wp.posts))
const fetchedPosts = R.flatten(fetchedPostsGroups)
const sortered = R.sort((a, b) => b.date - a.date, fetchedPosts)
return sortered
}
module.exports = {
getFeedPage
}

View file

@ -1,66 +0,0 @@
/**
* @format
*/
const Common = require('shock-common')
const Logger = require('winston')
const size = require('lodash/size')
const Utils = require('../utils')
const Key = require('../key')
/**
* @typedef {Common.Schema.Follow} Follow
*/
/**
* @throws {TypeError}
* @returns {Promise<Record<string, Common.Schema.Follow>>}
*/
exports.currentFollows = async () => {
/**
* @type {Record<string, Common.Schema.Follow>}
*/
const raw = await Utils.tryAndWait(
(_, user) =>
new Promise(res =>
// @ts-expect-error
user.get(Key.FOLLOWS).load(res)
),
v => {
if (typeof v !== 'object' || v === null) {
return true
}
// load sometimes returns an empty set on the first try
if (size(v) === 0) {
return true
}
// sometimes it returns empty sub objects
return Object.values(v)
.filter(Common.Schema.isObj)
.some(obj => size(obj) === 0)
}
)
if (typeof raw !== 'object' || raw === null) {
Logger.error(
`Expected user.follows to be an object but instead got: ${JSON.stringify(
raw
)}`
)
throw new TypeError('Could not get follows, not an object')
}
const clean = {
...raw
}
for (const [key, followOrNull] of Object.entries(clean)) {
if (!Common.Schema.isFollow(followOrNull)) {
delete clean[key]
}
}
return clean
}

View file

@ -1,27 +1,19 @@
/**
* @format
*/
const Common = require('shock-common')
const Key = require('../key')
const Utils = require('../utils')
const Wall = require('./wall')
const Feed = require('./feed')
const User = require('./user')
const { size } = require('lodash')
/**
* @param {string} pub
* @returns {Promise<string>}
*/
exports.currentOrderAddress = async pub => {
const currAddr = await Utils.tryAndWait(gun =>
gun
const currAddr = await require('../../Mediator')
.getGun()
.user(pub)
.get(Key.CURRENT_ORDER_ADDRESS)
.then()
)
.specialThen()
if (typeof currAddr !== 'string') {
throw new TypeError('Expected user.currentOrderAddress to be an string')
@ -29,118 +21,3 @@ exports.currentOrderAddress = async pub => {
return currAddr
}
/**
* @param {string} pub
* @returns {Promise<string|null>}
*/
exports.userToIncomingID = async pub => {
const incomingID = await require('../../Mediator')
.getUser()
.get(Key.USER_TO_INCOMING)
.get(pub)
.then()
if (typeof incomingID === 'string') return incomingID
return null
}
/**
* @returns {Promise<any>}
*/
//@returns {Promise<Common.SchemaTypes.User>}
const getMyUser = async () => {
const oldProfile = await Utils.tryAndWait(
(_, user) => new Promise(res => user.get(Key.PROFILE).load(res)),
v => {
if (typeof v !== 'object') {
return true
}
if (v === null) {
return true
}
// load sometimes returns an empty set on the first try
return size(v) === 0
}
)
const bio = await Utils.tryAndWait(
(_, user) => user.get(Key.BIO).then(),
v => typeof v !== 'string'
)
const lastSeenApp = await Utils.tryAndWait(
(_, user) => user.get(Key.LAST_SEEN_APP).then(),
v => typeof v !== 'number'
)
const lastSeenNode = await Utils.tryAndWait(
(_, user) => user.get(Key.LAST_SEEN_NODE).then(),
v => typeof v !== 'number'
)
const publicKey = await Utils.tryAndWait(
(_, user) => Promise.resolve(user.is && user.is.pub),
v => typeof v !== 'string'
)
//@ts-ignore
/** @type {Common.SchemaTypes.User} */
const u = {
avatar: oldProfile.avatar,
// @ts-ignore
bio,
displayName: oldProfile.displayName,
// @ts-ignore
lastSeenApp,
// @ts-ignore
lastSeenNode,
// @ts-ignore
publicKey
}
return u
}
/**
* @param {string} publicKey
*/
const getUserInfo = async publicKey => {
const userInfo = await Utils.tryAndWait(
gun =>
new Promise(res =>
gun
.user(publicKey)
.get(Key.PROFILE)
.load(res)
),
v => {
if (typeof v !== 'object') {
return true
}
if (v === null) {
return true
}
// load sometimes returns an empty set on the first try
return size(v) === 0
}
)
return {
publicKey,
avatar: userInfo.avatar,
displayName: userInfo.displayName
}
}
module.exports.getMyUser = getMyUser
module.exports.getUserInfo = getUserInfo
module.exports.Follows = require('./follows')
module.exports.getWallPage = Wall.getWallPage
module.exports.getWallTotalPages = Wall.getWallTotalPages
module.exports.getFeedPage = Feed.getFeedPage
module.exports.getAnUser = User.getAnUser

View file

@ -1,129 +0,0 @@
/**
* @format
*/
const Common = require('shock-common')
const size = require('lodash/size')
const Key = require('../key')
const Utils = require('../utils')
/**
* @param {string} publicKey
* @returns {Promise<any>}
*/
//@returns {Promise<Common.SchemaTypes.User>}
const getAnUser = async publicKey => {
const oldProfile = await Utils.tryAndWait(
(g, u) => {
const user = u._.sea.pub === publicKey ? u : g.user(publicKey)
return new Promise(res => user.get(Key.PROFILE).load(res))
},
v => typeof v !== 'object'
)
const bio = await Utils.tryAndWait(
(g, u) => {
const user = u._.sea.pub === publicKey ? u : g.user(publicKey)
return user.get(Key.BIO).then()
},
v => typeof v !== 'string'
)
const lastSeenApp = await Utils.tryAndWait(
(g, u) => {
const user = u._.sea.pub === publicKey ? u : g.user(publicKey)
return user.get(Key.LAST_SEEN_APP).then()
},
v => typeof v !== 'number'
)
const lastSeenNode = await Utils.tryAndWait(
(g, u) => {
const user = u._.sea.pub === publicKey ? u : g.user(publicKey)
return user.get(Key.LAST_SEEN_NODE).then()
},
v => typeof v !== 'number'
)
//@ts-ignore
/** @type {Common.SchemaTypes.User} */
const u = {
avatar: oldProfile.avatar || null,
// @ts-ignore
bio: bio || null,
displayName: oldProfile.displayName || null,
// @ts-ignore
lastSeenApp: lastSeenApp || 0,
// @ts-ignore
lastSeenNode: lastSeenNode || 0,
// @ts-ignore
publicKey
}
return u
}
module.exports.getAnUser = getAnUser
/**
* @returns {Promise<any>}
*/
//@returns {Promise<Common.SchemaTypes.User>}
const getMyUser = async () => {
const oldProfile = await Utils.tryAndWait(
(_, user) => new Promise(res => user.get(Key.PROFILE).load(res)),
v => {
if (typeof v !== 'object') {
return true
}
if (v === null) {
return true
}
// load sometimes returns an empty set on the first try
return size(v) === 0
}
)
const bio = await Utils.tryAndWait(
(_, user) => user.get(Key.BIO).then(),
v => typeof v !== 'string'
)
const lastSeenApp = await Utils.tryAndWait(
(_, user) => user.get(Key.LAST_SEEN_APP).then(),
v => typeof v !== 'number'
)
const lastSeenNode = await Utils.tryAndWait(
(_, user) => user.get(Key.LAST_SEEN_NODE).then(),
v => typeof v !== 'number'
)
const publicKey = await Utils.tryAndWait(
(_, user) => Promise.resolve(user.is && user.is.pub),
v => typeof v !== 'string'
)
//@ts-ignore
/** @type {Common.SchemaTypes.User} */
const u = {
avatar: oldProfile.avatar,
// @ts-ignore
bio,
displayName: oldProfile.displayName,
// @ts-ignore
lastSeenApp,
// @ts-ignore
lastSeenNode,
// @ts-ignore
publicKey
}
return u
}
module.exports.getMyUser = getMyUser

View file

@ -1,208 +0,0 @@
/**
* @format
*/
const Common = require('shock-common')
const pickBy = require('lodash/pickBy')
const size = require('lodash/size')
const mapValues = require('lodash/mapValues')
const Utils = require('../utils')
const Key = require('../key')
const User = require('./user')
/**
* @param {string=} publicKey
* @returns {Promise<number>}
*/
const getWallTotalPages = async publicKey => {
const totalPages = await Utils.tryAndWait(
(gun, u) => {
/**
* @type {import('../SimpleGUN').GUNNode}
*/
let user = u
if (publicKey && u._.sea.pub !== publicKey) {
user = gun.user(publicKey)
}
return user
.get(Key.WALL)
.get(Key.NUM_OF_PAGES)
.then()
},
v => typeof v !== 'number'
)
return typeof totalPages === 'number' ? totalPages : 0
}
/**
* @param {number} page
* @param {string=} publicKey
* @throws {TypeError}
* @throws {RangeError}
* @returns {Promise<any>}
*/
////@returns {Promise<Common.SchemaTypes.WallPage>}
const getWallPage = async (page, publicKey) => {
const totalPages = await getWallTotalPages(publicKey)
if (page === 0) {
throw new RangeError(
`Page number cannot be zero, only positive and negative integers are allowed.`
)
}
const empty = {
count: 0,
posts: {}
}
if (totalPages === 0) {
return empty
}
const actualPageIdx = page < 0 ? totalPages + page : page - 1
if (actualPageIdx > totalPages - 1) {
throw new RangeError(`Requested a page out of bounds`)
}
/**
* @type {number}
*/
// @ts-ignore
const count = await Utils.tryAndWait(
(g, u) => {
/**
* @type {import('../SimpleGUN').GUNNode}
*/
let user = u
if (publicKey && u._.sea.pub !== publicKey) {
user = g.user(publicKey)
}
return user
.get(Key.WALL)
.get(Key.PAGES)
.get(actualPageIdx.toString())
.get(Key.COUNT)
.then()
},
v => typeof v !== 'number'
)
if (count === 0) {
return empty
}
/**
* We just use it so Common.Schema.isWallPage passes.
*/
const mockUser = await User.getMyUser()
/*
* @type {Common.SchemaTypes.WallPage}
*/
//@ts-ignore
const thePage = await Utils.tryAndWait(
(g, u) => {
/**
* @type {import('../SimpleGUN').GUNNode}
*/
let user = u
if (publicKey && u._.sea.pub !== publicKey) {
user = g.user(publicKey)
}
return new Promise(res => {
// forces data fetch
user
.get(Key.WALL)
.get(Key.PAGES)
.get(actualPageIdx.toString())
// @ts-ignore
.load(() => {})
process.nextTick(() => {
user
.get(Key.WALL)
.get(Key.PAGES)
.get(actualPageIdx.toString())
// @ts-ignore
.load(res)
})
})
},
maybePage => {
// sometimes load() returns an empty object on the first call
if (size(/** @type {any} */ (maybePage)) === 0) {
return true
}
const page = /** @type {Common.Schema.WallPage} */ (maybePage)
if (typeof page.count !== 'number') {
return true
}
// removes 'unused' initializer and aborted writes
page.posts = pickBy(page.posts, v => v !== null)
// .load() sometimes doesn't load all data on first call
if (size(page.posts) === 0) {
return true
}
// Give ids based on keys
page.posts = mapValues(page.posts, (v, k) => ({
...v,
id: k
}))
page.posts = mapValues(page.posts, v => ({
...v,
// isWallPage() would otherwise not pass
author: mockUser
}))
return !Common.Schema.isWallPage(page)
}
)
const clean = {
...thePage
}
for (const [key, post] of Object.entries(clean.posts)) {
// delete unsuccessful writes
if (post === null) {
delete clean.posts[key]
clean.count--
} else {
post.author = publicKey
? // eslint-disable-next-line no-await-in-loop
await User.getAnUser(publicKey)
: // eslint-disable-next-line no-await-in-loop
await User.getMyUser()
post.id = key
}
}
if (!Common.Schema.isWallPage(clean)) {
throw new Error(
`Fetched page not a wall page, instead got: ${JSON.stringify(clean)}`
)
}
return clean
}
module.exports = {
getWallTotalPages,
getWallPage
}

View file

@ -9,12 +9,10 @@
* tasks accept factories that are homonymous to the events on this same module.
*/
const onAcceptedRequests = require('./onAcceptedRequests')
const onOrders = require('./onOrders')
const lastSeenNode = require('./lastSeenNode')
module.exports = {
onAcceptedRequests,
onOrders,
lastSeenNode
}

View file

@ -2,7 +2,7 @@
* @format
*/
const logger = require('winston')
const logger = require('../../../../config/log')
const {
Constants: {
@ -11,12 +11,13 @@ const {
}
} = require('shock-common')
const Key = require('../key')
/// <reference path="../../../utils/GunSmith/Smith.ts" />
/**
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {import('../SimpleGUN').ListenerData} ListenerData
* @typedef {Smith.GunSmithNode} GUNNode
* @typedef {GunT.ListenerData} ListenerData
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
* @typedef {Smith.UserSmithNode} UserGUNNode
*/
/**
@ -26,27 +27,34 @@ const Key = require('../key')
*/
const lastSeenNode = user => {
if (!user.is) {
logger.warn('onOrders() -> tried to sub without authing')
logger.warn('lastSeenNode() -> tried to sub without authing')
throw new Error(ErrorCode.NOT_AUTH)
}
setInterval(() => {
if (user.is) {
user.get(Key.LAST_SEEN_NODE).put(Date.now(), ack => {
if (ack.err && typeof ack.err !== 'number') {
logger.error(`Error inside lastSeenNode job: ${ack.err}`)
}
})
let gotLatestProfileAck = true
setInterval(() => {
if (!user.is) {
return
}
if (!gotLatestProfileAck) {
logger.error(`lastSeenNode profile job: didnt get latest ack`)
return
}
gotLatestProfileAck = false
user
.get(Key.PROFILE)
.get(Key.LAST_SEEN_NODE)
.put(Date.now(), ack => {
if (ack.err && typeof ack.err !== 'number') {
logger.error(`Error inside lastSeenNode job: ${ack.err}`)
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
logger.error(`Error inside lastSeenNode profile job: ${ack.err}`)
}
gotLatestProfileAck = true
})
}
}, LAST_SEEN_NODE_INTERVAL)
}

View file

@ -1,191 +0,0 @@
/**
* @format
*/
const logger = require('winston')
const {
Constants: { ErrorCode },
Schema
} = require('shock-common')
const size = require('lodash/size')
const Key = require('../key')
const Utils = require('../utils')
/**
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
*/
let procid = 0
/**
* @throws {Error} NOT_AUTH
* @param {UserGUNNode} user
* @param {ISEA} SEA
* @returns {void}
*/
const onAcceptedRequests = (user, SEA) => {
if (!user.is) {
logger.warn('onAcceptedRequests() -> tried to sub without authing')
throw new Error(ErrorCode.NOT_AUTH)
}
procid++
user
.get(Key.STORED_REQS)
.map()
.once(async (storedReq, id) => {
logger.info(
`------------------------------------\nPROCID:${procid} (used for debugging memory leaks in jobs)\n---------------------------------------`
)
const mySecret = require('../../Mediator').getMySecret()
try {
if (!Schema.isStoredRequest(storedReq)) {
throw new Error(
'Stored request not an StoredRequest, instead got: ' +
JSON.stringify(storedReq) +
' this can be due to nulling out an old request (if null) or something else happened (please look at the output)'
)
}
// get the recipient pub from the stored request to avoid an attacker
// overwriting the handshake request in the root graph
const recipientPub = await SEA.decrypt(storedReq.recipientPub, mySecret)
if (typeof recipientPub !== 'string') {
throw new TypeError(
`Expected storedReq.recipientPub to be an string, instead got: ${recipientPub}`
)
}
if (await Utils.successfulHandshakeAlreadyExists(recipientPub)) {
return
}
const requestAddress = await SEA.decrypt(
storedReq.handshakeAddress,
mySecret
)
if (typeof requestAddress !== 'string') {
throw new TypeError()
}
const sentReqID = await SEA.decrypt(storedReq.sentReqID, mySecret)
if (typeof sentReqID !== 'string') {
throw new TypeError()
}
const latestReqSentID = await Utils.recipientPubToLastReqSentID(
recipientPub
)
const isStaleRequest = latestReqSentID !== sentReqID
if (isStaleRequest) {
return
}
const gun = require('../../Mediator').getGun()
const user = require('../../Mediator').getUser()
const recipientEpub = await Utils.pubToEpub(recipientPub)
const ourSecret = await SEA.secret(recipientEpub, user._.sea)
await /** @type {Promise<void>} */ (new Promise((res, rej) => {
gun
.get(Key.HANDSHAKE_NODES)
.get(requestAddress)
.get(sentReqID)
.on(async sentReq => {
if (!Schema.isHandshakeRequest(sentReq)) {
rej(
new Error(
'sent request found in handshake node not a handshake request'
)
)
return
}
// The response can be decrypted with the same secret regardless
// of who wrote to it last (see HandshakeRequest definition). This
// could be our feed ID for the recipient, or the recipient's feed
// id if he accepted the request.
const feedID = await SEA.decrypt(sentReq.response, ourSecret)
if (typeof feedID !== 'string') {
throw new TypeError("typeof feedID !== 'string'")
}
logger.info(`onAcceptedRequests -> decrypted feed ID: ${feedID}`)
logger.info(
'Will now try to access the other users outgoing feed'
)
const maybeFeedOnRecipientsOutgoings = await Utils.tryAndWait(
gun =>
new Promise(res => {
gun
.user(recipientPub)
.get(Key.OUTGOINGS)
.get(feedID)
.once(feed => {
res(feed)
})
}),
// @ts-ignore
v => size(v) === 0
)
const feedIDExistsOnRecipientsOutgoings =
typeof maybeFeedOnRecipientsOutgoings === 'object' &&
maybeFeedOnRecipientsOutgoings !== null
if (!feedIDExistsOnRecipientsOutgoings) {
return
}
const encryptedForMeIncomingID = await SEA.encrypt(
feedID,
mySecret
)
await /** @type {Promise<void>} */ (new Promise((res, rej) => {
user
.get(Key.USER_TO_INCOMING)
.get(recipientPub)
.put(encryptedForMeIncomingID, ack => {
if (ack.err && typeof ack.err !== 'number') {
rej(new Error(ack.err))
} else {
res()
}
})
}))
await /** @type {Promise<void>} */ (new Promise((res, rej) => {
user
.get(Key.STORED_REQS)
.get(id)
.put(null, ack => {
if (ack.err && typeof ack.err !== 'number') {
rej(new Error(ack.err))
} else {
res()
}
})
}))
// ensure this listeners gets called at least once
res()
})
}))
} catch (err) {
logger.warn(`Jobs.onAcceptedRequests() -> ${err.message}`)
logger.error(err)
}
})
}
module.exports = onAcceptedRequests

View file

@ -2,7 +2,7 @@
* @format
*/
// @ts-check
const logger = require('winston')
const logger = require('../../../../config/log')
const isFinite = require('lodash/isFinite')
const isNumber = require('lodash/isNumber')
const isNaN = require('lodash/isNaN')
@ -15,9 +15,8 @@ const SchemaManager = require('../../../schema')
const LightningServices = require('../../../../utils/lightningServices')
const Key = require('../key')
const Utils = require('../utils')
const Gun = require('gun')
const { selfContentToken, enrollContentTokens } = require('../../../seed')
/// <reference path="../../../utils/GunSmith/Smith.ts" />
const TipForwarder = require('../../../tipsCallback')
const getUser = () => require('../../Mediator').getUser()
@ -28,10 +27,10 @@ const getUser = () => require('../../Mediator').getUser()
const ordersProcessed = new Set()
/**
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {import('../SimpleGUN').ListenerData} ListenerData
* @typedef {Smith.GunSmithNode} GUNNode
* @typedef {GunT.ListenerData} ListenerData
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
* @typedef {Smith.UserSmithNode} UserGUNNode
*/
/**
@ -89,32 +88,41 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
try {
if (addr !== currentOrderAddr) {
logger.info(
orderID,
`order address: ${addr} invalidated (current address: ${currentOrderAddr})`
)
return
}
if (!Schema.isOrder(order)) {
logger.info(`Expected an order instead got: ${JSON.stringify(order)}`)
// Was recycled
if (order === null) {
return
}
if (ordersProcessed.has(orderID)) {
logger.warn(
`skipping already processed order: ${orderID}, this means orders are being processed twice!`
if (!Schema.isOrder(order)) {
logger.info(
orderID,
`Expected an order instead got: ${JSON.stringify(order)}`
)
return
}
// Gun might callback several times for the same order, avoid dupe
// processing.
if (ordersProcessed.has(orderID)) {
return
}
//const listenerStartTime = performance.now()
ordersProcessed.add(orderID)
logger.info(
`onOrders() -> processing order: ${orderID} -- ${JSON.stringify(
order
)} -- addr: ${addr}`
)
if (Date.now() - order.timestamp > 66000) {
logger.info('Not processing old order', orderID)
return
}
logger.info('processing order ', orderID)
const alreadyAnswered = await getUser()
.get(Key.ORDER_TO_RESPONSE)
@ -122,10 +130,12 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
.then()
if (alreadyAnswered) {
logger.info('this order is already answered, quitting')
logger.info(orderID, 'alreadyAnswered')
return
}
logger.info(orderID, ' was not answered, will now answer')
const senderEpub = await Utils.pubToEpub(order.from)
const secret = await SEA.secret(senderEpub, getUser()._.sea)
@ -138,19 +148,19 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
if (!isNumber(amount)) {
throw new TypeError(
`Could not parse decrypted amount as a number, not a number?, decryptedAmount: ${decryptedAmount}`
`${orderID} Could not parse decrypted amount as a number, not a number?, decryptedAmount: ${decryptedAmount}`
)
}
if (isNaN(amount)) {
throw new TypeError(
`Could not parse decrypted amount as a number, got NaN, decryptedAmount: ${decryptedAmount}`
`${orderID} Could not parse decrypted amount as a number, got NaN, decryptedAmount: ${decryptedAmount}`
)
}
if (!isFinite(amount)) {
throw new TypeError(
`Amount was correctly decrypted, but got a non finite number, decryptedAmount: ${decryptedAmount}`
`${orderID} Amount was correctly decrypted, but got a non finite number, decryptedAmount: ${decryptedAmount}`
)
}
const mySecret = require('../../Mediator').getMySecret()
@ -163,32 +173,34 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
*/
let serviceOrderContentSeedInfo = null //in case the service is of type 'torrentSeed' this is {seedUrl,seedToken}, can be omitted, in that case, it will be taken from env
if (order.targetType === 'service') {
console.log('General Service')
logger.info(orderID, 'General Service')
const { ackInfo: serviceID } = order
console.log('ACK INFO')
console.log(serviceID)
logger.info(orderID, 'ACK INFO')
logger.info(orderID, serviceID)
if (!Common.isPopulatedString(serviceID)) {
throw new TypeError(`no serviceID provided to orderAck`)
throw new TypeError(`${orderID} no serviceID provided to orderAck`)
}
const selectedService = await new Promise(res => {
getUser()
const selectedService = await getUser()
.get(Key.OFFERED_SERVICES)
.get(serviceID)
.load(res)
})
console.log(selectedService)
if (!selectedService) {
throw new TypeError(`invalid serviceID provided to orderAck`)
.then()
logger.info(orderID, selectedService)
if (!Common.isObj(selectedService)) {
throw new TypeError(
`${orderID} invalid serviceID provided to orderAck or service is not an object`
)
}
const {
serviceType,
servicePrice,
serviceSeedUrl: encSeedUrl, //=
serviceSeedToken: encSeedToken //=
} = selectedService
} = /** @type {Record<string, any>} */ (selectedService)
if (Number(amount) !== Number(servicePrice)) {
throw new TypeError(
`service price mismatch ${amount} : ${servicePrice}`
`${orderID} service price mismatch ${amount} : ${servicePrice}`
)
}
if (serviceType === 'torrentSeed') {
@ -209,23 +221,19 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
private: true
}
logger.info(
`onOrders() -> Will now create an invoice : ${JSON.stringify(invoiceReq)}`
)
const invoice = await _addInvoice(invoiceReq)
logger.info(
'onOrders() -> Successfully created the invoice, will now encrypt it'
`${orderID} onOrders() -> Successfully created the invoice, will now encrypt it`
)
const encInvoice = await SEA.encrypt(invoice.payment_request, secret)
logger.info(
`onOrders() -> Will now place the encrypted invoice in order to response usergraph: ${addr}`
`${orderID} onOrders() -> Will now place the encrypted invoice in order to response usergraph: ${addr}`
)
//@ts-expect-error
const ackNode = Gun.text.random()
const ackNode = Utils.gunID()
/** @type {import('shock-common').Schema.OrderResponse} */
const orderResponse = {
@ -240,10 +248,14 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
.get(orderID)
// @ts-expect-error
.put(orderResponse, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(
new Error(
`Error saving encrypted invoice to order to response usergraph: ${ack}`
`${orderID} Error saving encrypted invoice to order to response usergraph: ${ack}`
)
)
} else {
@ -255,38 +267,76 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
//logger.info(`[PERF] Added invoice to GunDB in ${invoicePutEndTime}ms`)
/**
*
* @param {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:string}} paidInvoice
* @param {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:Buffer}} paidInvoice
*/
const invoicePaidCb = async paidInvoice => {
console.log('INVOICE PAID')
logger.info(orderID, 'INVOICE PAID')
// Recycle
require('../../Mediator')
.getGun()
.get('orderNodes')
.get(addr)
.get(orderID)
.put(null)
let breakError = null
let orderMetadata //eslint-disable-line init-declarations
const hashString = paidInvoice.r_hash.toString('hex')
const {
amt_paid_sat: amt,
add_index: addIndex,
payment_addr: paymentAddr
payment_addr
} = paidInvoice
const paymentAddr = payment_addr.toString('hex')
const orderType = serviceOrderType || order.targetType
const { ackInfo } = order //a string representing what has been requested
switch (orderType) {
case 'tip': {
const postID = ackInfo
if (!Common.isPopulatedString(postID)) {
breakError = 'invalid ackInfo provided for postID'
breakError = orderID + ' invalid ackInfo provided for postID'
break //create the coordinate, but stop because of the invalid id
}
getUser()
.get('postToTipCount')
.get(Key.POSTS_NEW)
.get(postID)
.set(null) // each item in the set is a tip
.get('tipsSet')
.set(amt) // each item in the set is a tip
TipForwarder.notifySocketIfAny(
postID,
order.from,
'TIPPED YOU',
paidInvoice.memo || 'TIPPED YOU',
amt + ' sats'
)
const ackData = { tippedPost: postID }
const toSend = JSON.stringify(ackData)
const encrypted = await SEA.encrypt(toSend, secret)
const ordResponse = {
type: 'orderAck',
response: encrypted
}
await new Promise((res, rej) => {
getUser()
.get(Key.ORDER_TO_RESPONSE)
.get(ackNode)
.put(ordResponse, ack => {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(
new Error(
`${orderID} Error saving encrypted orderAck to order to response usergraph: ${ack}`
)
)
} else {
res(null)
}
})
})
orderMetadata = JSON.stringify(ackData)
break
}
case 'spontaneousPayment': {
@ -294,24 +344,28 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
break
}
case 'contentReveal': {
console.log('cONTENT REVEAL')
logger.info(orderID, 'CONTENT REVEAL')
//assuming digital product that only requires to be unlocked
const postID = ackInfo
console.log('ACK INFO')
console.log(ackInfo)
logger.info(orderID, 'ACK INFO')
logger.info(ackInfo)
if (!Common.isPopulatedString(postID)) {
breakError = 'invalid ackInfo provided for postID'
break //create the coordinate, but stop because of the invalid id
}
console.log('IS STRING')
const selectedPost = await new Promise(res => {
getUser()
logger.info(orderID, 'IS STRING')
const selectedPost = /** @type {Record<string, any>} */ (await getUser()
.get(Key.POSTS_NEW)
.get(postID)
.load(res)
})
console.log('LOAD ok')
console.log(selectedPost)
.then())
const selectedPostContent = /** @type {Record<string, any>} */ (await getUser()
.get(Key.POSTS_NEW)
.get(postID)
.get(Key.CONTENT_ITEMS)
.then())
logger.info(orderID, 'LOAD ok')
logger.info(selectedPost)
if (
!selectedPost ||
!selectedPost.status ||
@ -320,15 +374,15 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
breakError = 'ackInfo provided does not correspond to a valid post'
break //create the coordinate, but stop because of the invalid post
}
console.log('IS POST')
logger.info(orderID, 'IS POST')
/**
* @type {Record<string,string>} <contentID,decryptedRef>
*/
const contentsToSend = {}
console.log('SECRET OK')
logger.info(orderID, 'SECRET OK')
let privateFound = false
await Common.Utils.asyncForEach(
Object.entries(selectedPost.contentItems),
Object.entries(selectedPostContent),
async ([contentID, item]) => {
if (
item.type !== 'image/embedded' &&
@ -356,14 +410,18 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
type: 'orderAck',
response: encrypted
}
console.log('RES READY')
logger.info(orderID, 'RES READY')
await new Promise((res, rej) => {
getUser()
.get(Key.ORDER_TO_RESPONSE)
.get(ackNode)
.put(ordResponse, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(
new Error(
`Error saving encrypted orderAck to order to response usergraph: ${ack}`
@ -374,12 +432,12 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
}
})
})
console.log('RES SENT CONTENT')
orderMetadata = JSON.stringify(ordResponse)
logger.info(orderID, 'RES SENT CONTENT')
orderMetadata = JSON.stringify(ackData)
break
}
case 'torrentSeed': {
console.log('TORRENT')
logger.info(orderID, 'TORRENT')
const numberOfTokens = Number(ackInfo) || 1
const seedInfo = selfContentToken()
if (!seedInfo && !serviceOrderContentSeedInfo) {
@ -396,7 +454,7 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
numberOfTokens,
seedInfoReady
)
console.log('RES SEED OK')
logger.info(orderID, 'RES SEED OK')
const ackData = { seedUrl, tokens, ackInfo }
const toSend = JSON.stringify(ackData)
const encrypted = await SEA.encrypt(toSend, secret)
@ -404,13 +462,17 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
type: 'orderAck',
response: encrypted
}
console.log('RES SEED SENT')
logger.info(orderID, 'RES SEED SENT')
await new Promise((res, rej) => {
getUser()
.get(Key.ORDER_TO_RESPONSE)
.get(ackNode)
.put(serviceResponse, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(
new Error(
`Error saving encrypted orderAck to order to response usergraph: ${ack}`
@ -421,8 +483,8 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
}
})
})
console.log('RES SENT SEED')
orderMetadata = JSON.stringify(serviceResponse)
logger.info(orderID, 'RES SENT SEED')
orderMetadata = JSON.stringify(ackData)
break
}
case 'other': //not implemented yet but save them as a coordinate anyways
@ -451,17 +513,18 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
throw new Error(breakError)
}
}
console.log('WAITING INVOICE TO BE PAID')
logger.info(orderID, 'Waiting for invoice to be paid for order ' + orderID)
new Promise(res => SchemaManager.addListenInvoice(invoice.r_hash, res))
.then(invoicePaidCb)
.catch(err => {
logger.error(
orderID,
`error inside onOrders, orderAddr: ${addr}, orderID: ${orderID}, order: ${JSON.stringify(
order
)}`
)
logger.error(err)
console.log(err)
logger.error(orderID, err)
logger.info(orderID, err)
/** @type {import('shock-common').Schema.OrderResponse} */
const orderResponse = {
@ -474,21 +537,27 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
.get(orderID)
// @ts-expect-error
.put(orderResponse, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
logger.error(
orderID,
`Error saving encrypted invoice to order to response usergraph: ${ack}`
)
}
})
})
} catch (err) {
} catch (/** @type {any} */ err) {
logger.error(
orderID,
`error inside onOrders, orderAddr: ${addr}, orderID: ${orderID}, order: ${JSON.stringify(
order
)}`
)
logger.error(err)
console.log(err)
logger.error(orderID, err)
logger.info(orderID, err)
/** @type {import('shock-common').Schema.OrderResponse} */
const orderResponse = {
@ -501,8 +570,13 @@ const listenerForAddr = (addr, SEA) => async (order, orderID) => {
.get(orderID)
// @ts-expect-error
.put(orderResponse, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
logger.error(
orderID,
`Error saving encrypted invoice to order to response usergraph: ${ack}`
)
}
@ -530,6 +604,11 @@ const onOrders = (user, gun, SEA) => {
return
}
if (currentOrderAddr === addr) {
// Already subscribed
return
}
currentOrderAddr = addr
logger.info(`listening to address: ${addr}`)

View file

@ -1,56 +0,0 @@
/** @format */
const logger = require('winston')
const size = require('lodash/size')
const Key = require('../key')
/**
* @typedef {Record<string, string|null|undefined>} Addresses
*/
/** @type {Addresses} */
const pubToAddress = {}
/** @type {Set<() => void>} */
const listeners = new Set()
listeners.add(() => {
logger.info(`pubToAddress length: ${size(pubToAddress)}`)
})
const notify = () => listeners.forEach(l => l())
/** @type {Set<string>} */
const subbedPublicKeys = new Set()
/**
* @param {() => void} cb
* @param {string=} pub
*/
const onAddresses = (cb, pub) => {
listeners.add(cb)
cb()
if (pub && subbedPublicKeys.add(pub)) {
require('../../Mediator')
.getGun()
.user(pub)
.get(Key.CURRENT_HANDSHAKE_ADDRESS)
.on(addr => {
if (typeof addr === 'string' || addr === null) {
pubToAddress[pub] = addr
} else {
pubToAddress[pub] = null
}
notify()
})
}
return () => {
listeners.delete(cb)
}
}
const getAddresses = () => pubToAddress
module.exports = {
onAddresses,
getAddresses
}

View file

@ -1,196 +1,6 @@
/** @format */
const { Schema, Utils: CommonUtils } = require('shock-common')
const Key = require('../key')
const Utils = require('../utils')
/**
* @typedef {Record<string, string|null|undefined>} Avatars
* @typedef {(avatars: Avatars) => void} AvatarListener
*/
/** @type {Avatars} */
const pubToAvatar = {}
const getPubToAvatar = () => pubToAvatar
/** @type {Set<AvatarListener>} */
const avatarListeners = new Set()
const notifyAvatarListeners = () => {
avatarListeners.forEach(l => l(pubToAvatar))
}
/** @type {Set<string>} */
const pubsWithAvatarListeners = new Set()
/**
* @param {AvatarListener} cb
* @param {string=} pub
*/
const onAvatar = (cb, pub) => {
avatarListeners.add(cb)
cb(pubToAvatar)
if (pub && pubsWithAvatarListeners.add(pub)) {
require('../../Mediator')
.getGun()
.user(pub)
.get(Key.PROFILE_BINARY)
.get(Key.AVATAR)
.on(av => {
if (typeof av === 'string' || av === null) {
pubToAvatar[pub] = av || null
} else {
pubToAvatar[pub] = null
}
notifyAvatarListeners()
})
}
return () => {
avatarListeners.delete(cb)
}
}
/**
* @typedef {Record<string, string|null|undefined>} DisplayNames
* @typedef {(avatars: Avatars) => void} DisplayNameListener
*/
/** @type {DisplayNames} */
const pubToDisplayName = {}
const getPubToDn = () => pubToDisplayName
/** @type {Set<DisplayNameListener>} */
const displayNameListeners = new Set()
const notifyDisplayNameListeners = () => {
displayNameListeners.forEach(l => l(pubToDisplayName))
}
/** @type {Set<string>} */
const pubsWithDisplayNameListeners = new Set()
/**
* @param {DisplayNameListener} cb
* @param {string=} pub
*/
const onDisplayName = (cb, pub) => {
displayNameListeners.add(cb)
cb(pubToDisplayName)
if (pub && pubsWithDisplayNameListeners.add(pub)) {
require('../../Mediator')
.getGun()
.user(pub)
.get(Key.PROFILE)
.get(Key.DISPLAY_NAME)
.on(dn => {
if (typeof dn === 'string' || dn === null) {
pubToDisplayName[pub] = dn || null
} else {
pubToDisplayName[pub] = null
}
notifyDisplayNameListeners()
})
}
return () => {
displayNameListeners.delete(cb)
}
}
/**
* @typedef {import('shock-common').Schema.StoredRequest} StoredRequest
* @typedef {(reqs: StoredRequest[]) => void} StoredRequestsListener
*/
/** @type {Set<StoredRequestsListener>} */
const storedRequestsListeners = new Set()
/**
* @type {StoredRequest[]}
*/
let encryptedStoredReqs = []
/**
* @type {StoredRequest[]}
*/
let currentStoredReqs = []
const getStoredReqs = () => currentStoredReqs
const processStoredReqs = async () => {
const ereqs = encryptedStoredReqs
encryptedStoredReqs = []
const mySecret = await Utils.mySecret()
const SEA = require('../../Mediator').mySEA
const finalReqs = await CommonUtils.asyncMap(ereqs, async er => {
/** @type {StoredRequest} */
const r = {
handshakeAddress: await SEA.decrypt(er.handshakeAddress, mySecret),
recipientPub: await SEA.decrypt(er.recipientPub, mySecret),
sentReqID: await SEA.decrypt(er.sentReqID, mySecret),
timestamp: er.timestamp
}
return r
})
currentStoredReqs = finalReqs
storedRequestsListeners.forEach(l => l(currentStoredReqs))
}
let storedReqsSubbed = false
/**
* @param {StoredRequestsListener} cb
*/
const onStoredReqs = cb => {
storedRequestsListeners.add(cb)
if (!storedReqsSubbed) {
require('../../Mediator')
.getUser()
.get(Key.STORED_REQS)
.open(d => {
if (typeof d === 'object' && d !== null) {
//@ts-ignore
encryptedStoredReqs = /** @type {StoredRequest[]} */ (Object.values(
d
).filter(i => Schema.isStoredRequest(i)))
}
processStoredReqs()
})
storedReqsSubbed = true
}
cb(currentStoredReqs)
return () => {
storedRequestsListeners.delete(cb)
}
}
module.exports = {
onAvatar,
getPubToAvatar,
onDisplayName,
getPubToDn,
onPubToIncoming: require('./pubToIncoming').onPubToIncoming,
getPubToIncoming: require('./pubToIncoming').getPubToIncoming,
setPubToIncoming: require('./pubToIncoming').setPubToIncoming,
onPubToFeed: require('./pubToFeed').onPubToFeed,
getPubToFeed: require('./pubToFeed').getPubToFeed,
onStoredReqs,
getStoredReqs,
onAddresses: require('./addresses').onAddresses,
getAddresses: require('./addresses').getAddresses,
onLastSentReqIDs: require('./lastSentReqID').onLastSentReqIDs,
getSentReqIDs: require('./lastSentReqID').getSentReqIDs,
PubToIncoming: require('./pubToIncoming'),
getPubToLastSeenApp: require('./pubToLastSeenApp').getPubToLastSeenApp,
onPubToLastSeenApp: require('./pubToLastSeenApp').on
}

View file

@ -1,56 +0,0 @@
/** @format */
const logger = require('winston')
const { Constants } = require('shock-common')
const Key = require('../key')
/** @type {Record<string, string|null|undefined>} */
let pubToLastSentReqID = {}
/** @type {Set<() => void>} */
const listeners = new Set()
const notify = () => listeners.forEach(l => l())
let subbed = false
/**
* @param {() => void} cb
*/
const onLastSentReqIDs = cb => {
listeners.add(cb)
cb()
if (!subbed) {
const user = require('../../Mediator').getUser()
if (!user.is) {
logger.warn('lastSentReqID() -> tried to sub without authing')
throw new Error(Constants.ErrorCode.NOT_AUTH)
}
user.get(Key.USER_TO_LAST_REQUEST_SENT).open(data => {
if (typeof data === 'object' && data !== null) {
for (const [pub, id] of Object.entries(data)) {
if (typeof id === 'string' || id === null) {
pubToLastSentReqID[pub] = id
}
}
} else {
pubToLastSentReqID = {}
}
notify()
})
subbed = true
}
return () => {
listeners.delete(cb)
}
}
const getSentReqIDs = () => pubToLastSentReqID
module.exports = {
onLastSentReqIDs,
getSentReqIDs
}

View file

@ -1,260 +0,0 @@
/** @format */
const uuidv1 = require('uuid/v1')
const logger = require('winston')
const debounce = require('lodash/debounce')
const { Schema, Utils: CommonUtils } = require('shock-common')
const size = require('lodash/size')
const Key = require('../key')
const Utils = require('../utils')
/**
* @typedef {import('shock-common').Schema.ChatMessage} Message
* @typedef {import('../SimpleGUN').OpenListenerData} OpenListenerData
*/
const PubToIncoming = require('./pubToIncoming')
/**
* @typedef {Record<string, Message[]|null|undefined|'disconnected'>} Feeds
* @typedef {(feeds: Feeds) => void} FeedsListener
*/
/** @type {Set<FeedsListener>} */
const feedsListeners = new Set()
/**
* @type {Feeds}
*/
let pubToFeed = {}
const getPubToFeed = () => pubToFeed
feedsListeners.add(() => {
logger.info(`new pubToFeed length: ${size(getPubToFeed())}`)
})
/** @param {Feeds} ptf */
const setPubToFeed = ptf => {
pubToFeed = ptf
feedsListeners.forEach(l => {
l(pubToFeed)
})
}
/**
* If at one point we subscribed to a feed, record it here. Keeps track of it
* for unsubbing.
*
* Since we can't really unsub in GUN, what we do is that each listener created
* checks the last incoming feed, if it was created for other feed that is not
* the latest, it becomes inactive.
* @type {Record<string, string|undefined|null>}
*/
const pubToLastIncoming = {}
/**
* Any pub-feed pair listener will write its update id here when fired up. Avoid
* race conditions between different listeners and between different invocations
* of the same listener.
* @type {Record<string, string>}
*/
const pubToLastUpdate = {}
/**
* Performs a sub to a pub feed pair that will only emit if it is the last
* subbed feed for that pub, according to `pubToLastIncoming`. This listener is
* not in charge of writing to the cache.
* @param {[ string , string ]} param0
* @returns {(data: OpenListenerData) => void}
*/
const onOpenForPubFeedPair = ([pub, feed]) =>
debounce(async data => {
try {
// did invalidate
if (pubToLastIncoming[pub] !== feed) {
return
}
if (
// did disconnect
data === null ||
// interpret as disconnect
typeof data !== 'object'
) {
// invalidate this listener. If a reconnection happens it will be for a
// different pub-feed pair.
pubToLastIncoming[pub] = null
setImmediate(() => {
logger.info(
`onOpenForPubFeedPair -> didDisconnect -> pub: ${pub} - feed: ${feed}`
)
})
// signal disconnect to listeners listeners should rely on pubToFeed for
// disconnect status instead of pub-to-incoming. Only the latter will
// detect remote disconnection
setPubToFeed({
...getPubToFeed(),
[pub]: /** @type {'disconnected'} */ ('disconnected')
})
return
}
//@ts-ignore
const incoming = /** @type {import('shock-common').Schema.Outgoing} */ (data)
// incomplete data, let's not assume anything
if (
typeof incoming.with !== 'string' ||
typeof incoming.messages !== 'object'
) {
return
}
/** @type {import('shock-common').Schema.ChatMessage[]} */
const newMsgs = Object.entries(incoming.messages)
// filter out messages with incomplete data
.filter(([_, msg]) => Schema.isMessage(msg))
.map(([id, msg]) => {
/** @type {import('shock-common').Schema.ChatMessage} */
const m = {
// we'll decrypt later
body: msg.body,
id,
outgoing: false,
timestamp: msg.timestamp
}
return m
})
if (newMsgs.length === 0) {
setPubToFeed({
...getPubToFeed(),
[pub]: []
})
return
}
const thisUpdate = uuidv1()
pubToLastUpdate[pub] = thisUpdate
const user = require('../../Mediator').getUser()
if (!user.is) {
logger.warn('pubToFeed -> onOpenForPubFeedPair() -> user is not auth')
}
const SEA = require('../../Mediator').mySEA
const ourSecret = await SEA.secret(await Utils.pubToEpub(pub), user._.sea)
const decryptedMsgs = await CommonUtils.asyncMap(newMsgs, async m => {
/** @type {import('shock-common').Schema.ChatMessage} */
const decryptedMsg = {
...m,
body: await SEA.decrypt(m.body, ourSecret)
}
return decryptedMsg
})
// this listener got invalidated while we were awaiting the async operations
// above.
if (pubToLastUpdate[pub] !== thisUpdate) {
return
}
setPubToFeed({
...getPubToFeed(),
[pub]: decryptedMsgs
})
} catch (err) {
logger.warn(`error inside pub to pk-feed pair: ${pub} -- ${feed}`)
logger.error(err)
}
}, 750)
const react = () => {
const pubToIncoming = PubToIncoming.getPubToIncoming()
const gun = require('../../Mediator').getGun()
/** @type {Feeds} */
const newPubToFeed = {}
for (const [pub, inc] of Object.entries(pubToIncoming)) {
/**
* empty string -> null
* @type {string|null}
*/
const newIncoming = inc || null
if (
// if disconnected, the same incoming feed will try to overwrite the
// nulled out pubToLastIncoming[pub] entry. Making the listener for that
// pub feed pair fire up again, etc. Now. When the user disconnects from
// this side of things. He will overwrite the pub to incoming with null.
// Let's allow that.
newIncoming === pubToLastIncoming[pub] &&
!(pubToFeed[pub] === 'disconnected' && newIncoming === null)
) {
// eslint-disable-next-line no-continue
continue
}
// will invalidate stale listeners (a listener for an outdated incoming feed
// id)
pubToLastIncoming[pub] = newIncoming
// Invalidate pending writes from stale listener(s) for the old incoming
// address.
pubToLastUpdate[pub] = uuidv1()
newPubToFeed[pub] = newIncoming ? [] : null
// sub to this incoming feed
if (typeof newIncoming === 'string') {
// perform sub to pub-incoming_feed pair
// leave all of the sideffects from this for the next tick
setImmediate(() => {
gun
.user(pub)
.get(Key.OUTGOINGS)
.get(newIncoming)
.open(onOpenForPubFeedPair([pub, newIncoming]))
})
}
}
if (Object.keys(newPubToFeed).length > 0) {
setPubToFeed({
...getPubToFeed(),
...newPubToFeed
})
}
}
let subbed = false
/**
* Array.isArray(pubToFeed[pub]) means a Handshake is in place, look for
* incoming messages here.
* pubToIncoming[pub] === null means a disconnection took place.
* typeof pubToIncoming[pub] === 'undefined' means none of the above.
* @param {FeedsListener} cb
* @returns {() => void}
*/
const onPubToFeed = cb => {
feedsListeners.add(cb)
cb(getPubToFeed())
if (!subbed) {
PubToIncoming.onPubToIncoming(react)
subbed = true
}
return () => {
feedsListeners.delete(cb)
}
}
module.exports = {
getPubToFeed,
setPubToFeed,
onPubToFeed
}

View file

@ -1,105 +0,0 @@
/** @format */
const uuidv1 = require('uuid/v1')
const debounce = require('lodash/debounce')
const logger = require('winston')
const { Utils: CommonUtils } = require('shock-common')
const size = require('lodash/size')
const { USER_TO_INCOMING } = require('../key')
/** @typedef {import('../SimpleGUN').OpenListenerData} OpenListenerData */
/**
* @typedef {Record<string, string|null|undefined>} PubToIncoming
*/
/** @type {Set<() => void>} */
const listeners = new Set()
/** @type {PubToIncoming} */
let pubToIncoming = {}
const getPubToIncoming = () => pubToIncoming
/**
* @param {PubToIncoming} pti
* @returns {void}
*/
const setPubToIncoming = pti => {
pubToIncoming = pti
listeners.forEach(l => l())
}
let latestUpdate = uuidv1()
listeners.add(() => {
logger.info(`new pubToIncoming length: ${size(getPubToIncoming())}`)
})
const onOpen = debounce(async uti => {
const SEA = require('../../Mediator').mySEA
const mySec = require('../../Mediator').getMySecret()
const thisUpdate = uuidv1()
latestUpdate = thisUpdate
if (typeof uti !== 'object' || uti === null) {
setPubToIncoming({})
return
}
/** @type {PubToIncoming} */
const newPubToIncoming = {}
await CommonUtils.asyncForEach(
Object.entries(uti),
async ([pub, encFeedID]) => {
if (encFeedID === null) {
newPubToIncoming[pub] = null
return
}
if (typeof encFeedID === 'string') {
newPubToIncoming[pub] = await SEA.decrypt(encFeedID, mySec)
}
}
)
// avoid old data from overwriting new data if decrypting took longer to
// process for the older open() call than for the newer open() call
if (latestUpdate === thisUpdate) {
setPubToIncoming(newPubToIncoming)
}
}, 750)
let subbed = false
/**
* @param {() => void} cb
* @returns {() => void}
*/
const onPubToIncoming = cb => {
if (!listeners.add(cb)) {
throw new Error('Tried to subscribe twice')
}
cb()
if (!subbed) {
const user = require('../../Mediator').getUser()
if (!user.is) {
logger.warn(`subscribing to pubToIncoming on a unauth user`)
}
user.get(USER_TO_INCOMING).open(onOpen)
subbed = true
}
return () => {
if (!listeners.delete(cb)) {
throw new Error('Tried to unsubscribe twice')
}
}
}
module.exports = {
getPubToIncoming,
setPubToIncoming,
onPubToIncoming
}

View file

@ -27,14 +27,15 @@ const on = (cb, pub) => {
listeners.add(cb)
cb(pubToLastSeenApp)
if (pub && pubsWithListeners.add(pub)) {
pubToLastSeenApp[pub] = null;
pubToLastSeenApp[pub] = null
notifyListeners()
require('../../Mediator')
.getGun()
.user(pub)
.get(Key.LAST_SEEN_APP)
.on(timestamp => {
pubToLastSeenApp[pub] = typeof timestamp === 'number' ? timestamp : undefined
pubToLastSeenApp[pub] =
typeof timestamp === 'number' ? timestamp : undefined
notifyListeners()
})
}
@ -45,5 +46,5 @@ const on = (cb, pub) => {
module.exports = {
getPubToLastSeenApp,
on,
on
}

View file

@ -2,15 +2,16 @@
* @format
*/
/* eslint-disable init-declarations */
const logger = require('winston')
const logger = require('../../../../config/log')
const { Constants, Utils: CommonUtils } = require('shock-common')
const Key = require('../key')
/// <reference path="../../../../utils/GunSmith/Smith.ts" />
/**
* @typedef {import('../SimpleGUN').GUNNode} GUNNode
* @typedef {Smith.GunSmithNode} GUNNode
* @typedef {import('../SimpleGUN').ISEA} ISEA
* @typedef {import('../SimpleGUN').UserGUNNode} UserGUNNode
* @typedef {Smith.UserSmithNode} UserGUNNode
*/
/**
@ -25,73 +26,54 @@ const delay = ms => new Promise(res => setTimeout(res, ms))
const mySecret = () => Promise.resolve(require('../../Mediator').getMySecret())
/**
* @template T
* @param {Promise<T>} promise
* @returns {Promise<T>}
* Just a pointer.
*/
const timeout10 = promise => {
const TIMEOUT_PTR = {}
/**
* @param {number} ms Milliseconds
* @returns {<T>(promise: Promise<T>) => Promise<T>}
*/
const timeout = ms => async promise => {
/** @type {NodeJS.Timeout} */
// @ts-ignore
let timeoutID
return Promise.race([
const result = await Promise.race([
promise.then(v => {
clearTimeout(timeoutID)
return v
}),
new Promise((_, rej) => {
CommonUtils.makePromise(res => {
timeoutID = setTimeout(() => {
rej(new Error(Constants.ErrorCode.TIMEOUT_ERR))
}, 10000)
clearTimeout(timeoutID)
res(TIMEOUT_PTR)
}, ms)
})
])
if (result === TIMEOUT_PTR) {
throw new Error(Constants.TIMEOUT_ERR)
}
return result
}
/**
* @template T
* @param {Promise<T>} promise
* @returns {Promise<T>}
* Time outs at 10 seconds.
*/
const timeout5 = promise => {
/** @type {NodeJS.Timeout} */
// @ts-ignore
let timeoutID
return Promise.race([
promise.then(v => {
clearTimeout(timeoutID)
return v
}),
new Promise((_, rej) => {
timeoutID = setTimeout(() => {
rej(new Error(Constants.ErrorCode.TIMEOUT_ERR))
}, 5000)
})
])
}
const timeout10 = timeout(10)
/**
* @template T
* @param {Promise<T>} promise
* @returns {Promise<T>}
* Time outs at 5 seconds.
*/
const timeout2 = promise => {
/** @type {NodeJS.Timeout} */
// @ts-ignore
let timeoutID
return Promise.race([
promise.then(v => {
clearTimeout(timeoutID)
return v
}),
const timeout5 = timeout(5)
new Promise((_, rej) => {
timeoutID = setTimeout(() => {
rej(new Error(Constants.ErrorCode.TIMEOUT_ERR))
}, 2000)
})
])
}
/**
* Time outs at 2 seconds.
*/
const timeout2 = timeout(2)
/**
* @template T
@ -101,7 +83,6 @@ const timeout2 = promise => {
* @returns {Promise<T>}
*/
const tryAndWait = async (promGen, shouldRetry = () => false) => {
/* eslint-disable no-empty */
/* eslint-disable init-declarations */
// If hang stop at 10, wait 3, retry, if hang stop at 5, reinstate, warm for
@ -118,27 +99,15 @@ const tryAndWait = async (promGen, shouldRetry = () => false) => {
)
)
if (shouldRetry(resolvedValue)) {
logger.info(
'force retrying' +
` args: ${promGen.toString()} -- ${shouldRetry.toString()} \n resolvedValue: ${resolvedValue}, type: ${typeof resolvedValue}`
)
} else {
if (!shouldRetry(resolvedValue)) {
return resolvedValue
}
} catch (e) {
logger.error(e)
logger.info(JSON.stringify(e))
if (e.message === Constants.ErrorCode.NOT_AUTH) {
if (e.message !== Constants.ErrorCode.TIMEOUT_ERR) {
throw e
}
}
logger.info(
`\n retrying \n` +
` args: ${promGen.toString()} -- ${shouldRetry.toString()}`
)
await delay(200)
try {
@ -149,26 +118,15 @@ const tryAndWait = async (promGen, shouldRetry = () => false) => {
)
)
if (shouldRetry(resolvedValue)) {
logger.info(
'force retrying' +
` args: ${promGen.toString()} -- ${shouldRetry.toString()} \n resolvedValue: ${resolvedValue}, type: ${typeof resolvedValue}`
)
} else {
if (!shouldRetry(resolvedValue)) {
return resolvedValue
}
} catch (e) {
logger.error(e)
if (e.message === Constants.ErrorCode.NOT_AUTH) {
if (e.message !== Constants.ErrorCode.TIMEOUT_ERR) {
throw e
}
}
logger.info(
`\n retrying \n` +
` args: ${promGen.toString()} -- ${shouldRetry.toString()}`
)
await delay(3000)
try {
@ -179,30 +137,22 @@ const tryAndWait = async (promGen, shouldRetry = () => false) => {
)
)
if (shouldRetry(resolvedValue)) {
logger.info(
'force retrying' +
` args: ${promGen.toString()} -- ${shouldRetry.toString()} \n resolvedValue: ${resolvedValue}, type: ${typeof resolvedValue}`
)
} else {
if (!shouldRetry(resolvedValue)) {
return resolvedValue
}
} catch (e) {
logger.error(e)
if (e.message === Constants.ErrorCode.NOT_AUTH) {
if (e.message !== Constants.ErrorCode.TIMEOUT_ERR) {
throw e
}
}
logger.info(
`\n NOT recreating a fresh gun but retrying one last time \n` +
` args: ${promGen.toString()} -- ${shouldRetry.toString()}`
return timeout10(
promGen(
require('../../Mediator/index').getGun(),
require('../../Mediator/index').getUser()
)
)
const { gun, user } = require('../../Mediator/index').freshGun()
return timeout10(promGen(gun, user))
/* eslint-enable no-empty */
/* eslint-enable init-declarations */
}
@ -212,99 +162,22 @@ const tryAndWait = async (promGen, shouldRetry = () => false) => {
*/
const pubToEpub = async pub => {
try {
const epub = await timeout10(
CommonUtils.makePromise(res => {
require('../../Mediator/index')
const epub = await require('../../Mediator/index')
.getGun()
.user(pub)
.get('epub')
.on(data => {
if (typeof data === 'string') {
res(data)
}
})
})
)
.specialThen()
return epub
return /** @type {string} */ (epub)
} catch (err) {
logger.error(
`Error inside pubToEpub for pub ${pub.slice(0, 8)}...${pub.slice(-8)}:`
)
logger.error(err)
throw new Error(`pubToEpub() -> ${err.message}`)
throw err
}
}
/**
* Should only be called with a recipient pub that has already been contacted.
* If returns null, a disconnect happened.
* @param {string} recipientPub
* @returns {Promise<string|null>}
*/
const recipientPubToLastReqSentID = async recipientPub => {
const maybeLastReqSentID = await tryAndWait(
(_, user) => {
const userToLastReqSent = user.get(Key.USER_TO_LAST_REQUEST_SENT)
return userToLastReqSent.get(recipientPub).then()
},
// retry on undefined, in case it is a false negative
v => typeof v === 'undefined'
)
if (typeof maybeLastReqSentID !== 'string') {
return null
}
return maybeLastReqSentID
}
/**
* @param {string} recipientPub
* @returns {Promise<boolean>}
*/
const successfulHandshakeAlreadyExists = async recipientPub => {
const maybeIncomingID = await tryAndWait((_, user) => {
const userToIncoming = user.get(Key.USER_TO_INCOMING)
return userToIncoming.get(recipientPub).then()
})
const maybeOutgoingID = await tryAndWait((_, user) => {
const recipientToOutgoing = user.get(Key.RECIPIENT_TO_OUTGOING)
return recipientToOutgoing.get(recipientPub).then()
})
return (
typeof maybeIncomingID === 'string' && typeof maybeOutgoingID === 'string'
)
}
/**
* @param {string} recipientPub
* @returns {Promise<string|null>}
*/
const recipientToOutgoingID = async recipientPub => {
const maybeEncryptedOutgoingID = await tryAndWait(
(_, user) =>
user
.get(Key.RECIPIENT_TO_OUTGOING)
.get(recipientPub)
.then(),
// force retry in case undefined is a false negative
v => typeof v === 'undefined'
)
if (typeof maybeEncryptedOutgoingID === 'string') {
const outgoingID = await require('../../Mediator/index').mySEA.decrypt(
maybeEncryptedOutgoingID,
await mySecret()
)
return outgoingID || null
}
return null
}
/**
* @param {import('../SimpleGUN').ListenerData} listenerData
* @returns {listenerData is import('../SimpleGUN').ListenerObj}
@ -341,17 +214,30 @@ const isNodeOnline = async pub => {
)
}
/**
* @returns {string}
*/
const gunID = () => {
// Copied from gun internals
let s = ''
let l = 24 // you are not going to make a 0 length random number, so no need to check type
const c = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXZabcdefghijklmnopqrstuvwxyz'
while (l > 0) {
s += c.charAt(Math.floor(Math.random() * c.length))
l--
}
return s
}
module.exports = {
dataHasSoul,
delay,
pubToEpub,
recipientPubToLastReqSentID,
successfulHandshakeAlreadyExists,
recipientToOutgoingID,
tryAndWait,
mySecret,
promisifyGunNode: require('./promisifygun'),
timeout5,
timeout2,
isNodeOnline
isNodeOnline,
gunID
}

View file

@ -0,0 +1,14 @@
/**
* @format
*/
const expect = require('expect')
const { gunID } = require('./index')
describe('gunID()', () => {
it('generates 24-chars-long unique IDs', () => {
const id = gunID()
expect(id).toBeTruthy()
expect(id.length).toBe(24)
})
})

View file

@ -20,7 +20,11 @@ const promisify = node => {
pnode.put = data =>
new Promise((res, rej) => {
oldPut(data, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(new Error(ack.err))
} else {
res()
@ -31,7 +35,11 @@ const promisify = node => {
pnode.set = data =>
new Promise((res, rej) => {
oldSet(data, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(new Error(ack.err))
} else {
res()

View file

@ -6,7 +6,6 @@
const { makePromise, Constants, Schema } = require('shock-common')
const mapValues = require('lodash/mapValues')
const Bluebird = require('bluebird')
const Gun = require('gun')
const { pubToEpub } = require('../contact-api/utils')
const {
@ -16,6 +15,8 @@ const {
getMySecret,
$$__SHOCKWALLET__ENCRYPTED__
} = require('../Mediator')
const logger = require('../../../config/log')
const Utils = require('../contact-api/utils')
/**
* @typedef {import('../contact-api/SimpleGUN').ValidDataValue} ValidDataValue
* @typedef {import('./types').ValidRPCDataValue} ValidRPCDataValue
@ -27,12 +28,15 @@ const PATH_SEPARATOR = '>'
/**
* @param {ValidDataValue} value
* @param {string} publicKey
* @param {string=} epubForDecryption
* @returns {Promise<ValidDataValue>}
*/
const deepDecryptIfNeeded = async (value, publicKey) => {
const deepDecryptIfNeeded = async (value, publicKey, epubForDecryption) => {
if (Schema.isObj(value)) {
return Bluebird.props(
mapValues(value, o => deepDecryptIfNeeded(o, publicKey))
mapValues(value, o =>
deepDecryptIfNeeded(o, publicKey, epubForDecryption)
)
)
}
@ -46,10 +50,16 @@ const deepDecryptIfNeeded = async (value, publicKey) => {
}
let sec = ''
if (user.is.pub === publicKey) {
if (user.is.pub === publicKey || 'me' === publicKey) {
sec = getMySecret()
} else {
sec = await SEA.secret(await pubToEpub(publicKey), user._.sea)
let epub = epubForDecryption
if (!epub) {
epub = await pubToEpub(publicKey)
}
sec = await SEA.secret(epub, user._.sea)
}
const decrypted = SEA.decrypt(value, sec)
@ -81,6 +91,7 @@ async function deepEncryptIfNeeded(value) {
}
const pk = /** @type {string|undefined} */ (value.$$__ENCRYPT__FOR)
const epub = /** @type {string|undefined} */ (value.$$__EPUB__FOR)
if (!pk) {
return Bluebird.props(mapValues(value, deepEncryptIfNeeded))
@ -93,7 +104,15 @@ async function deepEncryptIfNeeded(value) {
if (pk === u.is.pub || pk === 'me') {
encryptedValue = await SEA.encrypt(actualValue, getMySecret())
} else {
const sec = await SEA.secret(await pubToEpub(pk), u._.sea)
const sec = await SEA.secret(
await (() => {
if (epub) {
return epub
}
return pubToEpub(pk)
})(),
u._.sea
)
encryptedValue = await SEA.encrypt(actualValue, sec)
}
@ -186,8 +205,18 @@ const put = async (rawPath, value) => {
} /* is primitive */ else {
await makePromise((res, rej) => {
node.put(/** @type {ValidDataValue} */ (theValue), ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
if (typeof ack.err === 'string') {
rej(new Error(ack.err))
} else {
logger.info(`NON STANDARD GUN ERROR:`)
logger.info(ack)
rej(new Error(JSON.stringify(ack.err, null, 4)))
}
} else {
res()
}
@ -237,8 +266,7 @@ async function set(rawPath, value) {
if (Array.isArray(theValue)) {
// we'll create a set of sets
// @ts-expect-error
const uuid = Gun.text.random()
const uuid = Utils.gunID()
// here we are simulating the top-most set()
const subPath = rawPath + PATH_SEPARATOR + uuid
@ -249,8 +277,7 @@ async function set(rawPath, value) {
return uuid
} else if (Schema.isObj(theValue)) {
// @ts-expect-error
const uuid = Gun.text.random() // we'll handle UUID ourselves
const uuid = Utils.gunID() // we'll handle UUID ourselves
// so we can use our own put()
@ -265,7 +292,11 @@ async function set(rawPath, value) {
const id = await makePromise((res, rej) => {
const subNode = node.set(theValue, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (
ack.err &&
typeof ack.err !== 'number' &&
typeof ack.err !== 'object'
) {
rej(new Error(ack.err))
} else {
res(subNode._.get)

View file

@ -0,0 +1,254 @@
/**
* @format
*/
const logger = require('../../../config/log')
const Common = require('shock-common')
const uuidv4 = require('uuid/v4')
const { getGun, getUser, isAuthenticated } = require('../Mediator')
const { deepDecryptIfNeeded } = require('../rpc')
const Subscriptions = require('./subscriptions')
const GunActions = require('../../gunDB/contact-api/actions')
const {
encryptedEmit,
encryptedOn,
encryptedCallback
} = require('../../../utils/ECC/socket')
/// <reference path="../../../utils/GunSmith/Smith.ts" />
const ALLOWED_GUN_METHODS = [
'map',
'map.on',
'on',
'once',
'load',
'then',
'open'
]
/**
* @typedef {import('../contact-api/SimpleGUN').ValidDataValue} ValidDataValue
*/
/**
* @typedef {(data: ValidDataValue, key?: string, _msg?: any, event?: any) => (void | Promise<void>)} GunListener
* @typedef {{ reconnect: boolean, token: string }} SubscriptionOptions
*/
/**
* @param {string} root
*/
const getNode = root => {
if (root === '$gun') {
return getGun()
}
if (root === '$user') {
return getUser()
}
return getGun().user(root)
}
/**
* @param {Smith.GunSmithNode} node
* @param {string} path
*/
const getGunQuery = (node, path) => {
const bits = path.split('>')
const query = bits.reduce((gunQuery, bit) => gunQuery.get(bit), node)
return query
}
/**
* Executes a GunDB query call using the specified method
* @param {any} query
* @param {string} method
* @param {GunListener} listener
*/
const executeGunQuery = (query, method, listener) => {
if (!ALLOWED_GUN_METHODS.includes(method)) {
throw {
field: 'method',
message: `Invalid GunDB method specified (${method}). `
}
}
if (method === 'on') {
return query.on(listener)
}
if (method === 'open') {
return query.open(listener)
}
if (method === 'map.on') {
return query.map().on(listener)
}
if (method === 'map.once') {
return query.map().once(listener)
}
}
/**
* @param {Object} queryData
* @param {(eventName: string, ...args: any[]) => Promise<void>} queryData.emit
* @param {string} queryData.publicKeyForDecryption
* @param {string} queryData.subscriptionId
* @param {string} queryData.deviceId
* @param {string=} queryData.epubForDecryption
* @param {string=} queryData.epubField If the epub is included in the received
* data itself. Handshake requests for example, have an epub field.
* @returns {GunListener}
*/
const queryListenerCallback = ({
emit,
publicKeyForDecryption,
subscriptionId,
deviceId,
epubForDecryption,
epubField
}) => async (data, key, _msg, event) => {
try {
const subscription = Subscriptions.get({
deviceId,
subscriptionId
})
if (subscription && !subscription.unsubscribe && event) {
Subscriptions.attachUnsubscribe({
deviceId,
subscriptionId,
unsubscribe: () => event.off()
})
}
const eventName = `query:data`
if (publicKeyForDecryption?.length > 0 || epubForDecryption || epubField) {
const decData = await deepDecryptIfNeeded(
data,
publicKeyForDecryption,
(() => {
if (epubField) {
if (Common.isObj(data)) {
const epub = data[epubField]
if (Common.isPopulatedString(epub)) {
return epub
}
logger.error(
`Got epubField in a rifle query, but the resulting value obtained is not an string -> `,
{
data,
epub
}
)
} else {
logger.warn(
`Got epubField in a rifle query for a non-object data -> `,
{
epubField,
data
}
)
}
}
return epubForDecryption
})()
)
emit(eventName, { subscriptionId, response: { data: decData, key } })
return
}
emit(eventName, { subscriptionId, response: { data, key } })
} catch (err) {
logger.error(`Error for gun rpc socket: ${err.message}`)
}
}
/** @param {import('socket.io').Socket} socket */
const startSocket = socket => {
try {
const emit = encryptedEmit(socket)
const on = encryptedOn(socket)
const { encryptionId } = socket.handshake.auth
if (!isAuthenticated()) {
logger.warn('GunDB is not yet authenticated')
socket.emit(Common.Constants.ErrorCode.NOT_AUTH)
}
if (isAuthenticated()) {
socket.onAny(async () => {
try {
await GunActions.setLastSeenApp()
} catch (err) {
logger.info('error setting last seen app', err)
}
})
}
on('subscribe:query', (query, response) => {
const { $shock, publicKey, epubForDecryption, epubField } = query
const subscriptionId = uuidv4()
try {
if (!isAuthenticated()) {
socket.emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
const [root, path, method] = $shock.split('::')
const socketCallback = encryptedCallback(socket, response)
if (!ALLOWED_GUN_METHODS.includes(method)) {
socketCallback(
`Invalid method for gun rpc call: ${method}, query: ${$shock}`
)
return
}
Subscriptions.add({
deviceId: encryptionId,
subscriptionId
})
const queryCallback = queryListenerCallback({
emit,
publicKeyForDecryption: publicKey,
subscriptionId,
deviceId: encryptionId,
epubForDecryption,
epubField
})
socketCallback(null, {
subscriptionId
})
const node = getNode(root)
const query = getGunQuery(node, path)
executeGunQuery(query, method, queryCallback)
} catch (error) {
emit(`query:error`, { subscriptionId, response: { data: error } })
}
})
on('unsubscribe', ({ subscriptionId }, response) => {
const callback = encryptedCallback(socket, response)
Subscriptions.remove({ deviceId: encryptionId, subscriptionId })
callback(null, {
message: 'Unsubscribed successfully!',
success: true
})
})
socket.on('disconnect', () => {
Subscriptions.removeDevice({ deviceId: encryptionId })
})
} catch (err) {
logger.error('GUNRPC: ' + err.message)
}
}
module.exports = startSocket

View file

@ -0,0 +1,123 @@
/**
* @typedef {() => void} Unsubscribe
*/
/** @type {Map<string, Map<string, { subscriptionId: string, unsubscribe?: () => void, metadata?: object }>>} */
const userSubscriptions = new Map()
/**
* Adds a new Subscription
* @param {Object} subscription
* @param {string} subscription.deviceId
* @param {string} subscription.subscriptionId
* @param {(Unsubscribe)=} subscription.unsubscribe
* @param {(object)=} subscription.metadata
*/
const add = ({ deviceId, subscriptionId, unsubscribe, metadata }) => {
const deviceSubscriptions = userSubscriptions.get(deviceId)
const subscriptions = deviceSubscriptions ?? new Map()
subscriptions.set(subscriptionId, {
subscriptionId,
unsubscribe,
metadata
})
userSubscriptions.set(deviceId, subscriptions)
}
/**
* Adds a new Subscription
* @param {Object} subscription
* @param {string} subscription.deviceId
* @param {string} subscription.subscriptionId
* @param {Unsubscribe} subscription.unsubscribe
*/
const attachUnsubscribe = ({ deviceId, subscriptionId, unsubscribe }) => {
const deviceSubscriptions = userSubscriptions.get(deviceId)
const subscriptions = deviceSubscriptions
if (!subscriptions) {
return
}
const subscription = subscriptions.get(subscriptionId)
if (!subscription) {
return
}
subscriptions.set(subscriptionId, {
...subscription,
unsubscribe
})
userSubscriptions.set(deviceId, subscriptions)
}
/**
* Unsubscribes from a GunDB query
* @param {Object} subscription
* @param {string} subscription.deviceId
* @param {string} subscription.subscriptionId
*/
const remove = ({ deviceId, subscriptionId }) => {
const deviceSubscriptions = userSubscriptions.get(deviceId)
const subscriptions = deviceSubscriptions ?? new Map()
const subscription = subscriptions.get(subscriptionId)
if (subscription?.unsubscribe) {
subscription.unsubscribe()
}
subscriptions.delete(subscriptionId)
userSubscriptions.set(deviceId, subscriptions)
}
/**
* Unsubscribes from all GunDB queries for a specific device
* @param {Object} subscription
* @param {string} subscription.deviceId
*/
const removeDevice = ({ deviceId }) => {
const deviceSubscriptions = userSubscriptions.get(deviceId)
if (!deviceSubscriptions) {
return
}
Array.from(deviceSubscriptions.values()).map(subscription => {
if (subscription && subscription.unsubscribe) {
subscription.unsubscribe()
}
return subscription
})
userSubscriptions.set(deviceId, new Map())
}
/**
* Retrieves the specified subscription's info if it exists
* @param {Object} subscription
* @param {string} subscription.deviceId
* @param {string} subscription.subscriptionId
*/
const get = ({ deviceId, subscriptionId }) => {
const deviceSubscriptions = userSubscriptions.get(deviceId)
if (!deviceSubscriptions) {
return false
}
const subscription = deviceSubscriptions.get(subscriptionId)
return subscription
}
module.exports = {
add,
attachUnsubscribe,
get,
remove,
removeDevice
}

12
services/initializer.js Normal file
View file

@ -0,0 +1,12 @@
const API = require('./gunDB/contact-api')
module.exports.InitUserData = async (user) => {
await API.Actions.setDisplayName('anon' + user._.sea.pub.slice(0, 8), user)
await API.Actions.generateHandshakeAddress()
await API.Actions.generateOrderAddress(user)
await API.Actions.initWall()
await API.Actions.setBio('A little bit about myself.', user)
await API.Actions.setDefaultSeedProvider('', user)
await API.Actions.setSeedServiceData('', user)
await API.Actions.setCurrentStreamInfo('', user)
}

View file

@ -1,5 +1,5 @@
const Crypto = require('crypto')
const logger = require('winston')
const logger = require('../../config/log')
const Common = require('shock-common')
const getGunUser = () => require('../gunDB/Mediator').getUser()
const isAuthenticated = () => require('../gunDB/Mediator').isAuthenticated()
@ -201,7 +201,7 @@ const AddTmpChainOrder = async (address, orderInfo) => {
.get(Key.TMP_CHAIN_COORDINATE)
.get(addressSHA256)
.put(encryptedOrderString, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (ack.err && typeof ack.err !== 'number' && typeof ack.err !== 'object') {
rej(
new Error(
`Error saving tmp chain coordinate order to user-graph: ${ack}`
@ -268,7 +268,7 @@ const clearTmpChainOrder = async (address) => {
.get(Key.TMP_CHAIN_COORDINATE)
.get(addressSHA256)
.put(null, ack => {
if (ack.err && typeof ack.err !== 'number') {
if (ack.err && typeof ack.err !== 'number' && typeof ack.err !== 'object') {
rej(
new Error(
`Error nulling tmp chain coordinate order to user-graph: ${ack}`
@ -370,8 +370,8 @@ class SchemaManager {
.get(Key.COORDINATES)
.get(coordinateSHA256)
.put(encryptedOrderString, ack => {
if (ack.err && typeof ack.err !== 'number') {
console.log(ack)
if (ack.err && typeof ack.err !== 'number' && typeof ack.err !== 'object') {
logger.info(ack)
rej(
new Error(
`Error saving coordinate order to user-graph: ${ack}`
@ -429,7 +429,7 @@ return orderedOrders
}*/
/**
* @typedef {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:string}} Invoice
* @typedef {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:Buffer}} Invoice
*/
/**
* @type {Record<string,(invoice:Invoice) =>void>}
@ -448,7 +448,7 @@ return orderedOrders
/**
*
* @param {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:string}} data
* @param {Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:Buffer}} data
*/
invoiceStreamDataCb(data) {
if (!data.settled) {
@ -468,7 +468,7 @@ return orderedOrders
coordinateIndex: parseInt(data.add_index, 10),
inbound: true,
amount: amt,
toLndPub: data.payment_addr,
toLndPub: data.payment_addr.toString('hex'),
invoiceMemo: data.memo
})
}

150
services/streams.js Normal file
View file

@ -0,0 +1,150 @@
const EventEmitter = require('events')
const fetch = require('node-fetch')
const Key = require('./gunDB/contact-api/key')
const StreamLiveManager = new EventEmitter()
const startedStream = (data) => {
StreamLiveManager.emit('awaitStream',data)
}
const endStream = (data) => {
StreamLiveManager.emit('endStream',data)
}
module.exports = {startedStream,endStream}
//-----------------------------------------
const intervalsWaitingAlive = {}
const intervalsStreamingViewers = {}
const intervalsWaitingMp4 = {}
const clearStreamInterval = (postId, map) => {
if(!postId){
return
}
if(map === "intervalsWaitingAlive"){
if(!intervalsWaitingAlive[postId]){
return
}
clearInterval(intervalsWaitingAlive[postId])
delete intervalsWaitingAlive[postId]
}
if(map === "intervalsStreamingViewers"){
if(!intervalsStreamingViewers[postId]){
return
}
clearInterval(intervalsStreamingViewers[postId])
delete intervalsStreamingViewers[postId]
}
if(map === "intervalsWaitingMp4"){
if(!intervalsWaitingMp4[postId]){
return
}
clearInterval(intervalsWaitingMp4[postId])
delete intervalsWaitingMp4[postId]
}
}
StreamLiveManager.on('awaitStream', data => {
const { postId, contentId, statusUrl } = data
if(intervalsWaitingAlive[postId]){
clearStreamInterval(intervalsWaitingAlive[postId])
}
const user = require('../services/gunDB/Mediator').getUser()
intervalsWaitingAlive[postId] = setInterval(async () => {
try {
const res = await fetch(statusUrl)
const j = await res.json()
if (!j.isLive) {
return
}
user
.get(Key.POSTS_NEW)
.get(postId)
.get('contentItems')
.get(contentId)
.get('liveStatus')
.put('live')
clearStreamInterval(postId,"intervalsWaitingAlive")
StreamLiveManager.emit('followStream', data)
//eslint-disable-next-line no-empty
} catch{}
}, 2 * 1000)
//kill sub after 10 minutes
setTimeout(()=>{
clearStreamInterval(postId,"intervalsWaitingAlive")
},10 * 60 * 1000)
})
StreamLiveManager.on('followStream', data => {
const { postId, contentId, statusUrl } = data
if(intervalsStreamingViewers[postId]){
clearStreamInterval(postId,"intervalsStreamingViewers")
}
const user = require('../services/gunDB/Mediator').getUser()
intervalsStreamingViewers[postId] = setInterval(async () => {
try {
const res = await fetch(statusUrl)
const j = await res.json()
if (typeof j.viewers !== 'number') {
return
}
user
.get(Key.POSTS_NEW)
.get(postId)
.get('contentItems')
.get(contentId)
.get('viewersCounter')
.put(j.viewers)
//eslint-disable-next-line no-empty
} catch{}
}, 5 * 1000)
})
StreamLiveManager.on('endStream', data => {
const { postId, contentId, endUrl, urlForMagnet, obsToken } = data
console.log("ending stream!")
clearStreamInterval(postId,"intervalsStreamingViewers")
if(intervalsWaitingMp4[postId]){
clearStreamInterval(postId,"intervalsWaitingMp4")
}
const user = require('../services/gunDB/Mediator').getUser()
user
.get(Key.POSTS_NEW)
.get(postId)
.get('contentItems')
.get(contentId)
.get('liveStatus')
.put('waiting')
fetch(endUrl,{
headers: {
'Authorization': `Bearer ${obsToken}`
},
})
intervalsWaitingMp4[postId] = setInterval(async () => {
try {
const res = await fetch(urlForMagnet)
const j = await res.json()
if (!j.magnet) {
return
}
user
.get(Key.POSTS_NEW)
.get(postId)
.get('contentItems')
.get(contentId)
.get('liveStatus')
.put('wasLive')
user
.get(Key.POSTS_NEW)
.get(postId)
.get('contentItems')
.get(contentId)
.get('playbackMagnet')
.put(j.magnet)
clearStreamInterval(postId,"intervalsWaitingMp4")
//eslint-disable-next-line no-empty
} catch{}
}, 5 * 1000)
})

View file

@ -1,9 +1,23 @@
//@ts-nocheck TODO- fix types
const { gunUUID } = require("../utils")
const logger = require('../config/log')
class TipsCB {
listeners = {}
addSocket(postID,socket){
console.log("subbing new socket for post: "+postID)
postsEnabled = {}
enablePostNotifications(postID){
const accessId = gunUUID()
this.postsEnabled[accessId] = postID
return accessId
}
addSocket(accessId,socket){
if(!this.postsEnabled[accessId]){
return "invalid access id"
}
const postID = this.postsEnabled[accessId]
logger.info("subbing new socket for post: "+postID)
if(!this.listeners[postID]){
this.listeners[postID] = []

View file

@ -3,7 +3,7 @@ const setAccessControlHeaders = (req, res) => {
res.header("Access-Control-Allow-Methods", "OPTIONS,POST,GET,PUT,DELETE")
res.header(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept, Authorization, public-key-for-decryption, encryption-device-id, public-key-for-decryption"
"Origin, X-Requested-With, Content-Type, Accept, Authorization, public-key-for-decryption, encryption-device-id, public-key-for-decryption,x-shock-hybrid-relay-id-x"
);
};

File diff suppressed because it is too large Load diff

View file

@ -1,12 +1,32 @@
/**
* @prettier
*/
// @ts-check
const ECCrypto = require('eccrypto')
const ECC = require('../utils/ECC')
/**
* This API run's private key.
*/
const runPrivateKey = ECCrypto.generatePrivate()
/**
* This API run's public key.
*/
const runPublicKey = ECCrypto.getPublic(runPrivateKey)
process.on('uncaughtException', e => {
console.log('something bad happened!')
console.log(e)
})
/**
* Module dependencies.
*/
const server = program => {
const Http = require('http')
const Https = require('https')
const FS = require('fs')
const Express = require('express')
const Crypto = require('crypto')
const Dotenv = require('dotenv')
@ -14,12 +34,8 @@ const server = program => {
const Path = require('path')
const { Logger: CommonLogger } = require('shock-common')
const binaryParser = require('socket.io-msgpack-parser')
const { fork } = require('child_process')
const EventEmitter = require('events')
const ECC = require('../utils/ECC')
const LightningServices = require('../utils/lightningServices')
const Encryption = require('../utils/encryptionStore')
const app = Express()
const compression = require('compression')
@ -27,12 +43,20 @@ const server = program => {
const session = require('express-session')
const methodOverride = require('method-override')
const qrcode = require('qrcode-terminal')
const relayClient = require('hybrid-relay-client/build')
const {
unprotectedRoutes,
sensitiveRoutes,
nonEncryptedRoutes
} = require('../utils/protectedRoutes')
/**
* An offline-only private key used for authenticating a client's key
* exchange. Neither the tunnel nor the WWW should see this private key, it
* should only be served through STDOUT (via QR or else).
*/
const accessSecret = ECCrypto.generatePrivate()
const accessSecretBase64 = accessSecret.toString('base64')
// load app default configuration data
const defaults = require('../config/defaults')(program.mainnet)
const rootFolder = program.rootPath || process.resourcesPath || __dirname
@ -45,10 +69,7 @@ const server = program => {
const tunnelHost = process.env.LOCAL_TUNNEL_SERVER || defaults.localtunnelHost
// setup winston logging ==========
const logger = require('../config/log')(
program.logfile || defaults.logfile,
program.loglevel || defaults.loglevel
)
const logger = require('../config/log')
CommonLogger.setLogger(logger)
@ -56,33 +77,8 @@ const server = program => {
require('../utils/server-utils')(module)
logger.info('Mainnet Mode:', !!program.mainnet)
const tunnelTimeout = 5000
let latestAliveTunnel = 0
let tunnelHealthInterval = null
const tunnelHealthManager = new EventEmitter()
tunnelHealthManager.on('fork', ({ params, cb }) => {
if (latestAliveTunnel !== 0 && latestAliveTunnel < tunnelTimeout) {
return
}
clearInterval(tunnelHealthInterval)
tunnelHealthInterval = setInterval(() => {
if (Date.now() - latestAliveTunnel > tunnelTimeout) {
console.log('oh no! tunnel is dead, will restart it now')
tunnelHealthManager.emit('fork', { params, cb })
}
}, 2000)
const forked = fork('src/tunnel.js')
forked.on('message', msg => {
//console.log('Message from child', msg);
if (msg && msg.type === 'info') {
cb(msg.tunnel)
}
latestAliveTunnel = Date.now()
})
forked.send(params)
})
if (process.env.DISABLE_SHOCK_ENCRYPTION === 'true') {
if (process.env.SHOCK_ENCRYPTION_ECC === 'false') {
logger.error('Encryption Mode: false')
} else {
logger.info('Encryption Mode: true')
@ -107,89 +103,30 @@ const server = program => {
.digest('hex')
}
const cacheCheck = ({ req, res, args, send }) => {
if (
(process.env.SHOCK_CACHE === 'true' || !process.env.SHOCK_CACHE) &&
req.method === 'GET'
) {
const dataHash = hashData(args[0]).slice(-8)
res.set('shock-cache-hash', dataHash)
logger.debug('shock-cache-hash:', req.headers['shock-cache-hash'])
logger.debug('Data Hash:', dataHash)
if (
!req.headers['shock-cache-hash'] &&
(process.env.CACHE_HEADERS_MANDATORY === 'true' ||
!process.env.CACHE_HEADERS_MANDATORY)
) {
logger.warn(
"Request is missing 'shock-cache-hash' header, please make sure to include that in each GET request in order to benefit from reduced data usage"
)
return { cached: false, hash: dataHash }
}
if (req.headers['shock-cache-hash'] === dataHash) {
logger.debug('Same Hash Detected!')
args[0] = null
res.status(304)
send.apply(res, args)
return { cached: true, hash: dataHash }
}
return { cached: false, hash: dataHash }
}
return { cached: false, hash: null }
}
/**
* @param {Express.Request} req
* @param {Express.Response} res
* @param {(() => void)} next
*/
const modifyResponseBody = (req, res, next) => {
const legacyDeviceId = req.headers['x-shockwallet-device-id']
const deviceId = req.headers['encryption-device-id']
const oldSend = res.send
if (nonEncryptedRoutes.includes(req.path)) {
console.log({
deviceId,
encryptionDisabled: process.env.SHOCK_ENCRYPTION_ECC === 'false',
unprotectedRoute: nonEncryptedRoutes.includes(req.path)
})
if (
nonEncryptedRoutes.includes(req.path) ||
process.env.SHOCK_ENCRYPTION_ECC === 'false'
) {
next()
return
}
if (legacyDeviceId) {
res.send = (...args) => {
if (args[0] && args[0].encryptedData && args[0].encryptionKey) {
logger.warn('Response loop detected!')
oldSend.apply(res, args)
return
}
const { cached, hash } = cacheCheck({ req, res, args, send: oldSend })
if (cached) {
return
}
// arguments[0] (or `data`) contains the response body
const authorized = Encryption.isAuthorizedDevice({
deviceId: legacyDeviceId
})
const encryptedMessage = authorized
? Encryption.encryptMessage({
message: args[0] ? args[0] : {},
deviceId: legacyDeviceId,
metadata: {
hash
}
})
: args[0]
args[0] = JSON.stringify(encryptedMessage)
oldSend.apply(res, args)
}
}
if (deviceId) {
// @ts-expect-error
res.send = (...args) => {
if (args[0] && args[0].ciphertext && args[0].iv) {
logger.warn('Response loop detected!')
@ -197,28 +134,40 @@ const server = program => {
return
}
const authorized = ECC.isAuthorizedDevice({
deviceId
})
if (typeof deviceId !== 'string' || !deviceId) {
// TODO
}
const authorized = ECC.devicePublicKeys.has(deviceId)
// Using classic promises syntax to avoid
// modifying res.send's return type
if (authorized) {
ECC.encryptMessage({
deviceId,
message: args[0]
}).then(encryptedMessage => {
if (authorized && process.env.SHOCK_ENCRYPTION_ECC !== 'false') {
const devicePub = Buffer.from(ECC.devicePublicKeys.get(deviceId))
ECCrypto.encrypt(devicePub, Buffer.from(args[0], 'utf-8')).then(
encryptedMessage => {
args[0] = JSON.stringify(encryptedMessage)
oldSend.apply(res, args)
})
}
)
}
if (!authorized || process.env.SHOCK_ENCRYPTION_ECC === 'false') {
if (!authorized) {
logger.warn(
`An unauthorized Device ID is contacting the API: ${deviceId}`
)
logger.warn(
`Authorized Device IDs: ${[...ECC.devicePublicKeys.keys()].join(
', '
)}`
)
}
args[0] = JSON.stringify(args[0])
oldSend.apply(res, args)
}
}
}
next()
}
@ -236,17 +185,19 @@ const server = program => {
await LightningServices.init()
}
await new Promise((resolve, reject) => {
await /** @type {Promise<void>} */ (new Promise((resolve, reject) => {
LightningServices.services.lightning.getInfo({}, (err, res) => {
if (err && err.code !== 12) {
if (
err &&
!err.details.includes('wallet not created') &&
!err.details.includes('wallet locked')
) {
reject(err)
} else {
resolve()
}
})
})
const auth = require('../services/auth/auth')
}))
app.use(compression())
@ -291,53 +242,12 @@ const server = program => {
await Storage.init({
dir: storageDirectory
})
if (program.tunnel) {
// setup localtunnel ==========
const [tunnelToken, tunnelSubdomain, tunnelUrl] = await Promise.all([
Storage.getItem('tunnel/token'),
Storage.getItem('tunnel/subdomain'),
Storage.getItem('tunnel/url')
])
const tunnelOpts = { port: serverPort, host: tunnelHost }
if (tunnelToken && tunnelSubdomain) {
tunnelOpts.tunnelToken = tunnelToken
tunnelOpts.subdomain = tunnelSubdomain
logger.info('Recreating tunnel... with subdomain: ' + tunnelSubdomain)
} else {
logger.info('Creating new tunnel... ')
}
tunnelHealthManager.emit('fork', {
params: tunnelOpts,
cb: async tunnel => {
logger.info('Tunnel created! connect to: ' + tunnel.url)
const dataToQr = JSON.stringify({
internalIP: tunnel.url,
walletPort: 443,
externalIP: tunnel.url
})
qrcode.generate(dataToQr, { small: true })
if (!tunnelToken) {
await Promise.all([
Storage.setItem('tunnel/token', tunnel.token),
Storage.setItem('tunnel/subdomain', tunnel.clientId),
Storage.setItem('tunnel/url', tunnel.url)
])
}
if (tunnelUrl && tunnel.url !== tunnelUrl) {
logger.error('New tunnel URL different from OLD tunnel url')
logger.error('OLD: ' + tunnelUrl + ':80')
logger.error('NEW: ' + tunnel.url + ':80')
logger.error('New pair required')
await Promise.all([
Storage.setItem('tunnel/token', tunnel.token),
Storage.setItem('tunnel/subdomain', tunnel.clientId),
Storage.setItem('tunnel/url', tunnel.url)
])
}
}
})
}
}) /*
if (false) {
await Storage.removeItem('tunnel/token')
await Storage.removeItem('tunnel/subdomain')
await Storage.removeItem('tunnel/url')
}*/
const storePersistentRandomField = async ({ fieldName, length = 16 }) => {
const randomField = await Storage.getItem(fieldName)
@ -346,7 +256,7 @@ const server = program => {
return randomField
}
const newValue = await Encryption.generateRandomString()
const newValue = await ECC.generateRandomString(length)
await Storage.setItem(fieldName, newValue)
return newValue
}
@ -371,7 +281,7 @@ const server = program => {
})
)
app.use(bodyParser.urlencoded({ extended: 'true' }))
app.use(bodyParser.json())
app.use(bodyParser.json({ limit: '500kb' }))
app.use(bodyParser.json({ type: 'application/vnd.api+json' }))
app.use(methodOverride())
// WARNING
@ -384,22 +294,21 @@ const server = program => {
res.status(500).send({ status: 500, errorMessage: 'internal error' })
})
const CA = LightningServices.servicesConfig.lndCertPath
const CA_KEY = CA.replace('cert', 'key')
const CA = program.httpsCert
const CA_KEY = program.httpsCertKey
const createServer = () => {
try {
// if (LightningServices.servicesConfig.lndCertPath && program.usetls) {
// const [key, cert] = await Promise.all([
// FS.readFile(CA_KEY),
// FS.readFile(CA)
// ])
// const httpsServer = Https.createServer({ key, cert }, app)
if (program.useTLS) {
const key = FS.readFileSync(CA_KEY, 'utf-8')
const cert = FS.readFileSync(CA, 'utf-8')
// return httpsServer
// }
const httpsServer = Https.createServer({ key, cert }, app)
const httpServer = Http.Server(app)
return httpsServer
}
const httpServer = new Http.Server(app)
return httpServer
} catch (err) {
logger.error(err.message)
@ -407,7 +316,7 @@ const server = program => {
'An error has occurred while finding an LND cert to use to open an HTTPS server'
)
logger.warn('Falling back to opening an HTTP server...')
const httpServer = Http.Server(app)
const httpServer = new Http.Server(app)
return httpServer
}
}
@ -434,21 +343,21 @@ const server = program => {
}
})
const Sockets = require('./sockets')(io)
require('./routes')(
app,
{
...defaults,
lndAddress: program.lndAddress
lndAddress: program.lndAddress,
cliArgs: program
},
Sockets,
{
serverHost,
serverPort,
usetls: program.usetls,
useTLS: program.useTLS,
CA,
CA_KEY
CA_KEY,
runPrivateKey,
runPublicKey,
accessSecret
}
)
@ -457,14 +366,63 @@ const server = program => {
// app.use(bodyParser.json({limit: '100000mb'}));
app.use(bodyParser.json({ limit: '50mb' }))
app.use(bodyParser.urlencoded({ limit: '50mb', extended: true }))
if (process.env.DISABLE_SHOCK_ENCRYPTION !== 'true') {
if (process.env.SHOCK_ENCRYPTION_ECC !== 'false') {
app.use(modifyResponseBody)
}
if (program.tunnel) {
const [relayToken, relayId, relayUrl] = await Promise.all([
Storage.getItem('relay/token'),
Storage.getItem('relay/id'),
Storage.getItem('relay/url')
])
const opts = {
relayId,
relayToken,
address: tunnelHost,
port: serverPort
}
logger.info(opts)
relayClient.default(opts, async (connected, params) => {
if (connected) {
const noProtocolAddress = params.address.replace(
/^http(?<secure>s)?:\/\//giu,
''
)
await Promise.all([
Storage.setItem('relay/token', params.relayToken),
Storage.setItem('relay/id', params.relayId),
Storage.setItem('relay/url', noProtocolAddress)
])
const dataToQr = JSON.stringify({
URI: `https://${params.relayId}@${noProtocolAddress}`,
// Null-check is just to please typescript
accessSecret: accessSecretBase64
})
qrcode.generate(dataToQr, { small: false })
logger.info(`connect to ${params.relayId}@${noProtocolAddress}:443`)
console.log('\n')
console.log(`Here's your access secret:`)
console.log('\n')
console.log(accessSecretBase64)
console.log('\n')
console.log('\n')
} else {
logger.error('!! Relay did not connect to server !!')
}
})
} else {
console.log('\n')
console.log(`Here's your access secret:`)
console.log('\n')
console.log(accessSecretBase64)
console.log('\n')
console.log('\n')
}
serverInstance.listen(serverPort, serverHost)
logger.info('App listening on ' + serverHost + ' port ' + serverPort)
// @ts-expect-error
module.server = serverInstance
} catch (err) {
logger.error({ exception: err, message: err.message, code: err.code })

View file

@ -3,21 +3,14 @@
*/
// @ts-check
const logger = require('winston')
const logger = require('../config/log')
const Common = require('shock-common')
const mapValues = require('lodash/mapValues')
const auth = require('../services/auth/auth')
const Encryption = require('../utils/encryptionStore')
const LightningServices = require('../utils/lightningServices')
const {
getGun,
getUser,
isAuthenticated
} = require('../services/gunDB/Mediator')
const { deepDecryptIfNeeded } = require('../services/gunDB/rpc')
const GunEvents = require('../services/gunDB/contact-api/events')
const SchemaManager = require('../services/schema')
const { isAuthenticated } = require('../services/gunDB/Mediator')
const initGunDBSocket = require('../services/gunDB/sockets')
const { encryptedEmit, encryptedOn } = require('../utils/ECC/socket')
const TipsForwarder = require('../services/tipsCallback')
/**
@ -29,204 +22,7 @@ module.exports = (
/** @type {import('socket.io').Server} */
io
) => {
// This should be used for encrypting and emitting your data
const encryptedEmitLegacy = ({ eventName, data, socket }) => {
try {
if (Encryption.isNonEncrypted(eventName)) {
return socket.emit(eventName, data)
}
const deviceId = socket.handshake.auth['x-shockwallet-device-id']
const authorized = Encryption.isAuthorizedDevice({ deviceId })
if (!deviceId) {
throw {
field: 'deviceId',
message: 'Please specify a device ID'
}
}
if (!authorized) {
throw {
field: 'deviceId',
message: 'Please exchange keys with the API before using the socket'
}
}
const encryptedMessage = Encryption.encryptMessage({
message: data,
deviceId
})
return socket.emit(eventName, encryptedMessage)
} catch (err) {
logger.error(
`[SOCKET] An error has occurred while encrypting an event (${eventName}):`,
err
)
return socket.emit('encryption:error', err)
}
}
const onNewInvoice = (socket, subID) => {
const { lightning } = LightningServices.services
logger.warn('Subscribing to invoices socket...' + subID)
const stream = lightning.subscribeInvoices({})
stream.on('data', data => {
logger.info('[SOCKET] New invoice data:', data)
encryptedEmitLegacy({ eventName: 'invoice:new', data, socket })
if (!data.settled) {
return
}
SchemaManager.AddOrder({
type: 'invoice',
amount: parseInt(data.amt_paid_sat, 10),
coordinateHash: data.r_hash.toString('hex'),
coordinateIndex: parseInt(data.add_index, 10),
inbound: true,
toLndPub: data.payment_addr
})
})
stream.on('end', () => {
logger.info('New invoice stream ended, starting a new one...')
// Prevents call stack overflow exceptions
//process.nextTick(() => onNewInvoice(socket))
})
stream.on('error', err => {
logger.error('New invoice stream error:' + subID, err)
})
stream.on('status', status => {
logger.warn('New invoice stream status:' + subID, status)
switch (status.code) {
case 0: {
logger.info('[event:invoice:new] stream ok')
break
}
case 1: {
logger.info(
'[event:invoice:new] stream canceled, probably socket disconnected'
)
break
}
case 2: {
logger.warn('[event:invoice:new] got UNKNOWN error status')
break
}
case 12: {
logger.warn(
'[event:invoice:new] LND locked, new registration in 60 seconds'
)
process.nextTick(() =>
setTimeout(() => onNewInvoice(socket, subID), 60000)
)
break
}
case 13: {
//https://grpc.github.io/grpc/core/md_doc_statuscodes.html
logger.error('[event:invoice:new] INTERNAL LND error')
break
}
case 14: {
logger.error(
'[event:invoice:new] LND disconnected, sockets reconnecting in 30 seconds...'
)
process.nextTick(() =>
setTimeout(() => onNewInvoice(socket, subID), 30000)
)
break
}
default: {
logger.error('[event:invoice:new] UNKNOWN LND error')
}
}
})
return () => {
stream.cancel()
}
}
const onNewTransaction = (socket, subID) => {
const { lightning } = LightningServices.services
const stream = lightning.subscribeTransactions({})
logger.warn('Subscribing to transactions socket...' + subID)
stream.on('data', data => {
logger.info('[SOCKET] New transaction data:', data)
Promise.all(data.dest_addresses.map(SchemaManager.isTmpChainOrder)).then(
responses => {
const hasOrder = responses.some(res => res !== false)
if (hasOrder && data.num_confirmations > 0) {
//buddy needs to manage this
} else {
//business as usual
encryptedEmitLegacy({ eventName: 'transaction:new', data, socket })
}
}
)
})
stream.on('end', () => {
logger.info('New transactions stream ended, starting a new one...')
//process.nextTick(() => onNewTransaction(socket))
})
stream.on('error', err => {
logger.error('New transactions stream error:' + subID, err)
})
stream.on('status', status => {
logger.info('New transactions stream status:' + subID, status)
switch (status.code) {
case 0: {
logger.info('[event:transaction:new] stream ok')
break
}
case 1: {
logger.info(
'[event:transaction:new] stream canceled, probably socket disconnected'
)
break
}
case 2: {
//Happens to fire when the grpc client lose access to macaroon file
logger.warn('[event:transaction:new] got UNKNOWN error status')
break
}
case 12: {
logger.warn(
'[event:transaction:new] LND locked, new registration in 60 seconds'
)
process.nextTick(() =>
setTimeout(() => onNewTransaction(socket, subID), 60000)
)
break
}
case 13: {
//https://grpc.github.io/grpc/core/md_doc_statuscodes.html
logger.error('[event:transaction:new] INTERNAL LND error')
break
}
case 14: {
logger.error(
'[event:transaction:new] LND disconnected, sockets reconnecting in 30 seconds...'
)
process.nextTick(() =>
setTimeout(() => onNewTransaction(socket, subID), 30000)
)
break
}
default: {
logger.error('[event:transaction:new] UNKNOWN LND error')
}
}
})
return () => {
stream.cancel()
}
}
io.on('connection', socket => {
logger.info(`io.onconnection`)
logger.info('socket.handshake', socket.handshake)
io.on('connect', socket => {
const isLNDSocket = !!socket.handshake.auth.IS_LND_SOCKET
const isNotificationsSocket = !!socket.handshake.auth
.IS_NOTIFICATIONS_SOCKET
@ -240,89 +36,6 @@ module.exports = (
const subID = Math.floor(Math.random() * 1000).toString()
const isNotifications = isNotificationsSocket ? 'notifications' : ''
logger.info('[LND] New LND Socket created:' + isNotifications + subID)
/* not used by wallet anymore
const cancelInvoiceStream = onNewInvoice(socket, subID)
const cancelTransactionStream = onNewTransaction(socket, subID)
socket.on('disconnect', () => {
logger.info('LND socket disconnected:' + isNotifications + subID)
cancelInvoiceStream()
cancelTransactionStream()
})*/
}
})
io.of('gun').on('connect', socket => {
// TODO: off()
try {
if (!isAuthenticated()) {
socket.emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
const emit = encryptedEmit(socket)
const { $shock, publicKeyForDecryption } = socket.handshake.auth
const [root, path, method] = $shock.split('::')
// eslint-disable-next-line init-declarations
let node
if (root === '$gun') {
node = getGun()
} else if (root === '$user') {
node = getUser()
} else {
node = getGun().user(root)
}
for (const bit of path.split('>')) {
node = node.get(bit)
}
/**
* @param {ValidDataValue} data
* @param {string} key
*/
const listener = async (data, key) => {
try {
if (
typeof publicKeyForDecryption === 'string' &&
publicKeyForDecryption !== 'undefined' &&
publicKeyForDecryption.length > 15
) {
const decData = await deepDecryptIfNeeded(
data,
publicKeyForDecryption
)
emit('$shock', decData, key)
} else {
emit('$shock', data, key)
}
} catch (err) {
logger.error(
`Error for gun rpc socket, query ${$shock} -> ${err.message}`
)
}
}
if (method === 'on') {
node.on(listener)
} else if (method === 'open') {
node.open(listener)
} else if (method === 'map.on') {
node.map().on(listener)
} else if (method === 'map.once') {
node.map().once(listener)
} else {
throw new TypeError(
`Invalid method for gun rpc call : ${method}, query: ${$shock}`
)
}
} catch (err) {
logger.error('GUNRPC: ' + err.message)
}
})
@ -334,6 +47,10 @@ module.exports = (
*/
try {
logger.info(
'Connect event for socket with handshake: ',
socket.handshake.auth
)
if (!isAuthenticated()) {
socket.emit(Common.Constants.ErrorCode.NOT_AUTH)
return
@ -344,7 +61,16 @@ module.exports = (
const { services } = LightningServices
const { service, method, args: unParsed } = socket.handshake.auth
const {
service,
method,
args: unParsed,
isInitial
} = socket.handshake.auth
if (isInitial) {
return
}
const args = JSON.parse(unParsed)
@ -387,10 +113,15 @@ module.exports = (
call.write(args)
})
} catch (err) {
logger.error(err)
logger.error('LNDRPC: ' + err.message)
}
})
io.of('gun').on('connect', socket => {
initGunDBSocket(socket)
})
/**
* @param {string} token
* @returns {Promise<boolean>}
@ -415,7 +146,7 @@ module.exports = (
/** @type {null|NodeJS.Timeout} */
let pingIntervalID = null
// TODO: Unused?
io.of('shockping').on(
'connect',
// TODO: make this sync
@ -467,231 +198,15 @@ module.exports = (
}
)
// TODO: do this through rpc
const emptyUnsub = () => {}
let chatsUnsub = emptyUnsub
io.of('chats').on('connect', async socket => {
const on = encryptedOn(socket)
const emit = encryptedEmit(socket)
try {
if (!isAuthenticated()) {
logger.info(
'not authenticated in gun for chats socket, will send NOT_AUTH'
)
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
logger.info('now checking token for chats socket')
const { token } = socket.handshake.auth
const isAuth = await isValidToken(token)
if (!isAuth) {
logger.warn('invalid token for chats socket')
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
if (chatsUnsub !== emptyUnsub) {
logger.error(
'Tried to set chats socket twice, this might be due to an app restart and the old socket not being recycled by socket.io in time, will disable the older subscription, which means the old socket wont work and data will be sent to this new socket instead'
)
chatsUnsub()
chatsUnsub = emptyUnsub
}
/**
* @param {Common.Schema.Chat[]} chats
*/
const onChats = chats => {
const processed = chats.map(
({
didDisconnect,
id,
lastSeenApp,
messages,
recipientPublicKey
}) => {
/** @type {Common.Schema.Chat} */
const stripped = {
didDisconnect,
id,
lastSeenApp,
messages,
recipientAvatar: null,
recipientDisplayName: null,
recipientPublicKey
}
return stripped
}
)
emit('$shock', processed)
}
chatsUnsub = GunEvents.onChats(onChats)
on('disconnect', () => {
chatsUnsub()
chatsUnsub = emptyUnsub
})
} catch (e) {
logger.error('Error inside chats socket connect: ' + e.message)
emit('$error', e.message)
}
})
let sentReqsUnsub = emptyUnsub
io.of('sentReqs').on('connect', async socket => {
const on = encryptedOn(socket)
const emit = encryptedEmit(socket)
try {
if (!isAuthenticated()) {
logger.info(
'not authenticated in gun for sentReqs socket, will send NOT_AUTH'
)
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
logger.info('now checking token for sentReqs socket')
const { token } = socket.handshake.auth
const isAuth = await isValidToken(token)
if (!isAuth) {
logger.warn('invalid token for sentReqs socket')
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
if (sentReqsUnsub !== emptyUnsub) {
logger.error(
'Tried to set sentReqs socket twice, this might be due to an app restart and the old socket not being recycled by io in time, will disable the older subscription, which means the old socket wont work and data will be sent to this new socket instead'
)
sentReqsUnsub()
sentReqsUnsub = emptyUnsub
}
/**
* @param {Common.Schema.SimpleSentRequest[]} sentReqs
*/
const onSentReqs = sentReqs => {
const processed = sentReqs.map(
({
id,
recipientChangedRequestAddress,
recipientPublicKey,
timestamp
}) => {
/**
* @type {Common.Schema.SimpleSentRequest}
*/
const stripped = {
id,
recipientAvatar: null,
recipientChangedRequestAddress,
recipientDisplayName: null,
recipientPublicKey,
timestamp
}
return stripped
}
)
emit('$shock', processed)
}
sentReqsUnsub = GunEvents.onSimplerSentRequests(onSentReqs)
on('disconnect', () => {
sentReqsUnsub()
sentReqsUnsub = emptyUnsub
})
} catch (e) {
logger.error('Error inside sentReqs socket connect: ' + e.message)
emit('$error', e.message)
}
})
let receivedReqsUnsub = emptyUnsub
io.of('receivedReqs').on('connect', async socket => {
const on = encryptedOn(socket)
const emit = encryptedEmit(socket)
try {
if (!isAuthenticated()) {
logger.info(
'not authenticated in gun for receivedReqs socket, will send NOT_AUTH'
)
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
logger.info('now checking token for receivedReqs socket')
const { token } = socket.handshake.auth
const isAuth = await isValidToken(token)
if (!isAuth) {
logger.warn('invalid token for receivedReqs socket')
emit(Common.Constants.ErrorCode.NOT_AUTH)
return
}
if (receivedReqsUnsub !== emptyUnsub) {
logger.error(
'Tried to set receivedReqs socket twice, this might be due to an app restart and the old socket not being recycled by socket.io in time, will disable the older subscription, which means the old socket wont work and data will be sent to this new socket instead'
)
receivedReqsUnsub()
receivedReqsUnsub = emptyUnsub
}
/**
* @param {ReadonlyArray<Common.SimpleReceivedRequest>} receivedReqs
*/
const onReceivedReqs = receivedReqs => {
const processed = receivedReqs.map(({ id, requestorPK, timestamp }) => {
/** @type {Common.Schema.SimpleReceivedRequest} */
const stripped = {
id,
requestorAvatar: null,
requestorDisplayName: null,
requestorPK,
timestamp
}
return stripped
})
emit('$shock', processed)
}
receivedReqsUnsub = GunEvents.onSimplerReceivedRequests(onReceivedReqs)
on('disconnect', () => {
receivedReqsUnsub()
receivedReqsUnsub = emptyUnsub
})
} catch (e) {
logger.error('Error inside receivedReqs socket connect: ' + e.message)
emit('$error', e.message)
}
})
io.of('streams').on('connect', socket => {
console.log('a user connected')
socket.on('postID', postID => {
TipsForwarder.addSocket(postID, socket)
logger.info('a user connected')
socket.on('accessId', accessId => {
const err = TipsForwarder.addSocket(accessId, socket)
if (err) {
logger.info('err invalid socket for tips notifications ' + err)
socket.disconnect(true)
}
})
})
return io
}

View file

@ -1,14 +1,59 @@
const localtunnel = require('localtunnel')
let tunnelRef = null
process.on('message', async (tunnelOpts) => {
console.log('Message from parent:', tunnelOpts);
const tunnel = await localtunnel(tunnelOpts)
tunnelRef = tunnel
console.log(tunnelOpts)
const {subdomain:tunnelSubdomain} = tunnelOpts
process.send({ type: 'info', tunnel:{
url:tunnel.url,
token:tunnel.token,
clientId:tunnel.clientId,
} });
if(tunnelSubdomain !== tunnel.clientId && !tunnel.token){
console.log("AM killing it yo!")
console.log(tunnel.clientId)
tunnel.close()
// eslint-disable-next-line no-process-exit
process.exit()
}
});
setInterval(() => {
process.send({ type: "ping" });
}, 1000);
process.on('uncaughtException', ()=> {
if(tunnelRef){
console.log("clogin yo")
tunnelRef.close()
}
// eslint-disable-next-line no-process-exit
process.exit()
});
process.on('SIGINT', ()=>{
if(tunnelRef){
console.log("clogin yo")
tunnelRef.close()
}
// eslint-disable-next-line no-process-exit
process.exit()})
process.on('exit', ()=> {
if(tunnelRef){
console.log("clogin yo")
tunnelRef.close()
}
});
/*
const f = async () => {
const tunnelOpts =
{ port: 9835, host: 'https://tunnel.rip' ,
tunnelToken:'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjp7InRpbWVzdGFtcCI6MTYxODg2NTAxNjkzNywic3ViZG9tYWluIjoidGVycmlibGUtZWFyd2lnLTU2In0sImlhdCI6MTYxODg2NTAxNiwiZXhwIjo1MjE4ODY1MDE2fQ.m2H4B1NatErRqcriB9lRfusZmLdRee9-VXACfnKT-QY',
subdomain:'terrible-earwig-56'
}
const tunnel = await localtunnel(tunnelOpts)
console.log(tunnel)
tunnelRef = tunnel
}
f()*/

158
testscript.js Normal file
View file

@ -0,0 +1,158 @@
/**
* @format
* Example usage:
* ```bash
* node testcript.js on [user|gun|capdog|{publicKey}].[path] [alias] [pass]
* ```
* If no alias/pass provided, new user will be created, otherwise gun will
* authenticate with the provided credentials.
*/
// @ts-check
const Gun = require('gun')
const randomWords = require('random-words')
/** @returns {string} */
const randomWord = () => {
const word = randomWords()
if (typeof word !== 'string') {
throw new TypeError(`Not string`)
}
return word
}
require('gun/nts')
require('gun/lib/open')
require('gun/lib/load')
const args = process.argv.slice(2)
// eslint-disable-next-line prefer-const
let [method, path, alias, pass] = args
const fileName = randomWord()
if (!alias) {
alias = '$$__GENERATE'
}
if (!pass) {
pass = '$$__GENERATE'
}
console.log('\n')
console.log(`method: ${method}`)
console.log(`path: ${path}`)
console.log(`fileName: ${fileName}`)
console.log('\n')
// @ts-expect-error
const gun = /** @type {import('./services/gunDB/contact-api/SimpleGUN').GUNNode} */ (Gun(
{
axe: false,
multicast: false,
peers: ['https://gun.shock.network/gun', 'https://gun-eu.shock.network'],
file: `TESTSCRIPT-RADATA/${fileName}`
}
))
const user = gun.user()
/**
* @param {any} data
* @param {string} key
*/
const cb = (data, key) => {
console.log('\n')
console.log(`key: ${key}`)
console.log('\n')
console.log(data)
console.log('\n')
}
;(async () => {
try {
// gun
// .get('handshakeNodes')
// .map()
// .once(cb)
// wait for user data to be received
// await new Promise(res => setTimeout(res, 10000))
const ack = await new Promise(res => {
if (alias === '$$__GENERATE' || pass === '$$__GENERATE') {
alias = randomWord()
pass = randomWord()
console.log(`alias: ${alias}`)
console.log(`pass: ${pass}`)
user.create(alias, pass, _ack => {
res(_ack)
})
} else {
user.auth(alias, pass, _ack => {
res(_ack)
})
}
})
if (typeof ack.err === 'string') {
throw new Error(ack.err)
} else if (typeof ack.pub === 'string' || typeof user._.sea === 'object') {
console.log(`\n`)
console.log(`public key:`)
console.log(`\n`)
console.log(ack.pub || user._.sea.pub)
console.log(`\n`)
// clock skew
await new Promise(res => setTimeout(res, 2000))
} else {
throw new Error('unknown error, ack: ' + JSON.stringify(ack))
}
const [root, ...keys] = path.split('.')
let node = (() => {
if (root === 'gun') {
return gun
}
if (root === 'user') {
return user
}
if (root === 'capdog') {
return gun.user(
'qsgziGQS99sPUxV1CRwwRckn9cG6cJ3prbDsrbL7qko.oRbCaVKwJFQURWrS1pFhkfAzrkEvkQgBRIUz9uoWtrg'
)
}
if (root === 'explorador') {
return gun.user(
`zBQkPb1ohbdjVp_29TKFXyv_0g3amKgRJRqKr0E-Oyk.yB1P4UmOrzkGuPEL5zUgLETJWyYpM9K3l2ycNlt8jiY`
)
}
if (root === 'pleb') {
return gun.user(
`e1C60yZ1Cm3Mkceq7L9SmH6QQ7zsDdbibPFeQz7tNsk._1VlqJNo8BIJmzz2D5WELiMiRjBh3DBlDvzC6fNltZw`
)
}
if (root === 'boblazar') {
return gun.user(
`g6fcZ_1zyFwV1jR1eNK1GTUr2sSlEDL1D5vBsSvKoKg.2OA9MQHO2c1wjv6L-VPBFf36EZXjgQ1nnZFbOE9_5-o`
)
}
return gun.user(root)
})()
keys.forEach(key => (node = node.get(key)))
if (method === 'once') node.once(cb)
if (method === 'load') node.load(cb)
if (method === 'on') node.on(cb)
if (method === 'map.once') node.map().once(cb)
if (method === 'map.on') node.map().on(cb)
} catch (e) {
console.log(`\nCaught error in app:\n`)
console.log(e)
}
})()

View file

@ -1,5 +1,6 @@
{
"include": ["./services/gunDB/**/*.*", "./utils/lightningServices/**/*.*"],
"exclude": ["./node_modules/**/*.*"],
"compilerOptions": {
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
@ -20,7 +21,7 @@
// "removeComments": true, /* Do not emit comments to output. */
"noEmit": true /* Do not emit outputs. */,
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
"downlevelIteration": true /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */,
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
@ -47,7 +48,7 @@
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
@ -60,5 +61,6 @@
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
"useUnknownInCatchVariables": false
}
}

233
utils/ECC/ECC.js Normal file
View file

@ -0,0 +1,233 @@
/** @format */
const Storage = require('node-persist')
const { fork } = require('child_process')
const FieldError = require('../fieldError')
const logger = require('../../config/log')
const {
generateRandomString,
convertBufferToBase64,
processKey,
convertToEncryptedMessageResponse,
convertUTF8ToBuffer,
convertToEncryptedMessage,
convertBase64ToBuffer
} = require('./crypto')
const { invoke } = require('./subprocess')
const cryptoSubprocess = fork('utils/ECC/subprocess')
const nodeKeyPairs = new Map()
const devicePublicKeys = new Map()
/**
* @typedef {object} EncryptedMessage
* @prop {string} ciphertext
* @prop {string} iv
* @prop {string} mac
* @prop {string} ephemPublicKey
*/
/**
* Checks if the message supplied is encrypted or not
* @param {EncryptedMessage} message
*/
const isEncryptedMessage = message =>
message &&
message.ciphertext &&
message.iv &&
message.mac &&
message.ephemPublicKey
/**
* @typedef {object} Pair
* @prop {Buffer} privateKey
* @prop {Buffer} publicKey
* @prop {string} privateKeyBase64
* @prop {string} publicKeyBase64
*/
/**
* Generates a new encryption key pair that will be used
* when communicating with the deviceId specified
* @param {string} deviceId
* @returns {Promise<Pair>}
*/
const generateKeyPair = async deviceId => {
try {
const existingKey = nodeKeyPairs.get(deviceId)
if (existingKey) {
logger.info('Device ID is already trusted')
return {
...existingKey,
publicKeyBase64: convertBufferToBase64(existingKey.publicKey),
privateKeyBase64: convertBufferToBase64(existingKey.privateKey)
}
}
const privateKey = await invoke('generatePrivate', [], cryptoSubprocess)
const publicKey = await invoke('getPublic', [privateKey], cryptoSubprocess)
const privateKeyBase64 = convertBufferToBase64(privateKey)
const publicKeyBase64 = convertBufferToBase64(publicKey)
if (!Buffer.isBuffer(privateKey) || !Buffer.isBuffer(publicKey)) {
throw new Error('Invalid KeyPair Generated')
}
nodeKeyPairs.set(deviceId, {
privateKey,
publicKey
})
return {
privateKey,
publicKey,
privateKeyBase64,
publicKeyBase64
}
} catch (err) {
logger.error(
'[ENCRYPTION] An error has occurred while generating a new KeyPair',
err
)
logger.error('Device ID:', deviceId)
throw err
}
}
/**
* Checks if the specified device has a keypair generated
* @param {{ deviceId: string }} arg0
*/
const isAuthorizedDevice = ({ deviceId }) => devicePublicKeys.has(deviceId)
/**
* Generates a new keypair for the deviceId specified and
* saves its publicKey locally
* @param {{ deviceId: string, publicKey: string }} arg0
*/
const authorizeDevice = async ({ deviceId, publicKey }) => {
const hostId = await Storage.get('encryption/hostId')
devicePublicKeys.set(deviceId, convertBase64ToBuffer(publicKey))
const keyPair = await generateKeyPair(deviceId)
return {
success: true,
APIPublicKey: keyPair.publicKeyBase64,
hostId
}
}
/**
* Encrypts the specified message using the specified deviceId's
* public key
* @param {{ deviceId: string, message: string | number | boolean }} arg0
* @returns {Promise<import('./crypto').EncryptedMessageResponse>}
*/
const encryptMessage = async ({ message = '', deviceId }) => {
const parsedMessage = message.toString()
// decryptMessage checks for known devices while this one checks for
// authorized ones instead, why?
const publicKey = devicePublicKeys.get(deviceId)
if (!publicKey) {
throw new FieldError({
field: 'deviceId',
message: 'encryptMessage() -> Unauthorized Device ID detected'
})
}
const processedPublicKey = processKey(publicKey)
const messageBuffer = convertUTF8ToBuffer(parsedMessage)
const encryptedMessage = await invoke(
'encrypt',
[processedPublicKey, messageBuffer],
cryptoSubprocess
)
const encryptedMessageResponse = {
ciphertext: encryptedMessage.ciphertext,
iv: encryptedMessage.iv,
mac: encryptedMessage.mac,
ephemPublicKey: encryptedMessage.ephemPublicKey,
metadata: {
_deviceId: deviceId,
_publicKey: publicKey
}
}
return convertToEncryptedMessageResponse(encryptedMessageResponse)
}
/**
* Decrypts the specified message using the API keypair
* associated with the specified deviceId
* @param {{ encryptedMessage: import('./crypto').EncryptedMessageResponse, deviceId: string }} arg0
*/
const decryptMessage = async ({ encryptedMessage, deviceId }) => {
// encryptMessages checks for authorized devices while this one checks for
// known ones, why?
const keyPair = nodeKeyPairs.get(deviceId)
try {
if (!keyPair) {
throw new FieldError({
field: 'deviceId',
message: 'decryptMessage() -> Unknown Device ID detected'
})
}
const processedPrivateKey = processKey(keyPair.privateKey)
const decryptedMessage = await invoke(
'decrypt',
[processedPrivateKey, convertToEncryptedMessage(encryptedMessage)],
cryptoSubprocess
)
const parsedMessage = decryptedMessage.toString('utf8')
return parsedMessage
} catch (err) {
logger.error(err)
if (err.message?.toLowerCase() === 'bad mac') {
logger.error(
'Bad Mac!',
err,
convertToEncryptedMessage(encryptedMessage),
!!keyPair
)
}
throw err
}
}
/**
* @returns {Promise<Buffer>}
*/
const generatePrivate = () => invoke('generatePrivate', [], cryptoSubprocess)
/**
* @param {Buffer} priv
* @returns {Promise<Buffer>}
*/
const getPublic = priv => invoke('getPublic', [priv], cryptoSubprocess)
module.exports = {
isAuthorizedDevice,
isEncryptedMessage,
generateKeyPair,
encryptMessage,
decryptMessage,
authorizeDevice,
generateRandomString,
nodeKeyPairs,
devicePublicKeys,
generatePrivate,
getPublic,
/**
* Used for tests.
*/
killECCCryptoSubprocess() {
cryptoSubprocess.kill()
}
}

147
utils/ECC/ECC.spec.js Normal file
View file

@ -0,0 +1,147 @@
/**
* @format
*/
// @ts-check
const Path = require('path')
const Storage = require('node-persist')
const expect = require('expect')
const words = require('random-words')
const {
authorizeDevice,
decryptMessage,
encryptMessage,
generateKeyPair,
isAuthorizedDevice,
killECCCryptoSubprocess,
generatePrivate,
getPublic
} = require('./ECC')
const uuid = () => {
const arr = /** @type {string[]} */ (words({ exactly: 24 }))
return arr.join('-')
}
const storageDirectory = Path.resolve(__dirname, `./.test-storage`)
console.log(`Storage directory: ${storageDirectory}`)
describe('ECC', () => {
describe('generateKeyPair()', () => {
it('generates a keypair', async () => {
expect.hasAssertions()
const pair = await generateKeyPair(uuid())
expect(pair.privateKey).toBeInstanceOf(Buffer)
expect(typeof pair.privateKeyBase64 === 'string').toBeTruthy()
expect(pair.publicKey).toBeInstanceOf(Buffer)
expect(typeof pair.publicKeyBase64 === 'string').toBeTruthy()
})
it('returns the same pair for the same device', async () => {
expect.hasAssertions()
const id = uuid()
const pair = await generateKeyPair(id)
const pairAgain = await generateKeyPair(id)
expect(pairAgain).toStrictEqual(pair)
})
})
describe('authorizeDevice()/isAuthorizedDevice()', () => {
it('authorizes a device given its ID', async () => {
expect.hasAssertions()
await Storage.init({
dir: storageDirectory
})
const deviceId = uuid()
const pair = await generateKeyPair(deviceId)
await authorizeDevice({ deviceId, publicKey: pair.publicKeyBase64 })
expect(isAuthorizedDevice({ deviceId })).toBeTruthy()
})
})
describe('encryptMessage()/decryptMessage()', () => {
before(() =>
Storage.init({
dir: storageDirectory
})
)
it('throws if provided with an unauthorized device id when encrypting', async () => {
expect.hasAssertions()
const deviceId = uuid()
try {
await encryptMessage({
message: uuid(),
deviceId
})
throw new Error('encryptMessage() did not throw')
} catch (_) {
expect(true).toBeTruthy()
}
})
it('throws if provided with an unknown device id when decrypting', async () => {
expect.hasAssertions()
const deviceId = uuid()
try {
await decryptMessage({
deviceId,
encryptedMessage: {
ciphertext: uuid(),
ephemPublicKey: uuid(),
iv: uuid(),
mac: uuid(),
metadata: uuid()
}
})
throw new Error('decryptMessage() did not throw')
} catch (_) {
expect(true).toBeTruthy()
}
})
it('encrypts and decrypts messages when given a known device id', async () => {
expect.hasAssertions()
const deviceId = uuid()
const pair = await generateKeyPair(deviceId)
await authorizeDevice({ deviceId, publicKey: pair.publicKeyBase64 })
const message = 'Bitcoin fixes this'
const encryptedMessage = await encryptMessage({ deviceId, message })
const decrypted = await decryptMessage({
deviceId,
encryptedMessage
})
expect(decrypted).toEqual(message)
})
})
describe('generatePrivate()', () => {
it('generates a private key', async () => {
expect.hasAssertions()
const priv = await generatePrivate()
expect(priv).toBeInstanceOf(Buffer)
})
})
describe('getPublic()', () => {
it('derives a public key from a private key', async () => {
expect.hasAssertions()
const priv = await generatePrivate()
const pub = await getPublic(priv)
expect(pub).toBeInstanceOf(Buffer)
})
})
after(killECCCryptoSubprocess)
})

View file

@ -1,5 +1,14 @@
const { Buffer } = require("buffer");
const FieldError = require("../fieldError")
/**
* @format
*/
const { Buffer } = require('buffer')
const { fork } = require('child_process')
const FieldError = require('../fieldError')
const { invoke } = require('./subprocess')
const cryptoSubprocess = fork('utils/ECC/subprocess')
/**
* @typedef {object} EncryptedMessageBuffer
@ -7,6 +16,7 @@ const FieldError = require("../fieldError")
* @prop {Buffer} iv
* @prop {Buffer} mac
* @prop {Buffer} ephemPublicKey
* @prop {any?} metadata
*/
/**
@ -15,96 +25,122 @@ const FieldError = require("../fieldError")
* @prop {string} iv
* @prop {string} mac
* @prop {string} ephemPublicKey
* @prop {any?} metadata
*/
const generateRandomString = async (length = 16) => {
if (length % 2 !== 0 || length < 2) {
throw new Error('Random string length must be an even number.')
}
const res = await invoke('generateRandomString', [length], cryptoSubprocess)
return res
}
/**
* @param {string} value
*/
const convertUTF8ToBuffer = (value) => Buffer.from(value, 'utf-8');
const convertUTF8ToBuffer = value => Buffer.from(value, 'utf-8')
/**
* @param {string} value
*/
const convertBase64ToBuffer = (value) => Buffer.from(value, 'base64');
const convertBase64ToBuffer = value => Buffer.from(value, 'base64')
/**
* @param {Buffer} buffer
*/
const convertBufferToBase64 = (buffer) => buffer.toString("base64");
const convertBufferToBase64 = buffer => buffer.toString('base64')
/**
* @param {Buffer | string} key
*/
const processKey = (key) => {
const processKey = key => {
if (Buffer.isBuffer(key)) {
return key;
return key
}
const convertedKey = convertBase64ToBuffer(key);
return convertedKey;
};
const convertedKey = convertBase64ToBuffer(key)
return convertedKey
}
/**
* @param {EncryptedMessageBuffer | EncryptedMessageResponse} encryptedMessage
* @returns {EncryptedMessageResponse}
*/
const convertToEncryptedMessageResponse = (encryptedMessage) => {
if (Buffer.isBuffer(encryptedMessage.ciphertext) &&
const convertToEncryptedMessageResponse = encryptedMessage => {
if (
Buffer.isBuffer(encryptedMessage.ciphertext) &&
Buffer.isBuffer(encryptedMessage.iv) &&
Buffer.isBuffer(encryptedMessage.mac) &&
Buffer.isBuffer(encryptedMessage.ephemPublicKey)) {
Buffer.isBuffer(encryptedMessage.ephemPublicKey)
) {
return {
ciphertext: convertBufferToBase64(encryptedMessage.ciphertext),
iv: convertBufferToBase64(encryptedMessage.iv),
mac: convertBufferToBase64(encryptedMessage.mac),
ephemPublicKey: convertBufferToBase64(encryptedMessage.ephemPublicKey)
};
ephemPublicKey: convertBufferToBase64(encryptedMessage.ephemPublicKey),
metadata: encryptedMessage.metadata
}
}
if (typeof encryptedMessage.ciphertext === "string") {
if (typeof encryptedMessage.ciphertext === 'string') {
// @ts-ignore
return encryptedMessage;
return encryptedMessage
}
throw new FieldError({
field: "encryptedMessage",
message: "Unknown encrypted message format"
});
};
field: 'encryptedMessage',
message: 'Unknown encrypted message format'
})
}
/**
* @param {EncryptedMessageBuffer | EncryptedMessageResponse} encryptedMessage
* @returns {EncryptedMessageBuffer}
*/
const convertToEncryptedMessage = (encryptedMessage) => {
if (encryptedMessage.ciphertext instanceof Buffer &&
const convertToEncryptedMessage = encryptedMessage => {
if (
encryptedMessage.ciphertext instanceof Buffer &&
encryptedMessage.iv instanceof Buffer &&
encryptedMessage.mac instanceof Buffer &&
encryptedMessage.ephemPublicKey instanceof Buffer) {
encryptedMessage.ephemPublicKey instanceof Buffer
) {
// @ts-ignore
return encryptedMessage;
return encryptedMessage
}
if (typeof encryptedMessage.ciphertext === "string" &&
typeof encryptedMessage.iv === "string" &&
typeof encryptedMessage.mac === "string" &&
typeof encryptedMessage.ephemPublicKey === "string") {
if (
typeof encryptedMessage.ciphertext === 'string' &&
typeof encryptedMessage.iv === 'string' &&
typeof encryptedMessage.mac === 'string' &&
typeof encryptedMessage.ephemPublicKey === 'string'
) {
return {
ciphertext: convertBase64ToBuffer(encryptedMessage.ciphertext),
iv: convertBase64ToBuffer(encryptedMessage.iv),
mac: convertBase64ToBuffer(encryptedMessage.mac),
ephemPublicKey: convertBase64ToBuffer(encryptedMessage.ephemPublicKey)
};
ephemPublicKey: convertBase64ToBuffer(encryptedMessage.ephemPublicKey),
metadata: encryptedMessage.metadata
}
}
throw new FieldError({
field: "encryptedMessage",
message: "Unknown encrypted message format"
});
};
field: 'encryptedMessage',
message: 'Unknown encrypted message format'
})
}
module.exports = {
generateRandomString,
convertUTF8ToBuffer,
convertBase64ToBuffer,
convertBufferToBase64,
convertToEncryptedMessage,
convertToEncryptedMessageResponse,
processKey
processKey,
/**
* Used for tests.
*/
killCryptoCryptoSubprocess() {
cryptoSubprocess.kill()
}
}

39
utils/ECC/crypto.spec.js Normal file
View file

@ -0,0 +1,39 @@
/**
* @format
*/
// @ts-check
const expect = require('expect')
const {
generateRandomString,
convertBase64ToBuffer,
convertBufferToBase64,
killCryptoCryptoSubprocess
} = require('./crypto')
describe('crypto', () => {
describe('generateRandomString()', () => {
it('creates a random string of the specified length', async () => {
expect.hasAssertions()
const base = Math.ceil(Math.random() * 100)
const len = base % 2 !== 0 ? base + 1 : base
const result = await generateRandomString(len)
expect(result.length).toEqual(len)
})
})
describe('Buffer <> String <> Buffer', () => {
it('preserves values', async () => {
const rnd = await generateRandomString(24)
const asBuffer = convertBase64ToBuffer(rnd)
const asStringAgain = convertBufferToBase64(asBuffer)
expect(asStringAgain).toEqual(rnd)
})
})
after(killCryptoCryptoSubprocess)
})

View file

@ -1,152 +1,8 @@
/** @format */
const ECCrypto = require('eccrypto')
const Storage = require('node-persist')
const FieldError = require('../fieldError')
const {
convertBufferToBase64,
processKey,
convertToEncryptedMessageResponse,
convertUTF8ToBuffer,
convertToEncryptedMessage,
convertBase64ToBuffer
} = require('./crypto')
const nodeKeyPairs = new Map()
const devicePublicKeys = new Map()
/**
* @typedef {object} EncryptedMessage
* @prop {string} ciphertext
* @prop {string} iv
* @prop {string} mac
* @prop {string} ephemPublicKey
* @format
*/
//@ts-check
/**
* Checks if the message supplied is encrypted or not
* @param {EncryptedMessage} message
*/
const isEncryptedMessage = message =>
message &&
message.ciphertext &&
message.iv &&
message.mac &&
message.ephemPublicKey
module.exports = require('./ECC')
/**
* Generates a new encryption key pair that will be used
* when communicating with the deviceId specified
* @param {string} deviceId
*/
const generateKeyPair = deviceId => {
const privateKey = ECCrypto.generatePrivate()
const publicKey = ECCrypto.getPublic(privateKey)
const privateKeyBase64 = convertBufferToBase64(privateKey)
const publicKeyBase64 = convertBufferToBase64(publicKey)
nodeKeyPairs.set(deviceId, {
privateKey,
publicKey
})
return {
privateKey,
publicKey,
privateKeyBase64,
publicKeyBase64
}
}
/**
* Checks if the specified device has a keypair generated
* @param {{ deviceId: string }} arg0
*/
const isAuthorizedDevice = ({ deviceId }) => devicePublicKeys.has(deviceId)
/**
* Generates a new keypair for the deviceId specified and
* saves its publicKey locally
* @param {{ deviceId: string, publicKey: string }} arg0
*/
const authorizeDevice = async ({ deviceId, publicKey }) => {
const hostId = await Storage.get('encryption/hostId')
devicePublicKeys.set(deviceId, convertBase64ToBuffer(publicKey))
const keyPair = generateKeyPair(deviceId)
return {
success: true,
APIPublicKey: keyPair.publicKeyBase64,
hostId
}
}
/**
* Encrypts the specified message using the specified deviceId's
* public key
* @param {{ deviceId: string, message: string | number | boolean }} arg0
* @returns {Promise<import('./crypto').EncryptedMessageResponse>}
*/
const encryptMessage = async ({ message = '', deviceId }) => {
const parsedMessage = message.toString()
const publicKey = devicePublicKeys.get(deviceId)
if (!publicKey) {
throw new FieldError({
field: 'deviceId',
message: 'Unauthorized Device ID detected'
})
}
const processedPublicKey = processKey(publicKey)
const messageBuffer = convertUTF8ToBuffer(parsedMessage)
const encryptedMessage = await ECCrypto.encrypt(
processedPublicKey,
messageBuffer
)
const encryptedMessageResponse = {
ciphertext: encryptedMessage.ciphertext,
iv: encryptedMessage.iv,
mac: encryptedMessage.mac,
ephemPublicKey: encryptedMessage.ephemPublicKey
}
return convertToEncryptedMessageResponse(encryptedMessageResponse)
}
/**
* Decrypts the specified message using the API keypair
* associated with the specified deviceId
* @param {{ encryptedMessage: EncryptedMessage, deviceId: string }} arg0
*/
const decryptMessage = async ({ encryptedMessage, deviceId }) => {
try {
const keyPair = nodeKeyPairs.get(deviceId)
if (!keyPair) {
throw new FieldError({
field: 'deviceId',
message: 'Unauthorized Device ID detected'
})
}
const processedPrivateKey = processKey(keyPair.privateKey)
const decryptedMessage = await ECCrypto.decrypt(
processedPrivateKey,
convertToEncryptedMessage(encryptedMessage)
)
const parsedMessage = decryptedMessage.toString('utf8')
return parsedMessage
} catch (err) {
console.error(err)
throw err
}
}
module.exports = {
isAuthorizedDevice,
isEncryptedMessage,
generateKeyPair,
encryptMessage,
decryptMessage,
authorizeDevice
}
module.exports.convertToEncryptedMessage = require('./crypto').convertToEncryptedMessage

View file

@ -2,7 +2,7 @@
* @format
*/
const Common = require('shock-common')
const logger = require('winston')
const logger = require('../../config/log')
const { safeParseJSON } = require('../JSON')
const ECC = require('./index')
@ -20,12 +20,15 @@ const nonEncryptedEvents = [
* @typedef {import('../../services/gunDB/Mediator').EncryptedEmission} EncryptedEmission
* @typedef {import('../../services/gunDB/Mediator').EncryptedEmissionLegacy} EncryptedEmissionLegacy
* @typedef {import('../../services/gunDB/contact-api/SimpleGUN').ValidDataValue} ValidDataValue
* @typedef {(data: any, callback: (error?: any, data?: any) => void) => void} SocketOnListener
*/
/**
* @param {string} eventName
*/
const isNonEncrypted = eventName => nonEncryptedEvents.includes(eventName)
const isNonEncrypted = eventName =>
nonEncryptedEvents.includes(eventName) ||
process.env.SHOCK_ENCRYPTION_ECC === 'false'
/**
* @param {SimpleSocket} socket
@ -83,7 +86,7 @@ const encryptedEmit = socket => async (eventName, ...args) => {
/**
* @param {SimpleSocket} socket
* @returns {(eventName: string, callback: (data: any) => void) => void}
* @returns {(eventName: string, callback: SocketOnListener) => void}
*/
const encryptedOn = socket => (eventName, callback) => {
try {
@ -110,9 +113,10 @@ const encryptedOn = socket => (eventName, callback) => {
}
}
socket.on(eventName, async data => {
socket.on(eventName, async (data, response) => {
try {
if (isNonEncrypted(eventName)) {
callback(data)
callback(data, response)
return
}
@ -122,9 +126,11 @@ const encryptedOn = socket => (eventName, callback) => {
encryptedMessage: data
})
callback(safeParseJSON(decryptedMessage))
callback(safeParseJSON(decryptedMessage), response)
return
}
})
callback(data, response)
} catch (err) {
logger.error(
`[SOCKET] An error has occurred while decrypting an event (${eventName}):`,
@ -133,10 +139,70 @@ const encryptedOn = socket => (eventName, callback) => {
socket.emit('encryption:error', err)
}
})
} catch (err) {
socket.emit('encryption:error', err)
}
}
/**
* @param {SimpleSocket} socket
* @param {(error?: any, data?: any) => void} callback
* @returns {(...args: any[]) => Promise<void>}
*/
const encryptedCallback = (socket, callback) => async (...args) => {
try {
if (process.env.SHOCK_ENCRYPTION_ECC === 'false') {
return callback(...args)
}
const deviceId = socket.handshake.auth.encryptionId
if (!deviceId) {
throw {
field: 'deviceId',
message: 'Please specify a device ID'
}
}
const authorized = ECC.isAuthorizedDevice({ deviceId })
if (!authorized) {
throw {
field: 'deviceId',
message: 'Please exchange keys with the API before using the socket'
}
}
const encryptedArgs = await Promise.all(
args.map(async data => {
if (!data) {
return data
}
const encryptedMessage = await ECC.encryptMessage({
message: typeof data === 'object' ? JSON.stringify(data) : data,
deviceId
})
return encryptedMessage
})
)
return callback(...encryptedArgs)
} catch (err) {
logger.error(
`[SOCKET] An error has occurred while emitting an event response:`,
err
)
return socket.emit('encryption:error', err)
}
}
module.exports = {
isNonEncrypted,
encryptedOn,
encryptedEmit
encryptedEmit,
encryptedCallback
}

183
utils/ECC/subprocess.js Normal file
View file

@ -0,0 +1,183 @@
/**
* @format
*/
const Crypto = require('crypto')
const ECCrypto = require('eccrypto')
const uuid = require('uuid/v1')
const { Buffer } = require('buffer')
const mapValues = require('lodash/mapValues')
const logger = require('../../config/log')
logger.info('crypto subprocess invoked')
process.on('uncaughtException', e => {
logger.error('Uncaught exception inside crypto subprocess:')
logger.error(e)
})
process.on('unhandledRejection', e => {
logger.error('Unhandled rejection inside crypto subprocess:')
logger.error(e)
})
/**
* @typedef {'generateRandomString' | 'convertUTF8ToBuffer'
* | 'convertBase64ToBuffer' | 'convertBufferToBase64' | 'generatePrivate'
* | 'getPublic' | 'encrypt' | 'decrypt'
* } Method
*/
/**
* @param {any} obj
* @returns {any}
*/
const processBufferAfterSerialization = obj => {
if (typeof obj === 'object' && obj !== null) {
if (obj.type === 'Buffer') {
return Buffer.from(obj.data)
}
return mapValues(obj, processBufferAfterSerialization)
}
return obj
}
/**
* @typedef {object} Msg
* @prop {any[]} args
* @prop {string} id
* @prop {Method} method
*/
/**
* @param {Msg} msg
*/
const handleMsg = async msg => {
if (typeof msg !== 'object' || msg === null) {
logger.error('Msg in crypto subprocess not an object')
}
const { id, method } = msg
const args = msg.args.map(processBufferAfterSerialization)
try {
if (method === 'generateRandomString') {
const [length] = args
Crypto.randomBytes(length / 2, (err, buffer) => {
if (err) {
// @ts-expect-error
process.send({
id,
err: err.message
})
return
}
const token = buffer.toString('hex')
// @ts-expect-error
process.send({
id,
payload: token
})
})
}
if (method === 'convertUTF8ToBuffer') {
const [value] = args
// @ts-expect-error
process.send({
id,
payload: Buffer.from(value, 'utf8')
})
}
if (method === 'convertBase64ToBuffer') {
const [value] = args
// @ts-expect-error
process.send({
id,
payload: Buffer.from(value, 'base64')
})
}
if (method === 'convertBufferToBase64') {
const [buffer] = args
// @ts-expect-error
process.send({
id,
payload: buffer.toString('base64')
})
}
if (method === 'generatePrivate') {
// @ts-expect-error
process.send({
id,
payload: ECCrypto.generatePrivate()
})
}
if (method === 'getPublic') {
const [privateKey] = args
// @ts-expect-error
process.send({
id,
payload: ECCrypto.getPublic(privateKey)
})
}
if (method === 'encrypt') {
const [processedPublicKey, messageBuffer] = args
// @ts-expect-error
process.send({
id,
payload: await ECCrypto.encrypt(processedPublicKey, messageBuffer)
})
}
if (method === 'decrypt') {
const [processedPrivateKey, encryptedMessage] = args
// @ts-expect-error
process.send({
id,
payload: await ECCrypto.decrypt(processedPrivateKey, encryptedMessage)
})
}
} catch (e) {
// @ts-expect-error
process.send({
err: e.message
})
}
}
process.on('message', handleMsg)
/**
* @param {Method} method
* @param {any[]} args
* @param {import('child_process').ChildProcess} cryptoSubprocess
* @returns {Promise<any>}
*/
const invoke = (method, args, cryptoSubprocess) =>
new Promise((res, rej) => {
const id = uuid()
/** @param {any} msg */
const listener = msg => {
if (msg.id === id) {
cryptoSubprocess.off('message', listener)
if (msg.err) {
rej(new Error(msg.err))
} else {
res(processBufferAfterSerialization(msg.payload))
}
}
}
cryptoSubprocess.on('message', listener)
cryptoSubprocess.send({
args,
id,
method
})
})
module.exports = {
invoke
}

761
utils/GunSmith/GunSmith.js Normal file
View file

@ -0,0 +1,761 @@
/**
* @format
*/
/* eslint-disable no-use-before-define */
/* eslint-disable func-style */
// @ts-no-check TODO: Temporarily disabled TS checking due to new GunDB version
/// <reference path="Smith.ts" />
/// <reference path="GunT.ts" />
const uuid = require('uuid/v1')
const mapValues = require('lodash/mapValues')
const { fork } = require('child_process')
const logger = require('../../config/log')
const { mergePuts, isPopulated } = require('./misc')
const gunUUID = () => {
// Copied from gun internals
let s = ''
let l = 24 // you are not going to make a 0 length random number, so no need to check type
const c = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXZabcdefghijklmnopqrstuvwxyz'
while (l > 0) {
s += c.charAt(Math.floor(Math.random() * c.length))
l--
}
return s
}
/**
* Maps a path to `on()` listeners
* @type {Record<string, Set<GunT.Listener>|undefined>}
*/
const pathToListeners = {}
/**
* Maps a path to `map().on()` listeners
* @type {Record<string, Set<GunT.Listener>|undefined>}
*/
const pathToMapListeners = {}
/** @type {Record<string, GunT.LoadListener>} */
const idToLoadListener = {}
/**
* Path to pending puts. Oldest to newest
* @type {Record<string, Smith.PendingPut[]>}
*/
const pendingPuts = {}
/**
* @param {Smith.GunMsg} msg
*/
const handleMsg = msg => {
if (msg.type === 'load') {
const { data, id, key } = msg
const listener = idToLoadListener[id]
if (listener) {
listener(data, key)
delete idToLoadListener[id]
}
}
if (msg.type === 'on') {
const { data, path } = msg
// eslint-disable-next-line no-multi-assign
const listeners =
pathToListeners[path] || (pathToListeners[path] = new Set())
for (const l of listeners) {
l(data, path.split('>')[path.split('>').length - 1])
}
}
if (msg.type === 'map.on') {
const { data, key, path } = msg
// eslint-disable-next-line no-multi-assign
const listeners =
pathToMapListeners[path] || (pathToMapListeners[path] = new Set())
for (const l of listeners) {
l(data, key)
}
}
if (msg.type === 'put') {
const { ack, id, path } = msg
const pendingPutsForPath = pendingPuts[path] || (pendingPuts[path] = [])
const pendingPut = pendingPutsForPath.find(pp => pp.id === id)
const idx = pendingPutsForPath.findIndex(pp => pp.id === id)
if (pendingPut) {
pendingPutsForPath.splice(idx, 1)
if (pendingPut.cb) {
pendingPut.cb(ack)
}
} else {
logger.error(
`Could not find request for put message from gun subprocess. Data will be logged below.`
)
console.log({
msg,
pendingPut: pendingPut || 'No pending put found',
allPendingPuts: pendingPuts
})
}
}
if (msg.type === 'multiPut') {
const { ack, ids, path } = msg
const pendingPutsForPath = pendingPuts[path] || (pendingPuts[path] = [])
const ackedPuts = pendingPutsForPath.filter(pp => ids.includes(pp.id))
pendingPuts[path] = pendingPuts[path].filter(pp => !ids.includes(pp.id))
ackedPuts.forEach(pp => {
if (pp.cb) {
pp.cb(ack)
}
})
}
}
/** @type {ReturnType<typeof fork>} */
// eslint-disable-next-line init-declarations
let currentGun
let lastAlias = ''
let lastPass = ''
/** @type {GunT.UserPair|null} */
let lastPair = null
/** @type {import('gun/types/gun/IGunConstructorOptions').IGunConstructorOptions} */
let lastOpts = {}
let isAuthing = false
/**
* @param {string} alias
* @param {string} pass
* @returns {Promise<GunT.UserPair>}
*/
const auth = (alias, pass) => {
logger.info(`Authing with ${alias}`)
if (isAuthing) {
throw new Error(`Double auth?`)
}
isAuthing = true
return new Promise((res, rej) => {
/** @type {Smith.SmithMsgAuth} */
const msg = {
alias,
pass,
type: 'auth'
}
/** @param {Smith.GunMsg} msg */
const _cb = msg => {
if (msg.type === 'auth') {
logger.info(`Received ${msg.ack.sea ? 'ok' : 'bad'} auth reply.`)
currentGun.off('message', _cb)
isAuthing = false
const { ack } = msg
if (ack.err) {
lastAlias = ''
lastPass = ''
lastPair = null
logger.info('Auth unsuccessful, cached credentials cleared.')
rej(new Error(ack.err))
} else if (ack.sea) {
lastAlias = alias
lastPass = pass
lastPair = ack.sea
logger.info('Auth successful, credentials cached.')
res(ack.sea)
} else {
lastAlias = ''
lastPass = ''
lastPair = null
logger.info('Auth unsuccessful, cached credentials cleared.')
rej(new Error('Auth: ack.sea undefined'))
}
}
}
currentGun.on('message', _cb)
currentGun.send(msg)
logger.info('Sent auth message.')
})
}
const autoAuth = async () => {
if (!lastAlias || !lastPass) {
logger.info('No credentials cached, will not auto-auth')
return
}
logger.info('Credentials cached, will auth.')
await auth(lastAlias, lastPass)
}
const flushPendingPuts = () => {
if (isAuthing || isForging) {
throw new Error('Tried to flush pending puts while authing or forging.')
}
const ids = mapValues(pendingPuts, pendingPutsForPath =>
pendingPutsForPath.map(pp => pp.id)
)
const writes = mapValues(pendingPuts, pendingPutsForPath =>
pendingPutsForPath.map(pp => pp.data)
)
const finalWrites = mapValues(writes, writesForPath =>
mergePuts(writesForPath)
)
const messages = Object.entries(ids).map(([path, ids]) => {
/** @type {Smith.SmithMsgMultiPut} */
const msg = {
data: finalWrites[path],
ids,
path,
type: 'multiPut'
}
return msg
})
currentGun.send(messages)
logger.info(`Sent ${messages.length} pending puts.`)
}
let isForging = false
/** @returns {Promise<void>} */
const isReady = () =>
new Promise(res => {
if (isForging || isAuthing) {
setTimeout(() => {
isReady().then(res)
}, 1000)
} else {
res()
}
})
let procCounter = 0
let killed = false
const forge = () => {
;(async () => {
if (killed) {
throw new Error('Tried to forge after killing GunSmith')
}
logger.info(`Forging Gun # ${++procCounter}`)
if (isForging) {
throw new Error('Double forge?')
}
/** Used only for logs. */
const isReforge = !!currentGun
logger.info(isReforge ? 'Will reforge' : 'Will forge')
isForging = true
if (currentGun) {
currentGun.off('message', handleMsg)
currentGun.disconnect()
currentGun.kill()
logger.info('Destroyed current gun')
}
const newGun = fork('utils/GunSmith/gun.js')
currentGun = newGun
logger.info('Forged new gun')
// currentGun.on('', e => {
// logger.info('event from subprocess')
// logger.info(e)
// })
currentGun.on('message', handleMsg)
/** @type {Smith.SmithMsgInit} */
const initMsg = {
// @ts-ignore TODO: Fix options typings
opts: lastOpts,
type: 'init'
}
await new Promise(res => {
currentGun.on('message', msg => {
if (msg.type === 'init') {
// @ts-ignore
res()
}
})
currentGun.send(initMsg)
logger.info('Sent init msg')
})
logger.info('Received init reply')
const lastGunListeners = Object.keys(pathToListeners).map(path => {
/** @type {Smith.SmithMsgOn} */
const msg = {
path,
type: 'on'
}
return msg
})
if (lastGunListeners.length) {
currentGun.send(lastGunListeners)
logger.info(`Sent ${lastGunListeners.length} pending on() listeners`)
}
const lastGunMapListeners = Object.keys(pathToMapListeners).map(path => {
/** @type {Smith.SmithMsgMapOn} */
const msg = {
path,
type: 'map.on'
}
return msg
})
if (lastGunMapListeners.length) {
currentGun.send(lastGunMapListeners)
logger.info(
`Sent ${lastGunMapListeners.length} pending map().on() listeners`
)
}
logger.info(
isReforge
? 'Finished reforging, will now auto-auth'
: 'Finished forging, will now auto-auth'
)
await autoAuth()
// Eslint disable: This should be caught by a if (isForging) {throw} at the
// beginning of this function
// eslint-disable-next-line require-atomic-updates
isForging = false
flushPendingPuts()
})()
}
/**
* @param {string} path
* @param {boolean=} afterMap
* @returns {Smith.GunSmithNode}
*/
function createReplica(path, afterMap = false) {
/** @type {(GunT.Listener|GunT.LoadListener)[]} */
const listenersForThisRef = []
return {
_: {
get get() {
const keys = path.split('>')
return keys[keys.length - 1]
},
opt: {
// TODO
peers: {}
},
put: {
// TODO
}
},
back() {
throw new Error('Do not use back() on a GunSmith node.')
},
get(key) {
if (afterMap) {
throw new Error(
'Cannot call get() after map() on a GunSmith node, you should only call on() after map()'
)
}
return createReplica(path + '>' + key)
},
map() {
if (afterMap) {
throw new Error('Cannot call map() after map() on a GunSmith node')
}
return createReplica(path, true)
},
off() {
for (const l of listenersForThisRef) {
// eslint-disable-next-line no-multi-assign
const listeners =
pathToListeners[path] || (pathToListeners[path] = new Set())
// eslint-disable-next-line no-multi-assign
const mapListeners =
pathToMapListeners[path] || (pathToMapListeners[path] = new Set())
// @ts-expect-error
listeners.delete(l)
// @ts-expect-error
mapListeners.delete(l)
}
},
on(cb) {
listenersForThisRef.push(cb)
if (afterMap) {
// eslint-disable-next-line no-multi-assign
const listeners =
pathToMapListeners[path] || (pathToMapListeners[path] = new Set())
listeners.add(cb)
/** @type {Smith.SmithMsgMapOn} */
const msg = {
path,
type: 'map.on'
}
isReady().then(() => {
currentGun.send(msg)
})
} else {
// eslint-disable-next-line no-multi-assign
const listeners =
pathToListeners[path] || (pathToListeners[path] = new Set())
listeners.add(cb)
/** @type {Smith.SmithMsgOn} */
const msg = {
path,
type: 'on'
}
isReady().then(() => {
currentGun.send(msg)
})
}
return this
},
once(cb, opts = { wait: 500 }) {
if (afterMap) {
throw new Error('Cannot call once() after map() on a GunSmith node')
}
// We could use this.on() but then we couldn't call .off()
const tmp = createReplica(path, afterMap)
/** @type {GunT.ListenerData} */
let lastVal = null
tmp.on(data => {
lastVal = data
})
setTimeout(() => {
tmp.off()
const keys = path.split('>')
// eslint-disable-next-line no-unused-expressions
cb && cb(lastVal, keys[keys.length - 1])
}, opts.wait)
return this
},
put(data, cb) {
const id = uuid()
const pendingPutsForPath = pendingPuts[path] || (pendingPuts[path] = [])
/** @type {Smith.PendingPut} */
const pendingPut = {
cb: cb || (() => {}),
data,
id
}
pendingPutsForPath.push(pendingPut)
/** @type {Smith.SmithMsgPut} */
const msg = {
data,
id,
path,
type: 'put'
}
isReady().then(() => {
currentGun.send(msg)
})
return this
},
set(data, cb) {
if (afterMap) {
throw new Error('Cannot call set() after map() on a GunSmith node')
}
const id = gunUUID()
this.put(
{
[id]: data
},
ack => {
// eslint-disable-next-line no-unused-expressions
cb && cb(ack)
}
)
return this.get(id)
},
user(pub) {
if (path !== '$root') {
throw new ReferenceError(
`Do not call user() on a non-root GunSmith node`
)
}
if (!pub) {
return createUserReplica()
}
const replica = createReplica(pub)
// I don't know why Typescript insists on returning a UserGUNNode so here we go:
return {
...replica,
/** @returns {GunT.UserSoul} */
get _() {
throw new ReferenceError(
`Do not access _ on another user's graph (${pub.slice(
0,
8
)}...${pub.slice(-8)})`
)
},
auth() {
throw new Error(
"Do not call auth() on another user's graph (gun.user(otherUserPub))"
)
},
create() {
throw new Error(
"Do not call create() on another user's graph (gun.user(otherUserPub))"
)
},
leave() {
throw new Error(
"Do not call leave() on another user's graph (gun.user(otherUserPub))"
)
}
}
},
then() {
return new Promise(res => {
this.once(data => {
res(data)
})
})
},
specialOn(cb) {
let canaryPeep = false
const checkCanary = () =>
setTimeout(() => {
if (!canaryPeep) {
isReady()
.then(forge)
.then(isReady)
.then(checkCanary)
}
}, 30000)
checkCanary()
return this.on((data, key) => {
canaryPeep = true
cb(data, key)
})
},
specialOnce(cb, _wait = 1000) {
this.once(
(data, key) => {
if (isPopulated(data) || _wait > 100000) {
cb(data, key)
} else {
isReady()
.then(forge)
.then(isReady)
.then(() => {
this.specialOnce(cb, _wait * 3)
})
}
},
{ wait: _wait }
)
return this
},
specialThen() {
return new Promise((res, rej) => {
this.specialOnce(data => {
if (isPopulated(data)) {
res(data)
} else {
rej(new Error(`Could not fetch data at path ${path}`))
}
})
})
},
pPut(data) {
return new Promise((res, rej) => {
this.put(data, ack => {
if (ack.err) {
rej(new Error(ack.err))
} else {
res()
}
})
})
},
pSet(data) {
return new Promise((res, rej) => {
this.set(data, ack => {
if (ack.err) {
rej(new Error(ack.err))
} else {
res()
}
})
})
}
}
}
let userReplicaCalled = false
/**
* @returns {Smith.UserSmithNode}
*/
function createUserReplica() {
if (userReplicaCalled) {
throw new Error('Please only call gun.user() (without a pub) once.')
}
userReplicaCalled = true
const baseReplica = createReplica('$user')
/** @type {Smith.UserSmithNode} */
const completeReplica = {
...baseReplica,
get _() {
return {
...baseReplica._,
// TODO
sea: lastPair || {
epriv: '',
epub: '',
priv: '',
pub: ''
}
}
},
get is() {
if (lastAlias && lastPair) {
return {
alias: lastAlias,
pub: lastPair.pub
}
}
return undefined
},
auth(alias, pass, cb) {
auth(alias, pass)
.then(pair => {
cb({
err: undefined,
sea: pair
})
})
.catch(e => {
cb({
err: e.message,
sea: undefined
})
})
},
create(alias, pass, cb) {
lastAlias = ''
lastPass = ''
lastPair = null
/** @type {Smith.SmithMsgCreate} */
const msg = {
alias,
pass,
type: 'create'
}
/** @param {Smith.GunMsg} msg */
const _cb = msg => {
if (msg.type === 'create') {
currentGun.off('message', _cb)
const { ack } = msg
if (ack.err) {
cb(ack)
} else if (ack.pub) {
lastAlias = alias
lastPass = pass
lastPair = msg.pair
cb(ack)
} else {
throw (new Error('Auth: ack.pub undefined'))
}
}
}
currentGun.on('message', _cb)
currentGun.send(msg)
},
leave() {
lastAlias = ''
lastPass = ''
lastPair = null
/** @type {Smith.SmithMsgLeave} */
const msg = {
type: 'leave'
}
currentGun.send(msg)
}
}
return completeReplica
}
/**
* @param {import('gun/types/gun/IGunConstructorOptions').IGunConstructorOptions} opts
* @returns {Smith.GunSmithNode}
*/
const Gun = opts => {
lastOpts = opts
// forge()
return createReplica('$root')
}
module.exports = Gun
module.exports.kill = () => {
if (currentGun) {
currentGun.send('bye')
currentGun.off('message', handleMsg)
currentGun.disconnect()
currentGun.kill()
// @ts-ignore
currentGun = null
killed = true
logger.info('Killed gunsmith.')
}
}
module.exports._reforge = forge
module.exports._isReady = isReady
module.exports._getProcCounter = () => {
return procCounter
}

View file

@ -0,0 +1,411 @@
/**
* @format
*/
// @ts-check
const Gun = require('./GunSmith')
const words = require('random-words')
const fs = require('fs')
const debounce = require('lodash/debounce')
const once = require('lodash/once')
const expect = require('expect')
const logger = require('../../config/log')
const { removeBuiltInGunProps } = require('./misc')
if (!fs.existsSync('./test-radata')) {
fs.mkdirSync('./test-radata')
}
const instance = Gun({
axe: false,
multicast: false,
file: './test-radata/' + words({ exactly: 2 }).join('-')
})
const user = instance.user()
const alias = words({ exactly: 2 }).join('')
const pass = words({ exactly: 2 }).join('')
/**
* @param {number} ms
*/
const delay = ms => new Promise(res => setTimeout(res, ms))
describe('gun smith', () => {
after(() => {
Gun.kill()
})
// **************************************************************************
// These tests are long but we run them first to detect if the re-forging
// logic is flawed and affecting functionality.
// **************************************************************************
it('writes object items into sets and correctly populates item._.get with the newly created id', done => {
const node = instance.get(words()).get(words())
const obj = {
a: 1,
b: 'hello'
}
const item = node.set(obj)
node.get(item._.get).once(data => {
expect(removeBuiltInGunProps(data)).toEqual(obj)
done()
})
})
it('provides an special once() that restarts gun until a value is fetched', done => {
const a = words()
const b = words()
const node = instance.get(a).get(b)
const value = words()
node.specialOnce(data => {
expect(data).toEqual(value)
done()
})
setTimeout(() => {
node.put(value)
}, 30000)
})
it('provides an special then() that restarts gun until a value is fetched', async () => {
const a = words()
const b = words()
const node = instance.get(a).get(b)
const value = words()
setTimeout(() => {
node.put(value)
}, 30000)
const res = await node.specialThen()
expect(res).toBe(value)
})
it('provides an special on() that restarts gun when a value has not been obtained in a determinate amount of time', done => {
const node = instance.get(words()).get(words())
const secondValue = words()
const onceDone = once(done)
node.specialOn(
debounce(data => {
if (data === secondValue) {
onceDone()
}
})
)
setTimeout(() => {
node.put(secondValue)
}, 32000)
})
it('puts a true and reads it with once()', done => {
logger.info('puts a true and reads it with once()')
const a = words()
const b = words()
instance
.get(a)
.get(b)
.put(true)
instance
.get(a)
.get(b)
.once(val => {
expect(val).toBe(true)
done()
})
})
it('puts a false and reads it with once()', done => {
const a = words()
const b = words()
instance
.get(a)
.get(b)
.put(false, ack => {
if (ack.err) {
throw new Error(ack.err)
} else {
instance
.get(a)
.get(b)
.once(val => {
expect(val).toBe(false)
done()
})
}
})
})
it('puts numbers and reads them with once()', done => {
const a = words()
const b = words()
instance
.get(a)
.get(b)
.put(5)
instance
.get(a)
.get(b)
.once(val => {
expect(val).toBe(5)
done()
})
})
it('puts strings and reads them with once()', done => {
const a = words()
const b = words()
const sentence = words({ exactly: 50 }).join(' ')
instance
.get(a)
.get(b)
.put(sentence)
instance
.get(a)
.get(b)
.once(val => {
expect(val).toBe(sentence)
done()
})
})
it('merges puts', async () => {
const a = {
a: 1
}
const b = {
b: 1
}
const c = { ...a, ...b }
const node = instance.get('foo').get('bar')
node.put(a)
node.put(b)
const data = await node.then()
if (typeof data !== 'object' || data === null) {
throw new Error('Data not an object')
}
expect(removeBuiltInGunProps(data)).toEqual(c)
})
it('writes primitive items into sets and correctly assigns the id to ._.get', done => {
const node = instance.get(words()).get(words())
const item = node.set('hello')
node.once(data => {
expect(removeBuiltInGunProps(data)).toEqual({
[item._.get]: 'hello'
})
done()
})
})
// TODO: find out why this test fucks up the previous one if it runs before
// that one
it('maps over a primitive set', done => {
const node = instance.get(words()).get(words())
const items = words({ exactly: 50 })
const ids = items.map(i => node.set(i)._.get)
let checked = 0
node.map().on((data, id) => {
expect(items).toContain(data)
expect(ids).toContain(id)
checked++
if (checked === 50) {
done()
}
})
})
it('maps over an object set', done => {
const node = instance.get(words()).get(words())
const items = words({ exactly: 50 }).map(w => ({
word: w
}))
const ids = items.map(i => node.set(i)._.get)
let checked = 0
node.map().on((data, id) => {
expect(items).toContainEqual(removeBuiltInGunProps(data))
expect(ids).toContain(id)
checked++
if (checked === 50) {
done()
}
})
})
it('offs `on()`s', async () => {
const node = instance.get(words()).get(words())
let called = false
node.on(() => {
called = true
})
node.off()
await node.pPut('return')
await delay(500)
expect(called).toBe(false)
})
it('offs `map().on()`s', async () => {
const node = instance.get(words()).get(words())
let called = false
const iterateeNode = node.map()
iterateeNode.on(() => {
called = true
})
iterateeNode.off()
await node.pSet('return')
await delay(500)
expect(called).toBe(false)
})
it('provides an user node with create(), auth() and leave()', async () => {
const ack = await new Promise(res => user.create(alias, pass, res))
expect(ack.err).toBeUndefined()
const { pub } = ack
expect(pub).toBeTruthy()
expect(user.is?.pub).toEqual(pub)
user.leave()
expect(user.is).toBeUndefined()
/** @type {GunT.AuthAck} */
const authAck = await new Promise(res =>
user.auth(alias, pass, ack => res(ack))
)
expect(authAck.err).toBeUndefined()
expect(authAck.sea?.pub).toEqual(pub)
expect(user.is?.pub).toEqual(pub)
user.leave()
})
it('reliably provides authentication information across re-forges', async () => {
/** @type {GunT.AuthAck} */
const authAck = await new Promise(res =>
user.auth(alias, pass, ack => res(ack))
)
const pub = authAck.sea?.pub
expect(pub).toBeTruthy()
Gun._reforge()
expect(user.is?.pub).toEqual(pub)
await Gun._isReady()
expect(user.is?.pub).toEqual(pub)
user.leave()
})
it('provides thenables for values', async () => {
const a = words()
const b = words()
const node = instance.get(a).get(b)
const value = words()
await new Promise((res, rej) => {
node.put(value, ack => {
if (ack.err) {
rej(new Error(ack.err))
} else {
// @ts-ignore
res()
}
})
})
const fetch = await instance
.get(a)
.get(b)
.then()
expect(fetch).toEqual(value)
})
it('provides an special thenable put()', async () => {
const a = words()
const b = words()
const node = instance.get(a).get(b)
const value = words()
await node.pPut(value)
const res = await node.then()
expect(res).toBe(value)
})
it('on()s and handles object>primitive>object transitions', done => {
const a = {
one: 1
}
const b = 'two'
const lastPut = {
three: 3
}
const c = { ...a, ...lastPut }
const node = instance.get(words()).get(words())
let checked = 0
node.on(
debounce(data => {
checked++
if (checked === 1) {
expect(removeBuiltInGunProps(data)).toEqual(a)
} else if (checked === 2) {
expect(data).toEqual(b)
} else if (checked === 3) {
expect(removeBuiltInGunProps(data)).toEqual(c)
done()
}
})
)
node.put(a)
setTimeout(() => {
node.put(b)
}, 800)
setTimeout(() => {
node.put(c)
}, 1200)
})
})

85
utils/GunSmith/GunT.ts Normal file
View file

@ -0,0 +1,85 @@
/**
* @prettier
*/
namespace GunT {
export type Primitive = boolean | string | number
export interface Data {
[K: string]: ValidDataValue
}
export type ValidDataValue = Primitive | null | Data
export interface Ack {
err: string | undefined
}
type ListenerObjSoul = {
'#': string
}
export type ListenerObj = Record<
string,
ListenerObjSoul | Primitive | null
> & {
_: ListenerObjSoul
}
export type ListenerData = Primitive | null | ListenerObj | undefined
interface OpenListenerDataObj {
[k: string]: OpenListenerData
}
export type Listener = (data: ListenerData, key: string) => void
export type Callback = (ack: Ack) => void
export interface Peer {
url: string
id: string
wire?: {
readyState: number
}
}
export interface Soul {
get: string
put: Primitive | null | object | undefined
opt: {
peers: Record<string, Peer>
}
}
export type OpenListenerData = Primitive | null | OpenListenerDataObj
export type OpenListener = (data: OpenListenerData, key: string) => void
export type LoadListenerData = OpenListenerData
export type LoadListener = (data: LoadListenerData, key: string) => void
export interface CreateAck {
pub: string | undefined
err: string | undefined
}
export type CreateCB = (ack: CreateAck) => void
export interface AuthAck {
err: string | undefined
sea: UserPair | undefined
}
export type AuthCB = (ack: AuthAck) => void
export interface UserPair {
epriv: string
epub: string
priv: string
pub: string
}
export interface UserSoul extends Soul {
sea: UserPair
}
}

221
utils/GunSmith/Smith.ts Normal file
View file

@ -0,0 +1,221 @@
/**
* @format
*/
/// <reference path="GunT.ts" />
namespace Smith {
export interface GunSmithNode {
_: GunT.Soul
/**
* Used only inside the subprocess.
*/
back(
path: 'opt'
): {
peers: Record<
string,
{
url: string
id: string
wire?: {
readyState: number
}
}
>
}
/**
*
*/
get(key: string): GunSmithNode
/**
*
*/
map(): GunSmithNode
/**
*
*/
off(): void
/**
*
*/
on(cb: GunT.Listener): void
/**
*
*/
once(cb?: GunT.Listener, opts?: { wait?: number }): void
/**
* A promise version of put().
* @throws
*/
pPut(data: GunT.ValidDataValue): Promise<void>
/**
* A promise version of set().
* @throws
*/
pSet(data: GunT.ValidDataValue): Promise<void>
/**
*
*/
put(data: GunT.ValidDataValue, cb?: GunT.Callback): void
/**
*
*/
set(data: GunT.ValidDataValue, cb?: GunT.Callback): GunSmithNode
/**
* Gun will be restarted to force replication of data
* if needed.
* @param cb
*/
specialOn(cb: GunT.Listener): void
/**
* Gun will be restarted to force replication of data
* if needed.
* @param cb
* @param _wait
*/
specialOnce(cb: GunT.Listener, _wait?: number): GunSmithNode
/**
* Gun will be restarted to force replication of data
* if needed.
*/
specialThen(): Promise<GunT.ListenerData>
then(): Promise<GunT.ListenerData>
user(): UserSmithNode
user(pub: string): GunSmithNode
}
export interface UserSmithNode extends GunSmithNode {
_: GunT.UserSoul
auth(alias: string, pass: string, cb: GunT.AuthCB): void
is?: {
alias: string
pub: string
}
create(user: string, pass: string, cb: GunT.CreateCB): void
leave(): void
}
export interface PendingPut {
cb: GunT.Callback
data: GunT.ValidDataValue
id: string
}
export interface SmithMsgInit {
opts: Record<string, any>
type: 'init'
}
export interface SmithMsgAuth {
alias: string
pass: string
type: 'auth'
}
export interface SmithMsgCreate {
alias: string
pass: string
type: 'create'
}
export interface SmithMsgLeave {
type: 'leave'
}
export interface SmithMsgOn {
path: string
type: 'on'
}
export interface SmithMsgLoad {
id: string
path: string
type: 'load'
}
export interface SmithMsgMapOn {
path: string
type: 'map.on'
}
export interface SmithMsgPut {
id: string
data: GunT.ValidDataValue
path: string
type: 'put'
}
export interface SmithMsgMultiPut {
ids: string[]
data: GunT.ValidDataValue
path: string
type: 'multiPut'
}
export type SmithMsg =
| SmithMsgInit
| SmithMsgAuth
| SmithMsgCreate
| SmithMsgAuth
| SmithMsgOn
| SmithMsgLoad
| SmithMsgMapOn
| SmithMsgPut
| SmithMsgMultiPut
| BatchSmithMsg
export type BatchSmithMsg = SmithMsg[]
export interface GunMsgAuth {
ack: GunT.AuthAck
type: 'auth'
}
export interface GunMsgCreate {
ack: GunT.CreateAck
pair: GunT.UserPair
type: 'create'
}
export interface GunMsgOn {
data: GunT.ListenerData
path: string
type: 'on'
}
export interface GunMsgMapOn {
data: GunT.ListenerData
path: string
key: string
type: 'map.on'
}
export interface GunMsgLoad {
id: string
data: GunT.LoadListenerData
key: string
type: 'load'
}
export interface GunMsgPut {
ack: GunT.Ack
id: string
path: string
type: 'put'
}
export interface GunMsgMultiPut {
ack: GunT.Ack
ids: string[]
path: string
type: 'multiPut'
}
export type GunMsg =
| GunMsgAuth
| GunMsgCreate
| GunMsgOn
| GunMsgMapOn
| GunMsgLoad
| GunMsgPut
| GunMsgMultiPut
}

251
utils/GunSmith/gun.js Normal file
View file

@ -0,0 +1,251 @@
/**
* @format
*/
// @ts-check
/// <reference path="Smith.ts" />
/// <reference path="GunT.ts" />
const Gun = require('gun')
require('gun/nts')
require('gun/lib/load')
const logger = require('../../config/log')
let dead = false
/**
* @param {any} msg
*/
const sendMsg = msg => {
if (dead) {
return
}
if (process.send) {
process.send(msg)
} else {
logger.error(
'Fatal error: Could not send a message from inside the gun process.'
)
}
}
logger.info('subprocess invoked')
process.on('uncaughtException', e => {
logger.error('Uncaught exception inside Gun subprocess:')
logger.error(e)
})
process.on('unhandledRejection', e => {
logger.error('Unhandled rejection inside Gun subprocess:')
logger.error(e)
})
/**
* @type {Smith.GunSmithNode}
*/
// eslint-disable-next-line init-declarations
let gun
/**
* @type {Smith.UserSmithNode}
*/
// eslint-disable-next-line init-declarations
let user
/**
* @returns {Promise<void>}
*/
const waitForAuth = async () => {
if (user.is && user.is.pub) {
return Promise.resolve()
}
await new Promise(res => setTimeout(res, 1000))
return waitForAuth()
}
/**
* @param {Smith.SmithMsg} msg
*/
const handleMsg = async msg => {
if (dead) {
logger.error('Dead sub-process received msg: ', msg)
return
}
// @ts-ignore
if (msg === 'bye') {
logger.info('KILLING')
dead = true
}
if (Array.isArray(msg)) {
msg.forEach(handleMsg)
return
}
if (msg.type === 'init') {
gun = /** @type {any} */ (new Gun(msg.opts))
// Force gun to connect to peers
gun
.get('foo')
.get('baz')
.once()
let currentPeers = ''
setInterval(() => {
const newPeers = JSON.stringify(
Object.values(gun.back('opt').peers)
.filter(p => p.wire && p.wire.readyState)
.map(p => p.url)
)
if (newPeers !== currentPeers) {
logger.info('Connected peers:', newPeers)
currentPeers = newPeers
}
}, 2000)
setInterval(() => {
// Log regardless of change every 30 seconds
logger.info('Connected peers:', currentPeers)
}, 30000)
user = gun.user()
sendMsg({
type: 'init'
})
}
if (msg.type === 'auth') {
const { alias, pass } = msg
user.auth(alias, pass, ack => {
/** @type {Smith.GunMsgAuth} */
const msg = {
ack: {
err: ack.err,
sea: ack.sea
},
type: 'auth'
}
sendMsg(msg)
})
}
if (msg.type === 'create') {
const { alias, pass } = msg
user.create(alias, pass, ack => {
/** @type {Smith.GunMsgCreate} */
const msg = {
ack: {
err: ack.err,
pub: ack.pub
},
pair: user._.sea,
type: 'create'
}
sendMsg(msg)
})
}
if (msg.type === 'on') {
const [root, ...keys] = msg.path.split('>')
/** @type {Smith.GunSmithNode} */
let node =
{
$root: gun,
$user: user
}[root] || gun.user(root)
for (const key of keys) {
node = node.get(key)
}
node.on(data => {
/** @type {Smith.GunMsgOn} */
const res = {
data,
path: msg.path,
type: 'on'
}
sendMsg(res)
})
}
if (msg.type === 'map.on') {
const [root, ...keys] = msg.path.split('>')
/** @type {Smith.GunSmithNode} */
let node =
{
$root: gun,
$user: user
}[root] || gun.user(root)
for (const key of keys) {
node = node.get(key)
}
node.map().on((data, key) => {
/** @type {Smith.GunMsgMapOn} */
const res = {
data,
key,
path: msg.path,
type: 'map.on'
}
sendMsg(res)
})
}
if (msg.type === 'put') {
const [root, ...keys] = msg.path.split('>')
if (root === '$user') {
await waitForAuth()
}
/** @type {Smith.GunSmithNode} */
let node =
{
$root: gun,
$user: user
}[root] || gun.user(root)
for (const key of keys) {
node = node.get(key)
}
node.put(msg.data, ack => {
/** @type {Smith.GunMsgPut} */
const reply = {
ack: {
err: typeof ack.err === 'string' ? ack.err : undefined
},
id: msg.id,
path: msg.path,
type: 'put'
}
sendMsg(reply)
})
}
if (msg.type === 'multiPut') {
const [root, ...keys] = msg.path.split('>')
/** @type {Smith.GunSmithNode} */
let node =
{
$root: gun,
$user: user
}[root] || gun.user(root)
for (const key of keys) {
node = node.get(key)
}
node.put(msg.data, ack => {
/** @type {Smith.GunMsgMultiPut} */
const reply = {
ack: {
err: ack.err
},
ids: msg.ids,
path: msg.path,
type: 'multiPut'
}
sendMsg(reply)
})
}
}
process.on('message', handleMsg)

1
utils/GunSmith/index.js Normal file
View file

@ -0,0 +1 @@
module.exports = require('./GunSmith')

78
utils/GunSmith/misc.js Normal file
View file

@ -0,0 +1,78 @@
/**
* @format
*/
// @ts-check
// TODO: Check if merge() is equivalent to what gun does. But it should be.
const merge = require('lodash/merge')
/// <reference path="./GunT.ts" />
/**
* @param {GunT.ValidDataValue[]} values
* @returns {GunT.ValidDataValue}
*/
const mergePuts = values => {
/**
* @type {GunT.ValidDataValue}
* @example
* x.put({ a: 1 })
* x.put('yo')
* assertEquals(await x.then(), 'yo')
* x.put({ b: 2 })
* assertEquals(await x.then(), { a: 1 , b: 2 })
*/
const lastObjectValue = {}
/** @type {GunT.ValidDataValue} */
let finalResult = {}
for (const val of values) {
if (typeof val === 'object' && val !== null) {
finalResult = {}
merge(lastObjectValue, val)
merge(finalResult, lastObjectValue)
} else {
finalResult = val
}
}
return finalResult
}
/**
* @param {any} data
* @returns {any}
*/
const removeBuiltInGunProps = data => {
if (typeof data === 'object' && data !== null) {
const o = { ...data }
delete o._
delete o['#']
return o
}
console.log(data)
throw new TypeError(
'Non object passed to removeBuiltInGunProps: ' + JSON.stringify(data)
)
}
/**
* @param {GunT.ListenerData} data
*/
const isPopulated = data => {
if (data === null || typeof data === 'undefined') {
return false
}
if (typeof data === 'object') {
return Object.keys(removeBuiltInGunProps(data)).length > 0
}
return true
}
module.exports = {
mergePuts,
removeBuiltInGunProps,
isPopulated
}

View file

@ -1,181 +0,0 @@
/**
* @prettier
*/
const Crypto = require('crypto')
const { Buffer } = require('buffer')
const logger = require('winston')
const APIKeyPair = new Map()
const authorizedDevices = new Map()
const nonEncryptedEvents = [
'ping',
'disconnect',
'IS_GUN_AUTH',
'SET_LAST_SEEN_APP'
]
const Encryption = {
/**
* @param {string} event
* @returns {boolean}
*/
isNonEncrypted: event => nonEncryptedEvents.includes(event),
/**
* @param {{ deviceId: string , message: string }} arg0
*/
encryptKey: ({ deviceId, message }) => {
if (!authorizedDevices.has(deviceId)) {
throw { field: 'deviceId', message: 'Unknown Device ID' }
}
const devicePublicKey = authorizedDevices.get(deviceId)
const data = Buffer.from(message)
const encryptedData = Crypto.publicEncrypt(
{
key: devicePublicKey,
padding: Crypto.constants.RSA_PKCS1_PADDING
},
data
)
return encryptedData.toString('base64')
},
/**
* @param {{ deviceId: string , message: string }} arg0
*/
decryptKey: ({ deviceId, message }) => {
if (!authorizedDevices.has(deviceId)) {
throw { field: 'deviceId', message: 'Unknown Device ID' }
}
const data = Buffer.from(message, 'base64')
const encryptedData = Crypto.privateDecrypt(
{
key: APIKeyPair.get(deviceId).privateKey,
padding: Crypto.constants.RSA_PKCS1_PADDING
},
data
)
return encryptedData.toString()
},
/**
* @param {{ deviceId: string , message: any , metadata?: any}} arg0
*/
encryptMessage: ({ deviceId, message, metadata = {} }) => {
const parsedMessage =
typeof message === 'object' ? JSON.stringify(message) : message
const data = Buffer.from(parsedMessage)
const key = Crypto.randomBytes(32)
const iv = Crypto.randomBytes(16)
const encryptedKey = Encryption.encryptKey({
deviceId,
message: key.toString('hex')
})
const cipher = Crypto.createCipheriv('aes-256-cbc', key, iv)
const encryptedCipher = cipher.update(data)
const encryptedBuffer = Buffer.concat([
Buffer.from(encryptedCipher),
Buffer.from(cipher.final())
])
const encryptedData = encryptedBuffer.toString('base64')
const encryptedMessage = {
encryptedData,
encryptedKey,
iv: iv.toString('hex'),
metadata
}
return encryptedMessage
},
/**
* @param {{ message: string , key: string , iv: string }} arg0
*/
decryptMessage: ({ message, key, iv }) => {
const data = Buffer.from(message, 'base64')
const cipher = Crypto.createDecipheriv(
'aes-256-cbc',
Buffer.from(key, 'hex'),
Buffer.from(iv, 'hex')
)
const decryptedCipher = cipher.update(data)
const decryptedBuffer = Buffer.concat([
Buffer.from(decryptedCipher),
Buffer.from(cipher.final())
])
const decryptedData = decryptedBuffer.toString()
return decryptedData.toString()
},
/**
* @param {{ deviceId: string }} arg0
*/
isAuthorizedDevice: ({ deviceId }) => {
if (authorizedDevices.has(deviceId)) {
return true
}
return false
},
/**
* @param {{ deviceId: string , publicKey: string }} arg0
*/
authorizeDevice: ({ deviceId, publicKey }) =>
new Promise((resolve, reject) => {
authorizedDevices.set(deviceId, publicKey)
Crypto.generateKeyPair(
'rsa',
{
modulusLength: 2048,
privateKeyEncoding: {
type: 'pkcs1',
format: 'pem'
},
publicKeyEncoding: {
type: 'pkcs1',
format: 'pem'
}
},
(err, publicKey, privateKey) => {
if (err) {
// @ts-ignore
logger.error(err)
reject(err)
return
}
const exportedKey = {
publicKey,
privateKey
}
APIKeyPair.set(deviceId, exportedKey)
resolve({
success: true,
APIPublicKey: exportedKey.publicKey
})
}
)
}),
/**
* @param {{ deviceId: string }} arg0
*/
unAuthorizeDevice: ({ deviceId }) => {
authorizedDevices.delete(deviceId)
},
generateRandomString: (length = 16) =>
new Promise((resolve, reject) => {
Crypto.randomBytes(length, (err, buffer) => {
if (err) {
reject(err)
return
}
const token = buffer.toString('hex')
resolve(token)
})
})
}
module.exports = Encryption

View file

@ -1,6 +1,7 @@
/**
* @format
*/
const expect = require('expect')
const { asyncFilter } = require('./helpers')

View file

@ -1,7 +1,6 @@
/**
* @format
*/
const Gun = require('gun')
const { asyncFilter } = require('./helpers')
@ -9,10 +8,15 @@ const { asyncFilter } = require('./helpers')
* @returns {string}
*/
const gunUUID = () => {
// @ts-expect-error Not typed
const uuid = Gun.Text.random()
return uuid
// Copied from gun internals
let s = ''
let l = 24 // you are not going to make a 0 length random number, so no need to check type
const c = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXZabcdefghijklmnopqrstuvwxyz'
while (l > 0) {
s += c.charAt(Math.floor(Math.random() * c.length))
l--
}
return s
}
module.exports = {

View file

@ -0,0 +1,68 @@
const logger = require('../../config/log')
const fetch = require('node-fetch')
const Storage = require('node-persist')
const { listPeers, connectPeer,getInfo } = require('./v2')
const handlerBaseUrl = "https://channels.shock.network:4444"
module.exports = async () => {
logger.info("DOING CHANNEL INVITE THING: START")
/**
* @type string | undefined
*/
const invite = process.env.HOSTING_INVITE
if(!invite) {
logger.info("DOING CHANNEL INVITE THING: NVM NO INVITE")
return
}
try {
/**
* @type string[]
*/
const invites = await Storage.getItem('processedInvites') || []
if(invites.includes(invite)){
logger.info("DOING CHANNEL INVITE THING: INVITE PROCESSED ALREADY")
return
}
const me = await getInfo()
const {identity_pubkey} = me
//@ts-expect-error
const connectReq = await fetch(`${handlerBaseUrl}/connect`)
if(connectReq.status !== 200 ){
logger.info("DOING CHANNEL INVITE THING: CONNECT FAILED")
return
}
const connJson = await connectReq.json()
const [uri] = connJson.uris
const [pub,host] = uri.split("@")
const peers = await listPeers()
if(peers.findIndex(peer => peer.pub_key === pub) === -1){
await connectPeer(pub,host)
}
const channelReq = {
userPubKey:identity_pubkey,
invite,
lndTo:pub,
}
//@ts-expect-error
const res = await fetch(`${handlerBaseUrl}/channel`,{
method:'POST',
headers: {
'Content-Type': 'application/json'
},
body:JSON.stringify(channelReq)
})
if(res.status !== 200 ){
logger.info("DOING CHANNEL INVITE THING: FAILED ")
return
}
invites.push(invite)
await Storage.setItem('processedInvites',invites)
logger.info("DOING CHANNEL INVITE THING: DONE!")
} catch(e){
logger.error("error sending invite to channels handler")
logger.info("DOING CHANNEL INVITE THING: :(")
logger.error(e)
}
}

View file

@ -50,7 +50,7 @@ class LNDErrorManager {
*/
const listener = (err, response) => {
if (err) {
if (err.code === 12) {
if (err.details.includes("wallet not created") || err.details.includes("wallet locked")) {
res({
service: 'walletUnlocker',
message: 'Wallet locked',

View file

@ -134,15 +134,15 @@ export interface Services {
}
export interface ListChannelsReq {
active_only: boolean
inactive_only: boolean
public_only: boolean
private_only: boolean
active_only?: boolean
inactive_only?: boolean
public_only?: boolean
private_only?: boolean
/**
* Filters the response for channels with a target peer's pubkey. If peer is
* empty, all channels will be returned.
*/
peer: Common.Bytes
peer?: Common.Bytes
}
/**
@ -193,4 +193,8 @@ export interface AddInvoiceRes {
* all payments for this invoice as we require it for end to end security.
*/
payment_addr: Common.Bytes
/**
* Custom property, by us.
*/
liquidityCheck?: boolean
}

View file

@ -2,7 +2,7 @@
* @format
*/
const Crypto = require('crypto')
const logger = require('winston')
const logger = require('../../config/log')
const Common = require('shock-common')
const Ramda = require('ramda')
@ -578,7 +578,7 @@ const addInvoice = (value, memo = '', confidential = true, expiry = 180) =>
*
*/
/**
* @param {(invoice:Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:string}) => (boolean | undefined)} dataCb
* @param {(invoice:Common.Schema.InvoiceWhenListed & {r_hash:Buffer,payment_addr:Buffer}) => (boolean | undefined)} dataCb
* @param {(error:lndErr) => void} errorCb
*/
const subscribeInvoices = (dataCb, errorCb) => {
@ -631,6 +631,43 @@ const subscribeTransactions = (dataCb, errorCb) => {
})
}
const getInfo = () =>
Common.makePromise((res, rej) => {
const { lightning } = lightningServices.getServices()
lightning.getInfo({}, (err, resp) => {
if (err) {
rej(new Error(err.message))
} else {
// Needs cast because typescript refuses to assign Record<string, any>
// to an actual object :shrugs
res(resp)
}
})
})
/**
*
* @param {string} pubkey
* @param {string} host
* @returns
*/
const connectPeer = (pubkey, host) =>
Common.makePromise((res, rej) => {
const { lightning } = lightningServices.getServices()
const connectRequest = {
addr: { pubkey, host },
perm: true
}
lightning.connectPeer(connectRequest, (err, resp) => {
if (err) {
rej(new Error(err.message))
} else {
// Needs cast because typescript refuses to assign Record<string, any>
// to an actual object :shrugs
res(resp)
}
})
})
module.exports = {
sendPaymentV2Keysend,
sendPaymentV2Invoice,
@ -644,5 +681,7 @@ module.exports = {
pendingChannels,
addInvoice,
subscribeInvoices,
subscribeTransactions
subscribeTransactions,
getInfo,
connectPeer
}

View file

@ -1,37 +1,61 @@
module.exports = {
unprotectedRoutes: {
GET: {
"/healthz": true,
"/ping": true,
"/tunnel/status": true,
'/healthz': true,
'/ping': true,
'/tunnel/status': true,
// Errors out when viewing an API page from the browser
"/favicon.ico": true,
"/api/lnd/connect": true,
"/api/lnd/wallet/status": true,
"/api/lnd/auth": true,
'/favicon.ico': true,
'/api/lnd/connect': true,
'/api/lnd/wallet/status': true,
//
"/api/gun/auth": true,
"/api/subscribeStream":true,
'/api/gunw': true,
'/api/subscribeStream': true,
'/': true,
'/api/accessInfo': true,
'/qrCodeGenerator': true
},
POST: {
"/api/lnd/connect": true,
"/api/lnd/wallet": true,
"/api/lnd/wallet/existing": true,
"/api/lnd/auth": true,
"/api/security/exchangeKeys": true,
"/api/encryption/exchange": true
'/api/lnd/connect': true,
'/api/lnd/wallet': true,
'/api/lnd/wallet/existing': true,
'/api/lnd/unlock': true,
'/api/security/exchangeKeys': true,
'/api/encryption/exchange': true
},
PUT: {},
DELETE: {}
DELETE: {},
// Preflight request (CORS)
get OPTIONS() {
return {
...this.POST,
...this.GET,
...this.PUT,
...this.DELETE
}
}
},
sensitiveRoutes: {
GET: {},
POST: {
"/api/lnd/connect": true,
"/api/lnd/wallet": true
'/api/lnd/connect': true,
'/api/lnd/wallet': true
},
PUT: {},
DELETE: {}
},
nonEncryptedRoutes: ['/api/security/exchangeKeys', "/api/encryption/exchange", '/healthz', '/ping', '/tunnel/status', '/api/lnd/wallet/status', '/api/gun/auth',"/api/subscribeStream"]
nonEncryptedRoutes: [
'/api/security/exchangeKeys',
'/api/encryption/exchange',
'/healthz',
'/ping',
'/tunnel/status',
'/api/lnd/wallet/status',
'/api/gun/auth',
'/api/subscribeStream',
'/',
'/api/accessInfo',
'/qrCodeGenerator',
'/gun'
]
}

6443
yarn.lock

File diff suppressed because it is too large Load diff