v12.0.0 - initial commit
This commit is contained in:
commit
e2c49ea43c
1145 changed files with 97211 additions and 0 deletions
57
packages/server/.sample.env
Normal file
57
packages/server/.sample.env
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
NODE_ENV=
|
||||
|
||||
## Database variables
|
||||
|
||||
# Postgres related environment variables
|
||||
POSTGRES_USER=
|
||||
POSTGRES_PASSWORD=
|
||||
POSTGRES_HOST=
|
||||
POSTGRES_PORT=
|
||||
POSTGRES_DB=
|
||||
|
||||
## File paths
|
||||
|
||||
# Certificate-related variables
|
||||
CA_PATH=
|
||||
CERT_PATH=
|
||||
KEY_PATH=
|
||||
|
||||
# Full path to where the wallet's mnemonic is stored
|
||||
MNEMONIC_PATH=
|
||||
|
||||
## Directories
|
||||
|
||||
BLOCKCHAIN_DIR=
|
||||
OFAC_DATA_DIR=
|
||||
ID_PHOTO_CARD_DIR=
|
||||
FRONT_CAMERA_DIR=
|
||||
OPERATOR_DATA_DIR=
|
||||
|
||||
## URLs
|
||||
|
||||
COIN_ATM_RADAR_URL=
|
||||
|
||||
## Misc
|
||||
|
||||
HOSTNAME=
|
||||
LOG_LEVEL=
|
||||
LIGHTNING_NETWORK_DAEMON=
|
||||
|
||||
# Crypto nodes related variables
|
||||
|
||||
## Location info (can be local or remote)
|
||||
BTC_NODE_LOCATION=
|
||||
BTC_WALLET_LOCATION=
|
||||
|
||||
## Node connection info (remote node only)
|
||||
BTC_NODE_HOST=
|
||||
BTC_NODE_PORT=
|
||||
|
||||
## Node connection info (remote wallet only)
|
||||
BTC_NODE_RPC_HOST=
|
||||
BTC_NODE_RPC_PORT=
|
||||
BTC_NODE_USER=
|
||||
BTC_NODE_PASSWORD=
|
||||
|
||||
## Uncategorized variables
|
||||
WEBHOOK_URL=
|
||||
33
packages/server/Lamassu_CA.pem
Normal file
33
packages/server/Lamassu_CA.pem
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIFxzCCA6+gAwIBAgIJAJKY0HTYYIToMA0GCSqGSIb3DQEBCwUAMEsxCzAJBgNV
|
||||
BAYTAklTMRIwEAYDVQQHEwlSZXlramF2aWsxEzARBgNVBAoTCkxhbWFzc3UgQ0Ex
|
||||
EzARBgNVBAMTCmxhbWFzc3UuaXMwHhcNMTcxMjEwMjI0ODA4WhcNMjcwOTA5MjI0
|
||||
ODA4WjBLMQswCQYDVQQGEwJJUzESMBAGA1UEBxMJUmV5a2phdmlrMRMwEQYDVQQK
|
||||
EwpMYW1hc3N1IENBMRMwEQYDVQQDEwpsYW1hc3N1LmlzMIICIjANBgkqhkiG9w0B
|
||||
AQEFAAOCAg8AMIICCgKCAgEAqtyxqhOYAp/nXyeUPezX4ojB5/Yh/Ut/4sScwnOP
|
||||
nlGcW6IhBZfd6G4EgSvskgReNwiLqDrqfLit00gp0SnJsA88jNslZDvp/X/POcwO
|
||||
lORn2mkjsBuCZG0hLAYzpql+fn3xxKPGkaCTLITo6LoX90e0Z6ApXqeB9XSlvybl
|
||||
BW3P1OSOv6LPG9n7nkBANV1rWgmYPBq15y4ddD33NAMpqXCmkB2i444bZQ2TUaNq
|
||||
J/6rul0btH1obLg6vR53ioDJxNBs0NEhHSev4YA6Cq8NxGZSpRdvygLFW3IQb5Np
|
||||
4qmfYptmA+KyU2/4pMjO3VFLUcDujOyEcguaBVK6eecrucSg8S6pNHodPo7Z3hTn
|
||||
HRUPSnPToNLisLOc2336dGKrfGaQTvBqLRihnQdNnmS5CRD9u9+Vzjz9VBe7C9lC
|
||||
V02aDV113npzjl/VeNVQWeiT8XchGI1TXPZD+MUXgymCOho0CxqwGpiNL5w+2XUC
|
||||
Rb9aWcdpxBHxeSPLhqvDRf1cEuokEOrE1JkHepGFJtZXKszkuznw/pzNdmv9Gjw1
|
||||
/5cvnmG/QGQ2rjkYEd/7wuDbH/Ta5hiqlZLYMniptH6kAldxqE5+CqmhTyI75BwS
|
||||
VLv0fZkM+QB5QxDbD5cQ2FJJetg1Q3J/Rkn8kzaIxI9b6slESph//kw1aFdj1Lwx
|
||||
JbcCAwEAAaOBrTCBqjAdBgNVHQ4EFgQUQkz413M5wHy53wcgYh4W7uWzboEwewYD
|
||||
VR0jBHQwcoAUQkz413M5wHy53wcgYh4W7uWzboGhT6RNMEsxCzAJBgNVBAYTAklT
|
||||
MRIwEAYDVQQHEwlSZXlramF2aWsxEzARBgNVBAoTCkxhbWFzc3UgQ0ExEzARBgNV
|
||||
BAMTCmxhbWFzc3UuaXOCCQCSmNB02GCE6DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3
|
||||
DQEBCwUAA4ICAQAaRBasuUneGcSmCGZ/oCgdMTTBzMK79fMWr8yRt4VShuFpGAd1
|
||||
s8VoUNsZizgucUPTGwi3QK2KogZia2Rjq0jjk4OV12Cbsx8wTntnT0oYIBJL2Bvj
|
||||
r2uxEfJJQqs2AVZMTrje+NiFnSlbINpEhxSUuDZzTY0+nPMZ7kSYCW13SHdO86rR
|
||||
yHIwhd2iCiVLkjBcsUAyJHioPufbDvHUNiXyH2E5dbRLsvhrpluPM6JtlBmUBU/E
|
||||
kK3Bq4+P4ZQ/VIfy8xuL8+hXWgB9lTrN8LZ/B40wGoRsZT2pq10xDVdmvYDseuAD
|
||||
2GiAnY7eP+AftTV6My7oBWG2IZYpy73qKlUundNt3b9gIAPPMpjAC/Scpq4vslBl
|
||||
rR/dMw8C5qsVdk9Ek85SO95y/4jJn1SMLQ0udcKO8G97h5JifrGUxdWH+sIkZTkN
|
||||
zDAz+K+3HpVeGGYeue+QvF+fQ7Fxj+h6bnMbHe0wc4Q8ZlOb5THj1Fq5YFOx3BoD
|
||||
Qzn9vuWQ0wCGN4uDG6zqwhhaXY7pt+jTproBwQCULy0UR7MFGzJ+WjwDcJkx3oGB
|
||||
WU93wi+56O/DYU4u/3wSqFfGTYQQRVl55hS0heWbwWywxdiHe8SgHjSyDDPps4EP
|
||||
BW1l+RG2QLoqo2TD8jKiJnfh2LiUpLeH5RTeGXfDyEksNzAUnN/fm280dw==
|
||||
-----END CERTIFICATE-----
|
||||
9
packages/server/bin/bip39
Executable file
9
packages/server/bin/bip39
Executable file
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
const mnemonicHelpers = require('../lib/mnemonic-helpers')
|
||||
|
||||
const seed = process.argv[2]
|
||||
|
||||
console.log(mnemonicHelpers.fromSeed(seed))
|
||||
17
packages/server/bin/hkdf
Executable file
17
packages/server/bin/hkdf
Executable file
|
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
const hkdf = require('futoin-hkdf')
|
||||
|
||||
const label = process.argv[2]
|
||||
const masterSeedHex = process.argv[3].trim()
|
||||
|
||||
if (process.argv.length !== 4) {
|
||||
console.error('hdkf <label> <masterKey>')
|
||||
console.error('masterKey should be in hex encoding.')
|
||||
process.exit(3)
|
||||
}
|
||||
|
||||
const masterSeed = Buffer.from(masterSeedHex, 'hex')
|
||||
console.log(hkdf(masterSeed, 32, { salt: 'lamassu-server-salt', info: label }).toString('hex'))
|
||||
5
packages/server/bin/lamassu-admin-server
Executable file
5
packages/server/bin/lamassu-admin-server
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const adminServer = require('../lib/new-admin/admin-server')
|
||||
|
||||
adminServer.run()
|
||||
26
packages/server/bin/lamassu-admin-server-entrypoint.sh
Normal file
26
packages/server/bin/lamassu-admin-server-entrypoint.sh
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
CERT_FILES=(
|
||||
/lamassu-data/certs/{Lamassu_CA,Lamassu_OP,Lamassu_OP_Root_CA}.pem
|
||||
/lamassu-data/certs/Lamassu_OP_Root_CA.srl
|
||||
/lamassu-data/private/{Lamassu_OP,Lamassu_OP_Root_CA}.key
|
||||
)
|
||||
|
||||
if ! (( ${#CERT_FILES[@]} == $(ls "${CERT_FILES[@]}" 2>/dev/null | wc -l) )); then
|
||||
echo "Some certificates are missing. Retrying in 5 seconds"
|
||||
sleep 5
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Update certs on alpine"
|
||||
cp /lamassu-data/certs/Lamassu_CA.pem /usr/local/share/ca-certificates
|
||||
cp /lamassu-data/certs/Lamassu_OP_Root_CA.pem /usr/local/share/ca-certificates
|
||||
update-ca-certificates
|
||||
|
||||
if [ "${LAMASSU_DEV_MODE}" = "true" ]; then
|
||||
echo "Starting in dev mode"
|
||||
node /lamassu-server/bin/lamassu-admin-server --lamassuDev
|
||||
else
|
||||
node /lamassu-server/bin/lamassu-admin-server
|
||||
fi
|
||||
17
packages/server/bin/lamassu-backup-pg
Executable file
17
packages/server/bin/lamassu-backup-pg
Executable file
|
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [ "$(whoami)" != "root" ]; then
|
||||
echo -e "This script has to be run as \033[1mroot\033[0m user"
|
||||
exit 3
|
||||
fi
|
||||
|
||||
DAYS_TO_KEEP=3
|
||||
DATE=$(date --utc +%F_%H-%M)
|
||||
BACKUP_DIR=/var/backups/postgresql
|
||||
BACKUP_FILE=$BACKUP_DIR/backup-$DATE.sql.gz
|
||||
|
||||
cd ~postgres
|
||||
su postgres -c "pg_dump lamassu" | gzip > $BACKUP_FILE
|
||||
cd
|
||||
find $BACKUP_DIR -maxdepth 1 -mtime +$DAYS_TO_KEEP -exec rm -f '{}' ';'
|
||||
36
packages/server/bin/lamassu-batch-diagnostics
Executable file
36
packages/server/bin/lamassu-batch-diagnostics
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const db = require('../lib/db')
|
||||
const machineLoader = require('../lib/machine-loader')
|
||||
const operator = require('../lib/operator')
|
||||
|
||||
console.log('Running diagnostics on all paired devices...\n')
|
||||
|
||||
operator.getOperatorId('middleware')
|
||||
.then(operatorId => {
|
||||
if (!operatorId) {
|
||||
throw new Error('Operator ID not found in database')
|
||||
}
|
||||
|
||||
return db.any('SELECT device_id, name FROM devices')
|
||||
.then(devices => ({ operatorId, devices }))
|
||||
})
|
||||
.then(({ operatorId, devices }) => {
|
||||
if (devices.length === 0) {
|
||||
console.log('No paired devices found.')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
const deviceIds = devices.map(d => d.device_id)
|
||||
return machineLoader.batchDiagnostics(deviceIds, operatorId)
|
||||
})
|
||||
.then(() => {
|
||||
console.log('\n✓ Diagnostics initiated for all devices. It can take a few minutes for the results to appear on the admin.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Error:', err.message)
|
||||
process.exit(1)
|
||||
})
|
||||
47
packages/server/bin/lamassu-btc-bumpfee
Executable file
47
packages/server/bin/lamassu-btc-bumpfee
Executable file
|
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const inquirer = require('inquirer')
|
||||
|
||||
const bitcoind = require('../lib/plugins/wallet/bitcoind/bitcoind')
|
||||
const BN = require('../lib/bn')
|
||||
const mempool = require('../lib/blockexplorers/mempool.space')
|
||||
|
||||
const txId = process.argv[2]
|
||||
if (!txId) {
|
||||
console.error('Please provide a BTC transaction hash as input.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const bumpTransactionFee = async (txId) => {
|
||||
const txData = await bitcoind.fetch('gettransaction', [txId, true, true])
|
||||
|
||||
const fee = new BN(txData.fee).abs().shiftedBy(8).decimalPlaces(0)
|
||||
const size = txData.decoded.vsize
|
||||
const satPerVb = fee.div(size)
|
||||
|
||||
console.log(`Current fee: ${satPerVb.toFixed(2).toString()} sat/vB`)
|
||||
|
||||
const recommendedFees = await mempool.getSatBEstimateFees()
|
||||
|
||||
console.log('Recommended fees (sat/vB):', recommendedFees)
|
||||
|
||||
const { selectedFee } = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'selectedFee',
|
||||
message: 'Select a fee higher than the current one:',
|
||||
choices: Object.entries(recommendedFees)
|
||||
.filter(([_, value]) => satPerVb.lt(value))
|
||||
.map(([key, value]) => ({name: `${key}: ${value} sat/vB`, value})),
|
||||
},
|
||||
])
|
||||
|
||||
const { txid } = await bitcoind.fetch('bumpfee', [txId, {fee_rate: selectedFee}])
|
||||
|
||||
console.log(`
|
||||
Fee bumped to ${selectedFee.toFixed(2)} sat/vB
|
||||
Transaction ID: ${txid}
|
||||
`)
|
||||
}
|
||||
|
||||
bumpTransactionFee(txId)
|
||||
43
packages/server/bin/lamassu-clean-parsed-id
Executable file
43
packages/server/bin/lamassu-clean-parsed-id
Executable file
|
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const argv = require('minimist')(process.argv.slice(2))
|
||||
const _ = require('lodash')
|
||||
const db = require('../lib/db')
|
||||
|
||||
const txId = argv.tx
|
||||
const customerId = argv.customer
|
||||
|
||||
if ((!txId && !customerId) || (txId && customerId)) {
|
||||
console.log('Usage: lamassu-clean-parsed-id [--tx <txId> | --customer <customerId>]')
|
||||
console.log('The command can only be run with EITHER --tx OR --customer, NOT BOTH')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
if (!_.isNil(txId)) {
|
||||
db.oneOrNone('SELECT * FROM (SELECT id, customer_id FROM cash_in_txs UNION SELECT id, customer_id FROM cash_out_txs) as txs WHERE txs.id = $1', [txId])
|
||||
.then(res => {
|
||||
return db.none('UPDATE customers SET id_card_data = null WHERE id = $1', [res.customer_id])
|
||||
.then(() => {
|
||||
console.log(`ID card data from customer ${res.customer_id} was cleared with success`)
|
||||
process.exit(0)
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
console.log('A transaction with that ID was not found')
|
||||
process.exit(0)
|
||||
})
|
||||
}
|
||||
|
||||
if (!_.isNil(customerId)) {
|
||||
db.none('UPDATE customers SET id_card_data = null WHERE id = $1', [customerId])
|
||||
.then(() => {
|
||||
console.log(`ID card data from customer ${customerId} was cleared with success`)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(() => {
|
||||
console.log('A customer with that ID was not found')
|
||||
process.exit(0)
|
||||
})
|
||||
}
|
||||
11
packages/server/bin/lamassu-coinatmradar
Executable file
11
packages/server/bin/lamassu-coinatmradar
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo
|
||||
echo "Here is the 'External ID' of your paired machine(s), for use under the 'ATM / Teller details' of your CoinATMRadar listing:"
|
||||
echo
|
||||
su - postgres -c "psql \"lamassu\" -Atc \"select regexp_replace(device_id, '$', ' '),regexp_replace(name, '^', ' ') from devices\""
|
||||
echo
|
||||
echo "If speaking with CoinATMRadar directly, it may be helpful to let them know your 'Operator ID':"
|
||||
echo
|
||||
$(npm root -g)/lamassu-server/bin/lamassu-operator
|
||||
echo
|
||||
7
packages/server/bin/lamassu-coins
Executable file
7
packages/server/bin/lamassu-coins
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const install = require('../lib/blockchain/install')
|
||||
|
||||
install.run()
|
||||
11
packages/server/bin/lamassu-configure-frontcamera
Executable file
11
packages/server/bin/lamassu-configure-frontcamera
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const setEnvVariable = require('../tools/set-env-var')
|
||||
|
||||
if (!process.env.FRONT_CAMERA_DIR) {
|
||||
setEnvVariable('FRONT_CAMERA_DIR', '/opt/lamassu-server/frontcamera')
|
||||
}
|
||||
7
packages/server/bin/lamassu-devices
Executable file
7
packages/server/bin/lamassu-devices
Executable file
|
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo
|
||||
echo "Your list of paired machines and their Device IDs and names:"
|
||||
echo
|
||||
su - postgres -c "psql \"lamassu\" -Atc \"select device_id, name from devices\""
|
||||
echo
|
||||
67
packages/server/bin/lamassu-eth-recovery
Executable file
67
packages/server/bin/lamassu-eth-recovery
Executable file
|
|
@ -0,0 +1,67 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const hdkey = require('ethereumjs-wallet/hdkey')
|
||||
const hkdf = require('futoin-hkdf')
|
||||
const db = require('../lib/db')
|
||||
const _ = require('lodash/fp')
|
||||
const mnemonicHelpers = require('../lib/mnemonic-helpers')
|
||||
|
||||
const pify = require('pify')
|
||||
const fs = pify(require('fs'))
|
||||
const os = require('os')
|
||||
|
||||
const MNEMONIC_PATH = process.env.MNEMONIC_PATH
|
||||
|
||||
const defaultPrefixPath = "m/44'/60'/1'/0'"
|
||||
const paymentPrefixPath = "m/44'/60'/0'/0'"
|
||||
|
||||
const address = process.argv[2]
|
||||
|
||||
if (!MNEMONIC_PATH) {
|
||||
console.error(`Unable to fetch mnemonic from your account!`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!address) {
|
||||
console.log('Usage: lamassu-eth-recovery <cash-out address>')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
function run (address) {
|
||||
Promise.all([fetchMnemonic(), searchForHdIndex(address)])
|
||||
.then(([mnemonic, hdIndex]) => {
|
||||
try {
|
||||
const prefix = !_.isNil(hdIndex) ? paymentPrefixPath : defaultPrefixPath
|
||||
console.log(`Private key: `, defaultHdNode(mnemonic, prefix).deriveChild(hdIndex).getWallet().getPrivateKeyString())
|
||||
process.exit(0)
|
||||
} catch (err) {
|
||||
console.error(`Error while retrieving private key!`)
|
||||
process.exit(3)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function searchForHdIndex (address) {
|
||||
const sql = `SELECT hd_index FROM cash_out_txs WHERE to_address = $1`
|
||||
return db.oneOrNone(sql, [address])
|
||||
.then(result => _.get('hd_index', result))
|
||||
}
|
||||
|
||||
function fetchMnemonic () {
|
||||
return fs.readFile(MNEMONIC_PATH, 'utf8')
|
||||
.then(mnemonic => computeSeed(mnemonic))
|
||||
}
|
||||
|
||||
function computeSeed (seed) {
|
||||
const masterSeed = mnemonicHelpers.toEntropyBuffer(seed)
|
||||
return hkdf(masterSeed, 32, { salt: 'lamassu-server-salt', info: 'wallet-seed' })
|
||||
}
|
||||
|
||||
function defaultHdNode (masterSeed, prefix) {
|
||||
if (!masterSeed) throw new Error('No master seed!')
|
||||
const key = hdkey.fromMasterSeed(masterSeed)
|
||||
return key.derivePath(prefix)
|
||||
}
|
||||
|
||||
run(address)
|
||||
298
packages/server/bin/lamassu-eth-sweep-to-new-wallet
Normal file
298
packages/server/bin/lamassu-eth-sweep-to-new-wallet
Normal file
|
|
@ -0,0 +1,298 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const hdkey = require('ethereumjs-wallet/hdkey')
|
||||
const hkdf = require('futoin-hkdf')
|
||||
const crypto = require('crypto')
|
||||
const path = require('path')
|
||||
const pify = require('pify')
|
||||
const fs = pify(require('fs'))
|
||||
const _ = require('lodash/fp')
|
||||
const { BigNumber } = require('bignumber.js')
|
||||
const coins = require('@lamassu/coins')
|
||||
const Web3 = require('web3')
|
||||
const web3 = new Web3()
|
||||
const Tx = require('ethereumjs-tx')
|
||||
|
||||
const mnemonicHelpers = require('../lib/mnemonic-helpers')
|
||||
const settingsLoader = require('../lib/new-settings-loader')
|
||||
const BN = require('../lib/bn')
|
||||
const ph = require('../lib/plugin-helper')
|
||||
const configManager = require('../lib/new-config-manager')
|
||||
const walletI = require('../lib/wallet')
|
||||
|
||||
const LOCKFILE_PATH = '/var/lock/lamassu-eth-pending-sweep'
|
||||
const defaultPrefixPath = "m/44'/60'/1'/0'"
|
||||
let lastUsedNonces = {}
|
||||
|
||||
const hex = bigNum => '0x' + bigNum.integerValue(BN.ROUND_DOWN).toString(16)
|
||||
const MNEMONIC_PATH = process.env.MNEMONIC_PATH
|
||||
|
||||
function writeNewMnemonic (mnemonic) {
|
||||
return fs.writeFile(`${MNEMONIC_PATH}-new-temp`, mnemonic)
|
||||
.then(() => `${MNEMONIC_PATH}-new-temp`)
|
||||
}
|
||||
|
||||
function renameNewMnemonic () {
|
||||
return fs.rename(`${MNEMONIC_PATH}-new-temp`, `${MNEMONIC_PATH}`)
|
||||
.then(() => MNEMONIC_PATH)
|
||||
}
|
||||
|
||||
function backupMnemonic () {
|
||||
const folderPath = path.dirname(MNEMONIC_PATH)
|
||||
const fileName = path.resolve(folderPath, `mnemonic-${Date.now()}.txt`)
|
||||
return fs.copyFile(MNEMONIC_PATH, fileName)
|
||||
.then(() => fileName)
|
||||
}
|
||||
|
||||
function computeSeed (seed) {
|
||||
const masterSeed = mnemonicHelpers.toEntropyBuffer(seed)
|
||||
return hkdf(masterSeed, 32, { salt: 'lamassu-server-salt', info: 'wallet-seed' })
|
||||
}
|
||||
|
||||
function computeOperatorId (masterSeed) {
|
||||
return hkdf(masterSeed, 16, { salt: 'lamassu-server-salt', info: 'operator-id' }).toString('hex')
|
||||
}
|
||||
|
||||
function generateRandomSeed () {
|
||||
const seed = crypto
|
||||
.randomBytes(32)
|
||||
.toString('hex')
|
||||
|
||||
return Buffer.from(seed, 'hex')
|
||||
}
|
||||
|
||||
function generateNewMnemonic (newSeed) {
|
||||
return mnemonicHelpers.fromSeed(newSeed)
|
||||
}
|
||||
|
||||
function defaultWallet (seed) {
|
||||
return defaultHdNode(seed).deriveChild(0).getWallet()
|
||||
}
|
||||
|
||||
function defaultWalletAcc (account) {
|
||||
return defaultHdNodeAcc(account).deriveChild(0).getWallet()
|
||||
}
|
||||
|
||||
function defaultAddress (seed) {
|
||||
return defaultWallet(seed).getChecksumAddressString()
|
||||
}
|
||||
|
||||
function defaultHdNode (seed) {
|
||||
const key = hdkey.fromMasterSeed(seed)
|
||||
return key.derivePath(defaultPrefixPath)
|
||||
}
|
||||
|
||||
function defaultHdNodeAcc (account) {
|
||||
const key = hdkey.fromMasterSeed(account.seed)
|
||||
return key.derivePath(defaultPrefixPath)
|
||||
}
|
||||
|
||||
function getAllBalance (settings) {
|
||||
return Promise.resolve(settings)
|
||||
.then(settings => walletI.balance(settings, 'ETH'))
|
||||
.then(r => r.balance)
|
||||
}
|
||||
|
||||
function isInfuraRunning (settings) {
|
||||
const isInfuraSelected = settings.config.wallets_ETH_wallet === 'infura'
|
||||
const isInfuraConfigured =
|
||||
!_.isNil(settings.accounts.infura)
|
||||
&& !_.isNil(settings.accounts.infura.apiKey)
|
||||
&& !_.isNil(settings.accounts.infura.apiSecret)
|
||||
&& !_.isNil(settings.accounts.infura.endpoint)
|
||||
|
||||
return isInfuraSelected && isInfuraConfigured
|
||||
}
|
||||
|
||||
function isGethRunning (settings) {
|
||||
return walletI.checkBlockchainStatus(settings, 'ETH')
|
||||
.then(res => res === 'ready')
|
||||
.catch(() => false)
|
||||
}
|
||||
|
||||
function connect (url) {
|
||||
if (!web3.isConnected()) {
|
||||
web3.setProvider(new web3.providers.HttpProvider(url))
|
||||
}
|
||||
}
|
||||
|
||||
function sendCoins (account, tx, settings, operatorId, feeMultiplier, _opts) {
|
||||
const { toAddress, cryptoAtoms, cryptoCode } = tx
|
||||
const opts = { ..._opts, includesFee: _.defaultTo(false, _opts?.includesFee) }
|
||||
return generateTx(toAddress, defaultWalletAcc(account), cryptoAtoms, cryptoCode, opts)
|
||||
.then(pify(web3.eth.sendRawTransaction))
|
||||
.then(txid => {
|
||||
return pify(web3.eth.getTransaction)(txid)
|
||||
.then(tx => {
|
||||
if (!tx) return { txid }
|
||||
|
||||
const fee = new BN(tx.gas).times(new BN(tx.gasPrice)).decimalPlaces(0)
|
||||
|
||||
return { txid, fee }
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function generateTx (_toAddress, wallet, amount, cryptoCode, opts) {
|
||||
const fromAddress = '0x' + wallet.getAddress().toString('hex')
|
||||
|
||||
const isErc20Token = coins.utils.isErc20Token(cryptoCode)
|
||||
const toAddress = isErc20Token ? coins.utils.getErc20Token(cryptoCode).contractAddress : _toAddress.toLowerCase()
|
||||
|
||||
let contract, contractData
|
||||
if (isErc20Token) {
|
||||
contract = web3.eth.contract(ABI.ERC20).at(toAddress)
|
||||
contractData = isErc20Token && contract.transfer.getData(_toAddress.toLowerCase(), hex(toSend))
|
||||
}
|
||||
|
||||
const txTemplate = {
|
||||
from: fromAddress,
|
||||
to: toAddress,
|
||||
value: amount.toString()
|
||||
}
|
||||
|
||||
if (isErc20Token) txTemplate.data = contractData
|
||||
|
||||
const promises = [
|
||||
pify(web3.eth.estimateGas)(txTemplate),
|
||||
pify(web3.eth.getGasPrice)(),
|
||||
pify(web3.eth.getTransactionCount)(fromAddress)
|
||||
]
|
||||
|
||||
return Promise.all(promises)
|
||||
.then(([gas, gasPrice, txCount]) => [
|
||||
BN(gas),
|
||||
BN(gasPrice),
|
||||
_.max([0, txCount, lastUsedNonces[fromAddress] + 1])
|
||||
])
|
||||
.then(([gas, gasPrice, txCount]) => {
|
||||
lastUsedNonces[fromAddress] = txCount
|
||||
|
||||
const toSend = opts.includesFee
|
||||
? amount.minus(gasPrice.times(gas))
|
||||
: amount
|
||||
|
||||
const rawTx = {
|
||||
chainId: _.defaultTo(1, opts?.chainId),
|
||||
nonce: _.defaultTo(txCount, opts?.nonce),
|
||||
gasPrice: hex(gasPrice),
|
||||
gasLimit: hex(gas),
|
||||
to: toAddress,
|
||||
from: fromAddress,
|
||||
value: isErc20Token ? hex(BN(0)) : hex(toSend)
|
||||
}
|
||||
|
||||
if (isErc20Token) {
|
||||
rawTx.data = contractData
|
||||
}
|
||||
|
||||
const tx = new Tx(rawTx)
|
||||
const privateKey = wallet.getPrivateKey()
|
||||
|
||||
tx.sign(privateKey)
|
||||
|
||||
return '0x' + tx.serialize().toString('hex')
|
||||
})
|
||||
}
|
||||
|
||||
function fetchWallet (settings, cryptoCode) {
|
||||
return fs.readFile(MNEMONIC_PATH, 'utf8')
|
||||
.then(mnemonic => {
|
||||
const computeSeed = masterSeed =>
|
||||
hkdf(masterSeed, 32, { salt: 'lamassu-server-salt', info: 'wallet-seed' })
|
||||
|
||||
const masterSeed = mnemonicHelpers.toEntropyBuffer(mnemonic)
|
||||
const plugin = configManager.getWalletSettings(cryptoCode, settings.config).wallet
|
||||
const wallet = ph.load(ph.WALLET, plugin)
|
||||
const rawAccount = settings.accounts[plugin]
|
||||
const account = _.set('seed', computeSeed(masterSeed), rawAccount)
|
||||
if (_.isFunction(wallet.run)) wallet.run(account)
|
||||
const operatorId = computeOperatorId(masterSeed)
|
||||
return { wallet, account, operatorId }
|
||||
})
|
||||
}
|
||||
|
||||
fs.exists(LOCKFILE_PATH, function(exists) {
|
||||
if (!exists) {
|
||||
console.log('Couldn\'t find the lamassu-eth-pending-sweep lock file, exiting...')
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
||||
const seed = generateRandomSeed()
|
||||
const mnemonic = generateNewMnemonic(seed)
|
||||
const mnemonicSeed = computeSeed(mnemonic)
|
||||
const newAddress = defaultAddress(mnemonicSeed)
|
||||
|
||||
settingsLoader.load()
|
||||
.then(settings => Promise.all([isInfuraRunning(settings), isGethRunning(settings), settings]))
|
||||
.then(([infuraIsRunning, gethIsRunning, settings]) => {
|
||||
if (!infuraIsRunning && !gethIsRunning) {
|
||||
console.log('Neither geth nor Infura are running, so the script cannot be executed.')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
console.log(`Backing up old mnemonic...`)
|
||||
return Promise.all([backupMnemonic(), infuraIsRunning, settings])
|
||||
})
|
||||
.then(([fileName, infuraIsRunning, settings]) => {
|
||||
console.log(`Successfully backed up the old mnemonic, new location is ${fileName}`)
|
||||
return Promise.all([writeNewMnemonic(mnemonic), infuraIsRunning, settings])
|
||||
})
|
||||
.then(([tempMnemonicFileName, infuraIsRunning, settings]) => {
|
||||
console.log(`New mnemonic stored temporarily in ${tempMnemonicFileName}`)
|
||||
console.log(`Starting funds transfer...`)
|
||||
return Promise.all([infuraIsRunning, settings])
|
||||
})
|
||||
.then(([infuraIsRunning, settings]) => {
|
||||
if (infuraIsRunning) {
|
||||
const endpoint = _.startsWith('https://')(settings.accounts.infura.endpoint)
|
||||
? settings.accounts.infura.endpoint
|
||||
: `https://${settings.accounts.infura.endpoint}`
|
||||
connect(endpoint)
|
||||
} else {
|
||||
connect(`http://localhost:${coins.utils.getCryptoCurrency('ETH').defaultPort}`)
|
||||
}
|
||||
|
||||
return Promise.all([getAllBalance(settings), settings, fetchWallet(settings, 'ETH')])
|
||||
})
|
||||
.then(([balance, settings, { account, operatorId }]) => {
|
||||
const tx = {
|
||||
cryptoCode: 'ETH',
|
||||
toAddress: newAddress,
|
||||
cryptoAtoms: BN(balance.times(0.99999).toFixed(0, BigNumber.ROUND_DOWN))
|
||||
}
|
||||
|
||||
const opts = {
|
||||
chainId: 1,
|
||||
nonce: 0,
|
||||
includesFee: true
|
||||
}
|
||||
|
||||
return sendCoins(account, tx, settings, operatorId, null, opts)
|
||||
})
|
||||
.then(resTx => {
|
||||
console.log('Successfully moved funds from the old wallet to the new one.')
|
||||
console.log('Information about the transaction', resTx)
|
||||
console.log('Moving the current mnemonic to the default file...')
|
||||
return renameNewMnemonic()
|
||||
})
|
||||
.then(() => {
|
||||
console.log('New mnemonic stored successfully! All your funds (minus the transaction fee) should be available in the next few minutes.')
|
||||
return fs.rmdir(LOCKFILE_PATH)
|
||||
})
|
||||
.then(() => {
|
||||
console.log('lamassu-eth-pending-sweep lock file successfully removed')
|
||||
return fs.mkdir(`${LOCKFILE_PATH}-finished`)
|
||||
})
|
||||
.then(() => {
|
||||
console.log('lamassu-eth-pending-sweep-finished lock file successfully created, this will automatically be deleted once the upgrade script finishes running')
|
||||
console.log('Process finished successfully! You may now execute the upgrade script again')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
46
packages/server/bin/lamassu-migrate
Executable file
46
packages/server/bin/lamassu-migrate
Executable file
|
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env node
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const db = require('../lib/db')
|
||||
const migrate = require('../lib/migrate')
|
||||
|
||||
const createMigration = `CREATE TABLE IF NOT EXISTS migrations (
|
||||
id serial PRIMARY KEY,
|
||||
data json NOT NULL
|
||||
)`
|
||||
|
||||
// no need to log the migration process
|
||||
process.env.SKIP_SERVER_LOGS = true
|
||||
|
||||
function checkPostgresVersion () {
|
||||
return db.one('SHOW server_version;')
|
||||
.then(result => {
|
||||
console.log(result)
|
||||
const versionString = result.server_version
|
||||
const match = versionString.match(/(\d+)\.(\d+)/i)
|
||||
if (!match) {
|
||||
throw new Error(`Could not parse PostgreSQL version: ${versionString}`)
|
||||
}
|
||||
return parseInt(match[1], 10)
|
||||
})
|
||||
}
|
||||
|
||||
checkPostgresVersion()
|
||||
.then(majorVersion => {
|
||||
if (majorVersion < 12) {
|
||||
console.error('PostgreSQL version must be 12 or higher. Current version:', majorVersion)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
return db.none(createMigration)
|
||||
.then(() => migrate.run())
|
||||
.then(() => {
|
||||
console.log('DB Migration succeeded.')
|
||||
process.exit(0)
|
||||
})
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('DB Migration failed: %s', err)
|
||||
process.exit(1)
|
||||
})
|
||||
9
packages/server/bin/lamassu-mnemonic
Executable file
9
packages/server/bin/lamassu-mnemonic
Executable file
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs')
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const MNEMONIC_PATH = process.env.MNEMONIC_PATH
|
||||
|
||||
const mnemonic = fs.readFileSync(MNEMONIC_PATH, 'utf8').trim()
|
||||
console.log(mnemonic)
|
||||
11
packages/server/bin/lamassu-ofac-update
Executable file
11
packages/server/bin/lamassu-ofac-update
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const ofac = require('../lib/ofac/update')
|
||||
|
||||
console.log('Updating OFAC databases.')
|
||||
|
||||
ofac.update()
|
||||
.then(() => console.log('Success.'))
|
||||
.catch(console.log)
|
||||
14
packages/server/bin/lamassu-operator
Executable file
14
packages/server/bin/lamassu-operator
Executable file
|
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs')
|
||||
const hkdf = require('futoin-hkdf')
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const mnemonicHelpers = require('../lib/mnemonic-helpers')
|
||||
|
||||
const MNEMONIC_PATH = process.env.MNEMONIC_PATH
|
||||
|
||||
const mnemonic = fs.readFileSync(MNEMONIC_PATH, 'utf8').trim()
|
||||
const masterSeed = mnemonicHelpers.toEntropyBuffer(mnemonic)
|
||||
|
||||
console.log(hkdf(masterSeed, 16, { salt: 'lamassu-server-salt', info: 'operator-id' }).toString('hex'))
|
||||
51
packages/server/bin/lamassu-register
Executable file
51
packages/server/bin/lamassu-register
Executable file
|
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const userManagement = require('../lib/new-admin/graphql/modules/userManagement')
|
||||
const authErrors = require('../lib/new-admin/graphql/errors')
|
||||
|
||||
const name = process.argv[2]
|
||||
const role = process.argv[3]
|
||||
const domain = process.env.HOSTNAME
|
||||
|
||||
if (!domain) {
|
||||
console.error('No hostname configured in the environment')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!name || !role) {
|
||||
console.log('Usage: lamassu-register <email> <role>')
|
||||
console.log('<role> must be \'user\' or \'superuser\'')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
const emailRegex = /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||
|
||||
if (!emailRegex.test(name)) {
|
||||
console.log('Usage: <email> must be in an email format')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
if (role !== 'user' && role !== 'superuser') {
|
||||
console.log('Usage: <role> must be \'user\' or \'superuser\'')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
userManagement.createRegisterToken(name, role).then(token => {
|
||||
if (domain === 'localhost' && process.env.NODE_ENV !== 'production') {
|
||||
console.log(`https://${domain}:3001/register?t=${token.token}`)
|
||||
} else {
|
||||
console.log(`https://${domain}/register?t=${token.token}`)
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
}).catch(err => {
|
||||
|
||||
if (err instanceof authErrors.UserAlreadyExistsError){
|
||||
console.log(`A user with email ${name} already exists!`)
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
console.log('Error: %s', err)
|
||||
process.exit(3)
|
||||
})
|
||||
90
packages/server/bin/lamassu-send-coins
Executable file
90
packages/server/bin/lamassu-send-coins
Executable file
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const settingsLoader = require('../lib/new-settings-loader')
|
||||
const configManager = require('../lib/new-config-manager')
|
||||
const wallet = require('../lib/wallet')
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
const BN = require('../lib/bn')
|
||||
const inquirer = require('inquirer')
|
||||
const ticker = require('../lib/ticker')
|
||||
|
||||
const [toAddress, cryptoValue, cryptoCode] = process.argv.slice(2)
|
||||
|
||||
function computeCrypto (cryptoCode, value) {
|
||||
try {
|
||||
const cryptoRec = coinUtils.getCryptoCurrency(cryptoCode)
|
||||
const unitScale = cryptoRec.unitScale
|
||||
|
||||
return new BN(value).shiftedBy(unitScale)
|
||||
} catch (err) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
if (!toAddress || !cryptoValue || !cryptoCode) {
|
||||
console.log('Usage: lamassu-send-coins <address> <amount> <coin>')
|
||||
console.log('Example: lamassu-send-coins 3FUv7vKaP11idnsUKyQ2pxdWxCDMyr5HKJ 0.009 BTC')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const cryptoAtoms = computeCrypto(cryptoCode, cryptoValue)
|
||||
|
||||
if (!cryptoAtoms) {
|
||||
console.log(`Unsupported coin: ${cryptoCode}.\n`)
|
||||
console.log('Usage: lamassu-send-coins <address> <amount> <coin>')
|
||||
console.log('Example: lamassu-send-coins 3FUv7vKaP11idnsUKyQ2pxdWxCDMyr5HKJ 0.009 BTC')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
console.log('Loading ticker...')
|
||||
|
||||
settingsLoader.load()
|
||||
.then(settings => {
|
||||
const fiatCode = configManager.getGlobalLocale(settings.config).fiatCurrency
|
||||
|
||||
return wallet.isStrictAddress(settings, cryptoCode, toAddress)
|
||||
.then(isValid => {
|
||||
if (!isValid) {
|
||||
console.log(`Invalid ${cryptoCode} address: ${toAddress}.`)
|
||||
console.log('Please check your command.\n')
|
||||
console.log('Usage: lamassu-send-coins <address> <amount> <coin>')
|
||||
console.log('Example: lamassu-send-coins 3FUv7vKaP11idnsUKyQ2pxdWxCDMyr5HKJ 0.009 BTC')
|
||||
process.exit(3)
|
||||
}
|
||||
})
|
||||
.then(() => ticker.getRates(settings, fiatCode, cryptoCode))
|
||||
.then(rates => {
|
||||
const fiatAmount = rates.rates.ask.times(cryptoValue).toFixed(2)
|
||||
|
||||
const questions = [
|
||||
{
|
||||
type: 'confirm',
|
||||
name: 'confirm',
|
||||
message: `Are you sure you want to send ${cryptoValue} ${cryptoCode} (${fiatAmount} ${fiatCode}) to the address ${toAddress}?`,
|
||||
default: false
|
||||
}
|
||||
]
|
||||
|
||||
console.log('\nPlease look over this transaction carefully!')
|
||||
|
||||
return inquirer.prompt(questions)
|
||||
.then(answers => {
|
||||
if (!answers.confirm) {
|
||||
console.log('Transaction cancelled.')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
console.log('Sending...')
|
||||
return wallet.sendCoins(settings, { toAddress, cryptoAtoms, cryptoCode })
|
||||
.then(() => {
|
||||
console.log('Success.')
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(e => console.log(e.message))
|
||||
})
|
||||
.catch(e => console.log(e.message))
|
||||
})
|
||||
.catch(e => console.log(e.message))
|
||||
})
|
||||
12
packages/server/bin/lamassu-server
Executable file
12
packages/server/bin/lamassu-server
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const app = require('../lib/app')
|
||||
|
||||
process.on('unhandledRejection', err => {
|
||||
console.log('Unhandled rejection')
|
||||
console.dir(err)
|
||||
console.log(err.stack)
|
||||
})
|
||||
|
||||
app.run()
|
||||
.catch(console.log)
|
||||
25
packages/server/bin/lamassu-server-entrypoint.sh
Normal file
25
packages/server/bin/lamassu-server-entrypoint.sh
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
CERT_FILES=(
|
||||
/lamassu-data/certs/{Lamassu_CA,Lamassu_OP,Lamassu_OP_Root_CA}.pem
|
||||
/lamassu-data/certs/Lamassu_OP_Root_CA.srl
|
||||
/lamassu-data/private/{Lamassu_OP,Lamassu_OP_Root_CA}.key
|
||||
)
|
||||
|
||||
echo "Checking for Lamassu certificates..."
|
||||
|
||||
if ! (( ${#CERT_FILES[@]} == $(ls "${CERT_FILES[@]}" 2>/dev/null | wc -l) )); then
|
||||
echo "Some certificates are missing. Building them..."
|
||||
bash /lamassu-server/tools/build-docker-certs.sh
|
||||
fi
|
||||
|
||||
echo "Upcate certs on alpine"
|
||||
cp /lamassu-data/certs/Lamassu_CA.pem /usr/local/share/ca-certificates
|
||||
cp /lamassu-data/certs/Lamassu_OP_Root_CA.pem /usr/local/share/ca-certificates
|
||||
update-ca-certificates
|
||||
|
||||
echo "Executing migrations..."
|
||||
node /lamassu-server/bin/lamassu-migrate
|
||||
|
||||
echo "Starting server..."
|
||||
node /lamassu-server/bin/lamassu-server
|
||||
|
||||
53
packages/server/bin/lamassu-trx-recovery
Executable file
53
packages/server/bin/lamassu-trx-recovery
Executable file
|
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const TronWeb = require('tronweb')
|
||||
const db = require('../lib/db')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const pify = require('pify')
|
||||
const fs = pify(require('fs'))
|
||||
|
||||
const MNEMONIC_PATH = process.env.MNEMONIC_PATH
|
||||
|
||||
const defaultPrefixPath = "m/44'/195'/0'/0"
|
||||
const paymentPrefixPath = "m/44'/195'/1'/0"
|
||||
|
||||
const address = process.argv[2]
|
||||
|
||||
if (!MNEMONIC_PATH) {
|
||||
console.error(`Unable to fetch mnemonic from your account!`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!address) {
|
||||
console.log('Usage: lamassu-trx-recovery <cash-out address>')
|
||||
process.exit(2)
|
||||
}
|
||||
|
||||
function run (address) {
|
||||
Promise.all([fetchMnemonic(), searchForHdIndex(address)])
|
||||
.then(([mnemonic, hdIndex]) => {
|
||||
try {
|
||||
const prefix = !_.isNil(hdIndex) ? `${paymentPrefixPath}/${hdIndex}` : `${defaultPrefixPath}/0`
|
||||
const privKey = TronWeb.fromMnemonic(mnemonic.replace(/[\r\n]/gm, ' ').trim(), prefix).privateKey
|
||||
console.log(`Private key: `, privKey.slice(2))
|
||||
process.exit(0)
|
||||
} catch (err) {
|
||||
console.error(`Error while retrieving private key!`)
|
||||
process.exit(3)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function searchForHdIndex (address) {
|
||||
const sql = `SELECT hd_index FROM cash_out_txs WHERE to_address = $1`
|
||||
return db.oneOrNone(sql, [address])
|
||||
.then(result => _.get('hd_index', result))
|
||||
}
|
||||
|
||||
function fetchMnemonic () {
|
||||
return fs.readFile(MNEMONIC_PATH, 'utf8')
|
||||
}
|
||||
|
||||
run(address)
|
||||
36
packages/server/bin/lamassu-update-cassettes
Executable file
36
packages/server/bin/lamassu-update-cassettes
Executable file
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const _ = require('lodash')
|
||||
const db = require('../lib/db')
|
||||
|
||||
if (process.argv.length !== 4) {
|
||||
console.log('Usage: lamassu-update-cassettes <device_id> <number_of_cassettes>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!_.isFinite(parseInt(process.argv[3]))) {
|
||||
console.log('Error: <number_of_cassettes> is not a valid number (%s)', err)
|
||||
process.exit(3)
|
||||
}
|
||||
|
||||
if (parseInt(process.argv[3]) > 4 || parseInt(process.argv[3]) < 2) {
|
||||
console.log('Error: <number_of_cassettes> is out of range. Should be a number between 2 and 4')
|
||||
process.exit(3)
|
||||
}
|
||||
|
||||
const deviceId = process.argv[2]
|
||||
const numberOfCassettes = parseInt(process.argv[3])
|
||||
|
||||
const query = `UPDATE devices SET number_of_cassettes = $1 WHERE device_id = $2`
|
||||
|
||||
db.none(query, [numberOfCassettes, deviceId])
|
||||
.then(() => {
|
||||
console.log('Success! Device %s updated to %s cassettes', deviceId, numberOfCassettes)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.log('Error: %s', err)
|
||||
process.exit(3)
|
||||
})
|
||||
36
packages/server/bin/lamassu-update-recyclers
Normal file
36
packages/server/bin/lamassu-update-recyclers
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
|
||||
const _ = require('lodash')
|
||||
const db = require('../lib/db')
|
||||
|
||||
if (process.argv.length !== 4) {
|
||||
console.log('Usage: lamassu-update-recyclers <device_id> <number_of_recyclers>')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
if (!_.isFinite(parseInt(process.argv[3]))) {
|
||||
console.log('Error: <number_of_recyclers> is not a valid number (%s)', err)
|
||||
process.exit(3)
|
||||
}
|
||||
|
||||
if (parseInt(process.argv[3]) > 6 || parseInt(process.argv[3]) < 1) {
|
||||
console.log('Error: <number_of_recyclers> is out of range. Should be a number between 1 and 3')
|
||||
process.exit(3)
|
||||
}
|
||||
|
||||
const deviceId = process.argv[2]
|
||||
const numberOfRecyclers = parseInt(process.argv[3])
|
||||
|
||||
const query = `UPDATE devices SET number_of_recyclers = $1 WHERE device_id = $2`
|
||||
|
||||
db.none(query, [numberOfRecyclers, deviceId])
|
||||
.then(() => {
|
||||
console.log('Success! Device %s updated to %s recyclers', deviceId, numberOfRecyclers)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.log('Error: %s', err)
|
||||
process.exit(3)
|
||||
})
|
||||
30
packages/server/bin/lamassu-update-to-mnemonic
Executable file
30
packages/server/bin/lamassu-update-to-mnemonic
Executable file
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const os = require('os')
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const mnemonicHelpers = require('../lib/mnemonic-helpers')
|
||||
const setEnvVariable = require('../tools/set-env-var')
|
||||
|
||||
if (!process.env.MNEMONIC_PATH && process.env.SEED_PATH) {
|
||||
const seed = fs.readFileSync(process.env.SEED_PATH, 'utf8').trim()
|
||||
const mnemonic = mnemonicHelpers.fromSeed(seed)
|
||||
|
||||
if (process.argv[2] === '--prod') {
|
||||
setEnvVariable('MNEMONIC_PATH', path.resolve('/etc', 'lamassu', 'mnemonics', 'mnemonic.txt'))
|
||||
} else {
|
||||
setEnvVariable('MNEMONIC_PATH', path.resolve(os.homedir(), '.lamassu', 'mnemonics', 'mnemonic.txt'))
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.dirname(process.env.MNEMONIC_PATH))) {
|
||||
fs.mkdirSync(path.dirname(process.env.MNEMONIC_PATH))
|
||||
}
|
||||
|
||||
if (!fs.existsSync(process.env.MNEMONIC_PATH)) {
|
||||
fs.writeFileSync(process.env.MNEMONIC_PATH, mnemonic, 'utf8')
|
||||
}
|
||||
}
|
||||
43
packages/server/bin/lamassu-update-wallet-nodes
Executable file
43
packages/server/bin/lamassu-update-wallet-nodes
Executable file
|
|
@ -0,0 +1,43 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
require('../lib/environment-helper')
|
||||
const _ = require('lodash/fp')
|
||||
const common = require('../lib/blockchain/common')
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const cryptos = coinUtils.cryptoCurrencies()
|
||||
|
||||
const PLUGINS = {
|
||||
BTC: require('../lib/blockchain/bitcoin.js'),
|
||||
BCH: require('../lib/blockchain/bitcoincash.js'),
|
||||
DASH: require('../lib/blockchain/dash.js'),
|
||||
ETH: require('../lib/blockchain/ethereum.js'),
|
||||
LTC: require('../lib/blockchain/litecoin.js'),
|
||||
XMR: require('../lib/blockchain/monero.js'),
|
||||
ZEC: require('../lib/blockchain/zcash.js')
|
||||
}
|
||||
|
||||
function plugin (crypto) {
|
||||
const plugin = PLUGINS[crypto.cryptoCode]
|
||||
if (!plugin) throw new Error(`No such plugin: ${crypto.cryptoCode}`)
|
||||
return plugin
|
||||
}
|
||||
|
||||
function isWalletNodeInstalled (status) {
|
||||
// From http://supervisord.org/subprocess.html#process-states
|
||||
return _.includes(status, ['STARTING', 'RUNNING', 'STOPPED', 'BACKOFF', 'STOPPING', 'EXITED', 'FATAL'])
|
||||
}
|
||||
|
||||
function run () {
|
||||
_.forEach((crypto) => {
|
||||
if (!_.includes(crypto.cryptoCode, _.keys(PLUGINS))) return
|
||||
|
||||
const cryptoPlugin = plugin(crypto)
|
||||
const status = common.es(`sudo supervisorctl status ${crypto.code} | awk '{ print $2 }'`).trim()
|
||||
|
||||
if (!isWalletNodeInstalled(status)) return
|
||||
cryptoPlugin.updateCore(common.getBinaries(crypto.cryptoCode), _.includes(status, ['RUNNING', 'STARTING']))
|
||||
}, cryptos)
|
||||
}
|
||||
|
||||
run()
|
||||
88
packages/server/lib/app.js
Normal file
88
packages/server/lib/app.js
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
const fs = require('fs')
|
||||
const https = require('https')
|
||||
const argv = require('minimist')(process.argv.slice(2))
|
||||
|
||||
require('./environment-helper')
|
||||
const { loadRoutes } = require('./routes')
|
||||
const logger = require('./logger')
|
||||
const poller = require('./poller')
|
||||
const complianceTriggers = require('./compliance-triggers')
|
||||
const ofac = require('./ofac/index')
|
||||
const ofacUpdate = require('./ofac/update')
|
||||
const operator = require('./operator')
|
||||
const machineSettings = require('./machine-settings')
|
||||
|
||||
const KEY_PATH = process.env.KEY_PATH
|
||||
const CERT_PATH = process.env.CERT_PATH
|
||||
const CA_PATH = process.env.CA_PATH
|
||||
|
||||
const version = require('../package.json').version
|
||||
logger.info('Version: %s', version)
|
||||
|
||||
function run() {
|
||||
return new Promise(resolve => {
|
||||
let count = 0
|
||||
let handler
|
||||
|
||||
const errorHandler = err => {
|
||||
count += 1
|
||||
logger.error(err)
|
||||
logger.error('[%d] Retrying in 10s...', count)
|
||||
}
|
||||
|
||||
const runner = () => {
|
||||
Promise.all([
|
||||
complianceTriggers.getAllComplianceTriggers().then(loadSanctions),
|
||||
machineSettings.reloadAll(),
|
||||
])
|
||||
.then(() => {
|
||||
clearInterval(handler)
|
||||
startServer()
|
||||
})
|
||||
.then(resolve)
|
||||
.catch(errorHandler)
|
||||
}
|
||||
|
||||
handler = setInterval(runner, 10000)
|
||||
runner()
|
||||
})
|
||||
}
|
||||
|
||||
function loadSanctions(triggers) {
|
||||
return Promise.resolve().then(() => {
|
||||
const hasSanctions = complianceTriggers.hasSanctions(triggers)
|
||||
|
||||
if (!hasSanctions) return
|
||||
|
||||
logger.info('Loading sanctions DB...')
|
||||
return ofacUpdate
|
||||
.update()
|
||||
.then(() => logger.info('Sanctions DB updated'))
|
||||
.then(ofac.load)
|
||||
.then(() => logger.info('Sanctions DB loaded'))
|
||||
})
|
||||
}
|
||||
|
||||
async function startServer() {
|
||||
const app = await loadRoutes()
|
||||
|
||||
poller.setup()
|
||||
|
||||
const httpsServerOptions = {
|
||||
key: fs.readFileSync(KEY_PATH),
|
||||
cert: fs.readFileSync(CERT_PATH),
|
||||
ca: fs.readFileSync(CA_PATH),
|
||||
requestCert: true,
|
||||
rejectUnauthorized: false,
|
||||
}
|
||||
|
||||
const server = https.createServer(httpsServerOptions, app)
|
||||
|
||||
const port = argv.port || 3000
|
||||
|
||||
await operator.getOperatorId('middleware').catch(logger.error)
|
||||
await new Promise(resolve => server.listen({ port }, resolve))
|
||||
logger.info(`lamassu-server listening on port ${port}`)
|
||||
}
|
||||
|
||||
module.exports = { run }
|
||||
15
packages/server/lib/auth-tokens.js
Normal file
15
packages/server/lib/auth-tokens.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const crypto = require('crypto')
|
||||
|
||||
const constants = require('./constants')
|
||||
const db = require('./db')
|
||||
|
||||
function createAuthToken(userID, type) {
|
||||
const token = crypto.randomBytes(32).toString('hex')
|
||||
const sql = `INSERT INTO auth_tokens (token, type, user_id) VALUES ($1, $2, $3) ON CONFLICT (user_id, type) DO UPDATE SET token=$1, expire=now() + interval '${constants.AUTH_TOKEN_EXPIRATION_TIME}' RETURNING *`
|
||||
|
||||
return db.one(sql, [token, type, userID])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createAuthToken,
|
||||
}
|
||||
174
packages/server/lib/bill-math.js
Normal file
174
packages/server/lib/bill-math.js
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
const _ = require('lodash/fp')
|
||||
const sumService = require('@haensl/subset-sum')
|
||||
|
||||
const logger = require('./logger')
|
||||
const cc = require('./coin-change')
|
||||
|
||||
const BILL_LIST_MODES = {
|
||||
LAST_UNIT_FIRST: 0,
|
||||
FIRST_UNIT_FIRST: 1,
|
||||
LOWEST_VALUE_FIRST: 2,
|
||||
HIGHEST_VALUE_FIRST: 3,
|
||||
UNIT_ROUND_ROBIN: 4,
|
||||
VALUE_ROUND_ROBIN: 5,
|
||||
}
|
||||
|
||||
const buildBillList = (units, mode) => {
|
||||
switch (mode) {
|
||||
case BILL_LIST_MODES.LAST_UNIT_FIRST:
|
||||
return _.reduce(
|
||||
(acc, value) => {
|
||||
acc.push(..._.times(_.constant(value.denomination), value.count))
|
||||
return acc
|
||||
},
|
||||
[],
|
||||
_.reverse(units),
|
||||
)
|
||||
case BILL_LIST_MODES.FIRST_UNIT_FIRST:
|
||||
return _.reduce(
|
||||
(acc, value) => {
|
||||
acc.push(..._.times(_.constant(value.denomination), value.count))
|
||||
return acc
|
||||
},
|
||||
[],
|
||||
units,
|
||||
)
|
||||
case BILL_LIST_MODES.LOWEST_VALUE_FIRST:
|
||||
return _.reduce(
|
||||
(acc, value) => {
|
||||
acc.push(..._.times(_.constant(value.denomination), value.count))
|
||||
return acc
|
||||
},
|
||||
[],
|
||||
_.orderBy(['denomination'], ['asc'])(units),
|
||||
)
|
||||
case BILL_LIST_MODES.HIGHEST_VALUE_FIRST:
|
||||
return _.reduce(
|
||||
(acc, value) => {
|
||||
acc.push(..._.times(_.constant(value.denomination), value.count))
|
||||
return acc
|
||||
},
|
||||
[],
|
||||
_.orderBy(['denomination'], ['desc'])(units),
|
||||
)
|
||||
case BILL_LIST_MODES.UNIT_ROUND_ROBIN: {
|
||||
const amountOfBills = _.reduce(
|
||||
(acc, value) => acc + value.count,
|
||||
0,
|
||||
units,
|
||||
)
|
||||
|
||||
const _units = _.filter(it => it.count > 0)(_.cloneDeep(units))
|
||||
const bills = []
|
||||
|
||||
for (let i = 0; i < amountOfBills; i++) {
|
||||
const idx = i % _.size(_units)
|
||||
if (_units[idx].count > 0) {
|
||||
bills.push(_units[idx].denomination)
|
||||
_units[idx].count--
|
||||
}
|
||||
|
||||
if (_units[idx].count === 0) {
|
||||
_units.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return bills
|
||||
}
|
||||
case BILL_LIST_MODES.VALUE_ROUND_ROBIN: {
|
||||
const amountOfBills = _.reduce(
|
||||
(acc, value) => acc + value.count,
|
||||
0,
|
||||
units,
|
||||
)
|
||||
|
||||
const _units = _.flow([
|
||||
_.filter(it => it.count > 0),
|
||||
_.orderBy(['denomination'], ['asc']),
|
||||
])(_.cloneDeep(units))
|
||||
const bills = []
|
||||
|
||||
for (let i = 0; i < amountOfBills; i++) {
|
||||
const idx = i % _.size(_units)
|
||||
if (_units[idx].count > 0) {
|
||||
bills.push(_units[idx].denomination)
|
||||
_units[idx].count--
|
||||
}
|
||||
|
||||
if (_units[idx].count === 0) {
|
||||
_units.splice(idx, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return bills
|
||||
}
|
||||
default:
|
||||
throw new Error(`Invalid mode: ${mode}`)
|
||||
}
|
||||
}
|
||||
|
||||
const getSolution_old = (units, amount, mode) => {
|
||||
const billList = buildBillList(units, mode)
|
||||
|
||||
if (_.sum(billList) < amount.toNumber()) {
|
||||
return []
|
||||
}
|
||||
|
||||
const solver = sumService.subsetSum(billList, amount.toNumber())
|
||||
const solution = _.countBy(Math.floor, solver.next().value)
|
||||
return Object.entries(solution).map(([denomination, provisioned]) => [
|
||||
_.toNumber(denomination),
|
||||
provisioned,
|
||||
])
|
||||
}
|
||||
|
||||
const getSolution = (units, amount) => {
|
||||
amount = amount.toNumber()
|
||||
units = Object.entries(
|
||||
units.reduce((avail, { denomination, count }) => {
|
||||
avail[denomination] ??= 0
|
||||
avail[denomination] += count
|
||||
return avail
|
||||
}, {}),
|
||||
)
|
||||
const model = cc.model(units)
|
||||
return cc.solve(model, amount)
|
||||
}
|
||||
|
||||
const solutionToOriginalUnits = (solution, units) => {
|
||||
const billsToAssign = (count, left) =>
|
||||
_.clamp(0, count)(_.isNaN(left) || _.isNil(left) ? 0 : left)
|
||||
const billsLeft = Object.fromEntries(solution)
|
||||
return units.map(({ count, name, denomination }) => {
|
||||
const provisioned = billsToAssign(count, billsLeft[denomination])
|
||||
billsLeft[denomination] -= provisioned
|
||||
return { name, denomination, provisioned }
|
||||
})
|
||||
}
|
||||
|
||||
function makeChange(outCassettes, amount) {
|
||||
const ss_solution = getSolution_old(
|
||||
outCassettes,
|
||||
amount,
|
||||
BILL_LIST_MODES.VALUE_ROUND_ROBIN,
|
||||
)
|
||||
const cc_solution = getSolution(outCassettes, amount)
|
||||
|
||||
if (!cc.check(cc_solution, amount.toNumber())) {
|
||||
logger.error(new Error('coin-change provided a bad solution'))
|
||||
return solutionToOriginalUnits(ss_solution, outCassettes)
|
||||
}
|
||||
|
||||
if (!!ss_solution !== !!cc_solution) {
|
||||
logger.error(
|
||||
new Error(
|
||||
`subset-sum and coin-change don't agree on solvability -- subset-sum:${!!ss_solution} coin-change:${!!cc_solution}`,
|
||||
),
|
||||
)
|
||||
return solutionToOriginalUnits(ss_solution, outCassettes)
|
||||
}
|
||||
|
||||
return solutionToOriginalUnits(cc_solution, outCassettes)
|
||||
}
|
||||
|
||||
module.exports = { makeChange }
|
||||
59
packages/server/lib/blacklist.js
Normal file
59
packages/server/lib/blacklist.js
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
const _ = require('lodash/fp')
|
||||
|
||||
const { addressDetector } = require('@lamassu/coins')
|
||||
const db = require('./db')
|
||||
const notifierQueries = require('./notifier/queries')
|
||||
|
||||
const getBlacklist = () =>
|
||||
db.any(
|
||||
`SELECT blacklist.address AS address, blacklist_messages.content AS blacklistMessage
|
||||
FROM blacklist JOIN blacklist_messages
|
||||
ON blacklist.blacklist_message_id = blacklist_messages.id`,
|
||||
)
|
||||
|
||||
const deleteFromBlacklist = address => {
|
||||
const sql = `DELETE FROM blacklist WHERE address = $1`
|
||||
notifierQueries.clearBlacklistNotification(address)
|
||||
return db.none(sql, [address])
|
||||
}
|
||||
|
||||
const isValidAddress = address => {
|
||||
try {
|
||||
return !_.isEmpty(
|
||||
addressDetector.getSupportedCoinsForAddress(address).matches,
|
||||
)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const insertIntoBlacklist = address => {
|
||||
if (!isValidAddress(address)) {
|
||||
return Promise.reject(new Error('Invalid address'))
|
||||
}
|
||||
return db.none('INSERT INTO blacklist (address) VALUES ($1);', [address])
|
||||
}
|
||||
|
||||
function blocked(address) {
|
||||
const sql = `SELECT address, content FROM blacklist b LEFT OUTER JOIN blacklist_messages bm ON bm.id = b.blacklist_message_id WHERE address = $1`
|
||||
return db.oneOrNone(sql, [address])
|
||||
}
|
||||
|
||||
function getMessages() {
|
||||
const sql = `SELECT * FROM blacklist_messages`
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function editBlacklistMessage(id, content) {
|
||||
const sql = `UPDATE blacklist_messages SET content = $1 WHERE id = $2 RETURNING id`
|
||||
return db.oneOrNone(sql, [content, id])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
blocked,
|
||||
getBlacklist,
|
||||
deleteFromBlacklist,
|
||||
insertIntoBlacklist,
|
||||
getMessages,
|
||||
editBlacklistMessage,
|
||||
}
|
||||
149
packages/server/lib/blockchain/bitcoin.js
Normal file
149
packages/server/lib/blockchain/bitcoin.js
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
const { isDevMode, isRemoteNode } = require('../environment-helper')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const coinRec = coinUtils.getCryptoCurrency('BTC')
|
||||
|
||||
const BLOCKCHAIN_DIR = process.env.BLOCKCHAIN_DIR
|
||||
|
||||
const tmpDir = isDevMode() ? path.resolve(BLOCKCHAIN_DIR, 'tmp') : '/tmp'
|
||||
const usrBinDir = isDevMode()
|
||||
? path.resolve(BLOCKCHAIN_DIR, 'bin')
|
||||
: '/usr/local/bin'
|
||||
|
||||
function setup(dataDir) {
|
||||
!isDevMode() && common.firewall([coinRec.defaultPort])
|
||||
const config = buildConfig()
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `${usrBinDir}/${coinRec.daemon} -datadir=${dataDir}`
|
||||
!isDevMode() && common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating Bitcoin Core. This may take a minute...')
|
||||
!isDevMode() && common.es(`sudo supervisorctl stop bitcoin`)
|
||||
common.es(`curl -#o /tmp/bitcoin.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/bitcoin.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Bitcoin Core: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/bitcoin.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp ${tmpDir}/${coinRec.dir}/* ${usrBinDir}/`)
|
||||
common.es(`rm -r ${tmpDir}/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm ${tmpDir}/bitcoin.tar.gz`)
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "addresstype=p2sh-segwit" ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`Enabling bech32 receiving addresses in config file..`)
|
||||
common.es(
|
||||
`sed -i 's/addresstype=p2sh-segwit/addresstype=bech32/g' ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf`,
|
||||
)
|
||||
} else {
|
||||
common.logger.info(
|
||||
`bech32 receiving addresses already defined, skipping...`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "changetype=" ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`changetype already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Enabling bech32 change addresses in config file..`)
|
||||
common.es(
|
||||
`echo "\nchangetype=bech32" >> ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "listenonion=" ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`listenonion already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Setting 'listenonion=0' in config file...`)
|
||||
common.es(
|
||||
`echo "\nlistenonion=0" >> ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "fallbackfee=" ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`fallbackfee already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Setting 'fallbackfee=0.00005' in config file...`)
|
||||
common.es(
|
||||
`echo "\nfallbackfee=0.00005" >> ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "rpcworkqueue=" ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`rpcworkqueue already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Setting 'rpcworkqueue=2000' in config file...`)
|
||||
common.es(
|
||||
`echo "\nrpcworkqueue=2000" >> ${BLOCKCHAIN_DIR}/bitcoin/bitcoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (isCurrentlyRunning && !isDevMode()) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start bitcoin`)
|
||||
}
|
||||
|
||||
common.logger.info('Bitcoin Core is updated!')
|
||||
}
|
||||
|
||||
function buildConfig() {
|
||||
return `rpcuser=lamassuserver
|
||||
rpcpassword=${common.randomPass()}
|
||||
${isDevMode() ? `regtest=1` : ``}
|
||||
dbcache=500
|
||||
server=1
|
||||
connections=40
|
||||
keypool=10000
|
||||
prune=4000
|
||||
daemon=0
|
||||
addresstype=bech32
|
||||
changetype=bech32
|
||||
walletrbf=1
|
||||
listenonion=0
|
||||
fallbackfee=0.00005
|
||||
rpcworkqueue=2000
|
||||
${
|
||||
isDevMode()
|
||||
? `[regtest]
|
||||
rpcport=18333
|
||||
bind=0.0.0.0:18332
|
||||
${isRemoteNode(coinRec) ? `connect=${process.env.BTC_NODE_HOST}:${process.env.BTC_NODE_PORT}` : ``}`
|
||||
: `rpcport=8333
|
||||
bind=0.0.0.0:8332
|
||||
${isRemoteNode(coinRec) ? `connect=${process.env.BTC_NODE_HOST}:${process.env.BTC_NODE_PORT}` : ``}`
|
||||
}
|
||||
`
|
||||
}
|
||||
74
packages/server/lib/blockchain/bitcoincash.js
Normal file
74
packages/server/lib/blockchain/bitcoincash.js
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const coinRec = coinUtils.getCryptoCurrency('BCH')
|
||||
|
||||
function setup(dataDir) {
|
||||
common.firewall([coinRec.defaultPort])
|
||||
const config = buildConfig()
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} -datadir=${dataDir} -conf=${dataDir}/bitcoincash.conf`
|
||||
common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating Bitcoin Cash. This may take a minute...')
|
||||
common.es(`sudo supervisorctl stop bitcoincash`)
|
||||
common.es(`curl -#Lo /tmp/bitcoincash.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/bitcoincash.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Bitcoin Cash: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/bitcoincash.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/bitcoind /usr/local/bin/bitcoincashd`)
|
||||
common.es(`cp /tmp/${coinRec.dir}/bitcoin-cli /usr/local/bin/bitcoincash-cli`)
|
||||
common.es(`rm -r /tmp/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm /tmp/bitcoincash.tar.gz`)
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "listenonion=" /mnt/blockchains/bitcoincash/bitcoincash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`listenonion already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Setting 'listenonion=0' in config file...`)
|
||||
common.es(
|
||||
`echo "\nlistenonion=0" >> /mnt/blockchains/bitcoincash/bitcoincash.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start bitcoincash`)
|
||||
}
|
||||
|
||||
common.logger.info('Bitcoin Cash is updated!')
|
||||
}
|
||||
|
||||
function buildConfig() {
|
||||
return `rpcuser=lamassuserver
|
||||
rpcpassword=${common.randomPass()}
|
||||
dbcache=500
|
||||
server=1
|
||||
maxconnections=40
|
||||
keypool=10000
|
||||
prune=4000
|
||||
daemon=0
|
||||
bind=0.0.0.0:8335
|
||||
rpcport=8336
|
||||
listenonion=0
|
||||
`
|
||||
}
|
||||
222
packages/server/lib/blockchain/common.js
Normal file
222
packages/server/lib/blockchain/common.js
Normal file
|
|
@ -0,0 +1,222 @@
|
|||
const crypto = require('crypto')
|
||||
const os = require('os')
|
||||
const path = require('path')
|
||||
const cp = require('child_process')
|
||||
const fs = require('fs')
|
||||
const makeDir = require('make-dir')
|
||||
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const logger = require('console-log-level')({ level: 'info' })
|
||||
|
||||
const { isDevMode } = require('../environment-helper')
|
||||
|
||||
const BLOCKCHAIN_DIR = process.env.BLOCKCHAIN_DIR
|
||||
|
||||
module.exports = {
|
||||
es,
|
||||
writeSupervisorConfig,
|
||||
firewall,
|
||||
randomPass,
|
||||
fetchAndInstall,
|
||||
logger,
|
||||
isInstalledSoftware,
|
||||
writeFile,
|
||||
getBinaries,
|
||||
isUpdateDependent,
|
||||
}
|
||||
|
||||
const BINARIES = {
|
||||
BTC: {
|
||||
defaultUrl:
|
||||
'https://bitcoincore.org/bin/bitcoin-core-0.20.1/bitcoin-0.20.1-x86_64-linux-gnu.tar.gz',
|
||||
defaultUrlHash:
|
||||
'376194f06596ecfa40331167c39bc70c355f960280bd2a645fdbf18f66527397',
|
||||
defaultDir: 'bitcoin-0.20.1/bin',
|
||||
url: 'https://bitcoincore.org/bin/bitcoin-core-29.0/bitcoin-29.0-x86_64-linux-gnu.tar.gz',
|
||||
dir: 'bitcoin-29.0/bin',
|
||||
urlHash: 'a681e4f6ce524c338a105f214613605bac6c33d58c31dc5135bbc02bc458bb6c',
|
||||
},
|
||||
ETH: {
|
||||
url: 'https://gethstore.blob.core.windows.net/builds/geth-linux-amd64-1.15.11-36b2371c.tar.gz',
|
||||
dir: 'geth-linux-amd64-1.15.11-36b2371c',
|
||||
urlHash: 'a14a4285daedf75ea04a7a298e6caa48d566a2786c93fc5e86ec2c5998c92455',
|
||||
},
|
||||
ZEC: {
|
||||
url: 'https://download.z.cash/downloads/zcash-6.2.0-linux64-debian-bullseye.tar.gz',
|
||||
dir: 'zcash-6.2.0/bin',
|
||||
urlHash: '71cf378c27582a4b9f9d57cafc2b5a57a46e9e52a5eda33be112dc9790c64c6f',
|
||||
},
|
||||
DASH: {
|
||||
defaultUrl:
|
||||
'https://github.com/dashpay/dash/releases/download/v18.1.0/dashcore-18.1.0-x86_64-linux-gnu.tar.gz',
|
||||
defaultUrlHash:
|
||||
'd89c2afd78183f3ee815adcccdff02098be0c982633889e7b1e9c9656fbef219',
|
||||
defaultDir: 'dashcore-18.1.0/bin',
|
||||
url: 'https://github.com/dashpay/dash/releases/download/v22.1.2/dashcore-22.1.2-x86_64-linux-gnu.tar.gz',
|
||||
dir: 'dashcore-22.1.2/bin',
|
||||
urlHash: '230e871ef55c64c1550f358089a324a1e47e52a9a9c032366162cd82a19fa1af',
|
||||
},
|
||||
LTC: {
|
||||
defaultUrl:
|
||||
'https://download.litecoin.org/litecoin-0.18.1/linux/litecoin-0.18.1-x86_64-linux-gnu.tar.gz',
|
||||
defaultUrlHash:
|
||||
'ca50936299e2c5a66b954c266dcaaeef9e91b2f5307069b9894048acf3eb5751',
|
||||
defaultDir: 'litecoin-0.18.1/bin',
|
||||
url: 'https://download.litecoin.org/litecoin-0.21.4/linux/litecoin-0.21.4-x86_64-linux-gnu.tar.gz',
|
||||
dir: 'litecoin-0.21.4/bin',
|
||||
urlHash: '857fc41091f2bae65c3bf0fd4d388fca915fc93a03f16dd2578ac3cc92898390',
|
||||
},
|
||||
BCH: {
|
||||
url: 'https://github.com/bitcoin-cash-node/bitcoin-cash-node/releases/download/v28.0.1/bitcoin-cash-node-28.0.1-x86_64-linux-gnu.tar.gz',
|
||||
dir: 'bitcoin-cash-node-28.0.1/bin',
|
||||
files: [
|
||||
['bitcoind', 'bitcoincashd'],
|
||||
['bitcoin-cli', 'bitcoincash-cli'],
|
||||
],
|
||||
urlHash: 'd69ee632147f886ca540cecdff5b1b85512612b4c005e86b09083a63c35b64fa',
|
||||
},
|
||||
XMR: {
|
||||
url: 'https://downloads.getmonero.org/cli/monero-linux-x64-v0.18.4.0.tar.bz2',
|
||||
dir: 'monero-x86_64-linux-gnu-v0.18.4.0',
|
||||
files: [
|
||||
['monerod', 'monerod'],
|
||||
['monero-wallet-rpc', 'monero-wallet-rpc'],
|
||||
],
|
||||
urlHash: '16cb74c899922887827845a41d37c7f3121462792a540843f2fcabcc1603993f',
|
||||
},
|
||||
}
|
||||
|
||||
const coinsUpdateDependent = ['BTC', 'LTC', 'DASH']
|
||||
|
||||
function firewall(ports) {
|
||||
if (!ports || ports.length === 0) throw new Error('No ports supplied')
|
||||
const portsString = ports.join(',')
|
||||
es(`sudo ufw allow ${portsString}`)
|
||||
}
|
||||
|
||||
function randomPass() {
|
||||
return crypto.randomBytes(32).toString('hex')
|
||||
}
|
||||
|
||||
function es(cmd) {
|
||||
const env = { HOME: os.userInfo().homedir }
|
||||
const options = { encoding: 'utf8', env }
|
||||
const res = cp.execSync(cmd, options)
|
||||
logger.debug(res)
|
||||
return res.toString()
|
||||
}
|
||||
|
||||
function generateSupervisorConfig(cryptoCode, command, isWallet = false) {
|
||||
return `[program:${cryptoCode}${isWallet ? `-wallet` : ``}]
|
||||
command=nice ${command}
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stderr_logfile=/var/log/supervisor/${cryptoCode}${isWallet ? `-wallet` : ``}.err.log
|
||||
stdout_logfile=/var/log/supervisor/${cryptoCode}${isWallet ? `-wallet` : ``}.out.log
|
||||
stderr_logfile_backups=2
|
||||
stdout_logfile_backups=2
|
||||
environment=HOME="/root"
|
||||
`
|
||||
}
|
||||
|
||||
function writeSupervisorConfig(coinRec, cmd, walletCmd = '') {
|
||||
if (isInstalledSoftware(coinRec)) return
|
||||
|
||||
const blockchain = coinRec.code
|
||||
|
||||
if (!_.isNil(coinRec.wallet)) {
|
||||
const supervisorConfigWallet = generateSupervisorConfig(
|
||||
blockchain,
|
||||
walletCmd,
|
||||
true,
|
||||
)
|
||||
writeFile(
|
||||
`/etc/supervisor/conf.d/${coinRec.code}-wallet.conf`,
|
||||
supervisorConfigWallet,
|
||||
)
|
||||
}
|
||||
|
||||
const supervisorConfig = generateSupervisorConfig(blockchain, cmd)
|
||||
writeFile(`/etc/supervisor/conf.d/${coinRec.code}.conf`, supervisorConfig)
|
||||
}
|
||||
|
||||
function isInstalledSoftware(coinRec) {
|
||||
if (isDevMode()) {
|
||||
return (
|
||||
fs.existsSync(
|
||||
`${BLOCKCHAIN_DIR}/${coinRec.code}/${coinRec.configFile}`,
|
||||
) && fs.existsSync(`${BLOCKCHAIN_DIR}/bin/${coinRec.daemon}`)
|
||||
)
|
||||
}
|
||||
|
||||
const nodeInstalled = fs.existsSync(
|
||||
`/etc/supervisor/conf.d/${coinRec.code}.conf`,
|
||||
)
|
||||
const walletInstalled = _.isNil(coinRec.wallet)
|
||||
? true
|
||||
: fs.existsSync(`/etc/supervisor/conf.d/${coinRec.code}.wallet.conf`)
|
||||
return nodeInstalled && walletInstalled
|
||||
}
|
||||
|
||||
function fetchAndInstall(coinRec) {
|
||||
const requiresUpdate = isUpdateDependent(coinRec.cryptoCode)
|
||||
if (isInstalledSoftware(coinRec)) return
|
||||
|
||||
const binaries = BINARIES[coinRec.cryptoCode]
|
||||
if (!binaries) throw new Error(`No such coin: ${coinRec.code}`)
|
||||
|
||||
const url = requiresUpdate ? binaries.defaultUrl : binaries.url
|
||||
const hash = requiresUpdate ? binaries.defaultUrlHash : binaries.urlHash
|
||||
const downloadFile = path.basename(url)
|
||||
const binDir = requiresUpdate ? binaries.defaultDir : binaries.dir
|
||||
|
||||
es(`wget -q ${url}`)
|
||||
if (es(`sha256sum ${downloadFile} | awk '{print $1}'`).trim() !== hash) {
|
||||
logger.info(
|
||||
`Failed to install ${coinRec.code}: Package signature do not match!`,
|
||||
)
|
||||
return
|
||||
}
|
||||
es(`tar -xf ${downloadFile}`)
|
||||
|
||||
const usrBinDir = isDevMode()
|
||||
? path.resolve(BLOCKCHAIN_DIR, 'bin')
|
||||
: '/usr/local/bin'
|
||||
|
||||
if (isDevMode()) {
|
||||
makeDir.sync(usrBinDir)
|
||||
}
|
||||
|
||||
if (_.isEmpty(binaries.files)) {
|
||||
es(`sudo cp ${binDir}/* ${usrBinDir}`)
|
||||
return
|
||||
}
|
||||
|
||||
_.forEach(([source, target]) => {
|
||||
es(`sudo cp ${binDir}/${source} ${usrBinDir}/${target}`)
|
||||
}, binaries.files)
|
||||
}
|
||||
|
||||
function writeFile(path, content) {
|
||||
try {
|
||||
fs.writeFileSync(path, content)
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
logger.info(`${path} exists, skipping.`)
|
||||
return
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
function getBinaries(coinCode) {
|
||||
const binaries = BINARIES[coinCode]
|
||||
if (!binaries) throw new Error(`No such coin: ${coinCode}`)
|
||||
return binaries
|
||||
}
|
||||
|
||||
function isUpdateDependent(coinCode) {
|
||||
return _.includes(coinCode, coinsUpdateDependent)
|
||||
}
|
||||
105
packages/server/lib/blockchain/dash.js
Normal file
105
packages/server/lib/blockchain/dash.js
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const coinRec = coinUtils.getCryptoCurrency('DASH')
|
||||
|
||||
function setup(dataDir) {
|
||||
common.firewall([coinRec.defaultPort])
|
||||
const config = buildConfig()
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} -datadir=${dataDir}`
|
||||
common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating Dash Core. This may take a minute...')
|
||||
common.es(`sudo supervisorctl stop dash`)
|
||||
common.es(`curl -#Lo /tmp/dash.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/dash.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Dash Core: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/dash.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/* /usr/local/bin/`)
|
||||
common.es(`rm -r /tmp/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm /tmp/dash.tar.gz`)
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "enableprivatesend=" /mnt/blockchains/dash/dash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`Switching from 'PrivateSend' to 'CoinJoin'...`)
|
||||
common.es(
|
||||
`sed -i 's/enableprivatesend/enablecoinjoin/g' /mnt/blockchains/dash/dash.conf`,
|
||||
)
|
||||
} else if (
|
||||
common.es(`grep "enablecoinjoin=" /mnt/blockchains/dash/dash.conf || true`)
|
||||
) {
|
||||
common.logger.info(`enablecoinjoin already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Enabling CoinJoin in config file...`)
|
||||
common.es(`echo "\nenablecoinjoin=1" >> /mnt/blockchains/dash/dash.conf`)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "privatesendautostart=" /mnt/blockchains/dash/dash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`Switching from 'PrivateSend' to 'CoinJoin'...`)
|
||||
common.es(
|
||||
`sed -i 's/privatesendautostart/coinjoinautostart/g' /mnt/blockchains/dash/dash.conf`,
|
||||
)
|
||||
} else if (
|
||||
common.es(
|
||||
`grep "coinjoinautostart=" /mnt/blockchains/dash/dash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`coinjoinautostart already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Enabling CoinJoin AutoStart in config file...`)
|
||||
common.es(`echo "\ncoinjoinautostart=1" >> /mnt/blockchains/dash/dash.conf`)
|
||||
}
|
||||
|
||||
if (common.es(`grep "litemode=" /mnt/blockchains/dash/dash.conf || true`)) {
|
||||
common.logger.info(`Switching from 'LiteMode' to 'DisableGovernance'...`)
|
||||
common.es(
|
||||
`sed -i 's/litemode/disablegovernance/g' /mnt/blockchains/dash/dash.conf`,
|
||||
)
|
||||
} else {
|
||||
common.es(`echo "\ndisablegovernance already defined, skipping..."`)
|
||||
}
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start dash`)
|
||||
}
|
||||
|
||||
common.logger.info('Dash Core is updated!')
|
||||
}
|
||||
|
||||
function buildConfig() {
|
||||
return `rpcuser=lamassuserver
|
||||
rpcpassword=${common.randomPass()}
|
||||
dbcache=500
|
||||
keypool=10000
|
||||
disablegovernance=1
|
||||
prune=4000
|
||||
txindex=0
|
||||
enablecoinjoin=1
|
||||
coinjoinautostart=1
|
||||
`
|
||||
}
|
||||
76
packages/server/lib/blockchain/do-volume.js
Normal file
76
packages/server/lib/blockchain/do-volume.js
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
const fs = require('fs')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
const BLOCKCHAIN_DIR = process.env.BLOCKCHAIN_DIR
|
||||
|
||||
const MOUNT_POINT = BLOCKCHAIN_DIR
|
||||
|
||||
module.exports = { prepareVolume }
|
||||
|
||||
const logger = common.logger
|
||||
|
||||
function isMounted() {
|
||||
return fs.existsSync(MOUNT_POINT)
|
||||
}
|
||||
|
||||
function isFormatted(volumePath) {
|
||||
const res = common.es(`file --dereference -s ${volumePath}`).trim()
|
||||
return res !== `${volumePath}: data`
|
||||
}
|
||||
|
||||
function formatVolume(volumePath) {
|
||||
if (isFormatted(volumePath)) {
|
||||
logger.info('Volume is already formatted.')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Formatting...')
|
||||
common.es(`sudo mkfs.ext4 ${volumePath}`)
|
||||
}
|
||||
|
||||
function mountVolume(volumePath) {
|
||||
if (isMounted()) {
|
||||
logger.info('Volume is already mounted.')
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Mounting...')
|
||||
common.es(`sudo mkdir -p ${MOUNT_POINT}`)
|
||||
common.es(`sudo mount -o discard,defaults ${volumePath} ${MOUNT_POINT}`)
|
||||
common.es(
|
||||
`echo ${volumePath} ${MOUNT_POINT} ext4 defaults,nofail,discard 0 0 | sudo tee -a /etc/fstab`,
|
||||
)
|
||||
}
|
||||
|
||||
function locateVolume() {
|
||||
const res = common.es('ls /dev/disk/by-id/*')
|
||||
const lines = res.trim().split('\n')
|
||||
|
||||
if (lines.length > 1) {
|
||||
logger.error('More than one volume present, cannot prepare.')
|
||||
return null
|
||||
}
|
||||
|
||||
if (lines.length === 0) {
|
||||
logger.error('No available volumes. You might need to attach one.')
|
||||
return null
|
||||
}
|
||||
|
||||
return lines[0].trim()
|
||||
}
|
||||
|
||||
function prepareVolume() {
|
||||
if (isMounted()) {
|
||||
logger.info('Volume is already mounted.')
|
||||
return true
|
||||
}
|
||||
|
||||
const volumePath = locateVolume()
|
||||
if (!volumePath) return false
|
||||
|
||||
formatVolume(volumePath)
|
||||
mountVolume(volumePath)
|
||||
|
||||
return true
|
||||
}
|
||||
40
packages/server/lib/blockchain/ethereum.js
Normal file
40
packages/server/lib/blockchain/ethereum.js
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info(
|
||||
'Updating the Geth Ethereum wallet. This may take a minute...',
|
||||
)
|
||||
common.es(`sudo supervisorctl stop ethereum`)
|
||||
common.es(`curl -#o /tmp/ethereum.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/ethereum.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info('Failed to update Geth: Package signature do not match!')
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/ethereum.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/geth /usr/local/bin/geth`)
|
||||
common.es(`rm -r /tmp/${coinRec.dir}`)
|
||||
common.es(`rm /tmp/ethereum.tar.gz`)
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start ethereum`)
|
||||
}
|
||||
|
||||
common.logger.info('Geth is updated!')
|
||||
}
|
||||
|
||||
function setup(dataDir) {
|
||||
const coinRec = coinUtils.getCryptoCurrency('ETH')
|
||||
common.firewall([coinRec.defaultPort])
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} --datadir "${dataDir}" --syncmode="light" --cache 2048 --maxpeers 40 --http`
|
||||
common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
336
packages/server/lib/blockchain/install.js
Normal file
336
packages/server/lib/blockchain/install.js
Normal file
|
|
@ -0,0 +1,336 @@
|
|||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const process = require('process')
|
||||
const os = require('os')
|
||||
|
||||
const makeDir = require('make-dir')
|
||||
const inquirer = require('inquirer')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
const settingsLoader = require('../new-settings-loader')
|
||||
const wallet = require('../wallet')
|
||||
const {
|
||||
isDevMode,
|
||||
isRemoteNode,
|
||||
isRemoteWallet,
|
||||
} = require('../environment-helper')
|
||||
|
||||
const common = require('./common')
|
||||
const doVolume = require('./do-volume')
|
||||
|
||||
const cryptos = coinUtils.cryptoCurrencies()
|
||||
|
||||
const logger = common.logger
|
||||
|
||||
const PLUGINS = {
|
||||
BTC: require('./bitcoin.js'),
|
||||
BCH: require('./bitcoincash.js'),
|
||||
DASH: require('./dash.js'),
|
||||
LTC: require('./litecoin.js'),
|
||||
XMR: require('./monero.js'),
|
||||
}
|
||||
|
||||
const BLOCKCHAIN_DIR = process.env.BLOCKCHAIN_DIR
|
||||
|
||||
module.exports = {
|
||||
isEnvironmentValid,
|
||||
run,
|
||||
}
|
||||
|
||||
function installedVolumeFilePath(crypto) {
|
||||
return path.resolve(coinUtils.cryptoDir(crypto, BLOCKCHAIN_DIR), '.installed')
|
||||
}
|
||||
|
||||
function isInstalledVolume(crypto) {
|
||||
return fs.existsSync(installedVolumeFilePath(crypto))
|
||||
}
|
||||
|
||||
function isInstalledSoftware(crypto) {
|
||||
return common.isInstalledSoftware(crypto)
|
||||
}
|
||||
|
||||
function processCryptos(codes) {
|
||||
if (_.isEmpty(codes)) {
|
||||
logger.info('No cryptos selected. Exiting.')
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
'Thanks! Installing: %s. Will take a while...',
|
||||
_.join(', ', codes),
|
||||
)
|
||||
|
||||
const selectedCryptos = _.map(code => _.find(['code', code], cryptos), codes)
|
||||
|
||||
if (isDevMode()) {
|
||||
_.forEach(setupCrypto, selectedCryptos)
|
||||
} else {
|
||||
const goodVolume = doVolume.prepareVolume()
|
||||
|
||||
if (!goodVolume) {
|
||||
logger.error('There was an error preparing the disk volume. Exiting.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
_.forEach(setupCrypto, selectedCryptos)
|
||||
common.es('sudo supervisorctl reread')
|
||||
common.es('sudo supervisorctl update')
|
||||
|
||||
const blockchainDir = BLOCKCHAIN_DIR
|
||||
const backupDir = path.resolve(os.homedir(), 'backups')
|
||||
const rsyncCmd = `( \
|
||||
(crontab -l 2>/dev/null || echo -n "") | grep -v "@daily rsync ".*"wallet.dat"; \
|
||||
echo "@daily rsync -r --prune-empty-dirs --include='*/' \
|
||||
--include='wallet.dat' \
|
||||
--exclude='*' ${blockchainDir} ${backupDir} > /dev/null" \
|
||||
) | crontab -`
|
||||
common.es(rsyncCmd)
|
||||
|
||||
_.forEach(c => {
|
||||
updateCrypto(c)
|
||||
common.es(`sudo supervisorctl start ${c.code}`)
|
||||
}, selectedCryptos)
|
||||
}
|
||||
|
||||
logger.info('Installation complete.')
|
||||
}
|
||||
|
||||
function isEnvironmentValid(crypto) {
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_LOCATION`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_LOCATION is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_WALLET_LOCATION`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_WALLET_LOCATION is not set!`,
|
||||
)
|
||||
|
||||
if (isRemoteWallet(crypto) && !isRemoteNode(crypto))
|
||||
throw new Error(
|
||||
`Invalid environment setup for ${crypto.display}: It's not possible to use a remote wallet without using a remote node!`,
|
||||
)
|
||||
|
||||
if (isRemoteNode(crypto) && !isRemoteWallet(crypto)) {
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_HOST`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_HOST is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_PORT`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_PORT is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env.BLOCKCHAIN_DIR))
|
||||
throw new Error(`The environment variable for BLOCKCHAIN_DIR is not set!`)
|
||||
}
|
||||
|
||||
if (isRemoteWallet(crypto)) {
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_RPC_HOST`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_RPC_HOST is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_RPC_PORT`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_RPC_PORT is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_USER`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_USER is not set!`,
|
||||
)
|
||||
|
||||
if (_.isEmpty(process.env[`${crypto.cryptoCode}_NODE_PASSWORD`]))
|
||||
throw new Error(
|
||||
`The environment variable for ${crypto.cryptoCode}_NODE_PASSWORD is not set!`,
|
||||
)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function setupCrypto(crypto) {
|
||||
logger.info(`Installing ${crypto.display}...`)
|
||||
|
||||
if (!isEnvironmentValid(crypto))
|
||||
throw new Error(`Environment error for ${crypto.display}`)
|
||||
|
||||
if (isRemoteWallet(crypto)) {
|
||||
logger.info(
|
||||
`Environment variable ${crypto.cryptoCode}_WALLET_LOCATION is set as 'remote', so there's no need to install a node in the system. Exiting...`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const cryptoDir = coinUtils.cryptoDir(crypto, BLOCKCHAIN_DIR)
|
||||
makeDir.sync(cryptoDir)
|
||||
const cryptoPlugin = plugin(crypto)
|
||||
const oldDir = process.cwd()
|
||||
const tmpDir = isDevMode()
|
||||
? path.resolve(BLOCKCHAIN_DIR, 'tmp', 'blockchain-install')
|
||||
: '/tmp/blockchain-install'
|
||||
|
||||
makeDir.sync(tmpDir)
|
||||
process.chdir(tmpDir)
|
||||
common.es('rm -rf *')
|
||||
common.fetchAndInstall(crypto)
|
||||
|
||||
cryptoPlugin.setup(cryptoDir)
|
||||
|
||||
if (!isDevMode()) {
|
||||
common.writeFile(installedVolumeFilePath(crypto), '')
|
||||
}
|
||||
|
||||
process.chdir(oldDir)
|
||||
}
|
||||
|
||||
function updateCrypto(crypto) {
|
||||
if (!common.isUpdateDependent(crypto.cryptoCode)) return
|
||||
const cryptoPlugin = plugin(crypto)
|
||||
// TODO: we need to refactor the way we retrieve this status, p.e Monero uses two
|
||||
// services with specific names, so each coin should have its implementation.
|
||||
// Currently, it's not a breaking change because only BTC is update dependent
|
||||
const status = common
|
||||
.es(`sudo supervisorctl status ${crypto.code} | awk '{ print $2 }'`)
|
||||
.trim()
|
||||
const isCurrentlyRunning = _.includes(status, ['RUNNING', 'STARTING'])
|
||||
cryptoPlugin.updateCore(
|
||||
common.getBinaries(crypto.cryptoCode),
|
||||
isCurrentlyRunning,
|
||||
)
|
||||
}
|
||||
|
||||
function plugin(crypto) {
|
||||
const plugin = PLUGINS[crypto.cryptoCode]
|
||||
if (!plugin) throw new Error(`No such plugin: ${crypto.cryptoCode}`)
|
||||
return plugin
|
||||
}
|
||||
|
||||
function getBlockchainSyncStatus(cryptoList) {
|
||||
return settingsLoader.load().then(settings => {
|
||||
if (isDevMode()) return new Array(_.size(cryptoList)).fill('ready')
|
||||
|
||||
const blockchainStatuses = _.reduce(
|
||||
(acc, value) => {
|
||||
const processStatus = common
|
||||
.es(`sudo supervisorctl status ${value.code} | awk '{ print $2 }'`)
|
||||
.trim()
|
||||
return acc.then(a => {
|
||||
if (processStatus === 'RUNNING') {
|
||||
return wallet
|
||||
.checkBlockchainStatus(settings, value.cryptoCode)
|
||||
.then(res => Promise.resolve({ ...a, [value.cryptoCode]: res }))
|
||||
}
|
||||
return Promise.resolve({ ...a })
|
||||
})
|
||||
},
|
||||
Promise.resolve({}),
|
||||
cryptoList,
|
||||
)
|
||||
|
||||
return blockchainStatuses
|
||||
})
|
||||
}
|
||||
|
||||
function isInstalled(crypto) {
|
||||
return isDevMode()
|
||||
? isInstalledSoftware(crypto)
|
||||
: isInstalledSoftware(crypto) && isInstalledVolume(crypto)
|
||||
}
|
||||
|
||||
function isDisabled(crypto) {
|
||||
switch (crypto.cryptoCode) {
|
||||
case 'XMR':
|
||||
return (
|
||||
(isInstalled(crypto) && 'Installed') ||
|
||||
(isInstalled(_.find(it => it.code === 'zcash', cryptos)) &&
|
||||
'Insufficient resources. Contact support.')
|
||||
)
|
||||
default:
|
||||
return isInstalled(crypto) && 'Installed'
|
||||
}
|
||||
}
|
||||
|
||||
function run() {
|
||||
const choices = _.flow([
|
||||
_.filter(c => !c.hideFromInstall),
|
||||
_.map(c => {
|
||||
return {
|
||||
name: c.display,
|
||||
value: c.code,
|
||||
checked: isInstalled(c),
|
||||
disabled: isDisabled(c),
|
||||
}
|
||||
}),
|
||||
])(cryptos)
|
||||
|
||||
const questions = []
|
||||
|
||||
const validateAnswers = async answers => {
|
||||
if (_.size(answers) > 2)
|
||||
return {
|
||||
message: `Please insert a maximum of two coins to install.`,
|
||||
isValid: false,
|
||||
}
|
||||
|
||||
if (
|
||||
_.isEmpty(_.difference(['monero', 'zcash'], answers)) ||
|
||||
(_.includes('monero', answers) &&
|
||||
isInstalled(_.find(it => it.code === 'zcash', cryptos))) ||
|
||||
(_.includes('zcash', answers) &&
|
||||
isInstalled(_.find(it => it.code === 'monero', cryptos)))
|
||||
) {
|
||||
return {
|
||||
message: `Zcash and Monero installations are temporarily mutually exclusive, given the space needed for their blockchains. Contact support for more information.`,
|
||||
isValid: false,
|
||||
}
|
||||
}
|
||||
|
||||
return getBlockchainSyncStatus(cryptos).then(blockchainStatuses => {
|
||||
const result = _.reduce(
|
||||
(acc, value) => ({
|
||||
...acc,
|
||||
[value]: _.isNil(acc[value]) ? 1 : acc[value] + 1,
|
||||
}),
|
||||
{},
|
||||
_.values(blockchainStatuses),
|
||||
)
|
||||
if (_.size(answers) + result.syncing > 2) {
|
||||
return {
|
||||
message: `Installing these coins would pass the 2 parallel blockchain synchronization limit. Please try again with fewer coins or try again later.`,
|
||||
isValid: false,
|
||||
}
|
||||
}
|
||||
|
||||
if (result.syncing > 2) {
|
||||
return {
|
||||
message: `There are currently more than 2 blockchains in their initial synchronization. Please try again later.`,
|
||||
isValid: false,
|
||||
}
|
||||
}
|
||||
|
||||
return { message: null, isValid: true }
|
||||
})
|
||||
}
|
||||
|
||||
questions.push({
|
||||
type: 'checkbox',
|
||||
name: 'crypto',
|
||||
message:
|
||||
'Which cryptocurrencies would you like to install?\nTo prevent server resource overloading, only TWO coins should be syncing simultaneously.\nMore coins can be installed after this process is over.',
|
||||
choices,
|
||||
})
|
||||
|
||||
inquirer
|
||||
.prompt(questions)
|
||||
.then(answers => Promise.all([validateAnswers(answers.crypto), answers]))
|
||||
.then(([res, answers]) => {
|
||||
if (res.isValid) {
|
||||
return processCryptos(answers.crypto)
|
||||
}
|
||||
logger.error(res.message)
|
||||
})
|
||||
}
|
||||
100
packages/server/lib/blockchain/litecoin.js
Normal file
100
packages/server/lib/blockchain/litecoin.js
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const coinRec = coinUtils.getCryptoCurrency('LTC')
|
||||
|
||||
function setup(dataDir) {
|
||||
common.firewall([coinRec.defaultPort])
|
||||
const config = buildConfig()
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} -datadir=${dataDir}`
|
||||
common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating Litecoin Core. This may take a minute...')
|
||||
common.es(`sudo supervisorctl stop litecoin`)
|
||||
common.es(`curl -#o /tmp/litecoin.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/litecoin.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Litecoin Core: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/litecoin.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/* /usr/local/bin/`)
|
||||
common.es(`rm -r /tmp/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm /tmp/litecoin.tar.gz`)
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "changetype=" /mnt/blockchains/litecoin/litecoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`changetype already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Enabling bech32 change addresses in config file..`)
|
||||
common.es(
|
||||
`echo "\nchangetype=bech32" >> /mnt/blockchains/litecoin/litecoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "blockfilterindex=" /mnt/blockchains/litecoin/litecoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`blockfilterindex already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Disabling blockfilterindex in config file..`)
|
||||
common.es(
|
||||
`echo "\nblockfilterindex=0" >> /mnt/blockchains/litecoin/litecoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "peerblockfilters=" /mnt/blockchains/litecoin/litecoin.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`peerblockfilters already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Disabling peerblockfilters in config file..`)
|
||||
common.es(
|
||||
`echo "\npeerblockfilters=0" >> /mnt/blockchains/litecoin/litecoin.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start litecoin`)
|
||||
}
|
||||
|
||||
common.logger.info('Litecoin Core is updated!')
|
||||
}
|
||||
|
||||
function buildConfig() {
|
||||
return `rpcuser=lamassuserver
|
||||
rpcpassword=${common.randomPass()}
|
||||
dbcache=500
|
||||
server=1
|
||||
connections=40
|
||||
keypool=10000
|
||||
prune=4000
|
||||
daemon=0
|
||||
addresstype=p2sh-segwit
|
||||
changetype=bech32
|
||||
blockfilterindex=0
|
||||
peerblockfilters=0
|
||||
`
|
||||
}
|
||||
61
packages/server/lib/blockchain/monero.js
Normal file
61
packages/server/lib/blockchain/monero.js
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const coinRec = utils.getCryptoCurrency('XMR')
|
||||
|
||||
function setup(dataDir) {
|
||||
common.firewall([coinRec.defaultPort])
|
||||
const auth = `lamassuserver:${common.randomPass()}`
|
||||
const config = buildConfig(auth)
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} --no-zmq --data-dir ${dataDir} --config-file ${dataDir}/${coinRec.configFile}`
|
||||
const walletCmd = `/usr/local/bin/${coinRec.wallet} --rpc-login ${auth} --daemon-host 127.0.0.1 --daemon-port 18081 --trusted-daemon --daemon-login ${auth} --rpc-bind-port 18082 --wallet-dir ${dataDir}/wallets`
|
||||
common.writeSupervisorConfig(coinRec, cmd, walletCmd)
|
||||
}
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating Monero. This may take a minute...')
|
||||
common.es(`sudo supervisorctl stop monero monero-wallet`)
|
||||
common.es(`curl -#o /tmp/monero.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/monero.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Monero: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xf /tmp/monero.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/monerod /usr/local/bin/monerod`)
|
||||
common.es(
|
||||
`cp /tmp/${coinRec.dir}/monero-wallet-rpc /usr/local/bin/monero-wallet-rpc`,
|
||||
)
|
||||
common.es(`rm -r /tmp/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm /tmp/monero.tar.gz`)
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start monero monero-wallet`)
|
||||
}
|
||||
|
||||
common.logger.info('Monero is updated!')
|
||||
}
|
||||
|
||||
function buildConfig(auth) {
|
||||
return `rpc-login=${auth}
|
||||
stagenet=0
|
||||
restricted-rpc=1
|
||||
db-sync-mode=safe
|
||||
out-peers=20
|
||||
in-peers=20
|
||||
prune-blockchain=1
|
||||
`
|
||||
}
|
||||
94
packages/server/lib/blockchain/zcash.js
Normal file
94
packages/server/lib/blockchain/zcash.js
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
const path = require('path')
|
||||
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
const common = require('./common')
|
||||
|
||||
module.exports = { setup, updateCore }
|
||||
|
||||
const es = common.es
|
||||
const logger = common.logger
|
||||
|
||||
function updateCore(coinRec, isCurrentlyRunning) {
|
||||
common.logger.info('Updating your Zcash wallet. This may take a minute...')
|
||||
common.es(`sudo supervisorctl stop zcash`)
|
||||
common.es(`curl -#Lo /tmp/zcash.tar.gz ${coinRec.url}`)
|
||||
if (
|
||||
common.es(`sha256sum /tmp/zcash.tar.gz | awk '{print $1}'`).trim() !==
|
||||
coinRec.urlHash
|
||||
) {
|
||||
common.logger.info(
|
||||
'Failed to update Zcash: Package signature do not match!',
|
||||
)
|
||||
return
|
||||
}
|
||||
common.es(`tar -xzf /tmp/zcash.tar.gz -C /tmp/`)
|
||||
|
||||
common.logger.info('Updating wallet...')
|
||||
common.es(`cp /tmp/${coinRec.dir}/* /usr/local/bin/`)
|
||||
common.es(`rm -r /tmp/${coinRec.dir.replace('/bin', '')}`)
|
||||
common.es(`rm /tmp/zcash.tar.gz`)
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "walletrequirebackup=" /mnt/blockchains/zcash/zcash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(`walletrequirebackup already defined, skipping...`)
|
||||
} else {
|
||||
common.logger.info(`Setting 'walletrequirebackup=false' in config file...`)
|
||||
common.es(
|
||||
`echo "\nwalletrequirebackup=false" >> /mnt/blockchains/zcash/zcash.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
common.es(
|
||||
`grep "i-am-aware-zcashd-will-be-replaced-by-zebrad-and-zallet-in-2025=" /mnt/blockchains/zcash/zcash.conf || true`,
|
||||
)
|
||||
) {
|
||||
common.logger.info(
|
||||
`i-am-aware-zcashd-will-be-replaced-by-zebrad-and-zallet-in-2025 already defined, skipping...`,
|
||||
)
|
||||
} else {
|
||||
common.logger.info(
|
||||
`Setting 'i-am-aware-zcashd-will-be-replaced-by-zebrad-and-zallet-in-2025=1' in config file...`,
|
||||
)
|
||||
common.es(
|
||||
`echo "\ni-am-aware-zcashd-will-be-replaced-by-zebrad-and-zallet-in-2025=1" >> /mnt/blockchains/zcash/zcash.conf`,
|
||||
)
|
||||
}
|
||||
|
||||
if (isCurrentlyRunning) {
|
||||
common.logger.info('Starting wallet...')
|
||||
common.es(`sudo supervisorctl start zcash`)
|
||||
}
|
||||
|
||||
common.logger.info('Zcash is updated!')
|
||||
}
|
||||
|
||||
function setup(dataDir) {
|
||||
es('sudo apt-get update')
|
||||
es('sudo apt-get install libgomp1 -y')
|
||||
const coinRec = coinUtils.getCryptoCurrency('ZEC')
|
||||
|
||||
common.firewall([coinRec.defaultPort])
|
||||
logger.info('Fetching Zcash proofs, will take a while...')
|
||||
es('zcash-fetch-params 2>&1')
|
||||
logger.info('Finished fetching proofs.')
|
||||
const config = buildConfig()
|
||||
common.writeFile(path.resolve(dataDir, coinRec.configFile), config)
|
||||
const cmd = `/usr/local/bin/${coinRec.daemon} -datadir=${dataDir}`
|
||||
common.writeSupervisorConfig(coinRec, cmd)
|
||||
}
|
||||
|
||||
function buildConfig() {
|
||||
return `mainnet=1
|
||||
addnode=mainnet.z.cash
|
||||
rpcuser=lamassuserver
|
||||
rpcpassword=${common.randomPass()}
|
||||
dbcache=500
|
||||
keypool=10000
|
||||
walletrequirebackup=false
|
||||
`
|
||||
}
|
||||
18
packages/server/lib/blockexplorers/mempool.space.js
Normal file
18
packages/server/lib/blockexplorers/mempool.space.js
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
const axios = require('axios')
|
||||
|
||||
const getSatBEstimateFee = () => {
|
||||
return axios
|
||||
.get('https://mempool.space/api/v1/fees/recommended')
|
||||
.then(r => r.data.hourFee)
|
||||
}
|
||||
|
||||
const getSatBEstimateFees = () => {
|
||||
return axios
|
||||
.get('https://mempool.space/api/v1/fees/recommended')
|
||||
.then(r => r.data)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSatBEstimateFees,
|
||||
getSatBEstimateFee,
|
||||
}
|
||||
5
packages/server/lib/bn.js
Normal file
5
packages/server/lib/bn.js
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
const BigNumber = require('bignumber.js')
|
||||
|
||||
BigNumber.config({ ROUNDING_MODE: BigNumber.ROUND_HALF_EVEN })
|
||||
|
||||
module.exports = BigNumber
|
||||
106
packages/server/lib/cash-in/cash-in-atomic.js
Normal file
106
packages/server/lib/cash-in/cash-in-atomic.js
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const db = require('../db')
|
||||
const E = require('../error')
|
||||
|
||||
const cashInLow = require('./cash-in-low')
|
||||
|
||||
module.exports = { atomic }
|
||||
|
||||
function atomic(machineTx) {
|
||||
const TransactionMode = pgp.txMode.TransactionMode
|
||||
const isolationLevel = pgp.txMode.isolationLevel
|
||||
const mode = new TransactionMode({ tiLevel: isolationLevel.serializable })
|
||||
function transaction(t) {
|
||||
const sql = 'select * from cash_in_txs where id=$1'
|
||||
const sql2 = 'select * from bills where cash_in_txs_id=$1'
|
||||
|
||||
return t.oneOrNone(sql, [machineTx.id]).then(row => {
|
||||
if (row && row.tx_version >= machineTx.txVersion)
|
||||
throw new E.StaleTxError({ txId: machineTx.id })
|
||||
|
||||
return t.any(sql2, [machineTx.id]).then(billRows => {
|
||||
const dbTx = cashInLow.toObj(row)
|
||||
|
||||
return preProcess(dbTx, machineTx)
|
||||
.then(preProcessedTx => cashInLow.upsert(t, dbTx, preProcessedTx))
|
||||
.then(r => {
|
||||
return insertNewBills(t, billRows, machineTx).then(newBills =>
|
||||
_.set('newBills', newBills, r),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
return db.tx({ mode }, transaction)
|
||||
}
|
||||
|
||||
function insertNewBills(t, billRows, machineTx) {
|
||||
const bills = pullNewBills(billRows, machineTx)
|
||||
if (_.isEmpty(bills)) return Promise.resolve([])
|
||||
|
||||
const dbBills = _.map(cashInLow.massage, bills)
|
||||
const billsByDestination = _.countBy(_.get(['destination_unit']), dbBills)
|
||||
|
||||
const columns = [
|
||||
'id',
|
||||
'fiat',
|
||||
'fiat_code',
|
||||
'crypto_code',
|
||||
'cash_in_fee',
|
||||
'cash_in_txs_id',
|
||||
'device_time',
|
||||
'destination_unit',
|
||||
]
|
||||
const sql = pgp.helpers.insert(dbBills, columns, 'bills')
|
||||
const deviceID = machineTx.deviceId
|
||||
const sql2 = `update devices set recycler1 = recycler1 + $2, recycler2 = recycler2 + $3, recycler3 = recycler3 + $4, recycler4 = recycler4 + $5, recycler5 = recycler5 + $6, recycler6 = recycler6 + $7
|
||||
where device_id = $1`
|
||||
|
||||
return t
|
||||
.none(sql2, [
|
||||
deviceID,
|
||||
_.defaultTo(0, billsByDestination.recycler1),
|
||||
_.defaultTo(0, billsByDestination.recycler2),
|
||||
_.defaultTo(0, billsByDestination.recycler3),
|
||||
_.defaultTo(0, billsByDestination.recycler4),
|
||||
_.defaultTo(0, billsByDestination.recycler5),
|
||||
_.defaultTo(0, billsByDestination.recycler6),
|
||||
])
|
||||
.then(() => {
|
||||
return t.none(sql)
|
||||
})
|
||||
.then(() => bills)
|
||||
}
|
||||
|
||||
function pullNewBills(billRows, machineTx) {
|
||||
if (_.isEmpty(machineTx.bills)) return []
|
||||
|
||||
const toBill = _.mapKeys(_.camelCase)
|
||||
const bills = _.map(toBill, billRows)
|
||||
|
||||
return _.differenceBy(_.get('id'), machineTx.bills, bills)
|
||||
}
|
||||
|
||||
function preProcess(dbTx, machineTx) {
|
||||
// Note: The way this works is if we're clear to send,
|
||||
// we mark the transaction as sendPending.
|
||||
//
|
||||
// If another process is trying to also mark this as sendPending
|
||||
// that means that it saw the tx as sendPending=false.
|
||||
// But if that's true, then it must be serialized before this
|
||||
// (otherwise it would see sendPending=true), and therefore we can't
|
||||
// be seeing sendPending=false (a pre-condition of clearToSend()).
|
||||
// Therefore, one of the conflicting transactions will error,
|
||||
// which is what we want.
|
||||
return new Promise(resolve => {
|
||||
if (!dbTx) return resolve(machineTx)
|
||||
|
||||
if (cashInLow.isClearToSend(dbTx, machineTx)) {
|
||||
return resolve(_.set('sendPending', true, machineTx))
|
||||
}
|
||||
|
||||
return resolve(machineTx)
|
||||
})
|
||||
}
|
||||
195
packages/server/lib/cash-in/cash-in-low.js
Normal file
195
packages/server/lib/cash-in/cash-in-low.js
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const BN = require('../bn')
|
||||
const T = require('../time')
|
||||
const logger = require('../logger')
|
||||
const E = require('../error')
|
||||
|
||||
const PENDING_INTERVAL_MS = 60 * T.minutes
|
||||
|
||||
const massageFields = [
|
||||
'direction',
|
||||
'cryptoNetwork',
|
||||
'bills',
|
||||
'blacklisted',
|
||||
'blacklistMessage',
|
||||
'addressReuse',
|
||||
'promoCodeApplied',
|
||||
'validWalletScore',
|
||||
'cashInFeeCrypto',
|
||||
]
|
||||
const massageUpdateFields = _.concat(massageFields, 'cryptoAtoms')
|
||||
|
||||
const massage = _.flow(
|
||||
_.omit(massageFields),
|
||||
convertBigNumFields,
|
||||
_.mapKeys(_.snakeCase),
|
||||
)
|
||||
|
||||
const massageUpdates = _.flow(
|
||||
_.omit(massageUpdateFields),
|
||||
convertBigNumFields,
|
||||
_.mapKeys(_.snakeCase),
|
||||
)
|
||||
|
||||
module.exports = { toObj, upsert, insert, update, massage, isClearToSend }
|
||||
|
||||
function convertBigNumFields(obj) {
|
||||
const convert = value =>
|
||||
value && BN.isBigNumber(value) ? value.toString() : value
|
||||
return _.mapValues(convert, obj)
|
||||
}
|
||||
|
||||
function toObj(row) {
|
||||
if (!row) return null
|
||||
|
||||
const keys = _.keys(row)
|
||||
let newObj = {}
|
||||
|
||||
keys.forEach(key => {
|
||||
const objKey = _.camelCase(key)
|
||||
if (
|
||||
_.includes(key, [
|
||||
'crypto_atoms',
|
||||
'fiat',
|
||||
'cash_in_fee',
|
||||
'commission_percentage',
|
||||
'raw_ticker_price',
|
||||
])
|
||||
) {
|
||||
newObj[objKey] = new BN(row[key])
|
||||
return
|
||||
}
|
||||
|
||||
newObj[objKey] = row[key]
|
||||
})
|
||||
|
||||
newObj.direction = 'cashIn'
|
||||
|
||||
return newObj
|
||||
}
|
||||
|
||||
function upsert(t, dbTx, preProcessedTx) {
|
||||
if (!dbTx) {
|
||||
return insert(t, preProcessedTx).then(tx => ({ dbTx, tx }))
|
||||
}
|
||||
|
||||
return update(t, dbTx, diff(dbTx, preProcessedTx)).then(tx => ({ dbTx, tx }))
|
||||
}
|
||||
|
||||
function insert(t, tx) {
|
||||
const dbTx = massage(tx)
|
||||
const sql = pgp.helpers.insert(dbTx, null, 'cash_in_txs') + ' returning *'
|
||||
return t.one(sql).then(toObj)
|
||||
}
|
||||
|
||||
function update(t, tx, changes) {
|
||||
if (_.isEmpty(changes)) return Promise.resolve(tx)
|
||||
|
||||
const dbChanges = isFinalTxStage(changes)
|
||||
? massage(changes)
|
||||
: massageUpdates(changes)
|
||||
const sql =
|
||||
pgp.helpers.update(dbChanges, null, 'cash_in_txs') +
|
||||
pgp.as.format(' where id=$1', [tx.id]) +
|
||||
' returning *'
|
||||
|
||||
return t.one(sql).then(toObj)
|
||||
}
|
||||
|
||||
function diff(oldTx, newTx) {
|
||||
let updatedTx = {}
|
||||
|
||||
if (!oldTx) throw new Error('oldTx must not be null')
|
||||
if (!newTx) throw new Error('newTx must not be null')
|
||||
|
||||
_.forEach(fieldKey => {
|
||||
const oldField = oldTx[fieldKey]
|
||||
const newField = newTx[fieldKey]
|
||||
if (fieldKey === 'bills') return
|
||||
if (_.isEqualWith(nilEqual, oldField, newField)) return
|
||||
|
||||
if (!ensureRatchet(oldField, newField, fieldKey)) {
|
||||
logger.warn(
|
||||
'Value from lamassu-machine would violate ratchet [%s]',
|
||||
fieldKey,
|
||||
)
|
||||
logger.warn('Old tx: %j', oldTx)
|
||||
logger.warn('New tx: %j', newTx)
|
||||
throw new E.RatchetError(
|
||||
'Value from lamassu-machine would violate ratchet',
|
||||
)
|
||||
}
|
||||
|
||||
updatedTx[fieldKey] = newField
|
||||
}, _.keys(newTx))
|
||||
|
||||
return updatedTx
|
||||
}
|
||||
|
||||
function ensureRatchet(oldField, newField, fieldKey) {
|
||||
const monotonic = [
|
||||
'cryptoAtoms',
|
||||
'fiat',
|
||||
'send',
|
||||
'sendConfirmed',
|
||||
'operatorCompleted',
|
||||
'timedout',
|
||||
'txVersion',
|
||||
'batched',
|
||||
'discount',
|
||||
]
|
||||
const free = [
|
||||
'sendPending',
|
||||
'error',
|
||||
'errorCode',
|
||||
'customerId',
|
||||
'discountSource',
|
||||
]
|
||||
|
||||
if (_.isNil(oldField)) return true
|
||||
if (_.includes(fieldKey, monotonic))
|
||||
return isMonotonic(oldField, newField, fieldKey)
|
||||
|
||||
if (_.includes(fieldKey, free)) {
|
||||
if (_.isNil(newField)) return false
|
||||
return true
|
||||
}
|
||||
|
||||
if (_.isNil(newField)) return false
|
||||
if (BN.isBigNumber(oldField) && BN.isBigNumber(newField))
|
||||
return new BN(oldField).eq(newField)
|
||||
if (oldField.toString() === newField.toString()) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function isMonotonic(oldField, newField, fieldKey) {
|
||||
if (_.isNil(newField)) return false
|
||||
if (_.isBoolean(oldField)) return oldField === newField || !oldField
|
||||
if (BN.isBigNumber(oldField)) return oldField.lte(newField)
|
||||
if (_.isNumber(oldField)) return oldField <= newField
|
||||
|
||||
throw new Error(`Unexpected value [${fieldKey}]: ${oldField}, ${newField}`)
|
||||
}
|
||||
|
||||
function nilEqual(a, b) {
|
||||
if (_.isNil(a) && _.isNil(b)) return true
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function isClearToSend(oldTx, newTx) {
|
||||
const now = Date.now()
|
||||
|
||||
return (
|
||||
(newTx.send || newTx.batched) &&
|
||||
(!oldTx || (!oldTx.sendPending && !oldTx.sendConfirmed)) &&
|
||||
newTx.created > now - PENDING_INTERVAL_MS
|
||||
)
|
||||
}
|
||||
|
||||
function isFinalTxStage(txChanges) {
|
||||
return txChanges.send || txChanges.batched
|
||||
}
|
||||
272
packages/server/lib/cash-in/cash-in-tx.js
Normal file
272
packages/server/lib/cash-in/cash-in-tx.js
Normal file
|
|
@ -0,0 +1,272 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
const pEachSeries = require('p-each-series')
|
||||
|
||||
const blacklist = require('../blacklist')
|
||||
const db = require('../db')
|
||||
const plugins = require('../plugins')
|
||||
const logger = require('../logger')
|
||||
const settingsLoader = require('../new-settings-loader')
|
||||
const configManager = require('../new-config-manager')
|
||||
const notifier = require('../notifier')
|
||||
const constants = require('../constants')
|
||||
|
||||
const cashInAtomic = require('./cash-in-atomic')
|
||||
const cashInLow = require('./cash-in-low')
|
||||
|
||||
const PENDING_INTERVAL = '60 minutes'
|
||||
const MAX_PENDING = 10
|
||||
|
||||
const TRANSACTION_STATES = `
|
||||
case
|
||||
when operator_completed and error = 'Operator cancel' then 'Cancelled'
|
||||
when error is not null then 'Error'
|
||||
when send_confirmed then 'Sent'
|
||||
when ((not send_confirmed) and (created <= now() - interval '${PENDING_INTERVAL}')) then 'Expired'
|
||||
else 'Pending'
|
||||
end`
|
||||
|
||||
module.exports = {
|
||||
post,
|
||||
monitorPending,
|
||||
cancel,
|
||||
doesTxReuseAddress,
|
||||
PENDING_INTERVAL,
|
||||
TRANSACTION_STATES,
|
||||
}
|
||||
|
||||
function post(machineTx, pi) {
|
||||
logger.silly('Updating cashin tx:', machineTx)
|
||||
return cashInAtomic.atomic(machineTx).then(r => {
|
||||
const updatedTx = r.tx
|
||||
let addressReuse = false
|
||||
|
||||
const promises = [settingsLoader.loadConfig()]
|
||||
|
||||
const isFirstPost = !r.tx.fiat || r.tx.fiat.isZero()
|
||||
if (isFirstPost) {
|
||||
promises.push(
|
||||
checkForBlacklisted(updatedTx),
|
||||
doesTxReuseAddress(updatedTx),
|
||||
getWalletScore(updatedTx, pi),
|
||||
)
|
||||
}
|
||||
|
||||
return Promise.all(promises).then(
|
||||
([
|
||||
config,
|
||||
blacklisted = false,
|
||||
isReusedAddress = false,
|
||||
walletScore = null,
|
||||
]) => {
|
||||
const { rejectAddressReuse } = configManager.getCompliance(config)
|
||||
const isBlacklisted = !!blacklisted
|
||||
|
||||
if (isBlacklisted) {
|
||||
notifier.notifyIfActive('compliance', 'blacklistNotify', r.tx, false)
|
||||
} else if (isReusedAddress && rejectAddressReuse) {
|
||||
notifier.notifyIfActive('compliance', 'blacklistNotify', r.tx, true)
|
||||
addressReuse = true
|
||||
}
|
||||
return postProcess(r, pi, isBlacklisted, addressReuse, walletScore)
|
||||
.then(changes =>
|
||||
_.set(
|
||||
'walletScore',
|
||||
_.isNil(walletScore) ? null : walletScore.score,
|
||||
changes,
|
||||
),
|
||||
)
|
||||
.then(changes => cashInLow.update(db, updatedTx, changes))
|
||||
.then(
|
||||
_.flow(
|
||||
_.set('bills', machineTx.bills),
|
||||
_.set('blacklisted', isBlacklisted),
|
||||
_.set('blacklistMessage', blacklisted?.content),
|
||||
_.set('addressReuse', addressReuse),
|
||||
_.set(
|
||||
'validWalletScore',
|
||||
_.isNil(walletScore) || walletScore.isValid,
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function registerTrades(pi, r) {
|
||||
_.forEach(bill => pi.buy(bill, r.tx), r.newBills)
|
||||
}
|
||||
|
||||
function logAction(rec, tx) {
|
||||
const action = {
|
||||
tx_id: tx.id,
|
||||
action: rec.action || (rec.sendConfirmed ? 'sendCoins' : 'sendCoinsError'),
|
||||
error: rec.error,
|
||||
error_code: rec.errorCode,
|
||||
tx_hash: rec.txHash,
|
||||
}
|
||||
|
||||
const sql = pgp.helpers.insert(action, null, 'cash_in_actions')
|
||||
|
||||
return db.none(sql).then(_.constant(rec))
|
||||
}
|
||||
|
||||
function logActionById(action, _rec, txId) {
|
||||
const rec = _.assign(_rec, { action, tx_id: txId })
|
||||
const sql = pgp.helpers.insert(rec, null, 'cash_in_actions')
|
||||
|
||||
return db.none(sql)
|
||||
}
|
||||
|
||||
function checkForBlacklisted(tx) {
|
||||
return blacklist.blocked(tx.toAddress)
|
||||
}
|
||||
|
||||
function postProcess(r, pi, isBlacklisted, addressReuse, walletScore) {
|
||||
if (addressReuse) {
|
||||
return Promise.resolve({
|
||||
operatorCompleted: true,
|
||||
error: 'Address Reused',
|
||||
})
|
||||
}
|
||||
|
||||
if (isBlacklisted) {
|
||||
return Promise.resolve({
|
||||
operatorCompleted: true,
|
||||
error: 'Blacklisted Address',
|
||||
})
|
||||
}
|
||||
|
||||
if (!_.isNil(walletScore) && !walletScore.isValid) {
|
||||
return Promise.resolve({
|
||||
walletScore: walletScore.score,
|
||||
operatorCompleted: true,
|
||||
error: 'Chain analysis score is above defined threshold',
|
||||
errorCode: 'scoreThresholdReached',
|
||||
})
|
||||
}
|
||||
|
||||
registerTrades(pi, r)
|
||||
|
||||
if (!cashInLow.isClearToSend(r.dbTx, r.tx)) return Promise.resolve({})
|
||||
|
||||
return pi
|
||||
.sendCoins(r.tx)
|
||||
.then(txObj => {
|
||||
if (txObj.batched) {
|
||||
return {
|
||||
batched: true,
|
||||
batchTime: 'now()^',
|
||||
sendPending: true,
|
||||
error: null,
|
||||
errorCode: null,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
txHash: txObj.txid,
|
||||
fee: txObj.fee,
|
||||
sendConfirmed: true,
|
||||
sendTime: 'now()^',
|
||||
sendPending: false,
|
||||
error: null,
|
||||
errorCode: null,
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
// Important: We don't know what kind of error this is
|
||||
// so not safe to assume that funds weren't sent.
|
||||
|
||||
// Setting sendPending to true ensures that the transaction gets
|
||||
// silently terminated and no retries are done
|
||||
|
||||
return {
|
||||
sendTime: 'now()^',
|
||||
error: err.message,
|
||||
errorCode: err.name,
|
||||
sendPending: true,
|
||||
}
|
||||
})
|
||||
.then(sendRec => {
|
||||
pi.notifyOperator(r.tx, sendRec).catch(err =>
|
||||
logger.error('Failure sending transaction notification', err),
|
||||
)
|
||||
return logAction(sendRec, r.tx)
|
||||
})
|
||||
}
|
||||
|
||||
// This feels like it can be simplified,
|
||||
// but it's the most concise query to express the requirement and its edge cases.
|
||||
// At most only one authenticated customer can use an address.
|
||||
// If the current customer is anon, we can still allow one other customer to use the address,
|
||||
// So we count distinct customers plus the current customer if they are not anonymous.
|
||||
// To prevent malicious blocking of address, we only check for txs with actual fiat
|
||||
function doesTxReuseAddress({ toAddress, customerId }) {
|
||||
const sql = `
|
||||
SELECT COUNT(*) > 1 as exists
|
||||
FROM (SELECT DISTINCT customer_id
|
||||
FROM cash_in_txs
|
||||
WHERE to_address = $1
|
||||
AND customer_id != $3
|
||||
AND fiat > 0
|
||||
UNION
|
||||
SELECT $2
|
||||
WHERE $2 != $3) t;
|
||||
`
|
||||
return db
|
||||
.one(sql, [toAddress, customerId, constants.anonymousCustomer.uuid])
|
||||
.then(({ exists }) => exists)
|
||||
}
|
||||
|
||||
function getWalletScore(tx, pi) {
|
||||
return pi.isWalletScoringEnabled(tx).then(isEnabled => {
|
||||
if (!isEnabled) return null
|
||||
return pi.rateAddress(tx.cryptoCode, tx.toAddress)
|
||||
})
|
||||
}
|
||||
|
||||
function monitorPending(settings) {
|
||||
const sql = `select * from cash_in_txs
|
||||
where created > now() - interval $1
|
||||
and send
|
||||
and not send_confirmed
|
||||
and not send_pending
|
||||
and not operator_completed
|
||||
order by created
|
||||
limit $2`
|
||||
|
||||
const processPending = row => {
|
||||
const tx = cashInLow.toObj(row)
|
||||
const pi = plugins(settings, tx.deviceId)
|
||||
|
||||
return post(tx, pi).catch(logger.error)
|
||||
}
|
||||
|
||||
return db
|
||||
.any(sql, [PENDING_INTERVAL, MAX_PENDING])
|
||||
.then(rows => pEachSeries(rows, row => processPending(row)))
|
||||
.catch(logger.error)
|
||||
}
|
||||
|
||||
function cancel(txId) {
|
||||
const updateRec = {
|
||||
error: 'Operator cancel',
|
||||
error_code: 'operatorCancel',
|
||||
operator_completed: true,
|
||||
batch_id: null,
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return (
|
||||
pgp.helpers.update(updateRec, null, 'cash_in_txs') +
|
||||
pgp.as.format(' where id=$1', [txId])
|
||||
)
|
||||
})
|
||||
.then(sql => db.result(sql, false))
|
||||
.then(res => {
|
||||
if (res.rowCount !== 1) throw new Error('No such tx-id')
|
||||
})
|
||||
.then(() => logActionById('operatorCompleted', {}, txId))
|
||||
}
|
||||
63
packages/server/lib/cash-out/cash-out-actions.js
Normal file
63
packages/server/lib/cash-out/cash-out-actions.js
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
module.exports = { logDispense, logActionById, logAction, logError }
|
||||
|
||||
function logDispense(t, tx) {
|
||||
const baseRec = { error: tx.error, error_code: tx.errorCode }
|
||||
const rec = _.merge(mapDispense(tx), baseRec)
|
||||
const action = _.isEmpty(tx.error) ? 'dispense' : 'dispenseError'
|
||||
return logAction(t, action, rec, tx)
|
||||
}
|
||||
|
||||
function logActionById(t, action, _rec, txId) {
|
||||
const rec = _.assign(_rec, { action, tx_id: txId, redeem: false })
|
||||
const sql = pgp.helpers.insert(rec, null, 'cash_out_actions')
|
||||
|
||||
return t.none(sql)
|
||||
}
|
||||
|
||||
function logAction(t, action, _rec, tx) {
|
||||
const rec = _.assign(_rec, {
|
||||
action,
|
||||
tx_id: tx.id,
|
||||
redeem: !!tx.redeem,
|
||||
device_id: tx.deviceId,
|
||||
})
|
||||
const sql = pgp.helpers.insert(rec, null, 'cash_out_actions')
|
||||
|
||||
return t.none(sql).then(_.constant(tx))
|
||||
}
|
||||
|
||||
function logError(t, action, err, tx) {
|
||||
return logAction(
|
||||
t,
|
||||
action,
|
||||
{
|
||||
error: err.message,
|
||||
error_code: err.name,
|
||||
},
|
||||
tx,
|
||||
)
|
||||
}
|
||||
|
||||
function mapDispense(tx) {
|
||||
const bills = tx.bills
|
||||
|
||||
if (_.isEmpty(bills)) return {}
|
||||
|
||||
const res = {}
|
||||
|
||||
_.forEach(
|
||||
it => {
|
||||
const suffix = _.snakeCase(bills[it].name.replace(/cassette/gi, ''))
|
||||
res[`provisioned_${suffix}`] = bills[it].provisioned
|
||||
res[`denomination_${suffix}`] = bills[it].denomination
|
||||
res[`dispensed_${suffix}`] = bills[it].dispensed
|
||||
res[`rejected_${suffix}`] = bills[it].rejected
|
||||
},
|
||||
_.times(_.identity(), _.size(bills)),
|
||||
)
|
||||
|
||||
return res
|
||||
}
|
||||
220
packages/server/lib/cash-out/cash-out-atomic.js
Normal file
220
packages/server/lib/cash-out/cash-out-atomic.js
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const db = require('../db')
|
||||
const E = require('../error')
|
||||
const logger = require('../logger')
|
||||
|
||||
const helper = require('./cash-out-helper')
|
||||
const cashOutActions = require('./cash-out-actions')
|
||||
const cashOutLow = require('./cash-out-low')
|
||||
|
||||
const toObj = helper.toObj
|
||||
|
||||
module.exports = { atomic }
|
||||
|
||||
function atomic(tx, pi, fromClient) {
|
||||
const TransactionMode = pgp.txMode.TransactionMode
|
||||
const isolationLevel = pgp.txMode.isolationLevel
|
||||
const mode = new TransactionMode({ tiLevel: isolationLevel.serializable })
|
||||
function transaction(t) {
|
||||
const sql = 'SELECT * FROM cash_out_txs WHERE id=$1 FOR UPDATE'
|
||||
|
||||
return t
|
||||
.oneOrNone(sql, [tx.id])
|
||||
.then(toObj)
|
||||
.then(oldTx => {
|
||||
const isStale = fromClient && oldTx && oldTx.txVersion >= tx.txVersion
|
||||
if (isStale) throw new E.StaleTxError({ txId: tx.id })
|
||||
|
||||
// Server doesn't bump version, so we just prevent from version being older.
|
||||
const isStaleFromServer =
|
||||
!fromClient && oldTx && oldTx.txVersion > tx.txVersion
|
||||
if (isStaleFromServer)
|
||||
throw new Error('Stale Error: server triggered', tx.id)
|
||||
|
||||
return preProcess(t, oldTx, tx, pi).then(preProcessedTx =>
|
||||
cashOutLow.upsert(t, oldTx, preProcessedTx),
|
||||
)
|
||||
})
|
||||
}
|
||||
return db.tx({ mode }, transaction)
|
||||
}
|
||||
|
||||
function preProcess(t, oldTx, newTx, pi) {
|
||||
if (!oldTx) {
|
||||
return pi
|
||||
.isHd(newTx)
|
||||
.then(isHd => nextHd(t, isHd, newTx))
|
||||
.then(newTxHd => {
|
||||
return pi.newAddress(newTxHd).then(_.merge(newTxHd))
|
||||
})
|
||||
.then(addressedTx => {
|
||||
const rec = {
|
||||
to_address: addressedTx.toAddress,
|
||||
layer_2_address: addressedTx.layer2Address,
|
||||
}
|
||||
|
||||
return cashOutActions.logAction(t, 'provisionAddress', rec, addressedTx)
|
||||
})
|
||||
.catch(err => {
|
||||
pi.notifyOperator(newTx, {
|
||||
isRedemption: false,
|
||||
error: 'Error while provisioning address',
|
||||
}).catch(err =>
|
||||
logger.error('Failure sending transaction notification', err),
|
||||
)
|
||||
return cashOutActions
|
||||
.logError(t, 'provisionAddress', err, newTx)
|
||||
.then(() => {
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return Promise.resolve(updateStatus(oldTx, newTx)).then(updatedTx => {
|
||||
if (updatedTx.status !== oldTx.status) {
|
||||
const isZeroConf = pi.isZeroConf(updatedTx)
|
||||
updatedTx.justAuthorized = wasJustAuthorized(oldTx, updatedTx, isZeroConf)
|
||||
|
||||
const rec = {
|
||||
to_address: updatedTx.toAddress,
|
||||
tx_hash: updatedTx.txHash,
|
||||
}
|
||||
|
||||
return cashOutActions.logAction(t, updatedTx.status, rec, updatedTx)
|
||||
}
|
||||
|
||||
const hasError = !oldTx.error && newTx.error
|
||||
const hasDispenseOccurred =
|
||||
!oldTx.dispenseConfirmed && dispenseOccurred(newTx.bills)
|
||||
|
||||
if (hasError || hasDispenseOccurred) {
|
||||
return cashOutActions
|
||||
.logDispense(t, updatedTx)
|
||||
.then(it => updateCassettes(t, updatedTx).then(() => it))
|
||||
.then(t => {
|
||||
pi.notifyOperator(updatedTx, { isRedemption: true }).catch(err =>
|
||||
logger.error('Failure sending transaction notification', err),
|
||||
)
|
||||
return t
|
||||
})
|
||||
}
|
||||
|
||||
if (!oldTx.phone && newTx.phone) {
|
||||
return cashOutActions.logAction(t, 'addPhone', {}, updatedTx)
|
||||
}
|
||||
|
||||
if (!oldTx.redeem && newTx.redeem) {
|
||||
return cashOutActions.logAction(t, 'redeemLater', {}, updatedTx)
|
||||
}
|
||||
|
||||
return updatedTx
|
||||
})
|
||||
}
|
||||
|
||||
function nextHd(t, isHd, tx) {
|
||||
if (!isHd) return Promise.resolve(tx)
|
||||
|
||||
return t
|
||||
.one("select nextval('hd_indices_seq') as hd_index")
|
||||
.then(row => _.set('hdIndex', row.hd_index, tx))
|
||||
}
|
||||
|
||||
function updateCassettes(t, tx) {
|
||||
if (!dispenseOccurred(tx.bills)) return Promise.resolve()
|
||||
|
||||
const billsStmt = _.join(', ')(
|
||||
_.map(it => `${tx.bills[it].name} = ${tx.bills[it].name} - $${it + 1}`)(
|
||||
_.range(0, _.size(tx.bills)),
|
||||
),
|
||||
)
|
||||
const returnStmt = _.join(', ')(_.map(bill => `${bill.name}`)(tx.bills))
|
||||
|
||||
const sql = `UPDATE devices SET ${billsStmt} WHERE device_id = $${_.size(tx.bills) + 1} RETURNING ${returnStmt}`
|
||||
|
||||
const values = []
|
||||
|
||||
_.forEach(
|
||||
it => values.push(tx.bills[it].dispensed + tx.bills[it].rejected),
|
||||
_.times(_.identity(), _.size(tx.bills)),
|
||||
)
|
||||
|
||||
values.push(tx.deviceId)
|
||||
|
||||
return t.one(sql, values)
|
||||
}
|
||||
|
||||
function wasJustAuthorized(oldTx, newTx, isZeroConf) {
|
||||
const isAuthorized = () =>
|
||||
_.includes(oldTx.status, ['notSeen', 'published', 'rejected']) &&
|
||||
_.includes(newTx.status, ['authorized', 'instant', 'confirmed'])
|
||||
|
||||
const isConfirmed = () =>
|
||||
_.includes(oldTx.status, [
|
||||
'notSeen',
|
||||
'published',
|
||||
'authorized',
|
||||
'rejected',
|
||||
]) && _.includes(newTx.status, ['instant', 'confirmed'])
|
||||
|
||||
return isZeroConf ? isAuthorized() : isConfirmed()
|
||||
}
|
||||
|
||||
function isPublished(status) {
|
||||
return _.includes(status, [
|
||||
'published',
|
||||
'rejected',
|
||||
'authorized',
|
||||
'instant',
|
||||
'confirmed',
|
||||
])
|
||||
}
|
||||
|
||||
function isConfirmed(status) {
|
||||
return status === 'confirmed'
|
||||
}
|
||||
|
||||
function updateStatus(oldTx, newTx) {
|
||||
const oldStatus = oldTx.status
|
||||
const newStatus = ratchetStatus(oldStatus, newTx.status)
|
||||
|
||||
const publishedAt =
|
||||
!oldTx.publishedAt && isPublished(newStatus) ? 'now()^' : undefined
|
||||
|
||||
const confirmedAt =
|
||||
!oldTx.confirmedAt && isConfirmed(newStatus) ? 'now()^' : undefined
|
||||
|
||||
const updateRec = {
|
||||
publishedAt,
|
||||
confirmedAt,
|
||||
status: newStatus,
|
||||
}
|
||||
|
||||
return _.merge(newTx, updateRec)
|
||||
}
|
||||
|
||||
function ratchetStatus(oldStatus, newStatus) {
|
||||
const statusOrder = [
|
||||
'notSeen',
|
||||
'published',
|
||||
'rejected',
|
||||
'authorized',
|
||||
'instant',
|
||||
'confirmed',
|
||||
]
|
||||
|
||||
if (oldStatus === newStatus) return oldStatus
|
||||
if (newStatus === 'insufficientFunds') return newStatus
|
||||
|
||||
const idx = Math.max(
|
||||
statusOrder.indexOf(oldStatus),
|
||||
statusOrder.indexOf(newStatus),
|
||||
)
|
||||
return statusOrder[idx]
|
||||
}
|
||||
|
||||
function dispenseOccurred(bills) {
|
||||
if (_.isEmpty(bills)) return false
|
||||
return _.every(_.overEvery([_.has('dispensed'), _.has('rejected')]), bills)
|
||||
}
|
||||
197
packages/server/lib/cash-out/cash-out-helper.js
Normal file
197
packages/server/lib/cash-out/cash-out-helper.js
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
const _ = require('lodash/fp')
|
||||
|
||||
const db = require('../db')
|
||||
const T = require('../time')
|
||||
const BN = require('../bn')
|
||||
|
||||
// FP operations on Postgres result in very big errors.
|
||||
// E.g.: 1853.013808 * 1000 = 1866149.494
|
||||
const REDEEMABLE_AGE = T.day / 1000
|
||||
|
||||
const CASH_OUT_TRANSACTION_STATES = `
|
||||
case
|
||||
when error = 'Operator cancel' then 'Cancelled'
|
||||
when error is not null then 'Error'
|
||||
when dispense then 'Success'
|
||||
when (extract(epoch from (now() - greatest(created, confirmed_at))) * 1000) >= ${REDEEMABLE_AGE} then 'Expired'
|
||||
else 'Pending'
|
||||
end`
|
||||
|
||||
const MAX_CASSETTES = 4
|
||||
const MAX_RECYCLERS = 6
|
||||
|
||||
const SNAKE_CASE_BILL_FIELDS = [
|
||||
'denomination_1',
|
||||
'denomination_2',
|
||||
'denomination_3',
|
||||
'denomination_4',
|
||||
'denomination_recycler_1',
|
||||
'denomination_recycler_2',
|
||||
'denomination_recycler_3',
|
||||
'denomination_recycler_4',
|
||||
'denomination_recycler_5',
|
||||
'denomination_recycler_6',
|
||||
'provisioned_1',
|
||||
'provisioned_2',
|
||||
'provisioned_3',
|
||||
'provisioned_4',
|
||||
'provisioned_recycler_1',
|
||||
'provisioned_recycler_2',
|
||||
'provisioned_recycler_3',
|
||||
'provisioned_recycler_4',
|
||||
'provisioned_recycler_5',
|
||||
'provisioned_recycler_6',
|
||||
]
|
||||
|
||||
const BILL_FIELDS = _.map(_.camelCase, SNAKE_CASE_BILL_FIELDS)
|
||||
|
||||
module.exports = {
|
||||
redeemableTxs,
|
||||
toObj,
|
||||
toDb,
|
||||
REDEEMABLE_AGE,
|
||||
CASH_OUT_TRANSACTION_STATES,
|
||||
}
|
||||
|
||||
const mapValuesWithKey = _.mapValues.convert({ cap: false })
|
||||
|
||||
function convertBigNumFields(obj) {
|
||||
const convert = (value, key) => {
|
||||
if (
|
||||
_.includes(key, [
|
||||
'cryptoAtoms',
|
||||
'receivedCryptoAtoms',
|
||||
'fiat',
|
||||
'fixedFee',
|
||||
])
|
||||
) {
|
||||
// BACKWARDS_COMPATIBILITY 10.1
|
||||
// bills before 10.2 don't have fixedFee
|
||||
if (key === 'fixedFee' && !value) return new BN(0).toString()
|
||||
return value.toString()
|
||||
}
|
||||
|
||||
// Only test isNil for these fields since the others should not be empty.
|
||||
if (
|
||||
_.includes(key, ['commissionPercentage', 'rawTickerPrice']) &&
|
||||
!_.isNil(value)
|
||||
) {
|
||||
return value.toString()
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
const convertKey = key =>
|
||||
_.includes(key, ['cryptoAtoms', 'fiat']) ? key + '#' : key
|
||||
|
||||
return _.mapKeys(convertKey, mapValuesWithKey(convert, obj))
|
||||
}
|
||||
|
||||
function convertField(key) {
|
||||
return _.snakeCase(key)
|
||||
}
|
||||
|
||||
function addDbBills(tx) {
|
||||
const bills = tx.bills
|
||||
if (_.isEmpty(bills)) return tx
|
||||
|
||||
const billsObj = _.flow(
|
||||
_.reduce((acc, value) => {
|
||||
const suffix = _.snakeCase(value.name.replace(/cassette/gi, ''))
|
||||
return {
|
||||
...acc,
|
||||
[`provisioned_${suffix}`]: value.provisioned,
|
||||
[`denomination_${suffix}`]: value.denomination,
|
||||
}
|
||||
}, {}),
|
||||
it => {
|
||||
const missingKeys = _.reduce((acc, value) => {
|
||||
return _.assign({ [value]: 0 })(acc)
|
||||
}, {})(_.difference(SNAKE_CASE_BILL_FIELDS, _.keys(it)))
|
||||
return _.assign(missingKeys, it)
|
||||
},
|
||||
)(bills)
|
||||
|
||||
return _.assign(tx, billsObj)
|
||||
}
|
||||
|
||||
function toDb(tx) {
|
||||
const massager = _.flow(
|
||||
convertBigNumFields,
|
||||
addDbBills,
|
||||
_.omit(['direction', 'bills', 'promoCodeApplied']),
|
||||
_.mapKeys(convertField),
|
||||
)
|
||||
|
||||
return massager(tx)
|
||||
}
|
||||
|
||||
function toObj(row) {
|
||||
if (!row) return null
|
||||
|
||||
const keys = _.keys(row)
|
||||
let newObj = {}
|
||||
|
||||
keys.forEach(key => {
|
||||
const objKey = _.camelCase(key)
|
||||
if (key === 'received_crypto_atoms' && row[key]) {
|
||||
newObj[objKey] = new BN(row[key])
|
||||
return
|
||||
}
|
||||
if (
|
||||
_.includes(key, [
|
||||
'crypto_atoms',
|
||||
'fiat',
|
||||
'commission_percentage',
|
||||
'raw_ticker_price',
|
||||
])
|
||||
) {
|
||||
newObj[objKey] = new BN(row[key])
|
||||
return
|
||||
}
|
||||
|
||||
newObj[objKey] = row[key]
|
||||
})
|
||||
|
||||
newObj.direction = 'cashOut'
|
||||
|
||||
if (_.every(_.isNil, _.at(BILL_FIELDS, newObj))) return newObj
|
||||
if (_.some(_.isNil, _.at(BILL_FIELDS, newObj)))
|
||||
throw new Error('Missing cassette values')
|
||||
|
||||
const billFieldsArr = _.concat(
|
||||
_.map(it => ({
|
||||
name: `cassette${it + 1}`,
|
||||
denomination: newObj[`denomination${it + 1}`],
|
||||
provisioned: newObj[`provisioned${it + 1}`],
|
||||
}))(_.range(0, MAX_CASSETTES)),
|
||||
_.map(it => ({
|
||||
name: `recycler${it + 1}`,
|
||||
denomination: newObj[`denominationRecycler${it + 1}`],
|
||||
provisioned: newObj[`provisionedRecycler${it + 1}`],
|
||||
}))(_.range(0, MAX_RECYCLERS)),
|
||||
)
|
||||
|
||||
// There can't be bills with denomination === 0.
|
||||
// If a bill has denomination === 0, then that cassette is not set and should be filtered out.
|
||||
const bills = _.filter(it => it.denomination > 0, billFieldsArr)
|
||||
|
||||
return _.set('bills', bills, _.omit(BILL_FIELDS, newObj))
|
||||
}
|
||||
|
||||
function redeemableTxs(deviceId) {
|
||||
const sql = `select * from cash_out_txs
|
||||
where device_id=$1
|
||||
and redeem=$2
|
||||
and dispense=$3
|
||||
and (
|
||||
provisioned_1 is not null or provisioned_2 is not null or provisioned_3 is not null or provisioned_4 is not null or
|
||||
provisioned_recycler_1 is not null or provisioned_recycler_2 is not null or
|
||||
provisioned_recycler_3 is not null or provisioned_recycler_4 is not null or
|
||||
provisioned_recycler_5 is not null or provisioned_recycler_6 is not null
|
||||
)
|
||||
and extract(epoch from (now() - greatest(created, confirmed_at))) < $4`
|
||||
|
||||
return db.any(sql, [deviceId, true, false, REDEEMABLE_AGE]).then(_.map(toObj))
|
||||
}
|
||||
98
packages/server/lib/cash-out/cash-out-low.js
Normal file
98
packages/server/lib/cash-out/cash-out-low.js
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const helper = require('./cash-out-helper')
|
||||
const { anonymousCustomer } = require('../constants')
|
||||
|
||||
const toDb = helper.toDb
|
||||
const toObj = helper.toObj
|
||||
|
||||
const UPDATEABLE_FIELDS = [
|
||||
'txHash',
|
||||
'txVersion',
|
||||
'status',
|
||||
'dispense',
|
||||
'dispenseConfirmed',
|
||||
'notified',
|
||||
'redeem',
|
||||
'phone',
|
||||
'error',
|
||||
'swept',
|
||||
'publishedAt',
|
||||
'confirmedAt',
|
||||
'errorCode',
|
||||
'receivedCryptoAtoms',
|
||||
'walletScore',
|
||||
'customerId',
|
||||
]
|
||||
|
||||
module.exports = { upsert, update, insert }
|
||||
|
||||
function upsert(t, oldTx, tx) {
|
||||
if (!oldTx) {
|
||||
return insert(t, tx).then(newTx => [oldTx, newTx])
|
||||
}
|
||||
|
||||
return update(t, tx, diff(oldTx, tx)).then(newTx => [
|
||||
oldTx,
|
||||
newTx,
|
||||
tx.justAuthorized,
|
||||
])
|
||||
}
|
||||
|
||||
function insert(t, tx) {
|
||||
const dbTx = toDb(tx)
|
||||
|
||||
const sql = pgp.helpers.insert(dbTx, null, 'cash_out_txs') + ' returning *'
|
||||
return t.one(sql).then(toObj)
|
||||
}
|
||||
|
||||
function update(t, tx, changes) {
|
||||
if (_.isEmpty(changes)) return Promise.resolve(tx)
|
||||
|
||||
const dbChanges = toDb(changes)
|
||||
const sql =
|
||||
pgp.helpers.update(dbChanges, null, 'cash_out_txs') +
|
||||
pgp.as.format(' where id=$1', [tx.id])
|
||||
|
||||
const newTx = _.merge(tx, changes)
|
||||
|
||||
return t.none(sql).then(() => newTx)
|
||||
}
|
||||
|
||||
function diff(oldTx, newTx) {
|
||||
let updatedTx = {}
|
||||
|
||||
UPDATEABLE_FIELDS.forEach(fieldKey => {
|
||||
if (oldTx && _.isEqualWith(nilEqual, oldTx[fieldKey], newTx[fieldKey]))
|
||||
return
|
||||
|
||||
// We never null out an existing field
|
||||
if (oldTx && _.isNil(newTx[fieldKey]))
|
||||
return (updatedTx[fieldKey] = oldTx[fieldKey])
|
||||
|
||||
switch (fieldKey) {
|
||||
case 'customerId':
|
||||
if (oldTx.customerId === anonymousCustomer.uuid) {
|
||||
return (updatedTx['customerId'] = newTx['customerId'])
|
||||
}
|
||||
return
|
||||
// prevent dispense changing from 'true' to 'false'
|
||||
case 'dispense':
|
||||
if (!oldTx.dispense) {
|
||||
return (updatedTx[fieldKey] = newTx[fieldKey])
|
||||
}
|
||||
return
|
||||
default:
|
||||
return (updatedTx[fieldKey] = newTx[fieldKey])
|
||||
}
|
||||
})
|
||||
|
||||
return updatedTx
|
||||
}
|
||||
|
||||
function nilEqual(a, b) {
|
||||
if (_.isNil(a) && _.isNil(b)) return true
|
||||
|
||||
return undefined
|
||||
}
|
||||
242
packages/server/lib/cash-out/cash-out-tx.js
Normal file
242
packages/server/lib/cash-out/cash-out-tx.js
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
const pEachSeries = require('p-each-series')
|
||||
|
||||
const db = require('../db')
|
||||
const dbErrorCodes = require('../db-error-codes')
|
||||
const billMath = require('../bill-math')
|
||||
const T = require('../time')
|
||||
const logger = require('../logger')
|
||||
const plugins = require('../plugins')
|
||||
|
||||
const httpError = require('../route-helpers').httpError
|
||||
const helper = require('./cash-out-helper')
|
||||
const cashOutAtomic = require('./cash-out-atomic')
|
||||
const cashOutActions = require('./cash-out-actions')
|
||||
const cashOutLow = require('./cash-out-low')
|
||||
|
||||
module.exports = {
|
||||
post,
|
||||
monitorLiveIncoming,
|
||||
monitorStaleIncoming,
|
||||
monitorUnnotified,
|
||||
cancel,
|
||||
}
|
||||
|
||||
const STALE_INCOMING_TX_AGE = T.day
|
||||
const STALE_LIVE_INCOMING_TX_AGE = 10 * T.minutes
|
||||
const MAX_NOTIFY_AGE = T.day
|
||||
const MIN_NOTIFY_AGE = 5 * T.minutes
|
||||
const INSUFFICIENT_FUNDS_CODE = 570
|
||||
|
||||
const toObj = helper.toObj
|
||||
|
||||
function selfPost(tx, pi) {
|
||||
return post(tx, pi, false)
|
||||
}
|
||||
|
||||
function post(tx, pi, fromClient = true) {
|
||||
logger.silly('Updating cashout -- tx:', JSON.stringify(tx))
|
||||
logger.silly('Updating cashout -- fromClient:', JSON.stringify(fromClient))
|
||||
return cashOutAtomic.atomic(tx, pi, fromClient).then(txVector => {
|
||||
const [, newTx, justAuthorized] = txVector
|
||||
return postProcess(txVector, justAuthorized, pi).then(changes =>
|
||||
cashOutLow.update(db, newTx, changes),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function postProcess(txVector, justAuthorized, pi) {
|
||||
const [oldTx, newTx] = txVector
|
||||
|
||||
if (justAuthorized) {
|
||||
pi.sell(newTx)
|
||||
pi.notifyOperator(newTx, { isRedemption: false }).catch(err =>
|
||||
logger.error('Failure sending transaction notification', err),
|
||||
)
|
||||
}
|
||||
|
||||
if ((newTx.dispense && !oldTx.dispense) || (newTx.redeem && !oldTx.redeem)) {
|
||||
return pi
|
||||
.buildAvailableUnits(newTx.id)
|
||||
.then(units => {
|
||||
units = _.concat(units.cassettes, units.recyclers)
|
||||
logger.silly('Computing bills to dispense:', {
|
||||
txId: newTx.id,
|
||||
units: units,
|
||||
fiat: newTx.fiat,
|
||||
})
|
||||
const bills = billMath.makeChange(units, newTx.fiat)
|
||||
logger.silly('Bills to dispense:', JSON.stringify(bills))
|
||||
|
||||
if (!bills) throw httpError('Out of bills', INSUFFICIENT_FUNDS_CODE)
|
||||
return bills
|
||||
})
|
||||
.then(bills => {
|
||||
const rec = {}
|
||||
|
||||
_.forEach(
|
||||
it => {
|
||||
const suffix = _.snakeCase(bills[it].name.replace(/cassette/gi, ''))
|
||||
rec[`provisioned_${suffix}`] = bills[it].provisioned
|
||||
rec[`denomination_${suffix}`] = bills[it].denomination
|
||||
},
|
||||
_.times(_.identity(), _.size(bills)),
|
||||
)
|
||||
|
||||
return cashOutActions
|
||||
.logAction(db, 'provisionNotes', rec, newTx)
|
||||
.then(_.constant({ bills }))
|
||||
})
|
||||
.catch(err => {
|
||||
pi.notifyOperator(newTx, {
|
||||
error: err.message,
|
||||
isRedemption: true,
|
||||
}).catch(err =>
|
||||
logger.error('Failure sending transaction notification', err),
|
||||
)
|
||||
return cashOutActions
|
||||
.logError(db, 'provisionNotesError', err, newTx)
|
||||
.then(() => {
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return Promise.resolve({})
|
||||
}
|
||||
|
||||
function fetchOpenTxs(statuses, fromAge, toAge) {
|
||||
const sql = `select *
|
||||
from cash_out_txs
|
||||
where ((extract(epoch from (now() - created))) * 1000)>$1
|
||||
and ((extract(epoch from (now() - created))) * 1000)<$2
|
||||
and status in ($3^)
|
||||
and error is distinct from 'Operator cancel'`
|
||||
|
||||
const statusClause = _.map(pgp.as.text, statuses).join(',')
|
||||
|
||||
return db
|
||||
.any(sql, [fromAge, toAge, statusClause])
|
||||
.then(rows => rows.map(toObj))
|
||||
}
|
||||
|
||||
function processTxStatus(tx, settings) {
|
||||
const pi = plugins(settings, tx.deviceId)
|
||||
|
||||
return pi
|
||||
.getStatus(tx)
|
||||
.then(res =>
|
||||
_.assign(tx, {
|
||||
receivedCryptoAtoms: res.receivedCryptoAtoms,
|
||||
status: res.status,
|
||||
}),
|
||||
)
|
||||
.then(_tx => getWalletScore(_tx, pi))
|
||||
.then(_tx => selfPost(_tx, pi))
|
||||
}
|
||||
|
||||
function getWalletScore(tx, pi) {
|
||||
const statuses = ['published', 'authorized', 'confirmed', 'insufficientFunds']
|
||||
|
||||
if (!_.includes(tx.status, statuses) || !_.isNil(tx.walletScore)) {
|
||||
return tx
|
||||
}
|
||||
|
||||
// Transaction shows up on the blockchain, we can request the sender address
|
||||
return pi.isWalletScoringEnabled(tx).then(isEnabled => {
|
||||
if (!isEnabled) return tx
|
||||
return pi
|
||||
.rateTransaction(tx)
|
||||
.then(res =>
|
||||
res.isValid
|
||||
? _.assign(tx, { walletScore: res.score })
|
||||
: _.assign(tx, {
|
||||
walletScore: res.score,
|
||||
error: 'Chain analysis score is above defined threshold',
|
||||
errorCode: 'scoreThresholdReached',
|
||||
dispense: true,
|
||||
}),
|
||||
)
|
||||
.catch(error =>
|
||||
_.assign(tx, {
|
||||
walletScore: 10,
|
||||
error: `Failure getting address score: ${error.message}`,
|
||||
errorCode: 'walletScoringError',
|
||||
dispense: true,
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function monitorLiveIncoming(settings) {
|
||||
const statuses = ['notSeen', 'published', 'insufficientFunds']
|
||||
return monitorIncoming(settings, statuses, 0, STALE_LIVE_INCOMING_TX_AGE)
|
||||
}
|
||||
|
||||
function monitorStaleIncoming(settings) {
|
||||
const statuses = [
|
||||
'notSeen',
|
||||
'published',
|
||||
'authorized',
|
||||
'instant',
|
||||
'rejected',
|
||||
'insufficientFunds',
|
||||
]
|
||||
return monitorIncoming(
|
||||
settings,
|
||||
statuses,
|
||||
STALE_LIVE_INCOMING_TX_AGE,
|
||||
STALE_INCOMING_TX_AGE,
|
||||
)
|
||||
}
|
||||
|
||||
function monitorIncoming(settings, statuses, fromAge, toAge) {
|
||||
return fetchOpenTxs(statuses, fromAge, toAge)
|
||||
.then(txs => pEachSeries(txs, tx => processTxStatus(tx, settings)))
|
||||
.catch(err => {
|
||||
if (err.code === dbErrorCodes.SERIALIZATION_FAILURE) {
|
||||
logger.warn('Harmless DB conflict, the query will be retried.')
|
||||
} else {
|
||||
logger.error(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function monitorUnnotified(settings) {
|
||||
const sql = `select *
|
||||
from cash_out_txs
|
||||
where ((extract(epoch from (now() - created))) * 1000)<$1
|
||||
and notified=$2 and dispense=$3
|
||||
and phone is not null
|
||||
and status in ('instant', 'confirmed')
|
||||
and (redeem=$4 or ((extract(epoch from (now() - created))) * 1000)>$5)`
|
||||
|
||||
const notify = tx => plugins(settings, tx.deviceId).notifyConfirmation(tx)
|
||||
return db
|
||||
.any(sql, [MAX_NOTIFY_AGE, false, false, true, MIN_NOTIFY_AGE])
|
||||
.then(rows => _.map(toObj, rows))
|
||||
.then(txs => Promise.all(txs.map(notify)))
|
||||
.catch(logger.error)
|
||||
}
|
||||
|
||||
function cancel(txId) {
|
||||
const updateRec = {
|
||||
error: 'Operator cancel',
|
||||
error_code: 'operatorCancel',
|
||||
dispense: true,
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
return (
|
||||
pgp.helpers.update(updateRec, null, 'cash_out_txs') +
|
||||
pgp.as.format(' where id=$1', [txId])
|
||||
)
|
||||
})
|
||||
.then(sql => db.result(sql, false))
|
||||
.then(res => {
|
||||
if (res.rowCount !== 1) throw new Error('No such tx-id')
|
||||
})
|
||||
.then(() => cashOutActions.logActionById(db, 'operatorCompleted', {}, txId))
|
||||
}
|
||||
223
packages/server/lib/cashbox-batches.js
Normal file
223
packages/server/lib/cashbox-batches.js
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
const constants = require('./constants')
|
||||
const db = require('./db')
|
||||
const _ = require('lodash/fp')
|
||||
const uuid = require('uuid')
|
||||
const camelize = require('./utils')
|
||||
|
||||
function createCashboxBatch(deviceId, cashboxCount) {
|
||||
if (_.isEqual(0, cashboxCount))
|
||||
throw new Error('Cash box is empty. Cash box batch could not be created.')
|
||||
const sql = `INSERT INTO cash_unit_operation (id, device_id, created, operation_type) VALUES ($1, $2, now(), 'cash-box-empty') RETURNING *`
|
||||
const sql2 = `
|
||||
UPDATE bills SET cashbox_batch_id=$1
|
||||
FROM cash_in_txs
|
||||
WHERE bills.cash_in_txs_id = cash_in_txs.id AND
|
||||
cash_in_txs.device_id = $2 AND
|
||||
bills.destination_unit = 'cashbox' AND
|
||||
bills.cashbox_batch_id IS NULL
|
||||
`
|
||||
const sql3 = `
|
||||
UPDATE empty_unit_bills SET cashbox_batch_id=$1
|
||||
WHERE empty_unit_bills.device_id = $2 AND empty_unit_bills.cashbox_batch_id IS NULL`
|
||||
|
||||
return db.tx(t => {
|
||||
const batchId = uuid.v4()
|
||||
const q1 = t.one(sql, [batchId, deviceId])
|
||||
const q2 = t.none(sql2, [batchId, deviceId])
|
||||
const q3 = t.none(sql3, [batchId, deviceId])
|
||||
return t.batch([q1, q2, q3]).then(([it]) => it)
|
||||
})
|
||||
}
|
||||
|
||||
function updateMachineWithBatch(machineContext, oldCashboxCount) {
|
||||
const cashUnits = machineContext.cashUnits
|
||||
const cashUnitNames = [
|
||||
'cashbox',
|
||||
'cassette1',
|
||||
'cassette2',
|
||||
'cassette3',
|
||||
'cassette4',
|
||||
'recycler1',
|
||||
'recycler2',
|
||||
'recycler3',
|
||||
'recycler4',
|
||||
'recycler5',
|
||||
'recycler6',
|
||||
]
|
||||
const isValidContext =
|
||||
_.has(['deviceId', 'cashUnits'], machineContext) &&
|
||||
_.has(cashUnitNames, cashUnits)
|
||||
const cassettes = _.filter(it => !_.isNil(it))([
|
||||
cashUnits.cassette1,
|
||||
cashUnits.cassette2,
|
||||
cashUnits.cassette3,
|
||||
cashUnits.cassette4,
|
||||
])
|
||||
const isCassetteAmountWithinRange = _.inRange(
|
||||
constants.CASH_OUT_MINIMUM_AMOUNT_OF_CASSETTES,
|
||||
constants.CASH_OUT_MAXIMUM_AMOUNT_OF_CASSETTES + 1,
|
||||
_.size(cassettes),
|
||||
)
|
||||
if (!isValidContext && !isCassetteAmountWithinRange)
|
||||
throw new Error('Insufficient info to create a new cashbox batch')
|
||||
if (_.isEqual(0, oldCashboxCount))
|
||||
throw new Error('Cash box is empty. Cash box batch could not be created.')
|
||||
|
||||
return db.tx(t => {
|
||||
const deviceId = machineContext.deviceId
|
||||
const batchId = uuid.v4()
|
||||
const q1 = t.none(
|
||||
`INSERT INTO cash_unit_operation (id, device_id, created, operation_type) VALUES ($1, $2, now(), 'cash-box-empty')`,
|
||||
[batchId, deviceId],
|
||||
)
|
||||
const q2 = t.none(
|
||||
`UPDATE bills SET cashbox_batch_id=$1 FROM cash_in_txs
|
||||
WHERE bills.cash_in_txs_id = cash_in_txs.id AND
|
||||
cash_in_txs.device_id = $2 AND
|
||||
bills.destination_unit = 'cashbox' AND
|
||||
bills.cashbox_batch_id IS NULL`,
|
||||
[batchId, deviceId],
|
||||
)
|
||||
const q3 = t.none(
|
||||
`UPDATE empty_unit_bills SET cashbox_batch_id=$1
|
||||
WHERE empty_unit_bills.device_id = $2 AND empty_unit_bills.cashbox_batch_id IS NULL`,
|
||||
[batchId, deviceId],
|
||||
)
|
||||
const q4 = t.none(
|
||||
`
|
||||
UPDATE devices SET cassette1=$1, cassette2=$2, cassette3=$3, cassette4=$4,
|
||||
recycler1=coalesce($5, recycler1), recycler2=coalesce($6, recycler2), recycler3=coalesce($7, recycler3),
|
||||
recycler4=coalesce($8, recycler4), recycler5=coalesce($9, recycler5), recycler6=coalesce($10, recycler6) WHERE device_id=$11
|
||||
`,
|
||||
[
|
||||
cashUnits.cassette1,
|
||||
cashUnits.cassette2,
|
||||
cashUnits.cassette3,
|
||||
cashUnits.cassette4,
|
||||
cashUnits.recycler1,
|
||||
cashUnits.recycler2,
|
||||
cashUnits.recycler3,
|
||||
cashUnits.recycler4,
|
||||
cashUnits.recycler5,
|
||||
cashUnits.recycler6,
|
||||
machineContext.deviceId,
|
||||
],
|
||||
)
|
||||
|
||||
return t.batch([q1, q2, q3, q4])
|
||||
})
|
||||
}
|
||||
|
||||
function getBatches(
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
) {
|
||||
const sql = `
|
||||
SELECT
|
||||
cuo.id,
|
||||
cuo.device_id,
|
||||
cuo.created,
|
||||
cuo.operation_type,
|
||||
cuo.bill_count_override,
|
||||
cuo.performed_by,
|
||||
COUNT(bi.id) AS bill_count,
|
||||
COALESCE(SUM(bi.fiat), 0) AS fiat_total
|
||||
FROM cash_unit_operation AS cuo
|
||||
LEFT JOIN (
|
||||
SELECT b.id, b.fiat, b.fiat_code, b.created, b.cashbox_batch_id, cit.device_id AS device_id FROM bills b LEFT OUTER JOIN (SELECT id, device_id FROM cash_in_txs) AS cit ON cit.id = b.cash_in_txs_id UNION
|
||||
SELECT id, fiat, fiat_code, created, cashbox_batch_id, device_id FROM empty_unit_bills
|
||||
) AS bi ON cuo.id = bi.cashbox_batch_id
|
||||
WHERE cuo.created >= $1 AND cuo.created <= $2 AND cuo.operation_type = 'cash-box-empty'
|
||||
GROUP BY cuo.id, cuo.device_id, cuo.created, cuo.operation_type, cuo.bill_count_override, cuo.performed_by
|
||||
ORDER BY cuo.created DESC
|
||||
`
|
||||
|
||||
return db.any(sql, [from, until]).then(camelize)
|
||||
}
|
||||
|
||||
function editBatchById(id, performedBy) {
|
||||
const sql = `UPDATE cash_unit_operation SET performed_by=$1 WHERE id=$2 AND cuo.operation_type = 'cash-box-empty'`
|
||||
return db.none(sql, [performedBy, id])
|
||||
}
|
||||
|
||||
function logFormatter(data) {
|
||||
return _.map(it => {
|
||||
return {
|
||||
id: it.id,
|
||||
deviceId: it.deviceId,
|
||||
created: it.created,
|
||||
operationType: it.operationType,
|
||||
billCount: it.billCount,
|
||||
fiatTotal: it.fiatTotal,
|
||||
}
|
||||
}, data)
|
||||
}
|
||||
|
||||
function getMachineUnbatchedBills(deviceId) {
|
||||
const sql = `
|
||||
SELECT now() AS created, cash_in_txs.device_id, json_agg(b.*) AS bills FROM bills b LEFT OUTER JOIN cash_in_txs
|
||||
ON b.cash_in_txs_id = cash_in_txs.id
|
||||
WHERE b.cashbox_batch_id IS NULL AND cash_in_txs.device_id = $1
|
||||
GROUP BY cash_in_txs.device_id
|
||||
`
|
||||
|
||||
return db
|
||||
.oneOrNone(sql, [deviceId])
|
||||
.then(res => _.mapKeys(it => _.camelCase(it), res))
|
||||
.then(logFormatterSingle)
|
||||
}
|
||||
|
||||
function getBatchById(id) {
|
||||
const sql = `
|
||||
SELECT cb.id, cb.device_id, cb.created, cb.operation_type, cb.bill_count_override, cb.performed_by, json_agg(b.*) AS bills
|
||||
FROM cash_unit_operation AS cb
|
||||
LEFT JOIN bills AS b ON cb.id = b.cashbox_batch_id
|
||||
WHERE cb.id = $1
|
||||
GROUP BY cb.id
|
||||
`
|
||||
|
||||
return db
|
||||
.oneOrNone(sql, [id])
|
||||
.then(res => _.mapKeys(it => _.camelCase(it), res))
|
||||
.then(logFormatterSingle)
|
||||
}
|
||||
|
||||
function logFormatterSingle(data) {
|
||||
const bills = _.filter(
|
||||
it =>
|
||||
!(
|
||||
_.isNil(it) ||
|
||||
_.isNil(it.fiat_code) ||
|
||||
_.isNil(it.fiat) ||
|
||||
_.isNaN(it.fiat)
|
||||
),
|
||||
data.bills,
|
||||
)
|
||||
|
||||
return {
|
||||
id: data.id,
|
||||
deviceId: data.deviceId,
|
||||
created: data.created,
|
||||
operationType: data.operationType,
|
||||
billCount: _.size(bills),
|
||||
fiatTotals: _.reduce(
|
||||
(acc, value) => {
|
||||
acc[value.fiat_code] = (acc[value.fiat_code] || 0) + value.fiat
|
||||
return acc
|
||||
},
|
||||
{},
|
||||
bills,
|
||||
),
|
||||
billsByDenomination: _.countBy(it => `${it.fiat} ${it.fiat_code}`, bills),
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createCashboxBatch,
|
||||
updateMachineWithBatch,
|
||||
getBatches,
|
||||
editBatchById,
|
||||
getBatchById,
|
||||
getMachineUnbatchedBills,
|
||||
logFormatter,
|
||||
}
|
||||
104
packages/server/lib/coin-change.js
Normal file
104
packages/server/lib/coin-change.js
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Greedy solver of the coin change problem, based on the following CHICKEN
|
||||
* implementation: https://git.sr.ht/~siiky/coin-change
|
||||
*/
|
||||
|
||||
/*
|
||||
* prepare_denominations([[d0, count], [d1, count], ...])
|
||||
* => [{ denom, count, csum }, ...]
|
||||
*/
|
||||
const prepare_denominations = denominations =>
|
||||
JSON.parse(JSON.stringify(denominations))
|
||||
.filter(([, count]) => count > 0)
|
||||
.sort(([d1], [d2]) => d1 - d2)
|
||||
.reduce(
|
||||
([csum, denoms], [denom, count]) => {
|
||||
csum += denom * count
|
||||
return [csum, [{ denom, count, csum }].concat(denoms)]
|
||||
},
|
||||
[0, []],
|
||||
)[1] /* ([csum, denoms]) => denoms */
|
||||
|
||||
const max_denomination_multiplicity = (denom, count, target) =>
|
||||
Math.min(count, Math.floor(target / denom))
|
||||
|
||||
/*
|
||||
* @returns null if there's no solution set;
|
||||
* false if there's no solution;
|
||||
* solution if there's a solution
|
||||
*/
|
||||
const memo_get = (memo, target, denom) => {
|
||||
const denom_solutions = memo[target]
|
||||
if (denom_solutions === undefined) return null
|
||||
const solution = denom_solutions[denom]
|
||||
return solution === undefined ? null : solution
|
||||
}
|
||||
|
||||
const memo_set = (memo, target, denom, solution) => {
|
||||
let denom_solutions = memo[target]
|
||||
if (denom_solutions === undefined) memo[target] = denom_solutions = {}
|
||||
return (denom_solutions[denom] = solution)
|
||||
}
|
||||
|
||||
const check = (solution, target) =>
|
||||
!solution ||
|
||||
target ===
|
||||
solution.reduce((sum, [denom, provisioned]) => sum + denom * provisioned, 0)
|
||||
|
||||
const model = denominations => ({
|
||||
denominations: prepare_denominations(denominations),
|
||||
memo: {},
|
||||
})
|
||||
|
||||
/*
|
||||
* target :: Int
|
||||
* denominations :: [[d0, count], [d1, count], ...]
|
||||
*
|
||||
* @returns [[d0, provisioned], [d1, provisioned], ... ];
|
||||
* false if there's no solution.
|
||||
*/
|
||||
const solve = (model, target) => {
|
||||
const { denominations, memo } = model
|
||||
|
||||
const coin_change = (didx, target) => {
|
||||
if (target === 0) return []
|
||||
|
||||
for (; didx < denominations.length; didx++) {
|
||||
const { denom, count, csum } = denominations[didx]
|
||||
|
||||
/*
|
||||
* There's no solution if the target is greater than the cumulative sum
|
||||
* of the denominations, or if the target is not divisible by any of the
|
||||
* denominations
|
||||
*/
|
||||
if (target > csum) return memo_set(memo, target, denom, false)
|
||||
|
||||
let solution = memo_get(memo, target, denom)
|
||||
if (solution === false) continue /* not here, keep looking */
|
||||
if (solution) return solution /* we've previously computed a solution */
|
||||
|
||||
/* solution === null */
|
||||
for (
|
||||
let nd = max_denomination_multiplicity(denom, count, target);
|
||||
nd >= 0;
|
||||
nd--
|
||||
) {
|
||||
solution = coin_change(didx + 1, target - denom * nd)
|
||||
if (solution)
|
||||
return memo_set(memo, target, denom, [[denom, nd]].concat(solution))
|
||||
}
|
||||
|
||||
memo_set(memo, target, denom, false)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return coin_change(0, target)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
check,
|
||||
model,
|
||||
solve,
|
||||
}
|
||||
179
packages/server/lib/coinatmradar/coinatmradar.js
Normal file
179
packages/server/lib/coinatmradar/coinatmradar.js
Normal file
|
|
@ -0,0 +1,179 @@
|
|||
const axios = require('axios')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const db = require('../db')
|
||||
const configManager = require('../new-config-manager')
|
||||
const complianceTriggers = require('../compliance-triggers')
|
||||
const logger = require('../logger')
|
||||
const plugins = require('../plugins')
|
||||
const { getOperatorId } = require('../operator')
|
||||
const machineSettings = require('../machine-settings')
|
||||
|
||||
const TIMEOUT = 10000
|
||||
const MAX_CONTENT_LENGTH = 2000
|
||||
|
||||
const COIN_ATM_RADAR_URL = process.env.COIN_ATM_RADAR_URL
|
||||
|
||||
// How long a machine can be down before it's considered offline
|
||||
const STALE_INTERVAL = '2 minutes'
|
||||
|
||||
module.exports = { update }
|
||||
|
||||
function mapCoin(rates, deviceId, settings, cryptoCode) {
|
||||
const config = settings.config
|
||||
const buildedRates = plugins(settings, deviceId).buildRates(rates)[
|
||||
cryptoCode
|
||||
] || { cashIn: null, cashOut: null }
|
||||
const commissions = configManager.getCommissions(cryptoCode, deviceId, config)
|
||||
const coinAtmRadar = configManager.getCoinAtmRadar(config)
|
||||
|
||||
const showCommissions = coinAtmRadar.commissions
|
||||
|
||||
const cashInFee = showCommissions ? commissions.cashIn / 100 : null
|
||||
const cashOutFee = showCommissions ? commissions.cashOut / 100 : null
|
||||
const cashInFixedFee = showCommissions ? commissions.fixedFee : null
|
||||
const cashOutFixedFee = showCommissions ? commissions.cashOutFixedFee : null
|
||||
const cashInRate = showCommissions
|
||||
? _.invoke('cashIn.toNumber', buildedRates)
|
||||
: null
|
||||
const cashOutRate = showCommissions
|
||||
? _.invoke('cashOut.toNumber', buildedRates)
|
||||
: null
|
||||
|
||||
return {
|
||||
cryptoCode,
|
||||
cashInFee,
|
||||
cashOutFee,
|
||||
cashInFixedFee,
|
||||
cashOutFixedFee,
|
||||
cashInRate,
|
||||
cashOutRate,
|
||||
}
|
||||
}
|
||||
|
||||
function mapIdentification(triggers) {
|
||||
return {
|
||||
isPhone: complianceTriggers.hasPhone(triggers),
|
||||
isPalmVein: false,
|
||||
isPhoto: complianceTriggers.hasFacephoto(triggers),
|
||||
isIdDocScan: complianceTriggers.hasIdScan(triggers),
|
||||
isFingerprint: false,
|
||||
}
|
||||
}
|
||||
|
||||
function mapMachine(rates, settings, machineRow, triggers) {
|
||||
const deviceId = machineRow.device_id
|
||||
const config = settings.config
|
||||
|
||||
const coinAtmRadar = configManager.getCoinAtmRadar(config)
|
||||
const locale = configManager.getLocale(deviceId, config)
|
||||
const cashOutConfig = configManager.getCashOut(deviceId, config)
|
||||
const cashOutEnabled = cashOutConfig.active ? cashOutConfig.active : false
|
||||
|
||||
const lastOnline = machineRow.last_online.toISOString()
|
||||
const status = machineRow.stale ? 'online' : 'offline'
|
||||
const showLimitsAndVerification = coinAtmRadar.limitsAndVerification
|
||||
const cashLimit = showLimitsAndVerification
|
||||
? complianceTriggers.getCashLimit(triggers)
|
||||
: null
|
||||
const cryptoCurrencies = locale.cryptoCurrencies
|
||||
const identification = mapIdentification(triggers)
|
||||
const coins = _.map(
|
||||
_.partial(mapCoin, [rates, deviceId, settings]),
|
||||
cryptoCurrencies,
|
||||
)
|
||||
return {
|
||||
machineId: deviceId,
|
||||
address: {
|
||||
streetAddress: null,
|
||||
city: null,
|
||||
region: null,
|
||||
postalCode: null,
|
||||
country: null,
|
||||
},
|
||||
location: {
|
||||
name: null,
|
||||
url: null,
|
||||
phone: null,
|
||||
},
|
||||
status,
|
||||
lastOnline,
|
||||
cashIn: true,
|
||||
cashOut: cashOutEnabled,
|
||||
manufacturer: 'lamassu',
|
||||
cashInTxLimit: cashLimit,
|
||||
cashOutTxLimit: cashLimit,
|
||||
cashInDailyLimit: cashLimit,
|
||||
cashOutDailyLimit: cashLimit,
|
||||
fiatCurrency: locale.fiatCurrency,
|
||||
identification,
|
||||
coins,
|
||||
}
|
||||
}
|
||||
|
||||
function getMachines(rates, settings) {
|
||||
const sql = `select device_id, last_online, now() - last_online < $1 as stale from devices
|
||||
where display=TRUE and
|
||||
paired=TRUE
|
||||
order by created`
|
||||
return db
|
||||
.any(sql, [STALE_INTERVAL])
|
||||
.then(machines =>
|
||||
Promise.all(
|
||||
machines.map(machine =>
|
||||
machineSettings
|
||||
.getOrUpdate(machine.device_id)
|
||||
.then(({ complianceTriggers }) =>
|
||||
mapMachine(rates, settings, machine, complianceTriggers),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
function sendRadar(data) {
|
||||
const url = COIN_ATM_RADAR_URL
|
||||
|
||||
if (_.isEmpty(url)) {
|
||||
return Promise.reject(new Error('Missing coinAtmRadar url!'))
|
||||
}
|
||||
|
||||
const config = {
|
||||
url,
|
||||
method: 'post',
|
||||
data,
|
||||
timeout: TIMEOUT,
|
||||
maxContentLength: MAX_CONTENT_LENGTH,
|
||||
}
|
||||
|
||||
return axios.default(config).then(r => logger.info(r.status))
|
||||
}
|
||||
|
||||
function mapRecord(rates, settings) {
|
||||
const timestamp = new Date().toISOString()
|
||||
return Promise.all([
|
||||
getMachines(rates, settings),
|
||||
getOperatorId('coinatmradar'),
|
||||
]).then(([machines, operatorId]) => {
|
||||
return {
|
||||
operatorId: operatorId,
|
||||
operator: {
|
||||
name: null,
|
||||
phone: null,
|
||||
email: null,
|
||||
},
|
||||
timestamp,
|
||||
machines,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function update(rates, settings) {
|
||||
const coinAtmRadar = configManager.getCoinAtmRadar(settings.config)
|
||||
|
||||
if (!coinAtmRadar.active) return Promise.resolve()
|
||||
|
||||
return mapRecord(rates, settings)
|
||||
.then(sendRadar)
|
||||
.catch(err => logger.error(`Failure to update CoinATMRadar`, err))
|
||||
}
|
||||
265
packages/server/lib/coinatmradar/test/coinatmradar.test.js
Normal file
265
packages/server/lib/coinatmradar/test/coinatmradar.test.js
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
const yup = require('yup')
|
||||
const BN = require('../../../lib/bn')
|
||||
const car = require('../coinatmradar')
|
||||
const db = require('../../db')
|
||||
|
||||
jest.mock('../../db')
|
||||
|
||||
afterEach(() => {
|
||||
// https://stackoverflow.com/questions/58151010/difference-between-resetallmocks-resetmodules-resetmoduleregistry-restoreallm
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
const settings = {
|
||||
config: {
|
||||
wallets_BTC_coin: 'BTC',
|
||||
wallets_BTC_wallet: 'mock-wallet',
|
||||
wallets_BTC_ticker: 'kraken',
|
||||
wallets_BTC_exchange: 'mock-exchange',
|
||||
wallets_BTC_zeroConf: 'all-zero-conf',
|
||||
locale_id: '1983951f-6c73-4308-ae6e-f6f56dfa5e11',
|
||||
locale_country: 'US',
|
||||
locale_fiatCurrency: 'USD',
|
||||
locale_languages: ['en-US'],
|
||||
locale_cryptoCurrencies: ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH'],
|
||||
commissions_minimumTx: 1,
|
||||
commissions_fixedFee: 2,
|
||||
commissions_cashOut: 11,
|
||||
commissions_cashIn: 11,
|
||||
commissions_id: '960bb192-db37-40eb-9b59-2c2c78620de6',
|
||||
wallets_ETH_active: true,
|
||||
wallets_ETH_ticker: 'bitstamp',
|
||||
wallets_ETH_wallet: 'mock-wallet',
|
||||
wallets_ETH_exchange: 'mock-exchange',
|
||||
wallets_ETH_zeroConf: 'mock-zero-conf',
|
||||
wallets_LTC_active: true,
|
||||
wallets_LTC_ticker: 'kraken',
|
||||
wallets_LTC_wallet: 'mock-wallet',
|
||||
wallets_LTC_exchange: 'mock-exchange',
|
||||
wallets_LTC_zeroConf: 'mock-zero-conf',
|
||||
wallets_DASH_active: true,
|
||||
wallets_DASH_ticker: 'binance',
|
||||
wallets_DASH_wallet: 'mock-wallet',
|
||||
wallets_DASH_exchange: 'mock-exchange',
|
||||
wallets_DASH_zeroConf: 'mock-zero-conf',
|
||||
wallets_ZEC_active: true,
|
||||
wallets_ZEC_ticker: 'binance',
|
||||
wallets_ZEC_wallet: 'mock-wallet',
|
||||
wallets_ZEC_exchange: 'mock-exchange',
|
||||
wallets_ZEC_zeroConf: 'mock-zero-conf',
|
||||
wallets_BCH_active: true,
|
||||
wallets_BCH_ticker: 'bitpay',
|
||||
wallets_BCH_wallet: 'mock-wallet',
|
||||
wallets_BCH_exchange: 'mock-exchange',
|
||||
wallets_BCH_zeroConf: 'mock-zero-conf',
|
||||
wallets_BTC_zeroConfLimit: 50,
|
||||
wallets_ETH_zeroConfLimit: 50,
|
||||
wallets_LTC_zeroConfLimit: 50,
|
||||
wallets_BCH_zeroConfLimit: 50,
|
||||
wallets_DASH_zeroConfLimit: 50,
|
||||
wallets_ZEC_zeroConfLimit: 50,
|
||||
cashOut_7e531a2666987aa27b9917ca17df7998f72771c57fdb21c90bc033999edd17e4_zeroConfLimit: 50,
|
||||
cashOut_7e531a2666987aa27b9917ca17df7998f72771c57fdb21c90bc033999edd17e4_bottom: 20,
|
||||
cashOut_7e531a2666987aa27b9917ca17df7998f72771c57fdb21c90bc033999edd17e4_top: 5,
|
||||
cashOut_7e531a2666987aa27b9917ca17df7998f72771c57fdb21c90bc033999edd17e4_active: true,
|
||||
cashOut_f02af604ca9010bd9ae04c427a24da90130da10d355f0a9b235886a89008fc05_zeroConfLimit: 200,
|
||||
cashOut_f02af604ca9010bd9ae04c427a24da90130da10d355f0a9b235886a89008fc05_bottom: 20,
|
||||
cashOut_f02af604ca9010bd9ae04c427a24da90130da10d355f0a9b235886a89008fc05_top: 5,
|
||||
cashOut_f02af604ca9010bd9ae04c427a24da90130da10d355f0a9b235886a89008fc05_active: true,
|
||||
notifications_email_active: false,
|
||||
notifications_sms_active: true,
|
||||
notifications_email_errors: false,
|
||||
notifications_sms_errors: true,
|
||||
coinAtmRadar_active: true,
|
||||
coinAtmRadar_commissions: true,
|
||||
coinAtmRadar_limitsAndVerification: true,
|
||||
triggers: [
|
||||
{
|
||||
requirement: 'suspend',
|
||||
suspensionDays: 1,
|
||||
threshold: 123,
|
||||
id: '9c3b5af8-b1d1-4125-b169-0e913b33894c',
|
||||
direction: 'both',
|
||||
triggerType: 'txAmount',
|
||||
},
|
||||
{
|
||||
requirement: 'sms',
|
||||
threshold: 999,
|
||||
thresholdDays: 1,
|
||||
id: 'b0e1e6a8-be1b-4e43-ac5f-3e4951e86f8b',
|
||||
direction: 'both',
|
||||
triggerType: 'txVelocity',
|
||||
},
|
||||
{
|
||||
requirement: 'sms',
|
||||
threshold: 888,
|
||||
thresholdDays: 1,
|
||||
id: '6ac38fe6-172c-48a4-8a7f-605213cbd600',
|
||||
direction: 'both',
|
||||
triggerType: 'txVolume',
|
||||
},
|
||||
],
|
||||
notifications_sms_transactions: true,
|
||||
notifications_highValueTransaction: 50,
|
||||
},
|
||||
accounts: {},
|
||||
}
|
||||
|
||||
const rates = [
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(19164.3),
|
||||
bid: new BN(19164.2),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(594.54),
|
||||
bid: new BN(594.09),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(84.38),
|
||||
bid: new BN(84.37),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(102.8),
|
||||
bid: new BN(101.64),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(74.91),
|
||||
bid: new BN(74.12),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
{
|
||||
rates: {
|
||||
ask: new BN(284.4),
|
||||
bid: new BN(284.4),
|
||||
},
|
||||
timestamp: +new Date(),
|
||||
},
|
||||
]
|
||||
|
||||
const dbResponse = [
|
||||
{
|
||||
device_id:
|
||||
'mock7e531a2666987aa27b9917ca17df7998f72771c57fdb21c90bc033999edd17e4',
|
||||
last_online: new Date('2020-11-16T13:11:03.169Z'),
|
||||
stale: false,
|
||||
},
|
||||
{
|
||||
device_id:
|
||||
'9871e58aa2643ff9445cbc299b50397430ada75157d6c29b4c93548fff0f48f7',
|
||||
last_online: new Date('2020-11-16T16:21:35.948Z'),
|
||||
stale: false,
|
||||
},
|
||||
{
|
||||
device_id:
|
||||
'5ae0d02dedeb77b6521bd5eb7c9159bdc025873fa0bcb6f87aaddfbda0c50913',
|
||||
last_online: new Date('2020-11-19T15:07:57.089Z'),
|
||||
stale: false,
|
||||
},
|
||||
{
|
||||
device_id:
|
||||
'f02af604ca9010bd9ae04c427a24da90130da10d355f0a9b235886a89008fc05',
|
||||
last_online: new Date('2020-11-26T20:05:57.792Z'),
|
||||
stale: false,
|
||||
},
|
||||
{
|
||||
device_id:
|
||||
'490ab16ee0c124512dc769be1f3e7ee3894ce1e5b4b8b975e134fb326e551e88',
|
||||
last_online: new Date('2020-12-04T16:48:05.129Z'),
|
||||
stale: false,
|
||||
},
|
||||
]
|
||||
|
||||
function validateData(data) {
|
||||
const schema = yup.object().shape({
|
||||
operatorId: yup.string().required('operatorId not provided'),
|
||||
operator: yup.object().shape({
|
||||
name: yup.string().nullable(),
|
||||
phone: yup.string().nullable(),
|
||||
email: yup.string().email().nullable(),
|
||||
}),
|
||||
timestamp: yup.string().required('timestamp not provided'),
|
||||
machines: yup.array().of(
|
||||
yup.object().shape({
|
||||
machineId: yup.string().required('machineId not provided'),
|
||||
address: yup.object().required('address object not provided').shape({
|
||||
streetAddress: yup.string().nullable(),
|
||||
city: yup.string().nullable(),
|
||||
region: yup.string().nullable(),
|
||||
postalCode: yup.string().nullable(),
|
||||
country: yup.string().nullable(),
|
||||
}),
|
||||
location: yup.object().required('location object not provided').shape({
|
||||
name: yup.string().nullable(),
|
||||
url: yup.string().nullable(),
|
||||
phone: yup.string().nullable(),
|
||||
}),
|
||||
status: yup
|
||||
.string()
|
||||
.required('status not provided')
|
||||
.oneOf(['online', 'offline']),
|
||||
lastOnline: yup
|
||||
.string()
|
||||
.required('date in isostring format not provided'),
|
||||
cashIn: yup.boolean().required('cashIn boolean not defined'),
|
||||
cashOut: yup.boolean().required('cashOut boolean not defined'),
|
||||
manufacturer: yup.string().required('manufacturer not provided'),
|
||||
cashInTxLimit: yup.number().nullable(),
|
||||
cashOutTxLimit: yup.number().nullable(),
|
||||
cashInDailyLimit: yup.number().nullable(),
|
||||
cashOutDailyLimit: yup.number().nullable(),
|
||||
fiatCurrency: yup.string().required('fiatCurrency not provided'),
|
||||
identification: yup.object().shape({
|
||||
isPhone: yup.boolean().required('isPhone boolean not defined'),
|
||||
isPalmVein: yup.boolean().required('isPalmVein boolean not defined'),
|
||||
isPhoto: yup.boolean().required('isPhoto boolean not defined'),
|
||||
isIdDocScan: yup
|
||||
.boolean()
|
||||
.required('isIdDocScan boolean not defined'),
|
||||
isFingerprint: yup
|
||||
.boolean()
|
||||
.required('isFingerprint boolean not defined'),
|
||||
}),
|
||||
coins: yup.array().of(
|
||||
yup.object().shape({
|
||||
cryptoCode: yup.string().required('cryptoCode not provided'),
|
||||
cashInFee: yup.number().nullable(),
|
||||
cashOutFee: yup.number().nullable(),
|
||||
cashInFixedFee: yup.number().nullable(),
|
||||
cashInRate: yup.number().nullable(),
|
||||
cashOutRate: yup.number().nullable(),
|
||||
}),
|
||||
),
|
||||
}),
|
||||
),
|
||||
})
|
||||
return schema.validate(data)
|
||||
}
|
||||
|
||||
test('Verify axios request schema', async () => {
|
||||
const axios = require('axios')
|
||||
|
||||
jest
|
||||
.spyOn(axios, 'default')
|
||||
.mockImplementation(
|
||||
jest.fn(req =>
|
||||
validateData(req.data).then(() => ({ status: 'mock status 200' })),
|
||||
),
|
||||
)
|
||||
|
||||
db.any.mockResolvedValue(dbResponse)
|
||||
await car.update(rates, settings)
|
||||
})
|
||||
59
packages/server/lib/commission-math.js
Normal file
59
packages/server/lib/commission-math.js
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
const BN = require('./bn')
|
||||
const configManager = require('./new-config-manager')
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
function truncateCrypto(cryptoAtoms, cryptoCode) {
|
||||
const DECIMAL_PLACES = 6
|
||||
if (cryptoAtoms.eq(0)) return cryptoAtoms
|
||||
|
||||
const scale = coinUtils.getCryptoCurrency(cryptoCode).unitScale
|
||||
const scaleFactor = BN(10).pow(scale)
|
||||
|
||||
return new BN(cryptoAtoms)
|
||||
.integerValue(BN.ROUND_DOWN)
|
||||
.div(scaleFactor)
|
||||
.decimalPlaces(DECIMAL_PLACES)
|
||||
.times(scaleFactor)
|
||||
}
|
||||
|
||||
function convertFiatToCryptoAtoms(fiatAmount, rate, cryptoCode) {
|
||||
const unitScale = coinUtils.getCryptoCurrency(cryptoCode).unitScale
|
||||
const unitScaleFactor = new BN(10).pow(unitScale)
|
||||
|
||||
const cryptoValue = new BN(fiatAmount).div(rate)
|
||||
return cryptoValue.times(unitScaleFactor).integerValue(BN.ROUND_DOWN)
|
||||
}
|
||||
|
||||
function fiatToCrypto(tx, rec, deviceId, config) {
|
||||
const usableFiat = rec.fiat - rec.cashInFee
|
||||
|
||||
const commissions = configManager.getCommissions(
|
||||
tx.cryptoCode,
|
||||
deviceId,
|
||||
config,
|
||||
)
|
||||
const tickerRate = new BN(tx.rawTickerPrice)
|
||||
const discount = getDiscountRate(tx.discount, commissions[tx.direction])
|
||||
const rate = tickerRate.times(discount).decimalPlaces(5)
|
||||
const unitScale = coinUtils.getCryptoCurrency(tx.cryptoCode).unitScale
|
||||
const unitScaleFactor = new BN(10).pow(unitScale)
|
||||
|
||||
return truncateCrypto(
|
||||
new BN(usableFiat).div(rate.div(unitScaleFactor)),
|
||||
tx.cryptoCode,
|
||||
)
|
||||
}
|
||||
|
||||
function getDiscountRate(discount, commission) {
|
||||
const bnDiscount = discount ? new BN(discount) : new BN(0)
|
||||
const bnCommission = new BN(commission)
|
||||
const percentageDiscount = new BN(1).minus(bnDiscount.div(100))
|
||||
const percentageCommission = bnCommission.div(100)
|
||||
return new BN(1).plus(percentageDiscount.times(percentageCommission))
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fiatToCrypto,
|
||||
getDiscountRate,
|
||||
convertFiatToCryptoAtoms,
|
||||
}
|
||||
70
packages/server/lib/compliance-external.js
Normal file
70
packages/server/lib/compliance-external.js
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
const logger = require('./logger')
|
||||
const ph = require('./plugin-helper')
|
||||
|
||||
const getPlugin = (accounts, pluginCode) => ({
|
||||
plugin: ph.load(ph.COMPLIANCE, pluginCode),
|
||||
account:
|
||||
pluginCode === 'mock-compliance'
|
||||
? { applicantLevel: 'basic' }
|
||||
: accounts[pluginCode],
|
||||
})
|
||||
|
||||
const getStatus = (accounts, service, customerId) => {
|
||||
try {
|
||||
const { plugin, account } = getPlugin(accounts, service)
|
||||
|
||||
return plugin
|
||||
.getApplicantStatus(account, customerId)
|
||||
.then(status => ({
|
||||
service,
|
||||
status,
|
||||
}))
|
||||
.catch(error => {
|
||||
if (error.response.status !== 404)
|
||||
logger.error(
|
||||
`Error getting applicant for service ${service}:`,
|
||||
error.message,
|
||||
)
|
||||
return {
|
||||
service,
|
||||
status: null,
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error loading plugin for service ${service}:`, error)
|
||||
return Promise.resolve({
|
||||
service,
|
||||
status: null,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const getStatusMap = (accounts, externalComplianceTriggers, customerId) =>
|
||||
Promise.all(
|
||||
externalComplianceTriggers.map(({ externalService }) =>
|
||||
getStatus(accounts, externalService, customerId),
|
||||
),
|
||||
).then(applicantResults =>
|
||||
applicantResults.reduce((map, result) => {
|
||||
if (result.status) map[result.service] = result.status
|
||||
return map
|
||||
}, {}),
|
||||
)
|
||||
|
||||
const createApplicant = (accounts, externalService, customerId) => {
|
||||
const { plugin, account } = getPlugin(accounts, externalService)
|
||||
|
||||
return plugin.createApplicant(account, customerId, account.applicantLevel)
|
||||
}
|
||||
|
||||
const createLink = (accounts, externalService, customerId) => {
|
||||
const { plugin, account } = getPlugin(accounts, externalService)
|
||||
|
||||
return plugin.createLink(account, customerId, account.applicantLevel)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStatusMap,
|
||||
createApplicant,
|
||||
createLink,
|
||||
}
|
||||
84
packages/server/lib/compliance-triggers.js
Normal file
84
packages/server/lib/compliance-triggers.js
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
const uuid = require('uuid')
|
||||
const {
|
||||
db: { default: db },
|
||||
complianceTriggers,
|
||||
} = require('typesafe-db')
|
||||
|
||||
const maxDaysThreshold = triggers =>
|
||||
Math.max(...[{ thresholdDays: 0 }].concat(triggers).map(t => t.thresholdDays))
|
||||
|
||||
const getCashLimit = triggers =>
|
||||
Math.min(
|
||||
...triggers.flatMap(({ triggerType, requirementType, threshold }) => {
|
||||
const withFiat = ['txVolume', 'txAmount'].includes(triggerType)
|
||||
const blocking = ['block', 'suspend'].includes(requirementType)
|
||||
return withFiat && blocking && threshold ? [threshold] : []
|
||||
}),
|
||||
) || Infinity
|
||||
|
||||
const hasRequirement = requirementType => triggers =>
|
||||
triggers.some(t => t.requirementType === requirementType)
|
||||
|
||||
const hasPhone = hasRequirement('sms')
|
||||
const hasFacephoto = hasRequirement('facephoto')
|
||||
const hasIdScan = hasRequirement('idCardData')
|
||||
const hasSanctions = hasRequirement('sanctions')
|
||||
|
||||
const AUTH_METHODS = {
|
||||
SMS: 'SMS',
|
||||
EMAIL: 'EMAIL',
|
||||
}
|
||||
|
||||
const getComplianceTriggerSets = () =>
|
||||
complianceTriggers.getComplianceTriggerSets(db)
|
||||
|
||||
const getComplianceTriggerSetById = id =>
|
||||
complianceTriggers.getComplianceTriggerSetById(db, id)
|
||||
|
||||
const getComplianceTriggers = complianceTriggerSetId =>
|
||||
complianceTriggers.getComplianceTriggers(db, complianceTriggerSetId)
|
||||
|
||||
const createComplianceTriggerSet = name =>
|
||||
complianceTriggers.createComplianceTriggerSet(db, uuid.v4(), name)
|
||||
|
||||
const deleteComplianceTriggerSet = id =>
|
||||
complianceTriggers.deleteComplianceTriggerSet(db, id)
|
||||
|
||||
const createComplianceTrigger = (complianceTriggerSetId, trigger) =>
|
||||
complianceTriggers.createComplianceTrigger(
|
||||
db,
|
||||
complianceTriggerSetId,
|
||||
trigger,
|
||||
)
|
||||
|
||||
const deleteComplianceTrigger = id =>
|
||||
complianceTriggers.deleteComplianceTrigger(db, id)
|
||||
|
||||
const deleteComplianceTriggersByCustomInfoRequestId = customInfoRequestId =>
|
||||
complianceTriggers.deleteComplianceTriggersByCustomInfoRequestId(
|
||||
db,
|
||||
customInfoRequestId,
|
||||
)
|
||||
|
||||
const getAllComplianceTriggers = (dbOrTx = db) =>
|
||||
complianceTriggers.getAllComplianceTriggers(dbOrTx)
|
||||
|
||||
module.exports = {
|
||||
getAllComplianceTriggers,
|
||||
hasSanctions,
|
||||
maxDaysThreshold,
|
||||
getCashLimit,
|
||||
hasPhone,
|
||||
hasFacephoto,
|
||||
hasIdScan,
|
||||
AUTH_METHODS,
|
||||
|
||||
getComplianceTriggerSets,
|
||||
getComplianceTriggerSetById,
|
||||
getComplianceTriggers,
|
||||
createComplianceTriggerSet,
|
||||
deleteComplianceTriggerSet,
|
||||
createComplianceTrigger,
|
||||
deleteComplianceTrigger,
|
||||
deleteComplianceTriggersByCustomInfoRequestId,
|
||||
}
|
||||
91
packages/server/lib/compliance.js
Normal file
91
packages/server/lib/compliance.js
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
const _ = require('lodash/fp')
|
||||
const uuid = require('uuid')
|
||||
|
||||
const logger = require('./logger')
|
||||
const db = require('./db')
|
||||
const ofac = require('./ofac/index')
|
||||
|
||||
function logSanctionsMatch(deviceId, customer, sanctionsId, alias) {
|
||||
const sql = `insert into sanctions_logs
|
||||
(id, device_id, sanctioned_id, sanctioned_alias_id, sanctioned_alias_full_name, customer_id)
|
||||
values
|
||||
($1, $2, $3, $4, $5, $6)`
|
||||
|
||||
return db.none(sql, [
|
||||
uuid.v4(),
|
||||
deviceId,
|
||||
sanctionsId,
|
||||
alias.id,
|
||||
alias.fullName,
|
||||
customer.id,
|
||||
])
|
||||
}
|
||||
|
||||
function logSanctionsMatches(deviceId, customer, results) {
|
||||
const logAlias = resultId => alias =>
|
||||
logSanctionsMatch(deviceId, customer, resultId, alias)
|
||||
const logResult = result => _.map(logAlias(result.id), result.aliases)
|
||||
|
||||
return Promise.all(_.flatMap(logResult, results))
|
||||
}
|
||||
|
||||
function matchOfac(deviceId, customer) {
|
||||
return Promise.resolve().then(() => {
|
||||
// Probably because we haven't asked for ID yet
|
||||
if (!_.isPlainObject(customer.idCardData)) {
|
||||
return true
|
||||
}
|
||||
|
||||
const nameParts = {
|
||||
firstName: customer.idCardData.firstName,
|
||||
lastName: customer.idCardData.lastName,
|
||||
}
|
||||
|
||||
if (_.some(_.isNil, _.values(nameParts))) {
|
||||
logger.error(
|
||||
new Error(
|
||||
`Insufficient idCardData while matching OFAC for: ${customer.id}`,
|
||||
),
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
const birthDate = customer.idCardData.dateOfBirth
|
||||
|
||||
if (_.isNil(birthDate)) {
|
||||
logger.error(
|
||||
new Error(`No birth date while matching OFAC for: ${customer.id}`),
|
||||
)
|
||||
return true
|
||||
}
|
||||
|
||||
const options = {
|
||||
threshold: 0.85,
|
||||
fullNameThreshold: 0.95,
|
||||
debug: false,
|
||||
}
|
||||
|
||||
const results = ofac.match(nameParts, birthDate, options)
|
||||
|
||||
return logSanctionsMatches(deviceId, customer, results).then(
|
||||
() => !_.isEmpty(results),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function validateOfac(deviceId, customer) {
|
||||
if (customer.sanctionsOverride === 'blocked') return Promise.resolve(false)
|
||||
if (customer.sanctionsOverride === 'verified') return Promise.resolve(true)
|
||||
|
||||
return matchOfac(deviceId, customer).then(didMatch => !didMatch)
|
||||
}
|
||||
|
||||
function validationPatch(deviceId, customer) {
|
||||
return validateOfac(deviceId, customer).then(sanctions =>
|
||||
_.isNil(customer.sanctions) || customer.sanctions !== sanctions
|
||||
? { sanctions }
|
||||
: {},
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = { validationPatch }
|
||||
32
packages/server/lib/compliance_overrides.js
Normal file
32
packages/server/lib/compliance_overrides.js
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
const db = require('./db')
|
||||
const uuid = require('uuid')
|
||||
|
||||
/**
|
||||
* Create new compliance override
|
||||
*
|
||||
* @name add
|
||||
* @function
|
||||
*
|
||||
* @param {object} complianceOverride Compliance override object
|
||||
*
|
||||
* @returns {object} Newly created compliance override
|
||||
*/
|
||||
function add(complianceOverride) {
|
||||
const sql = `insert into compliance_overrides
|
||||
(id,
|
||||
customer_id,
|
||||
compliance_type,
|
||||
override_at,
|
||||
override_by,
|
||||
verification)
|
||||
values ($1, $2, $3, now(), $4, $5) returning *`
|
||||
return db.one(sql, [
|
||||
uuid.v4(),
|
||||
complianceOverride.customerId,
|
||||
complianceOverride.complianceType,
|
||||
complianceOverride.overrideBy,
|
||||
complianceOverride.verification,
|
||||
])
|
||||
}
|
||||
|
||||
module.exports = { add }
|
||||
100
packages/server/lib/constants.js
Normal file
100
packages/server/lib/constants.js
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
const T = require('./time')
|
||||
|
||||
const POSTGRES_USER = process.env.POSTGRES_USER
|
||||
const POSTGRES_PASSWORD = process.env.POSTGRES_PASSWORD
|
||||
const POSTGRES_HOST = process.env.POSTGRES_HOST
|
||||
const POSTGRES_PORT = process.env.POSTGRES_PORT
|
||||
const POSTGRES_DB = process.env.POSTGRES_DB
|
||||
|
||||
const PSQL_URL = `postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}`
|
||||
|
||||
const anonymousCustomer = {
|
||||
uuid: '47ac1184-8102-11e7-9079-8f13a7117867',
|
||||
name: 'anonymous',
|
||||
}
|
||||
|
||||
const defaultMachineGroup = {
|
||||
uuid: '3b3c5f2e-8102-11e7-9079-8f13a7117867',
|
||||
name: 'default',
|
||||
}
|
||||
|
||||
const CASH_UNIT_CAPACITY = {
|
||||
default: {
|
||||
cashbox: 600,
|
||||
cassette: 500,
|
||||
},
|
||||
douro: {
|
||||
cashbox: 600,
|
||||
cassette: 500,
|
||||
},
|
||||
grandola: {
|
||||
cashbox: 2000,
|
||||
recycler: 2800,
|
||||
},
|
||||
aveiro: {
|
||||
cashbox: 1500,
|
||||
recycler: 60,
|
||||
cassette: 500,
|
||||
},
|
||||
tejo: {
|
||||
// TODO: add support for the different cashbox configuration in Tejo
|
||||
cashbox: 1000,
|
||||
cassette: 500,
|
||||
},
|
||||
gaia: {
|
||||
cashbox: 600,
|
||||
},
|
||||
sintra: {
|
||||
cashbox: 1000,
|
||||
cassette: 500,
|
||||
},
|
||||
gmuk1: {
|
||||
cashbox: 2200,
|
||||
cassette: 2000,
|
||||
},
|
||||
}
|
||||
|
||||
const CASH_OUT_MINIMUM_AMOUNT_OF_CASSETTES = 2
|
||||
const CASH_OUT_MAXIMUM_AMOUNT_OF_CASSETTES = 4
|
||||
const CASH_OUT_MAXIMUM_AMOUNT_OF_RECYCLERS = 6
|
||||
const AUTHENTICATOR_ISSUER_ENTITY = 'Lamassu'
|
||||
const AUTH_TOKEN_EXPIRATION_TIME = '30 minutes'
|
||||
const REGISTRATION_TOKEN_EXPIRATION_TIME = '30 minutes'
|
||||
const USER_SESSIONS_TABLE_NAME = 'user_sessions'
|
||||
const USER_SESSIONS_CLEAR_INTERVAL = 1 * T.hour
|
||||
|
||||
const AUTOMATIC = 'automatic'
|
||||
const MANUAL = 'manual'
|
||||
|
||||
const CASH_OUT_DISPENSE_READY = 'cash_out_dispense_ready'
|
||||
const CONFIRMATION_CODE = 'sms_code'
|
||||
const RECEIPT = 'sms_receipt'
|
||||
|
||||
const WALLET_SCORE_THRESHOLD = 9
|
||||
|
||||
const BALANCE_FETCH_SPEED_MULTIPLIER = {
|
||||
NORMAL: 1,
|
||||
SLOW: 3,
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
anonymousCustomer,
|
||||
defaultMachineGroup,
|
||||
CASH_UNIT_CAPACITY,
|
||||
AUTHENTICATOR_ISSUER_ENTITY,
|
||||
AUTH_TOKEN_EXPIRATION_TIME,
|
||||
REGISTRATION_TOKEN_EXPIRATION_TIME,
|
||||
AUTOMATIC,
|
||||
MANUAL,
|
||||
USER_SESSIONS_TABLE_NAME,
|
||||
USER_SESSIONS_CLEAR_INTERVAL,
|
||||
CASH_OUT_DISPENSE_READY,
|
||||
CONFIRMATION_CODE,
|
||||
CASH_OUT_MINIMUM_AMOUNT_OF_CASSETTES,
|
||||
CASH_OUT_MAXIMUM_AMOUNT_OF_CASSETTES,
|
||||
CASH_OUT_MAXIMUM_AMOUNT_OF_RECYCLERS,
|
||||
WALLET_SCORE_THRESHOLD,
|
||||
RECEIPT,
|
||||
PSQL_URL,
|
||||
BALANCE_FETCH_SPEED_MULTIPLIER,
|
||||
}
|
||||
33
packages/server/lib/customer-notes.js
Normal file
33
packages/server/lib/customer-notes.js
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const db = require('./db')
|
||||
|
||||
const getCustomerNotes = customerId => {
|
||||
const sql = `SELECT * FROM customer_notes WHERE customer_id=$1`
|
||||
return db
|
||||
.oneOrNone(sql, [customerId])
|
||||
.then(res => _.mapKeys((_, key) => _.camelize(key), res))
|
||||
}
|
||||
|
||||
const createCustomerNote = (customerId, userId, title, content) => {
|
||||
const sql = `INSERT INTO customer_notes (id, customer_id, last_edited_by, last_edited_at, title, content) VALUES ($1, $2, $3, now(), $4, $5)`
|
||||
return db.none(sql, [uuid.v4(), customerId, userId, title, content])
|
||||
}
|
||||
|
||||
const deleteCustomerNote = noteId => {
|
||||
const sql = `DELETE FROM customer_notes WHERE id=$1`
|
||||
return db.none(sql, [noteId])
|
||||
}
|
||||
|
||||
const updateCustomerNote = (noteId, userId, content) => {
|
||||
const sql = `UPDATE customer_notes SET last_edited_at=now(), last_edited_by=$1, content=$2 WHERE id=$3`
|
||||
return db.none(sql, [userId, content, noteId])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getCustomerNotes,
|
||||
createCustomerNote,
|
||||
deleteCustomerNote,
|
||||
updateCustomerNote,
|
||||
}
|
||||
1060
packages/server/lib/customers.js
Normal file
1060
packages/server/lib/customers.js
Normal file
File diff suppressed because it is too large
Load diff
4
packages/server/lib/db-error-codes.js
Normal file
4
packages/server/lib/db-error-codes.js
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
const dbErrorCodes = {
|
||||
SERIALIZATION_FAILURE: '40001',
|
||||
}
|
||||
module.exports = dbErrorCodes
|
||||
23
packages/server/lib/db-migrate-store.js
Normal file
23
packages/server/lib/db-migrate-store.js
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
const db = require('../lib/db')
|
||||
const logger = require('./logger')
|
||||
|
||||
const upsert =
|
||||
'insert into migrations (id, data) values (1, $1) on conflict (id) do update set data = $1'
|
||||
|
||||
function DbMigrateStore() {}
|
||||
|
||||
DbMigrateStore.prototype.save = function (set, fn) {
|
||||
let insertData = JSON.stringify({
|
||||
lastRun: set.lastRun,
|
||||
migrations: set.migrations,
|
||||
})
|
||||
db.none(upsert, [insertData]).then(fn).catch(logger.error)
|
||||
}
|
||||
|
||||
DbMigrateStore.prototype.load = function (fn) {
|
||||
db.oneOrNone('select data from migrations').then(res => {
|
||||
fn(null, res?.data || {})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = DbMigrateStore
|
||||
42
packages/server/lib/db.js
Normal file
42
packages/server/lib/db.js
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
const Pgp = require('pg-promise')
|
||||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const { PSQL_URL } = require('./constants')
|
||||
const logger = require('./logger')
|
||||
const eventBus = require('./event-bus')
|
||||
|
||||
const DATABASE_NOT_REACHABLE = 'Database not reachable.'
|
||||
|
||||
const pgp = Pgp({
|
||||
schema: 'public',
|
||||
error: (err, e) => {
|
||||
if (e.cn) logger.error(DATABASE_NOT_REACHABLE)
|
||||
else if (e.query) {
|
||||
logger.error(e.query)
|
||||
e.params && logger.error(e.params)
|
||||
} else logger.error(err)
|
||||
},
|
||||
})
|
||||
|
||||
const db = pgp(PSQL_URL)
|
||||
|
||||
eventBus.subscribe('log', args => {
|
||||
if (process.env.SKIP_SERVER_LOGS) return
|
||||
|
||||
const { level, message, meta } = args
|
||||
|
||||
// prevent loop if database is not reachable
|
||||
if (message === DATABASE_NOT_REACHABLE) return
|
||||
|
||||
const msgToSave = message || _.get('message', meta)
|
||||
|
||||
const sql = `insert into server_logs
|
||||
(id, device_id, message, log_level, meta) values ($1, $2, $3, $4, $5) returning *`
|
||||
|
||||
db.one(sql, [uuid.v4(), '', msgToSave, level, meta])
|
||||
.then(_.mapKeys(_.camelCase))
|
||||
.catch(_.noop)
|
||||
})
|
||||
|
||||
module.exports = db
|
||||
25
packages/server/lib/email.js
Normal file
25
packages/server/lib/email.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
const ph = require('./plugin-helper')
|
||||
|
||||
function sendMessage(settings, rec) {
|
||||
return Promise.resolve().then(() => {
|
||||
const pluginCode =
|
||||
settings.config.notifications_thirdParty_email || 'mailgun'
|
||||
const plugin = ph.load(ph.EMAIL, pluginCode)
|
||||
const account = settings.accounts[pluginCode]
|
||||
|
||||
return plugin.sendMessage(account, rec)
|
||||
})
|
||||
}
|
||||
|
||||
function sendCustomerMessage(settings, rec) {
|
||||
return Promise.resolve().then(() => {
|
||||
const pluginCode =
|
||||
settings.config.notifications_thirdParty_email || 'mailgun'
|
||||
const plugin = ph.load(ph.EMAIL, pluginCode)
|
||||
const account = settings.accounts[pluginCode]
|
||||
|
||||
return plugin.sendMessage(account, rec)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { sendMessage, sendCustomerMessage }
|
||||
44
packages/server/lib/environment-helper.js
Normal file
44
packages/server/lib/environment-helper.js
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
const path = require('path')
|
||||
|
||||
const isDevMode = () => process.env.NODE_ENV === 'development'
|
||||
const isProdMode = () => process.env.NODE_ENV === 'production'
|
||||
|
||||
require('dotenv').config({ path: path.resolve(__dirname, '../.env') })
|
||||
|
||||
function isRemoteNode(crypto) {
|
||||
return process.env[`${crypto.cryptoCode}_NODE_LOCATION`] === 'remote'
|
||||
}
|
||||
|
||||
function isRemoteWallet(crypto) {
|
||||
return process.env[`${crypto.cryptoCode}_WALLET_LOCATION`] === 'remote'
|
||||
}
|
||||
|
||||
const skip2fa = process.env.SKIP_2FA === 'true'
|
||||
|
||||
function getCustomTextEntries() {
|
||||
const customTextEntries = []
|
||||
|
||||
Object.keys(process.env).forEach(key => {
|
||||
if (key.startsWith('CUSTOM_TEXT_')) {
|
||||
const id = key
|
||||
.replace('CUSTOM_TEXT_', '')
|
||||
.toLowerCase()
|
||||
.replace(/_/g, '-')
|
||||
const text = process.env[key]
|
||||
if (text) {
|
||||
customTextEntries.push({ id, text })
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return customTextEntries
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isDevMode,
|
||||
isProdMode,
|
||||
isRemoteNode,
|
||||
isRemoteWallet,
|
||||
skip2fa,
|
||||
getCustomTextEntries,
|
||||
}
|
||||
27
packages/server/lib/error.js
Normal file
27
packages/server/lib/error.js
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
const _ = require('lodash/fp')
|
||||
|
||||
const E = function (name) {
|
||||
var CustomErr = function (msg) {
|
||||
this.message = msg || _.startCase(name)
|
||||
this.name = name
|
||||
Error.captureStackTrace(this, CustomErr)
|
||||
}
|
||||
CustomErr.prototype = Object.create(Error.prototype)
|
||||
CustomErr.prototype.constructor = CustomErr
|
||||
CustomErr.code = name
|
||||
|
||||
return CustomErr
|
||||
}
|
||||
|
||||
module.exports = E
|
||||
|
||||
function register(errorName) {
|
||||
E[errorName] = E(errorName)
|
||||
}
|
||||
|
||||
register('BadNumberError')
|
||||
register('NoDataError')
|
||||
register('InsufficientFundsError')
|
||||
register('StaleTxError')
|
||||
register('RatchetError')
|
||||
register('NotImplementedError')
|
||||
32
packages/server/lib/event-bus.js
Normal file
32
packages/server/lib/event-bus.js
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
// Adapted from https://medium.com/@soffritti.pierfrancesco/create-a-simple-event-bus-in-javascript-8aa0370b3969
|
||||
|
||||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const subscriptions = {}
|
||||
|
||||
function subscribe(eventType, callback) {
|
||||
const id = uuid.v1()
|
||||
|
||||
if (!subscriptions[eventType]) subscriptions[eventType] = {}
|
||||
|
||||
subscriptions[eventType][id] = callback
|
||||
|
||||
return {
|
||||
unsubscribe: () => {
|
||||
delete subscriptions[eventType][id]
|
||||
if (_.keys(subscriptions[eventType]).length === 0)
|
||||
delete subscriptions[eventType]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
function publish(eventType, arg) {
|
||||
if (!subscriptions[eventType]) return
|
||||
|
||||
_.keys(subscriptions[eventType]).forEach(key =>
|
||||
subscriptions[eventType][key](arg),
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = { subscribe, publish }
|
||||
95
packages/server/lib/exchange.js
Normal file
95
packages/server/lib/exchange.js
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
const _ = require('lodash/fp')
|
||||
const { ALL_CRYPTOS } = require('@lamassu/coins')
|
||||
|
||||
const logger = require('./logger')
|
||||
const configManager = require('./new-config-manager')
|
||||
const ccxt = require('./plugins/exchange/ccxt')
|
||||
const mockExchange = require('./plugins/exchange/mock-exchange')
|
||||
const accounts = require('./new-admin/config/accounts')
|
||||
|
||||
function lookupExchange(settings, cryptoCode) {
|
||||
const exchange = configManager.getWalletSettings(
|
||||
cryptoCode,
|
||||
settings.config,
|
||||
).exchange
|
||||
if (exchange === 'no-exchange') return null
|
||||
return exchange
|
||||
}
|
||||
|
||||
function fetchExchange(settings, cryptoCode) {
|
||||
return Promise.resolve().then(() => {
|
||||
const exchangeName = lookupExchange(settings, cryptoCode)
|
||||
if (exchangeName === 'mock-exchange')
|
||||
return { exchangeName, account: { currencyMarket: 'EUR' } }
|
||||
if (!exchangeName) throw new Error('No exchange set')
|
||||
const account = settings.accounts[exchangeName]
|
||||
|
||||
return { exchangeName, account }
|
||||
})
|
||||
}
|
||||
|
||||
function buy(settings, tradeEntry) {
|
||||
const { cryptoAtoms, fiatCode, cryptoCode } = tradeEntry
|
||||
return fetchExchange(settings, cryptoCode).then(r => {
|
||||
if (r.exchangeName === 'mock-exchange') {
|
||||
return mockExchange.buy(cryptoAtoms, fiatCode, cryptoCode)
|
||||
}
|
||||
return ccxt.trade('buy', r.account, tradeEntry, r.exchangeName)
|
||||
})
|
||||
}
|
||||
|
||||
function sell(settings, tradeEntry) {
|
||||
const { cryptoAtoms, fiatCode, cryptoCode } = tradeEntry
|
||||
return fetchExchange(settings, cryptoCode).then(r => {
|
||||
if (r.exchangeName === 'mock-exchange') {
|
||||
return mockExchange.sell(cryptoAtoms, fiatCode, cryptoCode)
|
||||
}
|
||||
return ccxt.trade('sell', r.account, tradeEntry, r.exchangeName)
|
||||
})
|
||||
}
|
||||
|
||||
function active(settings, cryptoCode) {
|
||||
return !!lookupExchange(settings, cryptoCode)
|
||||
}
|
||||
|
||||
function getMarkets() {
|
||||
const filterExchanges = _.filter(
|
||||
it => it.class === 'exchange' && !it.dev && it.code !== 'no-exchange',
|
||||
)
|
||||
const availableExchanges = _.map(
|
||||
it => it.code,
|
||||
filterExchanges(accounts.ACCOUNT_LIST),
|
||||
)
|
||||
|
||||
const fetchMarketForExchange = exchange =>
|
||||
ccxt
|
||||
.getMarkets(exchange, ALL_CRYPTOS)
|
||||
.then(markets => ({ exchange, markets }))
|
||||
.catch(error => {
|
||||
logger.error(`Error fetching markets for ${exchange}:`, error)
|
||||
return {
|
||||
exchange,
|
||||
markets: [],
|
||||
error: error.message,
|
||||
}
|
||||
})
|
||||
|
||||
const transformToObject = _.reduce(
|
||||
(acc, { exchange, markets }) => ({
|
||||
...acc,
|
||||
[exchange]: markets,
|
||||
}),
|
||||
{},
|
||||
)
|
||||
|
||||
const promises = _.map(fetchMarketForExchange, availableExchanges)
|
||||
return Promise.all(promises).then(transformToObject)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fetchExchange,
|
||||
buy,
|
||||
sell,
|
||||
active,
|
||||
getMarkets,
|
||||
}
|
||||
84
packages/server/lib/forex.js
Normal file
84
packages/server/lib/forex.js
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
const _ = require('lodash/fp')
|
||||
const axios = require('axios')
|
||||
const mem = require('mem')
|
||||
|
||||
const BN = require('./bn')
|
||||
const T = require('./time')
|
||||
|
||||
const MAX_ROTATIONS = 5
|
||||
|
||||
const _getFiatRates = () =>
|
||||
axios.get('https://bitpay.com/api/rates').then(response => response.data)
|
||||
|
||||
const getFiatRates = mem(_getFiatRates, {
|
||||
maxAge: 6 * T.hours,
|
||||
cacheKey: () => '',
|
||||
})
|
||||
|
||||
const API_QUEUE = [
|
||||
{
|
||||
api: getBitPayFxRate,
|
||||
name: 'bitpay',
|
||||
fiatCodeProperty: 'code',
|
||||
rateProperty: 'rate',
|
||||
},
|
||||
]
|
||||
|
||||
function getBitPayFxRate(
|
||||
fiatCode,
|
||||
defaultFiatMarket,
|
||||
fiatCodeProperty,
|
||||
rateProperty,
|
||||
) {
|
||||
return getFiatRates().then(fxRates => {
|
||||
const defaultFiatRate = findCurrencyRates(
|
||||
fxRates,
|
||||
defaultFiatMarket,
|
||||
fiatCodeProperty,
|
||||
rateProperty,
|
||||
)
|
||||
const fxRate = findCurrencyRates(
|
||||
fxRates,
|
||||
fiatCode,
|
||||
fiatCodeProperty,
|
||||
rateProperty,
|
||||
).div(defaultFiatRate)
|
||||
return {
|
||||
fxRate,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function findCurrencyRates(fxRates, fiatCode, fiatCodeProperty, rateProperty) {
|
||||
const rates = _.find(_.matchesProperty(fiatCodeProperty, fiatCode), fxRates)
|
||||
if (!rates || !rates[rateProperty])
|
||||
throw new Error(`Unsupported currency: ${fiatCode}`)
|
||||
return new BN(rates[rateProperty].toString())
|
||||
}
|
||||
|
||||
const getRate = (retries = 1, fiatCode, defaultFiatMarket) => {
|
||||
const selected = _.first(API_QUEUE).name
|
||||
const activeAPI = _.first(API_QUEUE).api
|
||||
const fiatCodeProperty = _.first(API_QUEUE).fiatCodeProperty
|
||||
const rateProperty = _.first(API_QUEUE).rateProperty
|
||||
|
||||
if (!activeAPI) throw new Error(`FOREX api ${selected} does not exist.`)
|
||||
|
||||
return activeAPI(
|
||||
fiatCode,
|
||||
defaultFiatMarket,
|
||||
fiatCodeProperty,
|
||||
rateProperty,
|
||||
).catch(err => {
|
||||
const erroredService = API_QUEUE.shift()
|
||||
API_QUEUE.push(erroredService)
|
||||
if (retries >= MAX_ROTATIONS)
|
||||
throw new Error(
|
||||
`FOREX API error from ${erroredService.name} ${err?.message}`,
|
||||
)
|
||||
|
||||
return getRate(++retries, fiatCode, defaultFiatMarket)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { getFiatRates, getRate }
|
||||
473
packages/server/lib/graphql/resolvers.js
Normal file
473
packages/server/lib/graphql/resolvers.js
Normal file
|
|
@ -0,0 +1,473 @@
|
|||
const { skip2fa, getCustomTextEntries } = require('../environment-helper')
|
||||
|
||||
const _ = require('lodash/fp')
|
||||
const nmd = require('nano-markdown')
|
||||
|
||||
const plugins = require('../plugins')
|
||||
const configManager = require('../new-config-manager')
|
||||
const {
|
||||
batchGetCustomInfoRequest,
|
||||
getCustomInfoRequests,
|
||||
} = require('../new-admin/services/customInfoRequests')
|
||||
const state = require('../middlewares/state')
|
||||
const { getMachine } = require('../machine-loader')
|
||||
|
||||
const VERSION = require('../../package.json').version
|
||||
|
||||
const urlsToPing = [
|
||||
`us.archive.ubuntu.com`,
|
||||
`uk.archive.ubuntu.com`,
|
||||
`za.archive.ubuntu.com`,
|
||||
`cn.archive.ubuntu.com`,
|
||||
]
|
||||
|
||||
const speedtestFiles = [
|
||||
{
|
||||
url: 'https://github.com/lamassu/speed-test-assets/raw/main/python-defaults_2.7.18-3.tar.gz',
|
||||
size: 44668,
|
||||
},
|
||||
]
|
||||
|
||||
const addDefaults = (dflts, obj) => {
|
||||
if (!obj) return obj
|
||||
const ret = Object.assign({}, obj)
|
||||
Object.entries(dflts).forEach(([key, dflt]) => {
|
||||
ret[key] = ret[key] ?? dflt
|
||||
})
|
||||
return ret
|
||||
}
|
||||
|
||||
const addSmthInfo = (dstField, srcFields) => smth =>
|
||||
smth && smth.active ? _.set(dstField, _.pick(srcFields, smth)) : _.identity
|
||||
|
||||
const addOperatorInfo = addSmthInfo('operatorInfo', [
|
||||
'name',
|
||||
'phone',
|
||||
'email',
|
||||
'website',
|
||||
'companyNumber',
|
||||
])
|
||||
|
||||
const addReceiptInfo = receiptInfo => ret => {
|
||||
if (!receiptInfo) return ret
|
||||
|
||||
const fields = [
|
||||
'automaticPrint',
|
||||
'paper',
|
||||
'sms',
|
||||
'operatorWebsite',
|
||||
'operatorEmail',
|
||||
'operatorPhone',
|
||||
'companyNumber',
|
||||
'machineLocation',
|
||||
'customerNameOrPhoneNumber',
|
||||
'exchangeRate',
|
||||
'addressQRCode',
|
||||
]
|
||||
const defaults = _.fromPairs(_.map(field => [field, false], fields))
|
||||
|
||||
receiptInfo = _.flow(
|
||||
o => _.set('paper', !!o.active, o),
|
||||
_.assign(defaults),
|
||||
_.pick(fields),
|
||||
)(receiptInfo)
|
||||
|
||||
return receiptInfo.paper || receiptInfo.sms
|
||||
? _.set('receiptInfo', receiptInfo, ret)
|
||||
: ret
|
||||
}
|
||||
|
||||
const addMachineScreenOpts = smth =>
|
||||
_.update(
|
||||
'screenOptions',
|
||||
_.flow(
|
||||
addSmthInfo('rates', ['active'])(smth.rates),
|
||||
_.set('customText', getCustomTextEntries()),
|
||||
),
|
||||
)
|
||||
|
||||
const buildTriggers = allTriggers => {
|
||||
// BACKWARDS_COMPATIBILITY 11
|
||||
// requirement was renamed to requirementType in v12
|
||||
allTriggers = allTriggers.map(t =>
|
||||
Object.assign({}, t, { requirement: t.requirementType }),
|
||||
)
|
||||
|
||||
const isCustomTrigger = ({ requirementType, customInfoRequestId }) =>
|
||||
requirementType === 'custom' &&
|
||||
customInfoRequestId &&
|
||||
typeof customInfoRequestId === 'string'
|
||||
|
||||
const [customTriggers, normalTriggers] = _.partition(
|
||||
isCustomTrigger,
|
||||
allTriggers,
|
||||
)
|
||||
|
||||
const addCustomInfoRequestDetails = (customInfoRequest, idx) => {
|
||||
const trigger = customTriggers[idx]
|
||||
// make sure we aren't attaching the details to the wrong trigger
|
||||
return trigger.customInfoRequestId === customInfoRequest.id
|
||||
? [Object.assign({}, trigger, { customInfoRequest })]
|
||||
: []
|
||||
}
|
||||
|
||||
return batchGetCustomInfoRequest(
|
||||
customTriggers.map(({ customInfoRequestId }) => customInfoRequestId),
|
||||
).then(customInfoRequests => {
|
||||
const customTriggersWithDetails = customInfoRequests.flatMap(
|
||||
addCustomInfoRequestDetails,
|
||||
)
|
||||
return [...normalTriggers, ...customTriggersWithDetails]
|
||||
})
|
||||
}
|
||||
|
||||
const staticConfig = ({
|
||||
currentConfigVersion,
|
||||
deviceId,
|
||||
deviceName,
|
||||
pq,
|
||||
settings,
|
||||
machineSettings,
|
||||
}) => {
|
||||
const massageCoins = _.map(
|
||||
_.pick([
|
||||
'batchable',
|
||||
'cashInCommission',
|
||||
'cashInFee',
|
||||
'cashOutCommission',
|
||||
'cashOutFee',
|
||||
'cryptoCode',
|
||||
'cryptoCodeDisplay',
|
||||
'cryptoNetwork',
|
||||
'cryptoUnits',
|
||||
'display',
|
||||
'minimumTx',
|
||||
'isCashInOnly',
|
||||
]),
|
||||
)
|
||||
|
||||
const staticConf = _.flow(
|
||||
_.pick(['coins', 'timezone', 'screenOptions']),
|
||||
_.update('coins', massageCoins),
|
||||
_.set('serverVersion', VERSION),
|
||||
_.set('configVersion', settings.version),
|
||||
)(pq)
|
||||
|
||||
return Promise.all([
|
||||
!!configManager.getCompliance(settings.config).enablePaperWalletOnly,
|
||||
configManager.getTriggersAutomation(
|
||||
getCustomInfoRequests(true),
|
||||
settings.config,
|
||||
),
|
||||
buildTriggers(machineSettings.complianceTriggers),
|
||||
configManager.getWalletSettings('BTC', settings.config).layer2 !==
|
||||
'no-layer2',
|
||||
configManager.getLocale(deviceId, settings.config),
|
||||
configManager.getOperatorInfo(settings.config),
|
||||
configManager.getReceipt(settings.config),
|
||||
configManager.getAllMachineScreenOpts(settings.config),
|
||||
!!configManager.getCashOut(deviceId, settings.config).active,
|
||||
getMachine(deviceId, currentConfigVersion),
|
||||
configManager.getCustomerAuthenticationMethod(settings.config),
|
||||
]).then(
|
||||
([
|
||||
enablePaperWalletOnly,
|
||||
triggersAutomation,
|
||||
triggers,
|
||||
hasLightning,
|
||||
localeInfo,
|
||||
operatorInfo,
|
||||
receiptInfo,
|
||||
machineScreenOpts,
|
||||
twoWayMode,
|
||||
{ numberOfCassettes, numberOfRecyclers },
|
||||
customerAuthentication,
|
||||
]) =>
|
||||
currentConfigVersion && currentConfigVersion >= staticConf.configVersion
|
||||
? null
|
||||
: _.flow(
|
||||
_.assign({
|
||||
enablePaperWalletOnly,
|
||||
triggersAutomation,
|
||||
triggers,
|
||||
hasLightning,
|
||||
localeInfo: {
|
||||
country: localeInfo.country,
|
||||
languages: localeInfo.languages,
|
||||
fiatCode: localeInfo.fiatCurrency,
|
||||
},
|
||||
machineInfo: {
|
||||
deviceId,
|
||||
deviceName,
|
||||
numberOfCassettes,
|
||||
numberOfRecyclers,
|
||||
},
|
||||
twoWayMode,
|
||||
customerAuthentication,
|
||||
speedtestFiles,
|
||||
urlsToPing,
|
||||
}),
|
||||
addOperatorInfo(addDefaults({ phone: '' }, operatorInfo)),
|
||||
addReceiptInfo(receiptInfo),
|
||||
addMachineScreenOpts(machineScreenOpts),
|
||||
)(staticConf),
|
||||
)
|
||||
}
|
||||
|
||||
const setZeroConfLimit = config => coin =>
|
||||
_.set(
|
||||
'zeroConfLimit',
|
||||
configManager.getWalletSettings(coin.cryptoCode, config).zeroConfLimit ?? 0,
|
||||
coin,
|
||||
)
|
||||
|
||||
const dynamicConfig = ({ deviceId, operatorId, pid, pq, settings }) => {
|
||||
const massageCassettes = cassettes =>
|
||||
cassettes
|
||||
? _.flow(
|
||||
cassettes =>
|
||||
_.set('physical', _.get('cassettes', cassettes), cassettes),
|
||||
cassettes =>
|
||||
_.set('virtual', _.get('virtualCassettes', cassettes), cassettes),
|
||||
_.unset('cassettes'),
|
||||
_.unset('virtualCassettes'),
|
||||
)(cassettes)
|
||||
: null
|
||||
|
||||
const massageRecyclers = recyclers =>
|
||||
recyclers
|
||||
? _.flow(
|
||||
recyclers =>
|
||||
_.set('physical', _.get('recyclers', recyclers), recyclers),
|
||||
recyclers =>
|
||||
_.set('virtual', _.get('virtualRecyclers', recyclers), recyclers),
|
||||
_.unset('recyclers'),
|
||||
_.unset('virtualRecyclers'),
|
||||
)(recyclers)
|
||||
: null
|
||||
|
||||
state.pids = _.update(
|
||||
operatorId,
|
||||
_.set(deviceId, { pid, ts: Date.now() }),
|
||||
state.pids,
|
||||
)
|
||||
|
||||
const res = _.flow(
|
||||
_.pick([
|
||||
'areThereAvailablePromoCodes',
|
||||
'balances',
|
||||
'cassettes',
|
||||
'recyclers',
|
||||
'coins',
|
||||
'rates',
|
||||
]),
|
||||
|
||||
_.update('cassettes', massageCassettes),
|
||||
|
||||
_.update('recyclers', massageRecyclers),
|
||||
|
||||
/* [{ cryptoCode, rates }, ...] => [[cryptoCode, rates], ...] */
|
||||
_.update(
|
||||
'coins',
|
||||
_.map(({ cryptoCode, rates }) => [cryptoCode, rates]),
|
||||
),
|
||||
|
||||
/* [{ cryptoCode: balance }, ...] => [[cryptoCode, { balance }], ...] */
|
||||
_.update(
|
||||
'balances',
|
||||
_.flow(
|
||||
_.toPairs,
|
||||
_.map(([cryptoCode, balance]) => [cryptoCode, { balance }]),
|
||||
),
|
||||
),
|
||||
|
||||
/* Group the separate objects by cryptoCode */
|
||||
/* { balances, coins, rates } => { cryptoCode: { balance, ask, bid, cashIn, cashOut }, ... } */
|
||||
({
|
||||
areThereAvailablePromoCodes,
|
||||
balances,
|
||||
cassettes,
|
||||
recyclers,
|
||||
coins,
|
||||
rates,
|
||||
}) => ({
|
||||
areThereAvailablePromoCodes,
|
||||
cassettes,
|
||||
recyclers,
|
||||
coins: _.flow(
|
||||
_.reduce(
|
||||
(ret, [cryptoCode, obj]) => _.update(cryptoCode, _.assign(obj), ret),
|
||||
rates,
|
||||
),
|
||||
|
||||
/* { cryptoCode: { balance, ask, bid, cashIn, cashOut }, ... } => [[cryptoCode, { balance, ask, bid, cashIn, cashOut }], ...] */
|
||||
_.toPairs,
|
||||
|
||||
/* [[cryptoCode, { balance, ask, bid, cashIn, cashOut }], ...] => [{ cryptoCode, balance, ask, bid, cashIn, cashOut }, ...] */
|
||||
_.map(([cryptoCode, obj]) => _.set('cryptoCode', cryptoCode, obj)),
|
||||
|
||||
/* Only send coins which have all information needed by the machine. This prevents the machine going down if there's an issue with the coin node */
|
||||
_.filter(coin =>
|
||||
['ask', 'bid', 'balance', 'cashIn', 'cashOut', 'cryptoCode'].every(
|
||||
it => it in coin,
|
||||
),
|
||||
),
|
||||
)(_.concat(balances, coins)),
|
||||
}),
|
||||
|
||||
_.update('coins', _.map(setZeroConfLimit(settings.config))),
|
||||
_.set('skip2fa', skip2fa),
|
||||
_.set('reboot', !!pid && state.reboots?.[operatorId]?.[deviceId] === pid),
|
||||
_.set(
|
||||
'shutdown',
|
||||
!!pid && state.shutdowns?.[operatorId]?.[deviceId] === pid,
|
||||
),
|
||||
_.set(
|
||||
'restartServices',
|
||||
!!pid && state.restartServicesMap?.[operatorId]?.[deviceId] === pid,
|
||||
),
|
||||
_.set(
|
||||
'emptyUnit',
|
||||
!!pid && state.emptyUnit?.[operatorId]?.[deviceId] === pid,
|
||||
),
|
||||
_.set(
|
||||
'refillUnit',
|
||||
!!pid && state.refillUnit?.[operatorId]?.[deviceId] === pid,
|
||||
),
|
||||
_.set(
|
||||
'diagnostics',
|
||||
!!pid && state.diagnostics?.[operatorId]?.[deviceId] === pid,
|
||||
),
|
||||
)(pq)
|
||||
|
||||
// Clean up the state middleware and prevent commands from being issued more than once
|
||||
if (!_.isNil(state.emptyUnit?.[operatorId]?.[deviceId])) {
|
||||
delete state.emptyUnit?.[operatorId]?.[deviceId]
|
||||
}
|
||||
|
||||
if (!_.isNil(state.refillUnit?.[operatorId]?.[deviceId])) {
|
||||
delete state.refillUnit?.[operatorId]?.[deviceId]
|
||||
}
|
||||
|
||||
if (!_.isNil(state.diagnostics?.[operatorId]?.[deviceId])) {
|
||||
delete state.diagnostics?.[operatorId]?.[deviceId]
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
const configs = (
|
||||
parent,
|
||||
{ currentConfigVersion },
|
||||
{ deviceId, deviceName, operatorId, pid, settings, machineSettings },
|
||||
) =>
|
||||
plugins(settings, deviceId)
|
||||
.pollQueries()
|
||||
.then(pq => ({
|
||||
static: staticConfig({
|
||||
currentConfigVersion,
|
||||
deviceId,
|
||||
deviceName,
|
||||
pq,
|
||||
settings,
|
||||
machineSettings,
|
||||
}),
|
||||
dynamic: dynamicConfig({
|
||||
deviceId,
|
||||
operatorId,
|
||||
pid,
|
||||
pq,
|
||||
settings,
|
||||
}),
|
||||
}))
|
||||
|
||||
const machineSettings = (
|
||||
parent,
|
||||
{ currentSettingsVersion },
|
||||
{ deviceId, machineSettings },
|
||||
) => {
|
||||
if (!machineSettings)
|
||||
throw new Error(`No cached settings found for machine ${deviceId}`)
|
||||
|
||||
currentSettingsVersion = parseInt(currentSettingsVersion, 10)
|
||||
if (
|
||||
!isNaN(currentSettingsVersion) &&
|
||||
currentSettingsVersion >= machineSettings.settingsVersion
|
||||
)
|
||||
return null // The machine is up to date
|
||||
|
||||
return Object.assign({}, machineSettings, {
|
||||
complianceTriggers: buildTriggers(machineSettings.complianceTriggers),
|
||||
})
|
||||
}
|
||||
|
||||
const massageTerms = terms =>
|
||||
terms.active && terms.text
|
||||
? {
|
||||
tcPhoto: Boolean(terms.tcPhoto),
|
||||
delay: Boolean(terms.delay),
|
||||
title: terms.title,
|
||||
text: nmd(terms.text),
|
||||
accept: terms.acceptButtonText,
|
||||
cancel: terms.cancelButtonText,
|
||||
}
|
||||
: null
|
||||
|
||||
/*
|
||||
* The type of the result of `configManager.getTermsConditions()` is more or
|
||||
* less `Maybe (Maybe Hash, Maybe TC)`. Each case has a specific meaning to the
|
||||
* machine:
|
||||
*
|
||||
* Nothing => Nothing
|
||||
* There are no T&C or they've been removed/disabled.
|
||||
*
|
||||
* Just (Nothing, _) => Nothing
|
||||
* Shouldn't happen! Treated as if there were no T&C.
|
||||
*
|
||||
* Just (Just hash, Nothing) => Nothing
|
||||
* May happen (after `massageTerms`) if T&C are disabled.
|
||||
*
|
||||
* Just (Just hash, Just tc) => Just (hash, Nothing, Nothing)
|
||||
* If both the `hash` and the `configVersion` are the same as `currentHash`
|
||||
* and `currentConfigVersion`, respectively, then there's no need to send
|
||||
* `text` nor `details`.
|
||||
*
|
||||
* Just (Just hash, Just tc) => Just (hash, Nothing, Just details)
|
||||
* If `configVersion` differs from `currentConfigVersion` but the `hash` is
|
||||
* the same, then only the details have to be updated.
|
||||
*
|
||||
* Just (Just hash, Just tc) => Just (hash, Just text, Just details)
|
||||
* If the `hash` differs from `currentHash` then everything is resent (to
|
||||
* simplify machine implementation).
|
||||
*/
|
||||
const terms = (parent, { currentConfigVersion, currentHash }, { settings }) => {
|
||||
const isNone = x => _.isNil(x) || _.isEmpty(x)
|
||||
|
||||
let latestTerms = configManager.getTermsConditions(settings.config)
|
||||
if (isNone(latestTerms)) return null
|
||||
|
||||
const hash = latestTerms.hash
|
||||
if (!_.isString(hash)) return null
|
||||
|
||||
latestTerms = massageTerms(latestTerms)
|
||||
if (isNone(latestTerms)) return null
|
||||
|
||||
const isHashNew = hash !== currentHash
|
||||
const text = isHashNew ? latestTerms.text : null
|
||||
|
||||
const isVersionNew =
|
||||
isHashNew ||
|
||||
_.isNil(currentConfigVersion) ||
|
||||
currentConfigVersion < settings.version
|
||||
const details = isVersionNew ? _.omit(['text'], latestTerms) : null
|
||||
|
||||
return { hash, details, text }
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Query: {
|
||||
configs,
|
||||
machineSettings,
|
||||
terms,
|
||||
},
|
||||
}
|
||||
29
packages/server/lib/graphql/server.js
Normal file
29
packages/server/lib/graphql/server.js
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
const logger = require('../logger')
|
||||
|
||||
const { ApolloServer } = require('@apollo/server')
|
||||
|
||||
const devMode = !!require('minimist')(process.argv.slice(2)).dev
|
||||
|
||||
const context = ({ req, res }) => ({
|
||||
deviceId: req.deviceId /* lib/middlewares/populateDeviceId.js */,
|
||||
deviceName: req.deviceName /* lib/middlewares/authorize.js */,
|
||||
operatorId: res.locals.operatorId /* lib/middlewares/operatorId.js */,
|
||||
pid: req.query.pid,
|
||||
settings: req.settings /* lib/middlewares/populateSettings.js */,
|
||||
machineSettings:
|
||||
req.machineSettings /* lib/middlewares/populateMachineSettings.js */,
|
||||
})
|
||||
|
||||
const graphQLServer = new ApolloServer({
|
||||
typeDefs: require('./types'),
|
||||
resolvers: require('./resolvers'),
|
||||
introspection: false,
|
||||
formatError: error => {
|
||||
logger.error(error)
|
||||
return error
|
||||
},
|
||||
includeStacktraceInErrorResponses: devMode,
|
||||
logger,
|
||||
})
|
||||
|
||||
module.exports = { graphQLServer, context }
|
||||
250
packages/server/lib/graphql/types.js
Normal file
250
packages/server/lib/graphql/types.js
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
const gql = require('graphql-tag')
|
||||
|
||||
module.exports = gql`
|
||||
type Coin {
|
||||
cryptoCode: String!
|
||||
cryptoCodeDisplay: String!
|
||||
display: String!
|
||||
minimumTx: String!
|
||||
cashInFee: String!
|
||||
cashOutFee: String!
|
||||
cashInCommission: String!
|
||||
cashOutCommission: String!
|
||||
cryptoNetwork: String!
|
||||
cryptoUnits: String!
|
||||
batchable: Boolean!
|
||||
isCashInOnly: Boolean!
|
||||
}
|
||||
|
||||
type LocaleInfo {
|
||||
country: String!
|
||||
fiatCode: String!
|
||||
languages: [String!]!
|
||||
}
|
||||
|
||||
type OperatorInfo {
|
||||
name: String!
|
||||
phone: String!
|
||||
email: String!
|
||||
website: String!
|
||||
companyNumber: String!
|
||||
}
|
||||
|
||||
type MachineInfo {
|
||||
deviceId: String! @deprecated(reason: "unused by the machine")
|
||||
deviceName: String
|
||||
numberOfCassettes: Int
|
||||
numberOfRecyclers: Int
|
||||
}
|
||||
|
||||
type ReceiptInfo {
|
||||
paper: Boolean!
|
||||
automaticPrint: Boolean!
|
||||
sms: Boolean!
|
||||
operatorWebsite: Boolean!
|
||||
operatorEmail: Boolean!
|
||||
operatorPhone: Boolean!
|
||||
companyNumber: Boolean!
|
||||
machineLocation: Boolean!
|
||||
customerNameOrPhoneNumber: Boolean!
|
||||
exchangeRate: Boolean!
|
||||
addressQRCode: Boolean!
|
||||
}
|
||||
|
||||
type MachineScreenOptions {
|
||||
rates: RateScreenOptions!
|
||||
customText: [CustomText!]!
|
||||
}
|
||||
|
||||
type RateScreenOptions {
|
||||
active: Boolean!
|
||||
}
|
||||
|
||||
type SpeedtestFile {
|
||||
url: String!
|
||||
size: Int!
|
||||
}
|
||||
|
||||
enum TriggerAutomationType {
|
||||
Automatic
|
||||
Manual
|
||||
}
|
||||
|
||||
type CustomTriggersAutomation {
|
||||
id: ID!
|
||||
type: TriggerAutomationType!
|
||||
}
|
||||
|
||||
type TriggersAutomation {
|
||||
sanctions: TriggerAutomationType!
|
||||
idCardPhoto: TriggerAutomationType!
|
||||
idCardData: TriggerAutomationType!
|
||||
facephoto: TriggerAutomationType!
|
||||
usSsn: TriggerAutomationType!
|
||||
custom: [CustomTriggersAutomation]!
|
||||
}
|
||||
|
||||
type CustomScreen {
|
||||
text: String!
|
||||
title: String!
|
||||
}
|
||||
|
||||
type CustomInput {
|
||||
type: String!
|
||||
constraintType: String!
|
||||
label1: String
|
||||
label2: String
|
||||
choiceList: [String]
|
||||
}
|
||||
|
||||
type CustomRequest {
|
||||
name: String!
|
||||
input: CustomInput!
|
||||
disablePermissionScreen: Boolean!
|
||||
screen1: CustomScreen!
|
||||
screen2: CustomScreen!
|
||||
}
|
||||
|
||||
type CustomInfoRequest {
|
||||
id: String!
|
||||
enabled: Boolean!
|
||||
customRequest: CustomRequest!
|
||||
}
|
||||
|
||||
type CustomText {
|
||||
id: String!
|
||||
text: String!
|
||||
}
|
||||
|
||||
type Trigger {
|
||||
id: String!
|
||||
direction: String!
|
||||
requirement: String! @deprecated(reason: "use requirementType")
|
||||
requirementType: String!
|
||||
triggerType: String!
|
||||
|
||||
suspensionDays: Float
|
||||
threshold: Int
|
||||
thresholdDays: Int
|
||||
customInfoRequestId: String @deprecated(reason: "use customInfoRequest.id")
|
||||
customInfoRequest: CustomInfoRequest
|
||||
externalService: String
|
||||
}
|
||||
|
||||
type TermsDetails {
|
||||
tcPhoto: Boolean!
|
||||
delay: Boolean!
|
||||
title: String!
|
||||
accept: String!
|
||||
cancel: String!
|
||||
}
|
||||
|
||||
type Terms {
|
||||
hash: String!
|
||||
text: String
|
||||
details: TermsDetails
|
||||
}
|
||||
|
||||
enum CustomerAuthentication {
|
||||
EMAIL
|
||||
SMS
|
||||
}
|
||||
|
||||
type StaticConfig {
|
||||
configVersion: Int!
|
||||
|
||||
coins: [Coin!]!
|
||||
enablePaperWalletOnly: Boolean!
|
||||
hasLightning: Boolean!
|
||||
serverVersion: String!
|
||||
timezone: Int!
|
||||
twoWayMode: Boolean!
|
||||
customerAuthentication: CustomerAuthentication!
|
||||
|
||||
localeInfo: LocaleInfo!
|
||||
operatorInfo: OperatorInfo
|
||||
machineInfo: MachineInfo!
|
||||
receiptInfo: ReceiptInfo
|
||||
screenOptions: MachineScreenOptions
|
||||
|
||||
speedtestFiles: [SpeedtestFile!]!
|
||||
urlsToPing: [String!]!
|
||||
|
||||
triggersAutomation: TriggersAutomation!
|
||||
triggers: [Trigger!]!
|
||||
@deprecated(reason: "moved to machineSettings.complianceTriggers")
|
||||
}
|
||||
|
||||
type MachineSettings {
|
||||
settingsVersion: String!
|
||||
complianceTriggers: [Trigger!]!
|
||||
}
|
||||
|
||||
type DynamicCoinValues {
|
||||
# NOTE: Doesn't seem to be used anywhere outside of lib/plugins.js.
|
||||
# However, it can be used to generate the cache key, if we ever move to an
|
||||
# actual caching mechanism.
|
||||
#timestamp: String!
|
||||
|
||||
cryptoCode: String!
|
||||
balance: String!
|
||||
|
||||
# Raw rates
|
||||
ask: String!
|
||||
bid: String!
|
||||
|
||||
# Rates with commissions applied
|
||||
cashIn: String!
|
||||
cashOut: String!
|
||||
|
||||
zeroConfLimit: Int!
|
||||
}
|
||||
|
||||
type PhysicalCassette {
|
||||
name: String!
|
||||
denomination: Int!
|
||||
count: Int!
|
||||
}
|
||||
|
||||
type PhysicalRecycler {
|
||||
name: String!
|
||||
number: Int!
|
||||
denomination: Int!
|
||||
count: Int!
|
||||
}
|
||||
|
||||
type Cassettes {
|
||||
physical: [PhysicalCassette!]!
|
||||
virtual: [Int!]!
|
||||
}
|
||||
|
||||
type Recyclers {
|
||||
physical: [PhysicalRecycler!]!
|
||||
virtual: [Int!]!
|
||||
}
|
||||
|
||||
type DynamicConfig {
|
||||
areThereAvailablePromoCodes: Boolean!
|
||||
cassettes: Cassettes
|
||||
recyclers: Recyclers
|
||||
coins: [DynamicCoinValues!]!
|
||||
reboot: Boolean!
|
||||
shutdown: Boolean!
|
||||
restartServices: Boolean!
|
||||
emptyUnit: Boolean!
|
||||
refillUnit: Boolean!
|
||||
diagnostics: Boolean!
|
||||
skip2fa: Boolean!
|
||||
}
|
||||
|
||||
type Configs {
|
||||
static: StaticConfig
|
||||
dynamic: DynamicConfig!
|
||||
}
|
||||
|
||||
type Query {
|
||||
configs(currentConfigVersion: Int): Configs!
|
||||
machineSettings(currentSettingsVersion: String): MachineSettings
|
||||
terms(currentHash: String, currentConfigVersion: Int): Terms
|
||||
}
|
||||
`
|
||||
36
packages/server/lib/hardware-credentials.js
Normal file
36
packages/server/lib/hardware-credentials.js
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
const uuid = require('uuid')
|
||||
|
||||
const db = require('./db')
|
||||
|
||||
function createHardwareCredential(userID, credentialData) {
|
||||
const sql = `INSERT INTO hardware_credentials (id, user_id, data) VALUES ($1, $2, $3)`
|
||||
return db.none(sql, [uuid.v4(), userID, credentialData])
|
||||
}
|
||||
|
||||
function getHardwareCredentials() {
|
||||
const sql = `SELECT * FROM hardware_credentials`
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function getHardwareCredentialsByUserId(userID) {
|
||||
const sql = `SELECT * FROM hardware_credentials WHERE user_id=$1`
|
||||
return db.any(sql, [userID])
|
||||
}
|
||||
|
||||
function getUserByUserHandle(userHandle) {
|
||||
const sql = `SELECT users.id, users.username, users.role FROM users INNER JOIN hardware_credentials hc ON users.id=hc.user_id WHERE data->>'userHandle'=$1::jsonb::text`
|
||||
return db.oneOrNone(sql, [userHandle])
|
||||
}
|
||||
|
||||
function updateHardwareCredential(credential) {
|
||||
const sql = `UPDATE hardware_credentials SET last_used=now(), data=$1 WHERE id=$2`
|
||||
return db.none(sql, [credential.data, credential.id])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createHardwareCredential,
|
||||
getHardwareCredentials,
|
||||
getHardwareCredentialsByUserId,
|
||||
getUserByUserHandle,
|
||||
updateHardwareCredential,
|
||||
}
|
||||
62
packages/server/lib/layer2.js
Normal file
62
packages/server/lib/layer2.js
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
const configManager = require('./new-config-manager')
|
||||
const ph = require('./plugin-helper')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
function fetch(settings, cryptoCode) {
|
||||
const plugin = configManager.getWalletSettings(
|
||||
cryptoCode,
|
||||
settings.config,
|
||||
).layer2
|
||||
|
||||
if (_.isEmpty(plugin) || plugin === 'no-layer2') return Promise.resolve()
|
||||
|
||||
const layer2 = ph.load(ph.LAYER2, plugin)
|
||||
const account = settings.accounts[plugin]
|
||||
|
||||
return Promise.resolve({ layer2, account })
|
||||
}
|
||||
|
||||
function newAddress(settings, info) {
|
||||
return fetch(settings, info.cryptoCode).then(r => {
|
||||
if (!r) return
|
||||
return r.layer2.newAddress(r.account, info)
|
||||
})
|
||||
}
|
||||
|
||||
function getStatus(settings, tx) {
|
||||
const toAddress = tx.layer2Address
|
||||
if (!toAddress) return Promise.resolve({ status: 'notSeen' })
|
||||
|
||||
return fetch(settings, tx.cryptoCode).then(r => {
|
||||
if (!r) return { status: 'notSeen' }
|
||||
return r.layer2.getStatus(
|
||||
r.account,
|
||||
toAddress,
|
||||
tx.cryptoAtoms,
|
||||
tx.cryptoCode,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
function cryptoNetwork(settings, cryptoCode) {
|
||||
const plugin = configManager.getWalletSettings(
|
||||
cryptoCode,
|
||||
settings.config,
|
||||
).layer2
|
||||
const layer2 = ph.load(ph.LAYER2, plugin)
|
||||
const account = settings.accounts[plugin]
|
||||
|
||||
if (!layer2.cryptoNetwork) return Promise.resolve(false)
|
||||
return layer2.cryptoNetwork(account, cryptoCode)
|
||||
}
|
||||
|
||||
function isLayer2Address(address) {
|
||||
return address.split(':').length >= 2
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isLayer2Address,
|
||||
newAddress,
|
||||
getStatus,
|
||||
cryptoNetwork,
|
||||
}
|
||||
48
packages/server/lib/logger.js
Normal file
48
packages/server/lib/logger.js
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
const winston = require('winston')
|
||||
const Postgres = require('./pg-transport')
|
||||
const { PSQL_URL } = require('./constants')
|
||||
|
||||
const LOG_LEVEL = process.env.LOG_LEVEL
|
||||
|
||||
const logger = new winston.Logger({
|
||||
level: LOG_LEVEL,
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
timestamp: true,
|
||||
colorize: true,
|
||||
handleExceptions: true,
|
||||
humanReadableUnhandledException: true,
|
||||
}),
|
||||
new Postgres({
|
||||
connectionString: PSQL_URL,
|
||||
tableName: 'server_logs',
|
||||
handleExceptions: true,
|
||||
humanReadableUnhandledException: true,
|
||||
}),
|
||||
],
|
||||
rewriters: [
|
||||
(...[, , meta]) => {
|
||||
if (meta.isAxiosError) {
|
||||
return {
|
||||
message: meta.message,
|
||||
status: meta.response?.status,
|
||||
data: meta.response?.data,
|
||||
url: meta.config?.url,
|
||||
method: meta.config?.method,
|
||||
}
|
||||
}
|
||||
return meta instanceof Error
|
||||
? { message: meta.message, stack: meta.stack, meta }
|
||||
: meta
|
||||
},
|
||||
],
|
||||
exitOnError: false,
|
||||
})
|
||||
|
||||
logger.stream = {
|
||||
write: message => {
|
||||
logger.info(message.trim())
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = logger
|
||||
164
packages/server/lib/logs.js
Normal file
164
packages/server/lib/logs.js
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
const _ = require('lodash/fp')
|
||||
const { format, isValid } = require('date-fns/fp')
|
||||
const { utcToZonedTime } = require('date-fns-tz/fp')
|
||||
|
||||
const db = require('./db')
|
||||
const logger = require('./logger')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const getMachineName = require('./machine-loader').getMachineName
|
||||
|
||||
/**
|
||||
* Get the latest log's timestamp
|
||||
*
|
||||
* @name getLastSeen
|
||||
* @function
|
||||
* @async
|
||||
*
|
||||
* @param {string} deviceId Machine id to get the last timestamp for
|
||||
*
|
||||
* @returns {date} Last timestamp
|
||||
*/
|
||||
function getLastSeen(deviceId) {
|
||||
const sql = `select id, timestamp, serial from logs
|
||||
where device_id=$1
|
||||
order by timestamp desc, serial desc limit 1`
|
||||
return db
|
||||
.oneOrNone(sql, [deviceId])
|
||||
.then(log =>
|
||||
log ? { timestamp: log.timestamp, serial: log.serial, id: log.id } : null,
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Update logs in db
|
||||
*
|
||||
* @name update
|
||||
* @function
|
||||
* @async
|
||||
*
|
||||
* @param {string} deviceId Machine Id to which logs belong to
|
||||
* @param {array} logLines Logs to be saved
|
||||
*
|
||||
* @returns {null}
|
||||
*/
|
||||
function update(deviceId, logLines) {
|
||||
const cs = new pgp.helpers.ColumnSet(
|
||||
['id', 'device_id', 'log_level', 'timestamp', 'serial', 'message'],
|
||||
{ table: 'logs' },
|
||||
)
|
||||
|
||||
const logs = _.map(log => {
|
||||
const formatted = {
|
||||
id: log.id,
|
||||
deviceId: deviceId,
|
||||
message: log.msg,
|
||||
logLevel: _.contains('error', _.lowerCase(log.msg)) ? 'error' : 'info',
|
||||
timestamp: log.timestamp,
|
||||
serial: log.serial || 0,
|
||||
}
|
||||
return _.mapKeys(_.snakeCase, formatted)
|
||||
}, logLines)
|
||||
const sql = pgp.helpers.insert(logs, cs) + 'on conflict do nothing'
|
||||
|
||||
return db.none(sql)
|
||||
}
|
||||
|
||||
function clearOldLogs() {
|
||||
const sqls = `delete from logs
|
||||
where timestamp < now() - interval '3 days';
|
||||
delete from server_logs
|
||||
where timestamp < now() - interval '3 days';`
|
||||
return db.multi(sqls)
|
||||
}
|
||||
|
||||
function getUnlimitedMachineLogs(deviceId, until = new Date().toISOString()) {
|
||||
// Note: sql is a little confusing here, since timestamp is used both as a column
|
||||
// and a reserved word, but it works.
|
||||
const sql = `select id, log_level, timestamp, message from logs
|
||||
where device_id=$1
|
||||
and timestamp <= $2
|
||||
and timestamp > (timestamp $2 - interval '2 days')
|
||||
order by timestamp desc, serial desc`
|
||||
|
||||
return Promise.all([
|
||||
db.any(sql, [deviceId, until]),
|
||||
getMachineName(deviceId),
|
||||
]).then(([logs, machineName]) => ({
|
||||
logs: _.map(_.mapKeys(_.camelCase), logs),
|
||||
currentMachine: { deviceId, name: machineName },
|
||||
}))
|
||||
}
|
||||
|
||||
function getMachineLogs(
|
||||
deviceId,
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
) {
|
||||
const sql = `select id, log_level, timestamp, message from logs
|
||||
where device_id=$1
|
||||
and timestamp <= $2
|
||||
order by timestamp desc, serial desc
|
||||
limit $3
|
||||
offset $4`
|
||||
|
||||
return Promise.all([
|
||||
db.any(sql, [deviceId, until, limit, offset]),
|
||||
getMachineName(deviceId),
|
||||
]).then(([logs, machineName]) => ({
|
||||
logs: _.map(_.mapKeys(_.camelCase), logs),
|
||||
currentMachine: { deviceId, name: machineName },
|
||||
}))
|
||||
}
|
||||
|
||||
function simpleGetMachineLogs(
|
||||
deviceId,
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
) {
|
||||
const sql = `select id, log_level, timestamp, message from logs
|
||||
where device_id=$1
|
||||
and timestamp >= $2
|
||||
and timestamp <= $3
|
||||
order by timestamp desc, serial desc
|
||||
limit $4
|
||||
offset $5`
|
||||
|
||||
return db
|
||||
.any(sql, [deviceId, from, until, limit, offset])
|
||||
.then(_.map(_.mapKeys(_.camelCase)))
|
||||
}
|
||||
|
||||
function logDateFormat(timezone, logs, fields) {
|
||||
return _.map(log => {
|
||||
const values = _.map(field => {
|
||||
if (_.isNil(log[field])) return null
|
||||
if (!isValid(log[field])) {
|
||||
logger.warn(
|
||||
`Tried to convert to ${timezone} timezone the value ${log[field]} and failed. Returning original value...`,
|
||||
)
|
||||
return log[field]
|
||||
}
|
||||
const date = utcToZonedTime(timezone, log[field])
|
||||
return `${format('yyyy-MM-dd', date)}T${format('HH:mm:ss.SSS', date)}`
|
||||
}, fields)
|
||||
const fieldsToOverride = _.zipObject(fields, values)
|
||||
return {
|
||||
...log,
|
||||
...fieldsToOverride,
|
||||
}
|
||||
}, logs)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getUnlimitedMachineLogs,
|
||||
getMachineLogs,
|
||||
simpleGetMachineLogs,
|
||||
update,
|
||||
getLastSeen,
|
||||
clearOldLogs,
|
||||
logDateFormat,
|
||||
}
|
||||
78
packages/server/lib/loyalty.js
Normal file
78
packages/server/lib/loyalty.js
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
const db = require('./db')
|
||||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
function getAvailablePromoCodes() {
|
||||
const sql = `SELECT * FROM coupons WHERE soft_deleted=false`
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function getPromoCode(code) {
|
||||
const sql = `SELECT * FROM coupons WHERE code=$1 AND soft_deleted=false`
|
||||
return db.oneOrNone(sql, [code])
|
||||
}
|
||||
|
||||
function createPromoCode(code, discount) {
|
||||
const sql = `INSERT INTO coupons (id, code, discount) VALUES ($1, $2, $3) RETURNING *`
|
||||
return db.one(sql, [uuid.v4(), code, discount])
|
||||
}
|
||||
|
||||
function deletePromoCode(id) {
|
||||
const sql = `UPDATE coupons SET soft_deleted=true WHERE id=$1`
|
||||
return db.none(sql, [id])
|
||||
}
|
||||
|
||||
function getNumberOfAvailablePromoCodes() {
|
||||
const sql = `SELECT COUNT(id) FROM coupons WHERE soft_deleted=false`
|
||||
return db.one(sql).then(res => res.count)
|
||||
}
|
||||
|
||||
function getAvailableIndividualDiscounts() {
|
||||
const sql = `SELECT * FROM individual_discounts WHERE soft_deleted=false`
|
||||
return db.any(sql).then(res =>
|
||||
_.map(
|
||||
it => ({
|
||||
id: it.id,
|
||||
customerId: it.customer_id,
|
||||
discount: it.discount,
|
||||
}),
|
||||
res,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
function getCustomerActiveIndividualDiscount(customerId) {
|
||||
const sql = `SELECT * FROM individual_discounts WHERE customer_id=$1 AND soft_deleted=false LIMIT 1`
|
||||
return db.oneOrNone(sql, [customerId]).then(res => {
|
||||
if (!_.isNil(res)) {
|
||||
return {
|
||||
id: res.id,
|
||||
customerId: res.customer_id,
|
||||
discount: res.discount,
|
||||
}
|
||||
}
|
||||
return res
|
||||
})
|
||||
}
|
||||
|
||||
function createIndividualDiscount(customerId, discount) {
|
||||
const sql = `INSERT INTO individual_discounts (id, customer_id, discount) VALUES ($1, $2, $3)`
|
||||
return db.none(sql, [uuid.v4(), customerId, discount])
|
||||
}
|
||||
|
||||
function deleteIndividualDiscount(id) {
|
||||
const sql = `UPDATE individual_discounts SET soft_deleted=true WHERE id=$1`
|
||||
return db.none(sql, [id])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAvailablePromoCodes,
|
||||
getPromoCode,
|
||||
createPromoCode,
|
||||
deletePromoCode,
|
||||
getNumberOfAvailablePromoCodes,
|
||||
getAvailableIndividualDiscounts,
|
||||
getCustomerActiveIndividualDiscount,
|
||||
createIndividualDiscount,
|
||||
deleteIndividualDiscount,
|
||||
}
|
||||
839
packages/server/lib/machine-loader.js
Normal file
839
packages/server/lib/machine-loader.js
Normal file
|
|
@ -0,0 +1,839 @@
|
|||
const fsPromises = require('fs').promises
|
||||
const path = require('path')
|
||||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
const uuid = require('uuid')
|
||||
const makeDir = require('make-dir')
|
||||
|
||||
const batching = require('./cashbox-batches')
|
||||
const db = require('./db')
|
||||
const pairing = require('./pairing')
|
||||
const { checkPings, checkStuckScreen } = require('./notifier')
|
||||
const dbm = require('./postgresql_interface')
|
||||
const configManager = require('./new-config-manager')
|
||||
const notifierUtils = require('./notifier/utils')
|
||||
const notifierQueries = require('./notifier/queries')
|
||||
const { GraphQLError } = require('graphql')
|
||||
const { loadConfig } = require('./new-settings-loader')
|
||||
const logger = require('./logger')
|
||||
const {
|
||||
machines: { assignMachinesToGroup },
|
||||
} = require('typesafe-db')
|
||||
const T = require('./time')
|
||||
|
||||
const fullyFunctionalStatus = { label: 'Fully functional', type: 'success' }
|
||||
const unresponsiveStatus = { label: 'Unresponsive', type: 'error' }
|
||||
const stuckOnBootStatus = { label: 'Stuck booting up', type: 'error' }
|
||||
const stuckStatus = { label: 'Stuck', type: 'error' }
|
||||
const bootingUpStatus = { label: 'Booting up', type: 'warning' }
|
||||
const OPERATOR_DATA_DIR = process.env.OPERATOR_DATA_DIR
|
||||
|
||||
const MACHINE_WITH_CALCULATED_FIELD_SQL = `
|
||||
select d.*, mg.name as machine_group_name, COALESCE(emptybills, 0) + COALESCE(regularbills, 0) as cashbox from devices d
|
||||
left join machine_groups mg on d.machine_group_id = mg.id
|
||||
left join (
|
||||
select count(*) as emptyBills, eub.device_id
|
||||
from empty_unit_bills eub
|
||||
where eub.cashbox_batch_id is null
|
||||
group by eub.device_id
|
||||
) as nebills on nebills.device_id = d.device_id
|
||||
left join (
|
||||
select count(*) as regularBills, cit.device_id from bills b
|
||||
left join cash_in_txs cit on b.cash_in_txs_id = cit.id
|
||||
where b.cashbox_batch_id is null and b.destination_unit = 'cashbox'
|
||||
group by cit.device_id
|
||||
) as nbills on nbills.device_id = d.device_id`
|
||||
|
||||
function toMachineObject(r) {
|
||||
return {
|
||||
deviceId: r.device_id,
|
||||
cashUnits: {
|
||||
cashbox: r.cashbox,
|
||||
cassette1: r.cassette1,
|
||||
cassette2: r.cassette2,
|
||||
cassette3: r.cassette3,
|
||||
cassette4: r.cassette4,
|
||||
recycler1: r.recycler1,
|
||||
recycler2: r.recycler2,
|
||||
recycler3: r.recycler3,
|
||||
recycler4: r.recycler4,
|
||||
recycler5: r.recycler5,
|
||||
recycler6: r.recycler6,
|
||||
},
|
||||
numberOfCassettes: r.number_of_cassettes,
|
||||
numberOfRecyclers: r.number_of_recyclers,
|
||||
version: r.version,
|
||||
model: r.model,
|
||||
diagnostics: {
|
||||
timestamp: r.diagnostics_timestamp
|
||||
? new Date(r.diagnostics_timestamp)
|
||||
: null,
|
||||
scanTimestamp: r.diagnostics_scan_updated_at
|
||||
? new Date(r.diagnostics_scan_updated_at)
|
||||
: null,
|
||||
frontTimestamp: r.diagnostics_front_updated_at
|
||||
? new Date(r.diagnostics_front_updated_at)
|
||||
: null,
|
||||
},
|
||||
pairedAt: new Date(r.created),
|
||||
lastPing: new Date(r.last_online),
|
||||
name: r.name,
|
||||
paired: r.paired,
|
||||
machineGroup: {
|
||||
id: r.machine_group_id,
|
||||
name: r.machine_group_name,
|
||||
},
|
||||
// TODO: we shall start using this JSON field at some point
|
||||
// location: r.location,
|
||||
}
|
||||
}
|
||||
|
||||
function getMachineIds() {
|
||||
const sql = 'select device_id from devices'
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function getMachines() {
|
||||
const sql = `${MACHINE_WITH_CALCULATED_FIELD_SQL} where display=TRUE ORDER BY created`
|
||||
return db.any(sql).then(rr => rr.map(toMachineObject))
|
||||
}
|
||||
|
||||
function getUnpairedMachines() {
|
||||
return db
|
||||
.any('SELECT * FROM unpaired_devices')
|
||||
.then(
|
||||
_.map(r =>
|
||||
_.flow(
|
||||
_.set('deviceId', _.get('device_id', r)),
|
||||
_.unset('device_id'),
|
||||
)(r),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
function getConfig(defaultConfig) {
|
||||
return defaultConfig ? Promise.resolve(defaultConfig) : loadConfig()
|
||||
}
|
||||
|
||||
const isBootingState = state => ['booting', 'pendingIdle'].includes(state)
|
||||
|
||||
const isStuckOnBoot = machineEvents => {
|
||||
// Consider the machine stuck on boot if it's been booting for at least 30s
|
||||
const lowerLimit = 30 * T.seconds
|
||||
|
||||
// Heuristic to ignore older events (possibly from previous boots), obviously
|
||||
// fallible
|
||||
const higherLimit = 4 * lowerLimit
|
||||
|
||||
// machineEvents is sorted from oldest to newest
|
||||
const newest = machineEvents[machineEvents.length - 1]
|
||||
|
||||
// Find the first event that makes a lowerLimit time interval with the
|
||||
// newest, ignoring older events
|
||||
const firstOverLimit = machineEvents.findLastIndex(ev => {
|
||||
const ageDiff = ev.age - newest.age
|
||||
return ageDiff >= lowerLimit && ageDiff <= higherLimit
|
||||
})
|
||||
if (firstOverLimit < 0) return false
|
||||
|
||||
// Check all the events are for a booting state
|
||||
return machineEvents
|
||||
.slice(firstOverLimit)
|
||||
.every(ev => isBootingState(ev.note.state))
|
||||
}
|
||||
|
||||
const isBooting = machineEvents =>
|
||||
isBootingState(machineEvents[machineEvents.length - 1]?.note?.state)
|
||||
|
||||
const getMachineStatuses = (pings, events, machine) => {
|
||||
const lastPing = pings[machine.deviceId][0]
|
||||
if (lastPing?.age) return [unresponsiveStatus]
|
||||
|
||||
const machineEvents = events
|
||||
.filter(
|
||||
ev =>
|
||||
ev.device_id === machine.deviceId && ev.event_type === 'stateChange',
|
||||
)
|
||||
.map(ev =>
|
||||
Object.assign({}, ev, {
|
||||
age: Math.floor(ev.age),
|
||||
note: JSON.parse(ev.note),
|
||||
}),
|
||||
)
|
||||
.sort((e1, e2) => e2.age - e1.age)
|
||||
|
||||
if (isStuckOnBoot(machineEvents)) return [stuckOnBootStatus]
|
||||
|
||||
const stuckScreen = checkStuckScreen(machineEvents, machine)[0]
|
||||
if (stuckScreen?.age) return [stuckStatus]
|
||||
|
||||
if (isBooting(machineEvents)) return [bootingUpStatus]
|
||||
|
||||
return [fullyFunctionalStatus]
|
||||
}
|
||||
|
||||
function addName(pings, events, config) {
|
||||
return machine => {
|
||||
const cashOutConfig = configManager.getCashOut(machine.deviceId, config)
|
||||
const cashOut = !!cashOutConfig.active
|
||||
|
||||
const statuses = getMachineStatuses(pings, events, machine)
|
||||
|
||||
return _.assign(machine, { cashOut, statuses })
|
||||
}
|
||||
}
|
||||
|
||||
function getMachineNames(config) {
|
||||
return Promise.all([
|
||||
getMachines(),
|
||||
getConfig(config),
|
||||
getNetworkHeartbeat(),
|
||||
getNetworkPerformance(),
|
||||
])
|
||||
.then(([rawMachines, config, heartbeat, performance]) =>
|
||||
Promise.all([
|
||||
rawMachines,
|
||||
checkPings(rawMachines),
|
||||
dbm.machineEvents(),
|
||||
config,
|
||||
heartbeat,
|
||||
performance,
|
||||
]),
|
||||
)
|
||||
.then(([rawMachines, pings, events, config, heartbeat, performance]) => {
|
||||
const mergeByDeviceId = (x, y) =>
|
||||
_.values(_.merge(_.keyBy('deviceId', x), _.keyBy('deviceId', y)))
|
||||
const machines = mergeByDeviceId(
|
||||
mergeByDeviceId(rawMachines, heartbeat),
|
||||
performance,
|
||||
)
|
||||
|
||||
return machines.map(addName(pings, events, config))
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the machine id, get the machine name
|
||||
*
|
||||
* @name getMachineName
|
||||
* @function
|
||||
* @async
|
||||
*
|
||||
* @param {string} machineId machine id
|
||||
* @returns {string} machine name
|
||||
*/
|
||||
function getMachineName(machineId) {
|
||||
const sql = 'SELECT name FROM devices WHERE device_id=$1'
|
||||
return db.oneOrNone(sql, [machineId]).then(it => it?.name)
|
||||
}
|
||||
|
||||
const getPairedMachineName = deviceId =>
|
||||
db.oneOrNone(
|
||||
'SELECT name FROM devices WHERE device_id = $1 AND paired = TRUE',
|
||||
[deviceId],
|
||||
machine => machine?.name,
|
||||
)
|
||||
|
||||
function getMachine(machineId, config) {
|
||||
const sql = `${MACHINE_WITH_CALCULATED_FIELD_SQL} WHERE d.device_id = $1`
|
||||
|
||||
const queryMachine = db.oneOrNone(sql, [machineId]).then(r => {
|
||||
if (r === null)
|
||||
throw new GraphQLError("Resource doesn't exist", {
|
||||
extensions: { code: 'NOT_FOUND' },
|
||||
})
|
||||
else return toMachineObject(r)
|
||||
})
|
||||
|
||||
return Promise.all([
|
||||
queryMachine,
|
||||
dbm.machineEvents(),
|
||||
config,
|
||||
getNetworkHeartbeatByDevice(machineId),
|
||||
getNetworkPerformanceByDevice(machineId),
|
||||
]).then(([machine, events, config, heartbeat, performance]) => {
|
||||
const pings = checkPings([machine])
|
||||
const mergedMachine = {
|
||||
...machine,
|
||||
responseTime: _.get('responseTime', heartbeat),
|
||||
packetLoss: _.get('packetLoss', heartbeat),
|
||||
downloadSpeed: _.get('downloadSpeed', performance),
|
||||
}
|
||||
|
||||
return addName(pings, events, config)(mergedMachine)
|
||||
})
|
||||
}
|
||||
|
||||
function renameMachine(rec) {
|
||||
const sql = 'UPDATE devices SET name=$1 WHERE device_id=$2'
|
||||
return db.none(sql, [rec.newName, rec.deviceId])
|
||||
}
|
||||
|
||||
function resetCashOutBills(rec) {
|
||||
const detailB = notifierUtils.buildDetail({ deviceId: rec.deviceId })
|
||||
const {
|
||||
cassette1,
|
||||
cassette2,
|
||||
cassette3,
|
||||
cassette4,
|
||||
recycler1,
|
||||
recycler2,
|
||||
recycler3,
|
||||
recycler4,
|
||||
recycler5,
|
||||
recycler6,
|
||||
} = rec.cashUnits
|
||||
const sql = `UPDATE devices SET cassette1=$1, cassette2=$2, cassette3=$3, cassette4=$4, recycler1=$5, recycler2=$6, recycler3=$7, recycler4=$8, recycler5=$9, recycler6=$10 WHERE device_id=$11;`
|
||||
return db
|
||||
.none(sql, [
|
||||
cassette1,
|
||||
cassette2,
|
||||
cassette3,
|
||||
cassette4,
|
||||
recycler1,
|
||||
recycler2,
|
||||
recycler3,
|
||||
recycler4,
|
||||
recycler5,
|
||||
recycler6,
|
||||
rec.deviceId,
|
||||
])
|
||||
.then(() => notifierQueries.invalidateNotification(detailB, 'fiatBalance'))
|
||||
}
|
||||
|
||||
function setCassetteBills(rec) {
|
||||
const {
|
||||
cashbox,
|
||||
cassette1,
|
||||
cassette2,
|
||||
cassette3,
|
||||
cassette4,
|
||||
recycler1,
|
||||
recycler2,
|
||||
recycler3,
|
||||
recycler4,
|
||||
recycler5,
|
||||
recycler6,
|
||||
} = rec.cashUnits
|
||||
return getMachine(rec.deviceId).then(machine => {
|
||||
const oldCashboxCount = machine?.cashUnits?.cashbox
|
||||
if (
|
||||
_.isNil(oldCashboxCount) ||
|
||||
cashbox.toString() === oldCashboxCount.toString()
|
||||
) {
|
||||
const sql = `
|
||||
UPDATE devices SET cassette1=$1, cassette2=$2, cassette3=$3, cassette4=$4,
|
||||
recycler1=coalesce($5, recycler1), recycler2=coalesce($6, recycler2), recycler3=coalesce($7, recycler3),
|
||||
recycler4=coalesce($8, recycler4), recycler5=coalesce($9, recycler5), recycler6=coalesce($10, recycler6)
|
||||
WHERE device_id=$11`
|
||||
return db.none(sql, [
|
||||
cassette1,
|
||||
cassette2,
|
||||
cassette3,
|
||||
cassette4,
|
||||
recycler1,
|
||||
recycler2,
|
||||
recycler3,
|
||||
recycler4,
|
||||
recycler5,
|
||||
recycler6,
|
||||
rec.deviceId,
|
||||
])
|
||||
}
|
||||
|
||||
return batching.updateMachineWithBatch({
|
||||
...rec,
|
||||
oldCashboxValue: oldCashboxCount,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function emptyMachineUnits({ deviceId, newUnits, fiatCode }) {
|
||||
return loadConfig()
|
||||
.then(config =>
|
||||
Promise.all([
|
||||
getMachine(deviceId),
|
||||
configManager.getCashOut(deviceId, config),
|
||||
]),
|
||||
)
|
||||
.then(([machine, cashoutSettings]) => {
|
||||
const movedBills = _.reduce(
|
||||
(acc, value) => ({
|
||||
...acc,
|
||||
[value]: {
|
||||
operationName: `cash-${_.replace(/(cassette|recycler)/g, '$1-')(value)}-empty`,
|
||||
delta: newUnits[value] - machine.cashUnits[value],
|
||||
denomination: value !== 'cashbox' ? cashoutSettings[value] : null,
|
||||
},
|
||||
}),
|
||||
{},
|
||||
_.keys(newUnits),
|
||||
)
|
||||
|
||||
const operationNames = _.mapValues(it => it.operationName)(
|
||||
_.filter(it => Math.abs(it.delta) > 0)(_.omit(['cashbox'], movedBills)),
|
||||
)
|
||||
const operationsToCreate = _.map(it => ({
|
||||
id: uuid.v4(),
|
||||
device_id: deviceId,
|
||||
operation_type: it,
|
||||
}))(operationNames)
|
||||
|
||||
const billArr = _.reduce(
|
||||
(acc, value) => {
|
||||
const unit = movedBills[value]
|
||||
return _.concat(
|
||||
acc,
|
||||
_.times(
|
||||
() => ({
|
||||
id: uuid.v4(),
|
||||
fiat: unit.denomination,
|
||||
fiat_code: fiatCode,
|
||||
device_id: deviceId,
|
||||
// TODO: Uncomment this if we decide to keep track of bills across multiple operations. For now, we'll just create the emptying operations for each unit affected, but not relate these events with individual bills and just use the field for the cashbox batch event
|
||||
// cash_unit_operation_id: _.find(it => it.operation_type === `cash-${_.replace(/(cassette|recycler)/g, '$1-')(value)}-empty`, operationsToCreate).id
|
||||
}),
|
||||
Math.abs(unit.delta),
|
||||
),
|
||||
)
|
||||
},
|
||||
[],
|
||||
_.keys(_.omit(['cashbox'], movedBills)),
|
||||
)
|
||||
|
||||
// This occurs when an empty unit is called when the units are already empty, hence, no bills moved around
|
||||
if (_.isEmpty(billArr) && _.isEmpty(operationsToCreate)) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return db.tx(t => {
|
||||
const q1Cols = ['id', 'device_id', 'operation_type']
|
||||
const q1 = t.none(
|
||||
pgp.helpers.insert(operationsToCreate, q1Cols, 'cash_unit_operation'),
|
||||
)
|
||||
const q2Cols = ['id', 'fiat', 'fiat_code', 'device_id']
|
||||
const q2 = t.none(
|
||||
pgp.helpers.insert(billArr, q2Cols, 'empty_unit_bills'),
|
||||
)
|
||||
const q3 = t.none(
|
||||
`UPDATE devices SET cassette1=$1, cassette2=$2, cassette3=$3, cassette4=$4, recycler1=$5, recycler2=$6, recycler3=$7, recycler4=$8, recycler5=$9, recycler6=$10 WHERE device_id=$11`,
|
||||
[
|
||||
_.defaultTo(machine.cashUnits.cassette1, newUnits.cassette1),
|
||||
_.defaultTo(machine.cashUnits.cassette2, newUnits.cassette2),
|
||||
_.defaultTo(machine.cashUnits.cassette3, newUnits.cassette3),
|
||||
_.defaultTo(machine.cashUnits.cassette4, newUnits.cassette4),
|
||||
_.defaultTo(machine.cashUnits.recycler1, newUnits.recycler1),
|
||||
_.defaultTo(machine.cashUnits.recycler2, newUnits.recycler2),
|
||||
_.defaultTo(machine.cashUnits.recycler3, newUnits.recycler3),
|
||||
_.defaultTo(machine.cashUnits.recycler4, newUnits.recycler4),
|
||||
_.defaultTo(machine.cashUnits.recycler5, newUnits.recycler5),
|
||||
_.defaultTo(machine.cashUnits.recycler6, newUnits.recycler6),
|
||||
deviceId,
|
||||
],
|
||||
)
|
||||
|
||||
return t.batch([q1, q2, q3])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function refillMachineUnits({ deviceId, newUnits }) {
|
||||
return getMachine(deviceId).then(machine => {
|
||||
const movedBills = _.reduce(
|
||||
(acc, value) => ({
|
||||
...acc,
|
||||
[value]: {
|
||||
operationName: `cash-${_.replace(/(recycler)/g, '$1-')(value)}-refill`,
|
||||
delta: newUnits[value] - machine.cashUnits[value],
|
||||
},
|
||||
}),
|
||||
{},
|
||||
_.keys(newUnits),
|
||||
)
|
||||
|
||||
const operationNames = _.mapValues(it => it.operationName)(
|
||||
_.filter(it => Math.abs(it.delta) > 0)(
|
||||
_.omit(
|
||||
['cassette1', 'cassette2', 'cassette3', 'cassette4'],
|
||||
movedBills,
|
||||
),
|
||||
),
|
||||
)
|
||||
const operationsToCreate = _.map(it => ({
|
||||
id: uuid.v4(),
|
||||
device_id: deviceId,
|
||||
operation_type: it,
|
||||
}))(operationNames)
|
||||
|
||||
// This occurs when a refill unit is called when the loading boxes are empty, hence, no bills moved around
|
||||
if (_.isEmpty(operationsToCreate)) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return db.tx(t => {
|
||||
const q1Cols = ['id', 'device_id', 'operation_type']
|
||||
const q1 = t.none(
|
||||
pgp.helpers.insert(operationsToCreate, q1Cols, 'cash_unit_operation'),
|
||||
)
|
||||
const q2 = t.none(
|
||||
`UPDATE devices SET cassette1=$1, cassette2=$2, cassette3=$3, cassette4=$4, recycler1=$5, recycler2=$6, recycler3=$7, recycler4=$8, recycler5=$9, recycler6=$10 WHERE device_id=$11`,
|
||||
[
|
||||
_.defaultTo(machine.cashUnits.cassette1, newUnits.cassette1),
|
||||
_.defaultTo(machine.cashUnits.cassette2, newUnits.cassette2),
|
||||
_.defaultTo(machine.cashUnits.cassette3, newUnits.cassette3),
|
||||
_.defaultTo(machine.cashUnits.cassette4, newUnits.cassette4),
|
||||
_.defaultTo(machine.cashUnits.recycler1, newUnits.recycler1),
|
||||
_.defaultTo(machine.cashUnits.recycler2, newUnits.recycler2),
|
||||
_.defaultTo(machine.cashUnits.recycler3, newUnits.recycler3),
|
||||
_.defaultTo(machine.cashUnits.recycler4, newUnits.recycler4),
|
||||
_.defaultTo(machine.cashUnits.recycler5, newUnits.recycler5),
|
||||
_.defaultTo(machine.cashUnits.recycler6, newUnits.recycler6),
|
||||
deviceId,
|
||||
],
|
||||
)
|
||||
|
||||
return t.batch([q1, q2])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function unpair(rec) {
|
||||
return pairing.unpair(rec.deviceId)
|
||||
}
|
||||
|
||||
function reboot(rec) {
|
||||
return db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'reboot',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
function shutdown(rec) {
|
||||
return db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'shutdown',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
function restartServices(rec) {
|
||||
return db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'restartServices',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
function emptyUnit(rec) {
|
||||
return db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'emptyUnit',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
function refillUnit(rec) {
|
||||
return db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'refillUnit',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
])
|
||||
}
|
||||
|
||||
function diagnostics(rec) {
|
||||
const directory = `${OPERATOR_DATA_DIR}/diagnostics/${rec.deviceId}/`
|
||||
const sql = `UPDATE devices
|
||||
SET diagnostics_timestamp = NULL,
|
||||
diagnostics_scan_updated_at = NULL,
|
||||
diagnostics_front_updated_at = NULL
|
||||
WHERE device_id = $1`
|
||||
|
||||
const scanPath = path.join(directory, 'scan.jpg')
|
||||
const frontPath = path.join(directory, 'front.jpg')
|
||||
|
||||
const removeFiles = [scanPath, frontPath].map(filePath => {
|
||||
return fsPromises.unlink(filePath).catch(err => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
// File doesn't exist, no problem
|
||||
})
|
||||
})
|
||||
|
||||
return Promise.all(removeFiles)
|
||||
.then(() => db.none(sql, [rec.deviceId]))
|
||||
.then(() =>
|
||||
db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'diagnostics',
|
||||
value: _.pick(['deviceId', 'operatorId', 'action'], rec),
|
||||
}),
|
||||
]),
|
||||
)
|
||||
}
|
||||
|
||||
function batchDiagnostics(deviceIds, operatorId) {
|
||||
const diagnosticsDir = `${OPERATOR_DATA_DIR}/diagnostics/`
|
||||
|
||||
const removeDir = fsPromises
|
||||
.rm(diagnosticsDir, { recursive: true })
|
||||
.catch(err => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
const sql = `UPDATE devices
|
||||
SET diagnostics_timestamp = NULL,
|
||||
diagnostics_scan_updated_at = NULL,
|
||||
diagnostics_front_updated_at = NULL
|
||||
WHERE device_id = ANY($1)`
|
||||
|
||||
// Send individual notifications for each machine
|
||||
const sendNotifications = deviceIds.map(deviceId =>
|
||||
db.none('NOTIFY $1:name, $2', [
|
||||
'machineAction',
|
||||
JSON.stringify({
|
||||
action: 'diagnostics',
|
||||
value: {
|
||||
deviceId,
|
||||
operatorId,
|
||||
action: 'diagnostics',
|
||||
},
|
||||
}),
|
||||
]),
|
||||
)
|
||||
|
||||
return removeDir
|
||||
.then(() => db.none(sql, [deviceIds]))
|
||||
.then(() => Promise.all(sendNotifications))
|
||||
}
|
||||
|
||||
function setMachine(rec, operatorId) {
|
||||
rec.operatorId = operatorId
|
||||
switch (rec.action) {
|
||||
case 'rename':
|
||||
return renameMachine(rec)
|
||||
case 'resetCashOutBills':
|
||||
return resetCashOutBills(rec)
|
||||
case 'setCassetteBills':
|
||||
return setCassetteBills(rec)
|
||||
case 'unpair':
|
||||
return unpair(rec)
|
||||
case 'reboot':
|
||||
return reboot(rec)
|
||||
case 'shutdown':
|
||||
return shutdown(rec)
|
||||
case 'restartServices':
|
||||
return restartServices(rec)
|
||||
case 'emptyUnit':
|
||||
return emptyUnit(rec)
|
||||
case 'refillUnit':
|
||||
return refillUnit(rec)
|
||||
case 'diagnostics':
|
||||
return diagnostics(rec)
|
||||
default:
|
||||
throw new Error('No such action: ' + rec.action)
|
||||
}
|
||||
}
|
||||
|
||||
function updateNetworkPerformance(deviceId, data) {
|
||||
if (_.isEmpty(data)) return Promise.resolve(true)
|
||||
const downloadSpeed = _.head(data)
|
||||
const dbData = {
|
||||
device_id: deviceId,
|
||||
download_speed: downloadSpeed.speed,
|
||||
created: new Date(),
|
||||
}
|
||||
const cs = new pgp.helpers.ColumnSet(
|
||||
['device_id', 'download_speed', 'created'],
|
||||
{ table: 'machine_network_performance' },
|
||||
)
|
||||
const onConflict =
|
||||
' ON CONFLICT (device_id) DO UPDATE SET ' +
|
||||
cs.assignColumns({ from: 'EXCLUDED', skip: ['device_id'] })
|
||||
const upsert = pgp.helpers.insert(dbData, cs) + onConflict
|
||||
return db.none(upsert)
|
||||
}
|
||||
|
||||
function updateNetworkHeartbeat(deviceId, data) {
|
||||
if (_.isEmpty(data)) return Promise.resolve(true)
|
||||
const avgResponseTime = _.meanBy(e => _.toNumber(e.averageResponseTime), data)
|
||||
const avgPacketLoss = _.meanBy(e => _.toNumber(e.packetLoss), data)
|
||||
const dbData = {
|
||||
id: uuid.v4(),
|
||||
device_id: deviceId,
|
||||
average_response_time: avgResponseTime,
|
||||
average_packet_loss: avgPacketLoss,
|
||||
}
|
||||
const sql = pgp.helpers.insert(dbData, null, 'machine_network_heartbeat')
|
||||
return db.none(sql)
|
||||
}
|
||||
|
||||
function getNetworkPerformance() {
|
||||
const sql = `SELECT device_id, download_speed FROM machine_network_performance`
|
||||
return db.manyOrNone(sql).then(res => _.map(_.mapKeys(_.camelCase))(res))
|
||||
}
|
||||
|
||||
function getNetworkHeartbeat() {
|
||||
const sql = `SELECT AVG(average_response_time) AS response_time, AVG(average_packet_loss) AS packet_loss, device_id
|
||||
FROM machine_network_heartbeat
|
||||
GROUP BY device_id`
|
||||
return db.manyOrNone(sql).then(res => _.map(_.mapKeys(_.camelCase))(res))
|
||||
}
|
||||
|
||||
function getNetworkPerformanceByDevice(deviceId) {
|
||||
const sql = `SELECT device_id, download_speed FROM machine_network_performance WHERE device_id = $1`
|
||||
return db.manyOrNone(sql, [deviceId]).then(res =>
|
||||
_.mapKeys(
|
||||
_.camelCase,
|
||||
_.find(it => it.device_id === deviceId, res),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
function getNetworkHeartbeatByDevice(deviceId) {
|
||||
const sql = `SELECT AVG(average_response_time) AS response_time, AVG(average_packet_loss) AS packet_loss, device_id
|
||||
FROM machine_network_heartbeat WHERE device_id = $1
|
||||
GROUP BY device_id`
|
||||
return db.manyOrNone(sql, [deviceId]).then(res =>
|
||||
_.mapKeys(
|
||||
_.camelCase,
|
||||
_.find(it => it.device_id === deviceId, res),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
function updateDiagnostics(deviceId, images) {
|
||||
const sql = `UPDATE devices
|
||||
SET diagnostics_timestamp = NOW(),
|
||||
diagnostics_scan_updated_at = CASE WHEN $2 THEN NOW() ELSE diagnostics_scan_updated_at END,
|
||||
diagnostics_front_updated_at = CASE WHEN $3 THEN NOW() ELSE diagnostics_front_updated_at END
|
||||
WHERE device_id = $1`
|
||||
|
||||
const directory = `${OPERATOR_DATA_DIR}/diagnostics/${deviceId}/`
|
||||
const { scan, front } = images
|
||||
|
||||
return updatePhotos(directory, [
|
||||
['scan.jpg', scan],
|
||||
['front.jpg', front],
|
||||
])
|
||||
.then(([scan, front]) => db.none(sql, [deviceId, scan, front]))
|
||||
.catch(err => logger.error('while running machine diagnostics: ', err))
|
||||
}
|
||||
|
||||
const updateFailedQRScans = (deviceId, frames) => {
|
||||
const timestamp = new Date().toISOString()
|
||||
const directory = `${OPERATOR_DATA_DIR}/failedQRScans/${deviceId}/`
|
||||
return updatePhotos(
|
||||
directory,
|
||||
frames.map((frame, no) => [`${timestamp}-${no}.jpg`, frame]),
|
||||
)
|
||||
}
|
||||
|
||||
function createPhoto(name, data, dir) {
|
||||
if (!data) {
|
||||
logger.error(`Diagnostics error: No data to save for ${name} photo`)
|
||||
return Promise.reject()
|
||||
}
|
||||
|
||||
const decodedImageData = Buffer.from(data, 'base64')
|
||||
const filename = path.join(dir, name)
|
||||
return fsPromises.writeFile(filename, decodedImageData)
|
||||
}
|
||||
|
||||
function updatePhotos(dir, photoPairs) {
|
||||
const dirname = path.join(dir)
|
||||
_.attempt(() => makeDir.sync(dirname))
|
||||
return Promise.allSettled(
|
||||
photoPairs.map(([filename, data]) => createPhoto(filename, data, dirname)),
|
||||
).then(savedPhotos => savedPhotos.map(res => res.status === 'fulfilled'))
|
||||
}
|
||||
|
||||
let pendingRecordPings = new Map()
|
||||
const enqueueRecordPing = ping => {
|
||||
pendingRecordPings.set(ping.deviceId, ping)
|
||||
}
|
||||
|
||||
const batchRecordPendingPings = () => {
|
||||
const pings = pendingRecordPings.values()
|
||||
pendingRecordPings = new Map()
|
||||
|
||||
return db.task(async t => {
|
||||
for (const {
|
||||
deviceId,
|
||||
last_online,
|
||||
version,
|
||||
model,
|
||||
restrictionLevel,
|
||||
} of pings) {
|
||||
await t
|
||||
.none(
|
||||
`INSERT INTO machine_pings (device_id, device_time)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (device_id) DO
|
||||
UPDATE SET device_time = $2,
|
||||
updated = now()`,
|
||||
[deviceId, last_online],
|
||||
)
|
||||
.catch(err => logger.error(err))
|
||||
await t
|
||||
.none(
|
||||
pgp.helpers.update(
|
||||
{
|
||||
last_online,
|
||||
version,
|
||||
model,
|
||||
restriction_level: restrictionLevel,
|
||||
},
|
||||
null,
|
||||
'devices',
|
||||
) + ' WHERE device_id = ${deviceId}',
|
||||
{ deviceId },
|
||||
)
|
||||
.catch(err => logger.error(err))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function assignToGroup(machineIds, groupId) {
|
||||
if (!machineIds?.length) throw new Error('Machine ID is required')
|
||||
if (!groupId) throw new Error('Group ID is required')
|
||||
|
||||
return assignMachinesToGroup(machineIds, groupId).then(() => machineIds)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getMachineName,
|
||||
getPairedMachineName,
|
||||
getMachines,
|
||||
getUnpairedMachines,
|
||||
getMachine,
|
||||
getMachineNames,
|
||||
setMachine,
|
||||
updateNetworkPerformance,
|
||||
updateNetworkHeartbeat,
|
||||
getNetworkPerformance,
|
||||
getNetworkHeartbeat,
|
||||
getMachineIds,
|
||||
emptyMachineUnits,
|
||||
refillMachineUnits,
|
||||
updateDiagnostics,
|
||||
updateFailedQRScans,
|
||||
batchDiagnostics,
|
||||
enqueueRecordPing,
|
||||
batchRecordPendingPings,
|
||||
assignToGroup,
|
||||
}
|
||||
145
packages/server/lib/machine-settings.js
Normal file
145
packages/server/lib/machine-settings.js
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
const NodeCache = require('node-cache')
|
||||
|
||||
const {
|
||||
db: { default: kdb, inTransaction },
|
||||
machines: { getMachinesGroups },
|
||||
machineGroups: { getMachineGroupsComplianceTriggerSets },
|
||||
complianceTriggers: { getAllComplianceTriggers },
|
||||
} = require('typesafe-db')
|
||||
|
||||
const db = require('./db')
|
||||
const logger = require('./logger')
|
||||
|
||||
db.connect({ direct: true }).then(sco => {
|
||||
sco.client.on('notification', () => reloadAll())
|
||||
return sco.none('LISTEN updated_machine_groups')
|
||||
})
|
||||
|
||||
db.connect({ direct: true }).then(sco => {
|
||||
sco.client.on('notification', () => reloadAll())
|
||||
return sco.none('LISTEN updated_compliance_trigger_sets')
|
||||
})
|
||||
|
||||
db.connect({ direct: true }).then(sco => {
|
||||
sco.client.on('notification', () => reloadAll())
|
||||
return sco.none('LISTEN updated_compliance_triggers')
|
||||
})
|
||||
|
||||
// Make any given psudo real time clock strictly monotonic
|
||||
const StrictlyMonotonicPseudoRealTimeClock = opts => {
|
||||
let last = opts?.last ?? 0
|
||||
const now = opts?.now ?? Date.now
|
||||
return () => (last = Math.max(last + 1, now()))
|
||||
}
|
||||
|
||||
const timestamp = StrictlyMonotonicPseudoRealTimeClock()
|
||||
|
||||
const TTL = 3600 // 1h in seconds
|
||||
const CACHE = new NodeCache({
|
||||
stdTTL: TTL,
|
||||
checkperiod: TTL / 3,
|
||||
})
|
||||
|
||||
const KEY = 0
|
||||
|
||||
const get = () => CACHE.get(KEY)
|
||||
const set = val => CACHE.set(KEY, val)
|
||||
|
||||
CACHE.on('expired', (key, value) => {
|
||||
reloadAll(value)
|
||||
})
|
||||
|
||||
const oneAtATime = func => {
|
||||
let running = null
|
||||
|
||||
// Wait for the function to finish, resolve with its result, and clear the
|
||||
// running promise. Resolving a reject results in a reject.
|
||||
const stop = res => resolve =>
|
||||
Promise.resolve(res)
|
||||
.then(resolve)
|
||||
.finally(() => {
|
||||
running = null
|
||||
})
|
||||
|
||||
// If the function is running, return the current promise. Otherwise, create
|
||||
// a new promise and run the function.
|
||||
return (...args) => running || (running = new Promise(stop(func(...args))))
|
||||
}
|
||||
|
||||
const getTriggersByMachine = (machines, machineGroups, complianceTriggers) => {
|
||||
const triggersBySet = Object.groupBy(
|
||||
complianceTriggers,
|
||||
t => t.complianceTriggerSetId,
|
||||
)
|
||||
|
||||
const triggersByGroup = Object.fromEntries(
|
||||
machineGroups.map(({ id, complianceTriggerSetId }) => [
|
||||
id,
|
||||
// Machine groups with no compliance trigger set have no compliance triggers.
|
||||
triggersBySet[complianceTriggerSetId] ?? [],
|
||||
]),
|
||||
)
|
||||
|
||||
return Object.fromEntries(
|
||||
machines.map(({ deviceId, machineGroupId }) => [
|
||||
deviceId,
|
||||
triggersByGroup[machineGroupId],
|
||||
]),
|
||||
)
|
||||
}
|
||||
|
||||
const reloadAll = oneAtATime(oldCache => {
|
||||
const ts = timestamp()
|
||||
oldCache ??= get() // oldCache is given only on `expired`
|
||||
oldCache ??= [null, {}] // the cache is empty on the first reload
|
||||
|
||||
const ret = inTransaction(
|
||||
async tx => [
|
||||
await getMachinesGroups(tx),
|
||||
await getMachineGroupsComplianceTriggerSets(tx),
|
||||
await getAllComplianceTriggers(tx),
|
||||
],
|
||||
kdb,
|
||||
)
|
||||
.then(([machines, machineGroups, complianceTriggers]) => [
|
||||
ts,
|
||||
getTriggersByMachine(machines, machineGroups, complianceTriggers),
|
||||
])
|
||||
.catch(err => {
|
||||
logger.error(err)
|
||||
logger.info(
|
||||
'Reloading machine settings cache failed; using previous cache',
|
||||
)
|
||||
return oldCache
|
||||
})
|
||||
|
||||
set(ret)
|
||||
return ret
|
||||
})
|
||||
|
||||
const getFromPromise = async (cachePromise, deviceId) => {
|
||||
if (!cachePromise) return null
|
||||
const [settingsVersion, cache] = await cachePromise
|
||||
const complianceTriggers = cache[deviceId]
|
||||
if (!complianceTriggers) return null
|
||||
return { settingsVersion, complianceTriggers }
|
||||
}
|
||||
|
||||
const getOrUpdate = async (deviceId, machineVersion) => {
|
||||
let settings =
|
||||
(await getFromPromise(get(), deviceId)) ??
|
||||
(await getFromPromise(reloadAll(), deviceId))
|
||||
|
||||
if (!settings)
|
||||
throw new Error(`Compliance triggers cache has no entry for ${deviceId}`)
|
||||
|
||||
if (machineVersion && settings.settingsVersion < machineVersion)
|
||||
throw new Error(`Compliance triggers cache is older than ${deviceId}`)
|
||||
|
||||
return settings
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
reloadAll,
|
||||
getOrUpdate,
|
||||
}
|
||||
15
packages/server/lib/middlewares/addRWBytes.js
Normal file
15
packages/server/lib/middlewares/addRWBytes.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const addRWBytes = () => (req, res, next) => {
|
||||
const handle = () => {
|
||||
res.removeListener('finish', handle)
|
||||
res.removeListener('close', handle)
|
||||
res.bytesRead = req.connection.bytesRead
|
||||
res.bytesWritten = req.connection.bytesWritten
|
||||
}
|
||||
|
||||
res.on('finish', handle)
|
||||
res.on('close', handle)
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = addRWBytes
|
||||
21
packages/server/lib/middlewares/authorize.js
Normal file
21
packages/server/lib/middlewares/authorize.js
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
const { getPairedMachineName } = require('../machine-loader')
|
||||
const logger = require('../logger')
|
||||
|
||||
const authorize = function (req, res, next) {
|
||||
return getPairedMachineName(req.deviceId)
|
||||
.then(deviceName => {
|
||||
if (deviceName) {
|
||||
req.deviceName = deviceName
|
||||
return next()
|
||||
}
|
||||
|
||||
logger.error(`Device ${req.deviceId} not found`)
|
||||
return res.status(403).json({ error: 'Forbidden' })
|
||||
})
|
||||
.catch(error => {
|
||||
logger.error(error)
|
||||
return next()
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = authorize
|
||||
16
packages/server/lib/middlewares/ca.js
Normal file
16
packages/server/lib/middlewares/ca.js
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
const pairing = require('../pairing')
|
||||
const logger = require('../logger')
|
||||
|
||||
function ca(req, res) {
|
||||
const token = req.query.token
|
||||
|
||||
return pairing
|
||||
.authorizeCaDownload(token)
|
||||
.then(ca => res.json({ ca }))
|
||||
.catch(error => {
|
||||
logger.error(error.message)
|
||||
return res.status(403).json({ error: 'forbidden' })
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = ca
|
||||
14
packages/server/lib/middlewares/errorHandler.js
Normal file
14
packages/server/lib/middlewares/errorHandler.js
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
const logger = require('../logger')
|
||||
|
||||
function errorHandler(err, req, res, next) {
|
||||
const statusCode = err.name === 'HTTPError' ? err.code || 500 : 500
|
||||
|
||||
const json = { error: err.message }
|
||||
|
||||
if (statusCode >= 400) logger.error(err)
|
||||
|
||||
res.status(statusCode).json(json)
|
||||
next(err)
|
||||
}
|
||||
|
||||
module.exports = errorHandler
|
||||
31
packages/server/lib/middlewares/filterOldRequests.js
Normal file
31
packages/server/lib/middlewares/filterOldRequests.js
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
const state = require('./state')
|
||||
const logger = require('../logger')
|
||||
|
||||
const CLOCK_SKEW = 60 * 1000
|
||||
const REQUEST_TTL = 3 * 60 * 1000
|
||||
const THROTTLE_CLOCK_SKEW = 60 * 1000
|
||||
|
||||
function filterOldRequests(req, res, next) {
|
||||
const deviceTime = req.deviceTime
|
||||
const deviceId = req.deviceId
|
||||
const timestamp = Date.now()
|
||||
const delta = timestamp - Date.parse(deviceTime)
|
||||
|
||||
const shouldTrigger =
|
||||
!state.canLogClockSkewMap[deviceId] ||
|
||||
timestamp - state.canLogClockSkewMap[deviceId] >= THROTTLE_CLOCK_SKEW
|
||||
|
||||
if (delta > CLOCK_SKEW && shouldTrigger) {
|
||||
state.canLogClockSkewMap[deviceId] = timestamp
|
||||
logger.error(
|
||||
'Clock skew with lamassu-machine[%s] too high [%ss], adjust lamassu-machine clock',
|
||||
req.deviceName,
|
||||
(delta / 1000).toFixed(2),
|
||||
)
|
||||
}
|
||||
|
||||
if (delta > REQUEST_TTL) return res.status(408).json({ error: 'stale' })
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = filterOldRequests
|
||||
15
packages/server/lib/middlewares/operatorId.js
Normal file
15
packages/server/lib/middlewares/operatorId.js
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
const { getOperatorId } = require('../operator')
|
||||
|
||||
function findOperatorId(req, res, next) {
|
||||
return getOperatorId('middleware')
|
||||
.then(operatorId => {
|
||||
res.locals.operatorId = operatorId
|
||||
return next()
|
||||
})
|
||||
.catch(e => {
|
||||
console.error('Error while computing operator id\n' + e)
|
||||
next(e)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = findOperatorId
|
||||
29
packages/server/lib/middlewares/populateDeviceId.js
Normal file
29
packages/server/lib/middlewares/populateDeviceId.js
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
const crypto = require('crypto')
|
||||
|
||||
//const IS_STRESS_TESTING = process.env.LAMASSU_STRESS_TESTING === 'YES'
|
||||
|
||||
function sha256(buf) {
|
||||
if (!buf) return null
|
||||
const hash = crypto.createHash('sha256')
|
||||
|
||||
hash.update(buf)
|
||||
return hash.digest('hex').toString('hex')
|
||||
}
|
||||
|
||||
const populateDeviceId = function (req, res, next) {
|
||||
const peerCert = req.socket.getPeerCertificate
|
||||
? req.socket.getPeerCertificate()
|
||||
: null
|
||||
let deviceId = peerCert?.raw ? sha256(peerCert.raw) : null
|
||||
|
||||
//if (!deviceId && IS_STRESS_TESTING) deviceId = req.headers.device_id
|
||||
|
||||
if (!deviceId)
|
||||
return res.status(500).json({ error: 'Unable to find certificate' })
|
||||
req.deviceId = deviceId
|
||||
req.deviceTime = req.get('date')
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = populateDeviceId
|
||||
25
packages/server/lib/middlewares/populateMachineSettings.js
Normal file
25
packages/server/lib/middlewares/populateMachineSettings.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
const machineSettings = require('../machine-settings')
|
||||
|
||||
const getMachineVersion = machineVersion => {
|
||||
if (!machineVersion) return null
|
||||
machineVersion = parseInt(machineVersion, 10)
|
||||
return isNaN(machineVersion) ? null : machineVersion
|
||||
}
|
||||
|
||||
const populateMachineSettings = (req, res, next) => {
|
||||
const deviceId = req.deviceId
|
||||
const machineVersion = getMachineVersion(req.headers['settings-version'])
|
||||
machineSettings
|
||||
.getOrUpdate(deviceId, machineVersion)
|
||||
.then(settings => {
|
||||
if (!settings)
|
||||
return next(
|
||||
new Error(`No cached settings found for machine ${deviceId}`),
|
||||
)
|
||||
req.machineSettings = settings
|
||||
next()
|
||||
})
|
||||
.catch(err => next(err))
|
||||
}
|
||||
|
||||
module.exports = populateMachineSettings
|
||||
89
packages/server/lib/middlewares/populateSettings.js
Normal file
89
packages/server/lib/middlewares/populateSettings.js
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
const db = require('../db')
|
||||
const state = require('./state')
|
||||
const newSettingsLoader = require('../new-settings-loader')
|
||||
const logger = require('../logger')
|
||||
|
||||
db.connect({ direct: true })
|
||||
.then(sco => {
|
||||
sco.client.on('notification', () => reloadCache())
|
||||
return sco.none('LISTEN reload')
|
||||
})
|
||||
.catch(console.error)
|
||||
|
||||
db.connect({ direct: true })
|
||||
.then(sco => {
|
||||
sco.client.on('notification', data => {
|
||||
const parsedData = JSON.parse(data.payload)
|
||||
return machineAction(parsedData.action, parsedData.value)
|
||||
})
|
||||
return sco.none('LISTEN machineAction')
|
||||
})
|
||||
.catch(console.error)
|
||||
|
||||
function machineAction(type, value) {
|
||||
const deviceId = value.deviceId
|
||||
const pid = state.pids?.[deviceId]?.pid
|
||||
|
||||
switch (type) {
|
||||
case 'reboot':
|
||||
logger.debug(`Rebooting machine '${deviceId}'`)
|
||||
state.reboots[deviceId] = pid
|
||||
break
|
||||
case 'shutdown':
|
||||
logger.debug(`Shutting down machine '${deviceId}'`)
|
||||
state.shutdowns[deviceId] = pid
|
||||
break
|
||||
case 'restartServices':
|
||||
logger.debug(`Restarting services of machine '${deviceId}'`)
|
||||
state.restartServicesMap[deviceId] = pid
|
||||
break
|
||||
case 'emptyUnit':
|
||||
logger.debug(`Emptying units from machine '${deviceId}'`)
|
||||
state.emptyUnit[deviceId] = pid
|
||||
break
|
||||
case 'refillUnit':
|
||||
logger.debug(`Refilling recyclers from machine '${deviceId}'`)
|
||||
state.refillUnit[deviceId] = pid
|
||||
break
|
||||
case 'diagnostics':
|
||||
logger.debug(`Running diagnostics on machine '${deviceId}'`)
|
||||
state.diagnostics[deviceId] = pid
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const updateCache = (versionId, settings) => {
|
||||
const { settingsCache } = state
|
||||
settingsCache.set(settings.version, settings)
|
||||
if (!versionId) settingsCache.set('latest', settings)
|
||||
return settings
|
||||
}
|
||||
|
||||
const reloadCache = async versionId => {
|
||||
const settings = await newSettingsLoader.load(versionId)
|
||||
return updateCache(versionId, settings)
|
||||
}
|
||||
|
||||
const getOrUpdateCached = async versionId =>
|
||||
state.settingsCache.get(versionId || 'latest') ||
|
||||
(await reloadCache(versionId))
|
||||
|
||||
const populateSettings = function (req, res, next) {
|
||||
const versionId = req.headers['config-version']
|
||||
|
||||
// Priority of configs to retrieve
|
||||
// 1. Machine is in the middle of a transaction and has the config-version header set, fetch that config from cache or database, depending on whether it exists in cache
|
||||
// 2. The operator settings changed, so we must update the cache
|
||||
// 3. There's a cached config, send the cached value
|
||||
// 4. There's no cached config, cache and send the latest config
|
||||
getOrUpdateCached(versionId)
|
||||
.then(settings => {
|
||||
req.settings = settings
|
||||
next()
|
||||
})
|
||||
.catch(next)
|
||||
}
|
||||
|
||||
module.exports = populateSettings
|
||||
14
packages/server/lib/middlewares/recordPing.js
Normal file
14
packages/server/lib/middlewares/recordPing.js
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
const { enqueueRecordPing } = require('../machine-loader')
|
||||
|
||||
const record = (req, res, next) => {
|
||||
enqueueRecordPing({
|
||||
deviceId: req.deviceId,
|
||||
last_online: req.deviceTime,
|
||||
model: req.query.model,
|
||||
version: req.query.version,
|
||||
restrictionLevel: req.query.restrictionLevel || 0,
|
||||
})
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = record
|
||||
35
packages/server/lib/middlewares/rejectIncompatbleMachines.js
Normal file
35
packages/server/lib/middlewares/rejectIncompatbleMachines.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
const semver = require('semver')
|
||||
const version = require('../../package.json').version
|
||||
const logger = require('../logger')
|
||||
|
||||
const rejectIncompatibleMachines = function (req, res, next) {
|
||||
const machineVersion = req.query.version
|
||||
const deviceId = req.deviceId
|
||||
|
||||
if (!machineVersion) return next()
|
||||
|
||||
const serverMajor = semver.major(version)
|
||||
const machineMajor = semver.major(machineVersion)
|
||||
|
||||
if (serverMajor - machineMajor > 1) {
|
||||
logger.error(
|
||||
`Machine version too old: ${machineVersion} deviceId: ${deviceId}`,
|
||||
)
|
||||
return res.status(400).json({
|
||||
error: 'Machine version too old',
|
||||
})
|
||||
}
|
||||
|
||||
if (serverMajor < machineMajor) {
|
||||
logger.error(
|
||||
`Machine version too new: ${machineVersion} deviceId: ${deviceId}`,
|
||||
)
|
||||
return res.status(400).json({
|
||||
error: 'Machine version too new',
|
||||
})
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
module.exports = rejectIncompatibleMachines
|
||||
21
packages/server/lib/middlewares/state.js
Normal file
21
packages/server/lib/middlewares/state.js
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
const NodeCache = require('node-cache')
|
||||
const SETTINGS_CACHE_REFRESH = 3600
|
||||
|
||||
module.exports = (function () {
|
||||
return {
|
||||
settingsCache: new NodeCache({
|
||||
stdTTL: SETTINGS_CACHE_REFRESH,
|
||||
checkperiod: SETTINGS_CACHE_REFRESH, // Clear cache every hour
|
||||
}),
|
||||
canLogClockSkewMap: {},
|
||||
canGetLastSeenMap: {},
|
||||
pids: {},
|
||||
reboots: {},
|
||||
shutdowns: {},
|
||||
restartServicesMap: {},
|
||||
emptyUnit: {},
|
||||
refillUnit: {},
|
||||
diagnostics: {},
|
||||
mnemonic: null,
|
||||
}
|
||||
})()
|
||||
24
packages/server/lib/migrate.js
Normal file
24
packages/server/lib/migrate.js
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
const path = require('path')
|
||||
const migrate = require('migrate')
|
||||
|
||||
const DbMigrateStore = require('./db-migrate-store')
|
||||
|
||||
const migrateDir = path.resolve(__dirname, '..', 'migrations')
|
||||
const migrateOpts = {
|
||||
migrationsDirectory: migrateDir,
|
||||
stateStore: new DbMigrateStore(),
|
||||
filterFunction: it => it.match(/^\d+.*\.js$/),
|
||||
}
|
||||
|
||||
module.exports = { run }
|
||||
function run() {
|
||||
return new Promise((resolve, reject) => {
|
||||
migrate.load(migrateOpts, (err, set) => {
|
||||
if (err) return reject(err)
|
||||
set.up(err => {
|
||||
if (err) return reject(err)
|
||||
return resolve(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
19
packages/server/lib/mnemonic-helpers.js
Normal file
19
packages/server/lib/mnemonic-helpers.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
const bip39 = require('bip39')
|
||||
const os = require('os')
|
||||
|
||||
function fromSeed(seed) {
|
||||
const words = bip39.entropyToMnemonic(seed).split(' ')
|
||||
|
||||
let mnemonic = ''
|
||||
for (let i = 0; i < words.length; i += 6) {
|
||||
mnemonic += words.slice(i, i + 6).join(' ') + os.EOL
|
||||
}
|
||||
return mnemonic
|
||||
}
|
||||
|
||||
function toEntropyBuffer(mnemonic) {
|
||||
const hex = bip39.mnemonicToEntropy(mnemonic.split('\n').join(' ').trim())
|
||||
return Buffer.from(hex.trim(), 'hex')
|
||||
}
|
||||
|
||||
module.exports = { toEntropyBuffer, fromSeed }
|
||||
147
packages/server/lib/new-admin/admin-server.js
Normal file
147
packages/server/lib/new-admin/admin-server.js
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
const fs = require('fs')
|
||||
const compression = require('compression')
|
||||
const path = require('path')
|
||||
const express = require('express')
|
||||
const https = require('https')
|
||||
const serveStatic = require('serve-static')
|
||||
const helmet = require('helmet')
|
||||
const nocache = require('nocache')
|
||||
const cookieParser = require('cookie-parser')
|
||||
const { ApolloServer } = require('@apollo/server')
|
||||
const { expressMiddleware } = require('@apollo/server/express4')
|
||||
const {
|
||||
ApolloServerPluginLandingPageDisabled,
|
||||
} = require('@apollo/server/plugin/disabled')
|
||||
const {
|
||||
ApolloServerPluginLandingPageLocalDefault,
|
||||
} = require('@apollo/server/plugin/landingPage/default')
|
||||
|
||||
const { mergeResolvers } = require('@graphql-tools/merge')
|
||||
const { makeExecutableSchema } = require('@graphql-tools/schema')
|
||||
|
||||
require('../environment-helper')
|
||||
const logger = require('../logger')
|
||||
const exchange = require('../exchange')
|
||||
|
||||
const { authDirectiveTransformer } = require('./graphql/directives')
|
||||
const { typeDefs, resolvers } = require('./graphql/schema')
|
||||
const { ResourceNotFoundError } = require('./graphql/errors')
|
||||
const findOperatorId = require('../middlewares/operatorId')
|
||||
const { USER_SESSIONS_CLEAR_INTERVAL } = require('../constants')
|
||||
const {
|
||||
session,
|
||||
cleanUserSessions,
|
||||
buildApolloContext,
|
||||
} = require('./middlewares')
|
||||
|
||||
const devMode = require('minimist')(process.argv.slice(2)).dev
|
||||
|
||||
const HOSTNAME = process.env.HOSTNAME
|
||||
const KEY_PATH = process.env.KEY_PATH
|
||||
const CERT_PATH = process.env.CERT_PATH
|
||||
const CA_PATH = process.env.CA_PATH
|
||||
const ID_PHOTO_CARD_DIR = process.env.ID_PHOTO_CARD_DIR
|
||||
const FRONT_CAMERA_DIR = process.env.FRONT_CAMERA_DIR
|
||||
const OPERATOR_DATA_DIR = process.env.OPERATOR_DATA_DIR
|
||||
|
||||
if (!HOSTNAME) {
|
||||
logger.error('No hostname specified.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const loadRoutes = async () => {
|
||||
const app = express()
|
||||
|
||||
app.use(helmet())
|
||||
app.use(compression())
|
||||
app.use(nocache())
|
||||
app.use(cookieParser())
|
||||
app.use(express.json())
|
||||
app.use(express.urlencoded({ extended: true })) // support encoded bodies
|
||||
app.use(express.static(path.resolve(__dirname, '..', '..', 'public')))
|
||||
app.use(cleanUserSessions(USER_SESSIONS_CLEAR_INTERVAL))
|
||||
app.use(findOperatorId)
|
||||
app.use(session)
|
||||
|
||||
// Dynamic import for graphql-upload since it's not a CommonJS module
|
||||
const { default: graphqlUploadExpress } = await import(
|
||||
'graphql-upload/graphqlUploadExpress.mjs'
|
||||
)
|
||||
const { default: GraphQLUpload } = await import(
|
||||
'graphql-upload/GraphQLUpload.mjs'
|
||||
)
|
||||
|
||||
app.use(graphqlUploadExpress())
|
||||
|
||||
const schema = makeExecutableSchema({
|
||||
typeDefs,
|
||||
resolvers: mergeResolvers(resolvers, { Upload: GraphQLUpload }),
|
||||
})
|
||||
const schemaWithDirectives = authDirectiveTransformer(schema)
|
||||
|
||||
const apolloServer = new ApolloServer({
|
||||
schema: schemaWithDirectives,
|
||||
csrfPrevention: false,
|
||||
introspection: false,
|
||||
formatError: (formattedError, error) => {
|
||||
logger.error(error, JSON.stringify(error?.extensions || {}))
|
||||
|
||||
// Check by constructor name instead of instanceof due to ES module/CommonJS interop issues
|
||||
if (error.originalError?.constructor?.name === 'NoResultError') {
|
||||
return new ResourceNotFoundError()
|
||||
}
|
||||
|
||||
return formattedError
|
||||
},
|
||||
plugins: [
|
||||
devMode
|
||||
? ApolloServerPluginLandingPageLocalDefault()
|
||||
: ApolloServerPluginLandingPageDisabled(),
|
||||
],
|
||||
})
|
||||
|
||||
await apolloServer.start()
|
||||
|
||||
app.use(
|
||||
'/graphql',
|
||||
express.json(),
|
||||
expressMiddleware(apolloServer, {
|
||||
context: async ({ req, res }) => buildApolloContext({ req, res }),
|
||||
}),
|
||||
)
|
||||
|
||||
app.use('/id-card-photo', serveStatic(ID_PHOTO_CARD_DIR, { index: false }))
|
||||
app.use(
|
||||
'/front-camera-photo',
|
||||
serveStatic(FRONT_CAMERA_DIR, { index: false }),
|
||||
)
|
||||
app.use('/operator-data', serveStatic(OPERATOR_DATA_DIR, { index: false }))
|
||||
|
||||
// Everything not on graphql or api/register is redirected to the front-end
|
||||
app.get('*', (req, res) =>
|
||||
res.sendFile(path.resolve(__dirname, '..', '..', 'public', 'index.html')),
|
||||
)
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
const certOptions = {
|
||||
key: fs.readFileSync(KEY_PATH),
|
||||
cert: fs.readFileSync(CERT_PATH),
|
||||
ca: fs.readFileSync(CA_PATH),
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const app = await loadRoutes()
|
||||
const serverPort = devMode ? 8070 : 443
|
||||
|
||||
const serverLog = `lamassu-admin-server listening on port ${serverPort}`
|
||||
|
||||
// cache markets on startup
|
||||
exchange.getMarkets().catch(console.error)
|
||||
|
||||
const webServer = https.createServer(certOptions, app)
|
||||
webServer.listen(serverPort, () => logger.info(serverLog))
|
||||
}
|
||||
|
||||
module.exports = { run }
|
||||
213
packages/server/lib/new-admin/config/accounts.js
Normal file
213
packages/server/lib/new-admin/config/accounts.js
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
const { COINS, ALL_CRYPTOS } = require('@lamassu/coins')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const { ALL } = require('../../plugins/common/ccxt')
|
||||
|
||||
const { BTC, BCH, DASH, ETH, LTC, USDT, ZEC, XMR, LN, TRX, USDT_TRON, USDC } =
|
||||
COINS
|
||||
const { bitpay, itbit, bitstamp, kraken, binanceus, cex, binance, bitfinex } =
|
||||
ALL
|
||||
|
||||
const TICKER = 'ticker'
|
||||
const WALLET = 'wallet'
|
||||
const LAYER_2 = 'layer2'
|
||||
const EXCHANGE = 'exchange'
|
||||
const SMS = 'sms'
|
||||
const ID_VERIFIER = 'idVerifier'
|
||||
const EMAIL = 'email'
|
||||
const ZERO_CONF = 'zeroConf'
|
||||
const WALLET_SCORING = 'wallet_scoring'
|
||||
const COMPLIANCE = 'compliance'
|
||||
|
||||
const ALL_ACCOUNTS = [
|
||||
{
|
||||
code: 'bitfinex',
|
||||
display: 'Bitfinex',
|
||||
class: TICKER,
|
||||
cryptos: bitfinex.CRYPTO,
|
||||
},
|
||||
{
|
||||
code: 'bitfinex',
|
||||
display: 'Bitfinex',
|
||||
class: EXCHANGE,
|
||||
cryptos: bitfinex.CRYPTO,
|
||||
},
|
||||
{
|
||||
code: 'binance',
|
||||
display: 'Binance',
|
||||
class: TICKER,
|
||||
cryptos: binance.CRYPTO,
|
||||
},
|
||||
{
|
||||
code: 'binanceus',
|
||||
display: 'Binance.us',
|
||||
class: TICKER,
|
||||
cryptos: binanceus.CRYPTO,
|
||||
},
|
||||
{ code: 'cex', display: 'CEX.IO', class: TICKER, cryptos: cex.CRYPTO },
|
||||
{ code: 'bitpay', display: 'Bitpay', class: TICKER, cryptos: bitpay.CRYPTO },
|
||||
{ code: 'kraken', display: 'Kraken', class: TICKER, cryptos: kraken.CRYPTO },
|
||||
{
|
||||
code: 'bitstamp',
|
||||
display: 'Bitstamp',
|
||||
class: TICKER,
|
||||
cryptos: bitstamp.CRYPTO,
|
||||
},
|
||||
{ code: 'itbit', display: 'itBit', class: TICKER, cryptos: itbit.CRYPTO },
|
||||
{
|
||||
code: 'mock-ticker',
|
||||
display: 'Mock (Caution!)',
|
||||
class: TICKER,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
dev: true,
|
||||
},
|
||||
{ code: 'bitcoind', display: 'bitcoind', class: WALLET, cryptos: [BTC] },
|
||||
{
|
||||
code: 'no-layer2',
|
||||
display: 'No Layer 2',
|
||||
class: LAYER_2,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
},
|
||||
{
|
||||
code: 'infura',
|
||||
display: 'Infura/Alchemy',
|
||||
class: WALLET,
|
||||
cryptos: [ETH, USDT, USDC],
|
||||
},
|
||||
{
|
||||
code: 'trongrid',
|
||||
display: 'Trongrid',
|
||||
class: WALLET,
|
||||
cryptos: [TRX, USDT_TRON],
|
||||
},
|
||||
{
|
||||
code: 'geth',
|
||||
display: 'geth (deprecated)',
|
||||
class: WALLET,
|
||||
cryptos: [ETH, USDT, USDC],
|
||||
},
|
||||
{ code: 'zcashd', display: 'zcashd', class: WALLET, cryptos: [ZEC] },
|
||||
{ code: 'litecoind', display: 'litecoind', class: WALLET, cryptos: [LTC] },
|
||||
{ code: 'dashd', display: 'dashd', class: WALLET, cryptos: [DASH] },
|
||||
{ code: 'monerod', display: 'monerod', class: WALLET, cryptos: [XMR] },
|
||||
{
|
||||
code: 'bitcoincashd',
|
||||
display: 'bitcoincashd',
|
||||
class: WALLET,
|
||||
cryptos: [BCH],
|
||||
},
|
||||
{
|
||||
code: 'bitgo',
|
||||
display: 'BitGo',
|
||||
class: WALLET,
|
||||
cryptos: [BTC, ZEC, LTC, BCH, DASH],
|
||||
},
|
||||
{ code: 'galoy', display: 'Galoy', class: WALLET, cryptos: [LN] },
|
||||
{
|
||||
code: 'bitstamp',
|
||||
display: 'Bitstamp',
|
||||
class: EXCHANGE,
|
||||
cryptos: bitstamp.CRYPTO,
|
||||
},
|
||||
{ code: 'itbit', display: 'itBit', class: EXCHANGE, cryptos: itbit.CRYPTO },
|
||||
{
|
||||
code: 'kraken',
|
||||
display: 'Kraken',
|
||||
class: EXCHANGE,
|
||||
cryptos: kraken.CRYPTO,
|
||||
},
|
||||
{
|
||||
code: 'binance',
|
||||
display: 'Binance',
|
||||
class: EXCHANGE,
|
||||
cryptos: binance.CRYPTO,
|
||||
},
|
||||
{
|
||||
code: 'binanceus',
|
||||
display: 'Binance.us',
|
||||
class: EXCHANGE,
|
||||
cryptos: binanceus.CRYPTO,
|
||||
},
|
||||
{ code: 'cex', display: 'CEX.IO', class: EXCHANGE, cryptos: cex.CRYPTO },
|
||||
{
|
||||
code: 'mock-wallet',
|
||||
display: 'Mock (Caution!)',
|
||||
class: WALLET,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
dev: true,
|
||||
},
|
||||
{
|
||||
code: 'no-exchange',
|
||||
display: 'No exchange',
|
||||
class: EXCHANGE,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
},
|
||||
{
|
||||
code: 'mock-exchange',
|
||||
display: 'Mock exchange',
|
||||
class: EXCHANGE,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
dev: true,
|
||||
},
|
||||
{ code: 'mock-sms', display: 'Mock SMS', class: SMS, dev: true },
|
||||
{
|
||||
code: 'mock-id-verify',
|
||||
display: 'Mock ID verifier',
|
||||
class: ID_VERIFIER,
|
||||
dev: true,
|
||||
},
|
||||
{ code: 'twilio', display: 'Twilio', class: SMS },
|
||||
{ code: 'telnyx', display: 'Telnyx', class: SMS },
|
||||
{ code: 'vonage', display: 'Vonage', class: SMS },
|
||||
{ code: 'inforu', display: 'InforU', class: SMS },
|
||||
{ code: 'mailgun', display: 'Mailgun', class: EMAIL },
|
||||
{ code: 'mock-email', display: 'Mock Email', class: EMAIL, dev: true },
|
||||
{ code: 'none', display: 'None', class: ZERO_CONF, cryptos: ALL_CRYPTOS },
|
||||
{
|
||||
code: 'blockcypher',
|
||||
display: 'Blockcypher',
|
||||
class: ZERO_CONF,
|
||||
cryptos: [BTC],
|
||||
},
|
||||
{
|
||||
code: 'mock-zero-conf',
|
||||
display: 'Mock 0-conf',
|
||||
class: ZERO_CONF,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
dev: true,
|
||||
},
|
||||
{
|
||||
code: 'scorechain',
|
||||
display: 'Scorechain',
|
||||
class: WALLET_SCORING,
|
||||
cryptos: [BTC, ETH, LTC, BCH, DASH, USDT, USDC, USDT_TRON, TRX],
|
||||
},
|
||||
{
|
||||
code: 'elliptic',
|
||||
display: 'Elliptic',
|
||||
class: WALLET_SCORING,
|
||||
cryptos: [BTC, ETH, LTC, BCH, USDT, USDC, USDT_TRON, TRX, ZEC],
|
||||
},
|
||||
{
|
||||
code: 'mock-scoring',
|
||||
display: 'Mock scoring',
|
||||
class: WALLET_SCORING,
|
||||
cryptos: ALL_CRYPTOS,
|
||||
dev: true,
|
||||
},
|
||||
{ code: 'sumsub', display: 'Sumsub', class: COMPLIANCE },
|
||||
{
|
||||
code: 'mock-compliance',
|
||||
display: 'Mock Compliance',
|
||||
class: COMPLIANCE,
|
||||
dev: true,
|
||||
},
|
||||
]
|
||||
|
||||
const flags = require('minimist')(process.argv.slice(2))
|
||||
const devMode = flags.dev || flags.lamassuDev
|
||||
const ACCOUNT_LIST = devMode
|
||||
? ALL_ACCOUNTS
|
||||
: _.filter(it => !it.dev)(ALL_ACCOUNTS)
|
||||
|
||||
module.exports = { ACCOUNT_LIST }
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue