v12.0.0 - initial commit
This commit is contained in:
commit
e2c49ea43c
1145 changed files with 97211 additions and 0 deletions
60
packages/server/lib/new-admin/services/bills.js
Normal file
60
packages/server/lib/new-admin/services/bills.js
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const db = require('../../db')
|
||||
|
||||
const AND = (...clauses) => clauses.filter(clause => !!clause).join(' AND ')
|
||||
|
||||
const getBatchIDCondition = filter => {
|
||||
switch (filter) {
|
||||
case 'none':
|
||||
return 'b.cashbox_batch_id IS NULL'
|
||||
case 'any':
|
||||
return 'b.cashbox_batch_id IS NOT NULL'
|
||||
default:
|
||||
return _.isNil(filter)
|
||||
? ''
|
||||
: `b.cashbox_batch_id = ${pgp.as.text(filter)}`
|
||||
}
|
||||
}
|
||||
|
||||
const getBills = filters => {
|
||||
const deviceIDCondition = !_.isNil(filters.deviceId)
|
||||
? `device_id = ${pgp.as.text(filters.deviceId)}`
|
||||
: ''
|
||||
const batchIDCondition = getBatchIDCondition(filters.batch)
|
||||
|
||||
const cashboxBills = `SELECT b.id, b.fiat, b.fiat_code, b.created, b.cashbox_batch_id, cit.device_id AS device_id
|
||||
FROM bills b
|
||||
LEFT OUTER JOIN (
|
||||
SELECT id, device_id
|
||||
FROM cash_in_txs
|
||||
WHERE ${AND(
|
||||
deviceIDCondition,
|
||||
'device_id IN (SELECT device_id FROM devices WHERE paired)',
|
||||
)}
|
||||
) AS cit
|
||||
ON cit.id = b.cash_in_txs_id
|
||||
WHERE ${AND(
|
||||
batchIDCondition,
|
||||
"b.destination_unit = 'cashbox'",
|
||||
'cit.device_id IS NOT NULL',
|
||||
)}`
|
||||
|
||||
const recyclerBills = `SELECT b.id, b.fiat, b.fiat_code, b.created, b.cashbox_batch_id, b.device_id
|
||||
FROM empty_unit_bills b
|
||||
WHERE ${AND(
|
||||
deviceIDCondition,
|
||||
batchIDCondition,
|
||||
'b.device_id IN (SELECT device_id FROM devices WHERE paired)',
|
||||
)}`
|
||||
|
||||
return Promise.all([db.any(cashboxBills), db.any(recyclerBills)]).then(
|
||||
([cashboxBills, recyclerBills]) =>
|
||||
[].concat(cashboxBills, recyclerBills).map(_.mapKeys(_.camelCase)),
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getBills,
|
||||
}
|
||||
168
packages/server/lib/new-admin/services/customInfoRequests.js
Normal file
168
packages/server/lib/new-admin/services/customInfoRequests.js
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
const db = require('../../db')
|
||||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
const pgp = require('pg-promise')()
|
||||
|
||||
const {
|
||||
deleteComplianceTriggersByCustomInfoRequestId,
|
||||
} = require('../../compliance-triggers')
|
||||
|
||||
const getCustomInfoRequests = (onlyEnabled = false) => {
|
||||
const sql = onlyEnabled
|
||||
? `SELECT * FROM custom_info_requests WHERE enabled = true ORDER BY custom_request->>'name'`
|
||||
: `SELECT * FROM custom_info_requests ORDER BY custom_request->>'name'`
|
||||
return db.any(sql).then(res => {
|
||||
return res.map(item => ({
|
||||
id: item.id,
|
||||
enabled: item.enabled,
|
||||
customRequest: item.custom_request,
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
const addCustomInfoRequest = customRequest => {
|
||||
const sql =
|
||||
'INSERT INTO custom_info_requests (id, custom_request) VALUES ($1, $2)'
|
||||
const id = uuid.v4()
|
||||
return db.none(sql, [id, customRequest]).then(() => ({ id }))
|
||||
}
|
||||
|
||||
// TODO: execute in a transaction
|
||||
const removeCustomInfoRequest = id =>
|
||||
deleteComplianceTriggersByCustomInfoRequestId(id)
|
||||
.then(() =>
|
||||
db.none('UPDATE custom_info_requests SET enabled = false WHERE id = $1', [
|
||||
id,
|
||||
]),
|
||||
)
|
||||
.then(() => ({ id }))
|
||||
|
||||
const editCustomInfoRequest = (id, customRequest) => {
|
||||
return db
|
||||
.none('UPDATE custom_info_requests SET custom_request = $1 WHERE id=$2', [
|
||||
customRequest,
|
||||
id,
|
||||
])
|
||||
.then(() => ({ id, customRequest }))
|
||||
}
|
||||
|
||||
const getAllCustomInfoRequestsForCustomer = customerId => {
|
||||
const sql = `SELECT * FROM customers_custom_info_requests WHERE customer_id = $1`
|
||||
return db.any(sql, [customerId]).then(res =>
|
||||
res.map(item => ({
|
||||
customerId: item.customer_id,
|
||||
infoRequestId: item.info_request_id,
|
||||
customerData: item.customer_data,
|
||||
override: item.override,
|
||||
overrideAt: item.override_at,
|
||||
overrideBy: item.override_by,
|
||||
})),
|
||||
)
|
||||
}
|
||||
|
||||
const getCustomInfoRequestForCustomer = (customerId, infoRequestId) => {
|
||||
const sql = `SELECT * FROM customers_custom_info_requests WHERE customer_id = $1 AND info_request_id = $2`
|
||||
return db.one(sql, [customerId, infoRequestId]).then(item => {
|
||||
return {
|
||||
customerId: item.customer_id,
|
||||
infoRequestId: item.info_request_id,
|
||||
customerData: item.customer_data,
|
||||
override: item.override,
|
||||
overrideAt: item.override_at,
|
||||
overrideBy: item.override_by,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const batchGetAllCustomInfoRequestsForCustomer = customerIds => {
|
||||
const sql = `SELECT * FROM customers_custom_info_requests WHERE customer_id IN ($1^)`
|
||||
return db.any(sql, [_.map(pgp.as.text, customerIds).join(',')]).then(res => {
|
||||
const map = _.groupBy('customer_id', res)
|
||||
return customerIds.map(id => {
|
||||
const items = map[id] || []
|
||||
return items.map(item => ({
|
||||
customerId: item.customer_id,
|
||||
infoRequestId: item.info_request_id,
|
||||
customerData: item.customer_data,
|
||||
override: item.override,
|
||||
overrideAt: item.override_at,
|
||||
overrideBy: item.override_by,
|
||||
}))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const getCustomInfoRequest = infoRequestId => {
|
||||
const sql = `SELECT * FROM custom_info_requests WHERE id = $1`
|
||||
return db.one(sql, [infoRequestId]).then(item => ({
|
||||
id: item.id,
|
||||
enabled: item.enabled,
|
||||
customRequest: item.custom_request,
|
||||
}))
|
||||
}
|
||||
|
||||
const batchGetCustomInfoRequest = infoRequestIds => {
|
||||
if (infoRequestIds.length === 0) return Promise.resolve([])
|
||||
const sql = `SELECT * FROM custom_info_requests WHERE id IN ($1^)`
|
||||
return db
|
||||
.any(sql, [_.map(pgp.as.text, infoRequestIds).join(',')])
|
||||
.then(res => {
|
||||
const map = _.groupBy('id', res)
|
||||
return infoRequestIds.map(id => {
|
||||
const item = map[id][0] // since id is primary key the array always has 1 element
|
||||
return {
|
||||
id: item.id,
|
||||
enabled: item.enabled,
|
||||
customRequest: {
|
||||
disablePermissionScreen: false,
|
||||
...item.custom_request,
|
||||
},
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const setAuthorizedCustomRequest = (
|
||||
customerId,
|
||||
infoRequestId,
|
||||
override,
|
||||
token,
|
||||
) => {
|
||||
const sql = `UPDATE customers_custom_info_requests SET override = $1, override_by = $2, override_at = now() WHERE customer_id = $3 AND info_request_id = $4`
|
||||
return db
|
||||
.none(sql, [override, token, customerId, infoRequestId])
|
||||
.then(() => true)
|
||||
}
|
||||
|
||||
const setCustomerData = (customerId, infoRequestId, data) => {
|
||||
const sql = `
|
||||
INSERT INTO customers_custom_info_requests (customer_id, info_request_id, customer_data)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (customer_id, info_request_id)
|
||||
DO UPDATE SET customer_data = $3`
|
||||
return db.none(sql, [customerId, infoRequestId, data])
|
||||
}
|
||||
|
||||
const setCustomerDataViaMachine = (customerId, infoRequestId, data) => {
|
||||
const sql = `
|
||||
INSERT INTO customers_custom_info_requests (customer_id, info_request_id, customer_data)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (customer_id, info_request_id)
|
||||
DO UPDATE SET customer_data = $3, override = $4, override_by = $5, override_at = now()`
|
||||
return db.none(sql, [customerId, infoRequestId, data, 'automatic', null])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getCustomInfoRequests,
|
||||
addCustomInfoRequest,
|
||||
removeCustomInfoRequest,
|
||||
editCustomInfoRequest,
|
||||
getAllCustomInfoRequestsForCustomer,
|
||||
getCustomInfoRequestForCustomer,
|
||||
batchGetAllCustomInfoRequestsForCustomer,
|
||||
getCustomInfoRequest,
|
||||
batchGetCustomInfoRequest,
|
||||
setAuthorizedCustomRequest,
|
||||
setCustomerData,
|
||||
setCustomerDataViaMachine,
|
||||
}
|
||||
88
packages/server/lib/new-admin/services/funding.js
Normal file
88
packages/server/lib/new-admin/services/funding.js
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
const _ = require('lodash/fp')
|
||||
const BN = require('../../bn')
|
||||
const settingsLoader = require('../../new-settings-loader')
|
||||
const configManager = require('../../new-config-manager')
|
||||
const wallet = require('../../wallet')
|
||||
const ticker = require('../../ticker')
|
||||
const txBatching = require('../../tx-batching')
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
|
||||
function computeCrypto(cryptoCode, _balance) {
|
||||
const cryptoRec = coinUtils.getCryptoCurrency(cryptoCode)
|
||||
const unitScale = cryptoRec.unitScale
|
||||
|
||||
return new BN(_balance).shiftedBy(-unitScale).decimalPlaces(5)
|
||||
}
|
||||
|
||||
function computeFiat(rate, cryptoCode, _balance) {
|
||||
const cryptoRec = coinUtils.getCryptoCurrency(cryptoCode)
|
||||
const unitScale = cryptoRec.unitScale
|
||||
|
||||
return new BN(_balance).shiftedBy(-unitScale).times(rate).decimalPlaces(5)
|
||||
}
|
||||
|
||||
function getSingleCoinFunding(settings, fiatCode, cryptoCode) {
|
||||
const promises = [
|
||||
wallet.newFunding(settings, cryptoCode),
|
||||
ticker.getRates(settings, fiatCode, cryptoCode),
|
||||
txBatching.getOpenBatchCryptoValue(cryptoCode),
|
||||
]
|
||||
|
||||
return Promise.all(promises).then(([fundingRec, ratesRec, batchRec]) => {
|
||||
const rates = ratesRec.rates
|
||||
const rate = rates.ask.plus(rates.bid).div(2)
|
||||
const fundingConfirmedBalance = fundingRec.fundingConfirmedBalance
|
||||
const fiatConfirmedBalance = computeFiat(
|
||||
rate,
|
||||
cryptoCode,
|
||||
fundingConfirmedBalance,
|
||||
)
|
||||
const pending = fundingRec.fundingPendingBalance.minus(batchRec)
|
||||
const fiatPending = computeFiat(rate, cryptoCode, pending)
|
||||
const fundingAddress = fundingRec.fundingAddress
|
||||
const fundingAddressUrl = coinUtils.buildUrl(cryptoCode, fundingAddress)
|
||||
|
||||
return {
|
||||
cryptoCode,
|
||||
fundingAddress,
|
||||
fundingAddressUrl,
|
||||
confirmedBalance: computeCrypto(
|
||||
cryptoCode,
|
||||
fundingConfirmedBalance,
|
||||
).toFormat(5),
|
||||
pending: computeCrypto(cryptoCode, pending).toFormat(5),
|
||||
fiatConfirmedBalance: fiatConfirmedBalance,
|
||||
fiatPending: fiatPending,
|
||||
fiatCode,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Promise.allSettled not running on current version of node
|
||||
const reflect = p =>
|
||||
p.then(
|
||||
value => ({ value, status: 'fulfilled' }),
|
||||
error => ({ error: error.toString(), status: 'rejected' }),
|
||||
)
|
||||
|
||||
function getFunding() {
|
||||
return settingsLoader.load().then(settings => {
|
||||
const cryptoCodes = configManager.getAllCryptoCurrencies(settings.config)
|
||||
const fiatCode = configManager.getGlobalLocale(settings.config).fiatCurrency
|
||||
const pareCoins = c => _.includes(c.cryptoCode, cryptoCodes)
|
||||
const cryptoCurrencies = coinUtils.cryptoCurrencies()
|
||||
const cryptoDisplays = _.filter(pareCoins, cryptoCurrencies)
|
||||
|
||||
const promises = cryptoDisplays.map(it =>
|
||||
getSingleCoinFunding(settings, fiatCode, it.cryptoCode),
|
||||
)
|
||||
return Promise.all(promises.map(reflect)).then(response => {
|
||||
const mapped = response.map(it =>
|
||||
_.merge({ errorMsg: it.error }, it.value),
|
||||
)
|
||||
return _.toArray(_.merge(mapped, cryptoDisplays))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { getFunding }
|
||||
23
packages/server/lib/new-admin/services/login.js
Normal file
23
packages/server/lib/new-admin/services/login.js
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
const db = require('../../db')
|
||||
|
||||
function validateUser(username, password) {
|
||||
return db.tx(t => {
|
||||
const q1 = t.one('SELECT * FROM users WHERE username=$1 AND password=$2', [
|
||||
username,
|
||||
password,
|
||||
])
|
||||
const q2 = t.none(
|
||||
'UPDATE users SET last_accessed = now() WHERE username=$1',
|
||||
[username],
|
||||
)
|
||||
|
||||
return t
|
||||
.batch([q1, q2])
|
||||
.then(([user]) => user)
|
||||
.catch(() => false)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateUser,
|
||||
}
|
||||
56
packages/server/lib/new-admin/services/machineGroups.js
Normal file
56
packages/server/lib/new-admin/services/machineGroups.js
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
const { v4: uuid } = require('uuid')
|
||||
const { machineGroups, PG_ERROR_CODES } = require('typesafe-db')
|
||||
const { defaultMachineGroup } = require('../../constants')
|
||||
const {
|
||||
ResourceAlreadyExistsError,
|
||||
ResourceHasDependenciesError,
|
||||
} = require('../graphql/errors')
|
||||
|
||||
async function getAllMachineGroups() {
|
||||
return machineGroups.getMachineGroupsWithDeviceCount()
|
||||
}
|
||||
|
||||
async function createMachineGroup(name) {
|
||||
try {
|
||||
const newGroup = await machineGroups.createMachineGroup({
|
||||
id: uuid(),
|
||||
name,
|
||||
})
|
||||
|
||||
return {
|
||||
...newGroup,
|
||||
deviceCount: 0,
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === PG_ERROR_CODES.UNIQUE_VIOLATION) {
|
||||
throw new ResourceAlreadyExistsError({ name })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteMachineGroup(id) {
|
||||
if (id === defaultMachineGroup.uuid) {
|
||||
throw new ResourceHasDependenciesError({ id, name: 'default' })
|
||||
}
|
||||
|
||||
try {
|
||||
return await machineGroups.deleteMachineGroup(id)
|
||||
} catch (error) {
|
||||
if (error.code === PG_ERROR_CODES.FOREIGN_KEY_VIOLATION) {
|
||||
throw new ResourceHasDependenciesError({ id })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
function assignComplianceTriggerSetToMachineGroup(id, complianceTriggerSetId) {
|
||||
return machineGroups.setComplianceTriggerSetId(id, complianceTriggerSetId)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllMachineGroups,
|
||||
createMachineGroup,
|
||||
deleteMachineGroup,
|
||||
assignComplianceTriggerSetToMachineGroup,
|
||||
}
|
||||
27
packages/server/lib/new-admin/services/machines.js
Normal file
27
packages/server/lib/new-admin/services/machines.js
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
const machineLoader = require('../../machine-loader')
|
||||
const { UserInputError } = require('../graphql/errors')
|
||||
|
||||
function getMachine(machineId) {
|
||||
return machineLoader
|
||||
.getMachines()
|
||||
.then(machines => machines.find(({ deviceId }) => deviceId === machineId))
|
||||
}
|
||||
|
||||
function machineAction({ deviceId, action, cashUnits, newName }, context) {
|
||||
const operatorId = context.res.locals.operatorId
|
||||
return getMachine(deviceId)
|
||||
.then(machine => {
|
||||
if (!machine)
|
||||
throw new UserInputError(`machine:${deviceId} not found`, { deviceId })
|
||||
return machine
|
||||
})
|
||||
.then(() =>
|
||||
machineLoader.setMachine(
|
||||
{ deviceId, action, cashUnits, newName },
|
||||
operatorId,
|
||||
),
|
||||
)
|
||||
.then(() => getMachine(deviceId))
|
||||
}
|
||||
|
||||
module.exports = { machineAction }
|
||||
37
packages/server/lib/new-admin/services/pairing.js
Normal file
37
packages/server/lib/new-admin/services/pairing.js
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
const fs = require('fs')
|
||||
const pify = require('pify')
|
||||
const readFile = pify(fs.readFile)
|
||||
const crypto = require('crypto')
|
||||
const baseX = require('base-x')
|
||||
|
||||
const db = require('../../db')
|
||||
const pairing = require('../../pairing')
|
||||
|
||||
const ALPHA_BASE = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:'
|
||||
const bsAlpha = baseX(ALPHA_BASE)
|
||||
|
||||
const CA_PATH = process.env.CA_PATH
|
||||
const HOSTNAME = process.env.HOSTNAME
|
||||
|
||||
const unpair = pairing.unpair
|
||||
|
||||
function totem(name) {
|
||||
return readFile(CA_PATH).then(data => {
|
||||
const caHash = crypto.createHash('sha256').update(data).digest()
|
||||
const token = crypto.randomBytes(32)
|
||||
const hexToken = token.toString('hex')
|
||||
const caHexToken = crypto
|
||||
.createHash('sha256')
|
||||
.update(hexToken)
|
||||
.digest('hex')
|
||||
const buf = Buffer.concat([caHash, token, Buffer.from(HOSTNAME)])
|
||||
const sql =
|
||||
'insert into pairing_tokens (token, name) values ($1, $3), ($2, $3)'
|
||||
|
||||
return db
|
||||
.none(sql, [hexToken, caHexToken, name])
|
||||
.then(() => bsAlpha.encode(buf))
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { totem, unpair }
|
||||
18
packages/server/lib/new-admin/services/restriction-level.js
Normal file
18
packages/server/lib/new-admin/services/restriction-level.js
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
const mem = require('mem')
|
||||
const { machines } = require('typesafe-db')
|
||||
|
||||
// Cache configuration: 30 minutes
|
||||
const CACHE_DURATION = 30 * 60 * 1000
|
||||
|
||||
const _getHighestRestrictionLevel = async () => {
|
||||
return machines.getHighestRestrictionLevel()
|
||||
}
|
||||
|
||||
const getCachedRestrictionLevel = mem(_getHighestRestrictionLevel, {
|
||||
maxAge: CACHE_DURATION,
|
||||
cacheKey: () => '',
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
getCachedRestrictionLevel,
|
||||
}
|
||||
22
packages/server/lib/new-admin/services/server-logs.js
Normal file
22
packages/server/lib/new-admin/services/server-logs.js
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
const _ = require('lodash/fp')
|
||||
|
||||
const db = require('../../db')
|
||||
|
||||
function getServerLogs(
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
) {
|
||||
const sql = `select id, log_level, timestamp, message from server_logs
|
||||
where timestamp >= $1 and timestamp <= $2
|
||||
order by timestamp desc
|
||||
limit $3
|
||||
offset $4`
|
||||
|
||||
return db
|
||||
.any(sql, [from, until, limit, offset])
|
||||
.then(_.map(_.mapKeys(_.camelCase)))
|
||||
}
|
||||
|
||||
module.exports = { getServerLogs }
|
||||
64
packages/server/lib/new-admin/services/supervisor.js
Normal file
64
packages/server/lib/new-admin/services/supervisor.js
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
const xmlrpc = require('xmlrpc')
|
||||
const logger = require('../../logger')
|
||||
const { promisify } = require('util')
|
||||
|
||||
// TODO new-admin: add the following to supervisor config
|
||||
// [inet_http_server]
|
||||
// port = 127.0.0.1:9001
|
||||
|
||||
function getAllProcessInfo() {
|
||||
const convertStates = state => {
|
||||
// From http://supervisord.org/subprocess.html#process-states
|
||||
switch (state) {
|
||||
case 'STOPPED':
|
||||
return 'STOPPED'
|
||||
case 'STARTING':
|
||||
return 'RUNNING'
|
||||
case 'RUNNING':
|
||||
return 'RUNNING'
|
||||
case 'BACKOFF':
|
||||
return 'FATAL'
|
||||
case 'STOPPING':
|
||||
return 'STOPPED'
|
||||
case 'EXITED':
|
||||
return 'STOPPED'
|
||||
case 'UNKNOWN':
|
||||
return 'FATAL'
|
||||
default:
|
||||
logger.error(`Supervisord returned an unsupported state: ${state}`)
|
||||
return 'FATAL'
|
||||
}
|
||||
}
|
||||
|
||||
const client = xmlrpc.createClient({
|
||||
host: 'localhost',
|
||||
port: '9001',
|
||||
path: '/RPC2',
|
||||
})
|
||||
|
||||
client.methodCall[promisify.custom] = (method, params) => {
|
||||
return new Promise((resolve, reject) =>
|
||||
client.methodCall(method, params, (err, value) => {
|
||||
if (err) reject(err)
|
||||
else resolve(value)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
return promisify(client.methodCall)('supervisor.getAllProcessInfo', [])
|
||||
.then(value => {
|
||||
return value.map(process => ({
|
||||
name: process.name,
|
||||
state: convertStates(process.statename),
|
||||
uptime:
|
||||
process.statename === 'RUNNING' ? process.now - process.start : 0,
|
||||
}))
|
||||
})
|
||||
.catch(error => {
|
||||
if (error.code === 'ECONNREFUSED')
|
||||
logger.error('Failed to connect to supervisord HTTP server.')
|
||||
else logger.error(error)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { getAllProcessInfo }
|
||||
232
packages/server/lib/new-admin/services/transactions.js
Normal file
232
packages/server/lib/new-admin/services/transactions.js
Normal file
|
|
@ -0,0 +1,232 @@
|
|||
const _ = require('lodash/fp')
|
||||
|
||||
const db = require('../../db')
|
||||
const BN = require('../../bn')
|
||||
const { utils: coinUtils } = require('@lamassu/coins')
|
||||
const {
|
||||
transactions: { getTransactionById, getTransactionList },
|
||||
} = require('typesafe-db')
|
||||
|
||||
function addProfits(txs) {
|
||||
return _.map(
|
||||
it => ({
|
||||
...it,
|
||||
profit: getProfit(it).toString(),
|
||||
cryptoAmount: getCryptoAmount(it).toString(),
|
||||
}),
|
||||
txs,
|
||||
)
|
||||
}
|
||||
|
||||
function batch({
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
txClass = null,
|
||||
deviceId = null,
|
||||
customerId = null,
|
||||
cryptoCode = null,
|
||||
toAddress = null,
|
||||
status = null,
|
||||
swept = null,
|
||||
excludeTestingCustomers = false,
|
||||
simplified,
|
||||
}) {
|
||||
const isCsvExport = _.isBoolean(simplified)
|
||||
return getTransactionList(
|
||||
{
|
||||
from,
|
||||
until,
|
||||
cryptoCode,
|
||||
txClass,
|
||||
deviceId,
|
||||
toAddress,
|
||||
customerId,
|
||||
swept,
|
||||
status,
|
||||
excludeTestingCustomers,
|
||||
},
|
||||
{ limit, offset },
|
||||
)
|
||||
.then(addProfits)
|
||||
.then(res =>
|
||||
!isCsvExport
|
||||
? res
|
||||
: // GQL transactions and transactionsCsv both use this function and
|
||||
// if we don't check for the correct simplified value, the Transactions page polling
|
||||
// will continuously build a csv in the background
|
||||
simplified
|
||||
? simplifiedBatch(res)
|
||||
: advancedBatch(res),
|
||||
)
|
||||
}
|
||||
|
||||
function advancedBatch(data) {
|
||||
const fields = [
|
||||
'txClass',
|
||||
'id',
|
||||
'deviceId',
|
||||
'toAddress',
|
||||
'cryptoAtoms',
|
||||
'cryptoCode',
|
||||
'fiat',
|
||||
'fiatCode',
|
||||
'fee',
|
||||
'status',
|
||||
'profit',
|
||||
'cryptoAmount',
|
||||
'dispense',
|
||||
'notified',
|
||||
'redeem',
|
||||
'phone',
|
||||
'email',
|
||||
'error',
|
||||
'fixedFee',
|
||||
'created',
|
||||
'confirmedAt',
|
||||
'hdIndex',
|
||||
'swept',
|
||||
'timedout',
|
||||
'dispenseConfirmed',
|
||||
'provisioned1',
|
||||
'provisioned2',
|
||||
'provisioned3',
|
||||
'provisioned4',
|
||||
'provisionedRecycler1',
|
||||
'provisionedRecycler2',
|
||||
'provisionedRecycler3',
|
||||
'provisionedRecycler4',
|
||||
'provisionedRecycler5',
|
||||
'provisionedRecycler6',
|
||||
'denomination1',
|
||||
'denomination2',
|
||||
'denomination3',
|
||||
'denomination4',
|
||||
'denominationRecycler1',
|
||||
'denominationRecycler2',
|
||||
'denominationRecycler3',
|
||||
'denominationRecycler4',
|
||||
'denominationRecycler5',
|
||||
'denominationRecycler6',
|
||||
'errorCode',
|
||||
'customerId',
|
||||
'txVersion',
|
||||
'publishedAt',
|
||||
'termsAccepted',
|
||||
'commissionPercentage',
|
||||
'rawTickerPrice',
|
||||
'receivedCryptoAtoms',
|
||||
'discount',
|
||||
'couponCode',
|
||||
'txHash',
|
||||
'customerPhone',
|
||||
'customerEmail',
|
||||
'customerIdCardDataNumber',
|
||||
'customerIdCardDataExpiration',
|
||||
'customerIdCardData',
|
||||
'sendTime',
|
||||
'customerFrontCameraPath',
|
||||
'customerIdCardPhotoPath',
|
||||
'expired',
|
||||
'machineName',
|
||||
'walletScore',
|
||||
]
|
||||
|
||||
const addAdvancedFields = _.map(it => ({
|
||||
...it,
|
||||
fixedFee: it.fixedFee ?? null,
|
||||
fee: it.fee ?? null,
|
||||
}))
|
||||
|
||||
return _.compose(_.map(_.pick(fields)), addAdvancedFields)(data)
|
||||
}
|
||||
|
||||
function simplifiedBatch(data) {
|
||||
const fields = [
|
||||
'txClass',
|
||||
'id',
|
||||
'created',
|
||||
'machineName',
|
||||
'fee',
|
||||
'cryptoCode',
|
||||
'cryptoAtoms',
|
||||
'fiat',
|
||||
'fiatCode',
|
||||
'phone',
|
||||
'email',
|
||||
'toAddress',
|
||||
'txHash',
|
||||
'dispense',
|
||||
'error',
|
||||
'status',
|
||||
'profit',
|
||||
'cryptoAmount',
|
||||
]
|
||||
|
||||
return _.map(_.pick(fields))(data)
|
||||
}
|
||||
|
||||
const getCryptoAmount = it =>
|
||||
coinUtils.toUnit(BN(it.cryptoAtoms), it.cryptoCode)
|
||||
|
||||
const getProfit = it => {
|
||||
/* fiat - crypto*tickerPrice */
|
||||
const calcCashInProfit = (fiat, crypto, tickerPrice) =>
|
||||
fiat.minus(crypto.times(tickerPrice))
|
||||
/* crypto*tickerPrice - fiat */
|
||||
const calcCashOutProfit = (fiat, crypto, tickerPrice) =>
|
||||
crypto.times(tickerPrice).minus(fiat)
|
||||
|
||||
const fiat = BN(it.fiat)
|
||||
const crypto = getCryptoAmount(it)
|
||||
const tickerPrice = BN(it.rawTickerPrice)
|
||||
const isCashIn = it.txClass === 'cashIn'
|
||||
|
||||
return isCashIn
|
||||
? calcCashInProfit(fiat, crypto, tickerPrice)
|
||||
: calcCashOutProfit(fiat, crypto, tickerPrice)
|
||||
}
|
||||
|
||||
function getTx(txId) {
|
||||
return getTransactionById(txId)
|
||||
}
|
||||
|
||||
function getTxAssociatedData(txId, txClass) {
|
||||
const billsSql = `select 'bills' as bills, b.* from bills b where cash_in_txs_id = $1`
|
||||
const actionsSql = `select 'cash_out_actions' as cash_out_actions, actions.* from cash_out_actions actions where tx_id = $1`
|
||||
|
||||
return txClass === 'cashIn'
|
||||
? db.manyOrNone(billsSql, [txId])
|
||||
: db.manyOrNone(actionsSql, [txId])
|
||||
}
|
||||
|
||||
function updateTxCustomerPhoto(customerId, txId, direction, data) {
|
||||
const formattedData = _.mapKeys(_.snakeCase, data)
|
||||
const cashInSql =
|
||||
'UPDATE cash_in_txs SET tx_customer_photo_at = $1, tx_customer_photo_path = $2 WHERE customer_id=$3 AND id=$4'
|
||||
|
||||
const cashOutSql =
|
||||
'UPDATE cash_out_txs SET tx_customer_photo_at = $1, tx_customer_photo_path = $2 WHERE customer_id=$3 AND id=$4'
|
||||
|
||||
return direction === 'cashIn'
|
||||
? db.oneOrNone(cashInSql, [
|
||||
formattedData.tx_customer_photo_at,
|
||||
formattedData.tx_customer_photo_path,
|
||||
customerId,
|
||||
txId,
|
||||
])
|
||||
: db.oneOrNone(cashOutSql, [
|
||||
formattedData.tx_customer_photo_at,
|
||||
formattedData.tx_customer_photo_path,
|
||||
customerId,
|
||||
txId,
|
||||
])
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
batch,
|
||||
getTx,
|
||||
getTxAssociatedData,
|
||||
updateTxCustomerPhoto,
|
||||
}
|
||||
48
packages/server/lib/new-admin/services/triggers.js
Normal file
48
packages/server/lib/new-admin/services/triggers.js
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
const { PG_ERROR_CODES } = require('typesafe-db')
|
||||
|
||||
const complianceTriggers = require('../../compliance-triggers')
|
||||
const { ResourceAlreadyExistsError } = require('../graphql/errors')
|
||||
|
||||
const getComplianceTriggerSets = () =>
|
||||
complianceTriggers.getComplianceTriggerSets()
|
||||
|
||||
const getComplianceTriggerSetById = id =>
|
||||
complianceTriggers.getComplianceTriggerSetById(id)
|
||||
|
||||
const getComplianceTriggers = complianceTriggerSetId =>
|
||||
complianceTriggers.getComplianceTriggers(complianceTriggerSetId)
|
||||
|
||||
const createComplianceTriggerSet = name =>
|
||||
complianceTriggers.createComplianceTriggerSet(name).catch(error => {
|
||||
if (error.code === PG_ERROR_CODES.UNIQUE_VIOLATION)
|
||||
throw new ResourceAlreadyExistsError({ name })
|
||||
throw error
|
||||
})
|
||||
|
||||
const deleteComplianceTriggerSet = id =>
|
||||
complianceTriggers.deleteComplianceTriggerSet(id)
|
||||
|
||||
const createComplianceTrigger = (complianceTriggerSetId, trigger) =>
|
||||
complianceTriggers.createComplianceTrigger(complianceTriggerSetId, trigger)
|
||||
|
||||
const deleteComplianceTrigger = id =>
|
||||
complianceTriggers.deleteComplianceTrigger(id)
|
||||
|
||||
const getComplianceTriggerSetsByIdsBatch = ids =>
|
||||
getComplianceTriggerSets().then(ctss => {
|
||||
const ctsIdToName = Object.fromEntries(
|
||||
ctss.map(({ id, name }) => [id, name]),
|
||||
)
|
||||
return ids.map(id => ({ id, name: ctsIdToName[id] }))
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
getComplianceTriggerSets,
|
||||
getComplianceTriggerSetById,
|
||||
getComplianceTriggers,
|
||||
createComplianceTriggerSet,
|
||||
deleteComplianceTriggerSet,
|
||||
createComplianceTrigger,
|
||||
deleteComplianceTrigger,
|
||||
getComplianceTriggerSetsByIdsBatch,
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue