Merge branch 'master' of github.com:shocknet/Lightning.Pub

This commit is contained in:
boufni95 2024-02-12 21:55:12 +01:00
commit bf56f8ba69
14 changed files with 185 additions and 29 deletions

1
.gitignore vendored
View file

@ -7,5 +7,6 @@ temp/
.env .env
build/ build/
db.sqlite db.sqlite
metrics.sqlite
.key/ .key/
logs logs

View file

@ -8,36 +8,38 @@
### Don't just run a Lightning Node, run a Lightning Pub. ### Don't just run a Lightning Node, run a Lightning Pub.
`Pub` enables your Lightning node with public API's and accounts over `nostr`, allowing LN nodes to act like a webserver without the complexity of networking and SSL configurations. "Pub" is a `nostr` native account system that makes connecting your node to apps and websites super easy.
The Permissionless WebApps that use these API's promote a more decentralized Lightning Network, by removing hurdles for self-custodial home nodes to power connections from Friends, Family and Customers. Using Nostr relays as transport for encrypted RPCs, Pub eliminates the complexity of WebServer and SSL configurations.
By solving the networking and programability hurdles, Pub enables node-runners and Uncle Jim's to bring their Friends, Family and Customers into Bitcoin's permissionless circular economy. All while keeping the Lightning Network decentralized, and custodial scaling free of fiat shitcoin rails and large banks.
#### Features: #### Features:
- Wrapper for [`LND`](https://github.com/lightningnetwork/lnd/releases) that can serve accounts over LNURL and NOSTR - Wrapper for [`LND`](https://github.com/lightningnetwork/lnd/releases) that can serve accounts over LNURL and NOSTR
- A growing number of [methods](https://github.com/shocknet/Lightning.Pub/blob/master/proto/autogenerated/client.md) - A growing number of [methods](https://github.com/shocknet/Lightning.Pub/blob/master/proto/autogenerated/client.md)
- Accounting SubLayers for Application Pools and Users - Accounting SubLayers for Application Pools and Users
- A fee regime allows applications owners to tax users, or node operators to tax applications. - A fee regime allows applications owners to monetize users, or node operators to host distinctly monetized applications.
![Accounts](https://github.com/shocknet/Lightning.Pub/raw/master/accounting_layers.png) ![Accounts](https://github.com/shocknet/Lightning.Pub/raw/master/accounting_layers.png)
#### Planned #### Planned
- [ ] Management Dashboard is being integrated into [ShockWallet](https://github.com/shocknet/wallet2) - [ ] Management Dashboard is being integrated into [ShockWallet](https://github.com/shocknet/wallet2)
- [ ] Nostr native "offers"
- [ ] Channel Automation - [ ] Channel Automation
- [ ] Bootstarp Peering (Passive "LSP")
- [ ] Subscriptions / Notifications - [ ] Subscriptions / Notifications
- [ ] Submarine Swaps - [ ] Submarine Swaps
- [ ] High-Availabilty - [ ] High-Availabilty / Clustering
Dashboard: Dashboard:
<img src="https://shockwallet.b-cdn.net/pub_home_ss.png" alt="Pub Dashboard" width="240"> <img src="https://shockwallet.b-cdn.net/pub_home_ss.png" alt="Pub Dashboard" width="240">
#### ShockWallet and most of Lightning.Pub were developed as part of the [Bolt.Fun hackathon](https://bolt.fun/project/shocknet). If you would like to see continued development, please show your support there and help us win :) #### ShockWallet and Lightning.Pub are free software. If you would like to see continued development, please show your [support](https://github.com/sponsors/shocknet) :)
#### See the original NostrHack presentation: https://lightning.video/f0f64fa1fc3744fb6a3880e2bd8f6a254ceb3caee112d9708271f2d6a09a2f00
> **WARNING:** This repository is under rapid iteration and security is not guaranteed. Use tagged releases for non-development. > **WARNING:** While this software has been used in production for many months, it is still bleeding edge and security or reliabilty is not guaranteed.
## Manual Installation ## Manual Installation
@ -64,16 +66,17 @@ cd Lightning.Pub && npm i
3) `cp env.example .env` 3) `cp env.example .env`
4) Add values to env file 4) Add values to env file
- You can generate a keypair with `node genkey.js`
5) `npm start` 5) `npm start`
6) Create an Application Pool 6) Create an Application Pool
A default "wallet" pool will be automatically created and keys generated automatically, if you wish to create something other:
``` ```
curl -XPOST -H 'Authorization: Bearer defined_in_constants.ts' -H "Content-type: application/json" -d '{"name":"ExampleApplicationPoolName"}' 'http://localhost:8080/api/admin/app/add' curl -XPOST -H 'Authorization: Bearer defined_in_constants.ts' -H "Content-type: application/json" -d '{"name":"ExampleApplicationPoolName"}' 'http://localhost:8080/api/admin/app/add'
``` ```
7) Connect with [wallet2](https://github.com/shocknet/wallet2) using the npub response in step 6. 7) Connect with [wallet2](https://github.com/shocknet/wallet2) using the npub response in step 6 or the the wallet application nprofile logged at startup.

View file

@ -5,6 +5,7 @@ LND_MACAROON_PATH=/root/.lnd/data/chain/bitcoin/mainnet/admin.macaroon
#DB #DB
DATABASE_FILE=db.sqlite DATABASE_FILE=db.sqlite
METRICS_DATABASE_FILE=metrics.sqlite
#LOCAL #LOCAL
ADMIN_TOKEN= ADMIN_TOKEN=
@ -38,3 +39,6 @@ SERVICE_URL=https://test.lightning.pub
MOCK_LND=false MOCK_LND=false
ALLOW_BALANCE_MIGRATION=false ALLOW_BALANCE_MIGRATION=false
MIGRATE_DB=false MIGRATE_DB=false
#METRICS
RECORD_PERFORMANCE=true

12
metricsDatasource.js Normal file
View file

@ -0,0 +1,12 @@
import { DataSource } from "typeorm"
import { BalanceEvent } from "./build/src/services/storage/entity/BalanceEvent.js"
import { ChannelBalanceEvent } from "./build/src/services/storage/entity/ChannelsBalanceEvent.js"
import { RoutingEvent } from "./build/src/services/storage/entity/RoutingEvent.js"
export default new DataSource({
type: "sqlite",
database: "metrics.sqlite",
entities: [ RoutingEvent, BalanceEvent, ChannelBalanceEvent],
});

View file

@ -11,7 +11,7 @@ const serverOptions = (mainHandler: Main): ServerOptions => {
AppAuthGuard: async (authHeader) => { return { app_id: mainHandler.applicationManager.DecodeAppToken(stripBearer(authHeader)) } }, AppAuthGuard: async (authHeader) => { return { app_id: mainHandler.applicationManager.DecodeAppToken(stripBearer(authHeader)) } },
UserAuthGuard: async (authHeader) => { return mainHandler.appUserManager.DecodeUserToken(stripBearer(authHeader)) }, UserAuthGuard: async (authHeader) => { return mainHandler.appUserManager.DecodeUserToken(stripBearer(authHeader)) },
GuestAuthGuard: async (_) => ({}), GuestAuthGuard: async (_) => ({}),
metricsCallback: metrics => mainHandler.metricsManager.AddMetrics(metrics), metricsCallback: metrics => mainHandler.settings.recordPerformance ? mainHandler.metricsManager.AddMetrics(metrics) : null,
allowCors: true allowCors: true
//throwErrors: true //throwErrors: true
} }

88
src/custom-nip19.ts Normal file
View file

@ -0,0 +1,88 @@
/*
This file contains functions that deal with encoding and decoding nprofiles,
but with he addition of bridge urls in the nprofile.
These functions are basically the same functions from nostr-tools package
but with some tweaks to allow for the bridge inclusion.
*/
import { bytesToHex, concatBytes, hexToBytes } from '@noble/hashes/utils';
import { bech32 } from 'bech32';
export const utf8Decoder = new TextDecoder('utf-8')
export const utf8Encoder = new TextEncoder()
export type CustomProfilePointer = {
pubkey: string
relays?: string[]
bridge?: string[] // one bridge
}
type TLV = { [t: number]: Uint8Array[] }
const encodeTLV = (tlv: TLV): Uint8Array => {
const entries: Uint8Array[] = []
Object.entries(tlv)
/*
the original function does a reverse() here,
but here it causes the nprofile string to be different,
even though it would still decode to the correct original inputs
*/
//.reverse()
.forEach(([t, vs]) => {
vs.forEach(v => {
const entry = new Uint8Array(v.length + 2)
entry.set([parseInt(t)], 0)
entry.set([v.length], 1)
entry.set(v, 2)
entries.push(entry)
})
})
return concatBytes(...entries);
}
export const encodeNprofile = (profile: CustomProfilePointer): string => {
const data = encodeTLV({
0: [hexToBytes(profile.pubkey)],
1: (profile.relays || []).map(url => utf8Encoder.encode(url)),
2: (profile.bridge || []).map(url => utf8Encoder.encode(url))
});
const words = bech32.toWords(data)
return bech32.encode("nprofile", words, 5000);
}
const parseTLV = (data: Uint8Array): TLV => {
const result: TLV = {}
let rest = data
while (rest.length > 0) {
const t = rest[0]
const l = rest[1]
const v = rest.slice(2, 2 + l)
rest = rest.slice(2 + l)
if (v.length < l) throw new Error(`not enough data to read on TLV ${t}`)
result[t] = result[t] || []
result[t].push(v)
}
return result
}
export const decodeNprofile = (nprofile: string): CustomProfilePointer => {
const { prefix, words } = bech32.decode(nprofile, 5000)
if (prefix !== "nprofile") {
throw new Error ("Expected nprofile prefix");
}
const data = new Uint8Array(bech32.fromWords(words))
const tlv = parseTLV(data);
if (!tlv[0]?.[0]) throw new Error('missing TLV 0 for nprofile')
if (tlv[0][0].length !== 32) throw new Error('TLV 0 should be 32 bytes')
return {
pubkey: bytesToHex(tlv[0][0]),
relays: tlv[1] ? tlv[1].map(d => utf8Decoder.decode(d)) : [],
bridge: tlv[2] ? tlv[2].map(d => utf8Decoder.decode(d)): []
}
}

View file

@ -11,7 +11,7 @@ export default (serverMethods: Types.ServerMethods, mainHandler: Main, nostrSett
let nostrUser = await mainHandler.storage.applicationStorage.GetOrCreateNostrAppUser(app, pub || "") let nostrUser = await mainHandler.storage.applicationStorage.GetOrCreateNostrAppUser(app, pub || "")
return { user_id: nostrUser.user.user_id, app_user_id: nostrUser.identifier, app_id: appId || "" } return { user_id: nostrUser.user.user_id, app_user_id: nostrUser.identifier, app_id: appId || "" }
}, },
metricsCallback: metrics => mainHandler.metricsManager.AddMetrics(metrics) metricsCallback: metrics => mainHandler.settings.recordPerformance ? mainHandler.metricsManager.AddMetrics(metrics) : null
}) })
const nostr = new Nostr(nostrSettings, event => { const nostr = new Nostr(nostrSettings, event => {
let j: NostrRequest let j: NostrRequest

View file

@ -30,7 +30,8 @@ export const LoadMainSettingsFromEnv = (test = false): MainSettings => {
userToUserFee: EnvMustBeInteger("TX_FEE_INTERNAL_USER_BPS") / 10000, userToUserFee: EnvMustBeInteger("TX_FEE_INTERNAL_USER_BPS") / 10000,
appToUserFee: EnvMustBeInteger("TX_FEE_INTERNAL_ROOT_BPS") / 10000, appToUserFee: EnvMustBeInteger("TX_FEE_INTERNAL_ROOT_BPS") / 10000,
serviceUrl: EnvMustBeNonEmptyString("SERVICE_URL"), serviceUrl: EnvMustBeNonEmptyString("SERVICE_URL"),
servicePort: EnvMustBeInteger("PORT") servicePort: EnvMustBeInteger("PORT"),
recordPerformance: process.env.RECORD_PERFORMANCE === 'true' || false
} }
} }

View file

@ -14,5 +14,6 @@ export type MainSettings = {
appToUserFee: number appToUserFee: number
serviceUrl: string serviceUrl: string
servicePort: number servicePort: number
recordPerformance: boolean
} }

View file

@ -2,6 +2,7 @@
import { SimplePool, Sub, Event, UnsignedEvent, getEventHash, finishEvent, relayInit } from './tools/index.js' import { SimplePool, Sub, Event, UnsignedEvent, getEventHash, finishEvent, relayInit } from './tools/index.js'
import { encryptData, decryptData, getSharedSecret, decodePayload, encodePayload } from './nip44.js' import { encryptData, decryptData, getSharedSecret, decodePayload, encodePayload } from './nip44.js'
import { getLogger } from '../helpers/logger.js' import { getLogger } from '../helpers/logger.js'
import { encodeNprofile } from '../../custom-nip19.js'
const handledEvents: string[] = [] // TODO: - big memory leak here, add TTL const handledEvents: string[] = [] // TODO: - big memory leak here, add TTL
type AppInfo = { appId: string, publicKey: string, privateKey: string, name: string } type AppInfo = { appId: string, publicKey: string, privateKey: string, name: string }
export type SendData = { type: "content", content: string, pub: string } | { type: "event", event: UnsignedEvent } export type SendData = { type: "content", content: string, pub: string } | { type: "event", event: UnsignedEvent }
@ -88,7 +89,18 @@ export default class Handler {
eventCallback: (event: NostrEvent) => void eventCallback: (event: NostrEvent) => void
constructor(settings: NostrSettings, eventCallback: (event: NostrEvent) => void) { constructor(settings: NostrSettings, eventCallback: (event: NostrEvent) => void) {
this.settings = settings this.settings = settings
console.log(settings) console.log(
{
...settings,
apps: settings.apps.map(app => {
const { privateKey, ...rest } = app;
return {
...rest,
nprofile: encodeNprofile({ pubkey: rest.publicKey, relays: settings.relays })
}
})
}
)
this.eventCallback = eventCallback this.eventCallback = eventCallback
this.settings.apps.forEach(app => { this.settings.apps.forEach(app => {
this.apps[app.publicKey] = app this.apps[app.publicKey] = app

View file

@ -24,21 +24,41 @@ import { LndMetrics1703170330183 } from "./migrations/1703170330183-lnd_metrics.
export type DbSettings = { export type DbSettings = {
databaseFile: string databaseFile: string
migrate: boolean migrate: boolean
metricsDatabaseFile: string
} }
export const LoadDbSettingsFromEnv = (test = false): DbSettings => { export const LoadDbSettingsFromEnv = (test = false): DbSettings => {
return { return {
databaseFile: test ? ":memory:" : EnvMustBeNonEmptyString("DATABASE_FILE"), databaseFile: test ? ":memory:" : EnvMustBeNonEmptyString("DATABASE_FILE"),
migrate: process.env.MIGRATE_DB === 'true' || false, migrate: process.env.MIGRATE_DB === 'true' || false,
metricsDatabaseFile: test ? ":memory" : EnvMustBeNonEmptyString("METRICS_DATABASE_FILE")
} }
} }
export const newMetricsDb = async (settings: DbSettings, metricsMigrations: Function[]): Promise<{ source: DataSource, executedMigrations: Migration[] }> => {
const source = await new DataSource({
type: "sqlite",
database: settings.metricsDatabaseFile,
entities: [ RoutingEvent, BalanceEvent, ChannelBalanceEvent],
migrations: metricsMigrations
}).initialize();
const log = getLogger({});
const pendingMigrations = await source.showMigrations()
if (pendingMigrations) {
log("Migrations found, migrating...")
const executedMigrations = await source.runMigrations({ transaction: 'all' })
return { source, executedMigrations }
}
return { source, executedMigrations: [] }
}
export default async (settings: DbSettings, migrations: Function[]): Promise<{ source: DataSource, executedMigrations: Migration[] }> => { export default async (settings: DbSettings, migrations: Function[]): Promise<{ source: DataSource, executedMigrations: Migration[] }> => {
const source = await new DataSource({ const source = await new DataSource({
type: "sqlite", type: "sqlite",
database: settings.databaseFile, database: settings.databaseFile,
// logging: true, // logging: true,
entities: [User, UserReceivingInvoice, UserReceivingAddress, AddressReceivingTransaction, UserInvoicePayment, UserTransactionPayment, entities: [User, UserReceivingInvoice, UserReceivingAddress, AddressReceivingTransaction, UserInvoicePayment, UserTransactionPayment,
UserBasicAuth, UserEphemeralKey, Product, UserToUserPayment, Application, ApplicationUser, UserToUserPayment, RoutingEvent, BalanceEvent, ChannelBalanceEvent], UserBasicAuth, UserEphemeralKey, Product, UserToUserPayment, Application, ApplicationUser, UserToUserPayment],
//synchronize: true, //synchronize: true,
migrations migrations
}).initialize() }).initialize()

View file

@ -25,7 +25,7 @@ export default class {
constructor(settings: StorageSettings) { constructor(settings: StorageSettings) {
this.settings = settings this.settings = settings
} }
async Connect(migrations: Function[]) { async Connect(migrations: Function[], metricsMigrations: Function []) {
const { source, executedMigrations } = await NewDB(this.settings.dbSettings, migrations) const { source, executedMigrations } = await NewDB(this.settings.dbSettings, migrations)
this.DB = source this.DB = source
this.txQueue = new TransactionsQueue(this.DB) this.txQueue = new TransactionsQueue(this.DB)
@ -33,8 +33,9 @@ export default class {
this.productStorage = new ProductStorage(this.DB, this.txQueue) this.productStorage = new ProductStorage(this.DB, this.txQueue)
this.applicationStorage = new ApplicationStorage(this.DB, this.userStorage, this.txQueue) this.applicationStorage = new ApplicationStorage(this.DB, this.userStorage, this.txQueue)
this.paymentStorage = new PaymentStorage(this.DB, this.userStorage, this.txQueue) this.paymentStorage = new PaymentStorage(this.DB, this.userStorage, this.txQueue)
this.metricsStorage = new MetricsStorage(this.DB, this.txQueue) this.metricsStorage = new MetricsStorage(this.settings)
return executedMigrations const executedMetricsMigrations = await this.metricsStorage.Connect(metricsMigrations)
return { executedMigrations, executedMetricsMigrations };
} }
StartTransaction(exec: TX<void>) { StartTransaction(exec: TX<void>) {

View file

@ -3,12 +3,20 @@ import { RoutingEvent } from "./entity/RoutingEvent.js"
import { BalanceEvent } from "./entity/BalanceEvent.js" import { BalanceEvent } from "./entity/BalanceEvent.js"
import { ChannelBalanceEvent } from "./entity/ChannelsBalanceEvent.js" import { ChannelBalanceEvent } from "./entity/ChannelsBalanceEvent.js"
import TransactionsQueue, { TX } from "./transactionsQueue.js"; import TransactionsQueue, { TX } from "./transactionsQueue.js";
import { StorageSettings } from "./index.js";
import { newMetricsDb } from "./db.js";
export default class { export default class {
DB: DataSource | EntityManager DB: DataSource | EntityManager
settings: StorageSettings
txQueue: TransactionsQueue txQueue: TransactionsQueue
constructor(DB: DataSource | EntityManager, txQueue: TransactionsQueue) { constructor(settings: StorageSettings) {
this.DB = DB this.settings = settings;
this.txQueue = txQueue }
async Connect(metricsMigrations: Function[]) {
const { source, executedMigrations } = await newMetricsDb(this.settings.dbSettings, metricsMigrations)
this.DB = source;
this.txQueue = new TransactionsQueue(this.DB)
return executedMigrations;
} }
async SaveRoutingEvent(event: Partial<RoutingEvent>) { async SaveRoutingEvent(event: Partial<RoutingEvent>) {
const entry = this.DB.getRepository(RoutingEvent).create(event) const entry = this.DB.getRepository(RoutingEvent).create(event)

View file

@ -6,33 +6,38 @@ import { LndMetrics1703170330183 } from './1703170330183-lnd_metrics.js'
const allMigrations = [LndMetrics1703170330183] const allMigrations = [LndMetrics1703170330183]
export const TypeOrmMigrationRunner = async (log: PubLogger, storageManager: Storage, settings: DbSettings, arg: string | undefined): Promise<boolean> => { export const TypeOrmMigrationRunner = async (log: PubLogger, storageManager: Storage, settings: DbSettings, arg: string | undefined): Promise<boolean> => {
if (arg === 'initial_migration') { if (arg === 'initial_migration') {
await connectAndMigrate(log, storageManager, true, settings, [Initial1703170309875]) await connectAndMigrate(log, storageManager, true, settings, [Initial1703170309875], [])
return true return true
} else if (arg === 'lnd_metrics_migration') { } else if (arg === 'lnd_metrics_migration') {
await connectAndMigrate(log, storageManager, true, settings, [LndMetrics1703170330183]) await connectAndMigrate(log, storageManager, true, settings, [], [LndMetrics1703170330183])
return true return true
} else if (arg === 'all_migrations') { } else if (arg === 'all_migrations') {
await connectAndMigrate(log, storageManager, true, settings, allMigrations) await connectAndMigrate(log, storageManager, true, settings, [], allMigrations)
return true return true
} else if (settings.migrate) { } else if (settings.migrate) {
await connectAndMigrate(log, storageManager, false, settings, allMigrations) await connectAndMigrate(log, storageManager, false, settings, [], allMigrations)
return false return false
} }
await connectAndMigrate(log, storageManager, false, settings, []) await connectAndMigrate(log, storageManager, false, settings, [], [])
return false return false
} }
const connectAndMigrate = async (log: PubLogger, storageManager: Storage, manual: boolean, settings: DbSettings, migrations: Function[]) => { const connectAndMigrate = async (log: PubLogger, storageManager: Storage, manual: boolean, settings: DbSettings, migrations: Function[], metricsMigrations: Function[]) => {
if (manual && settings.migrate) { if (manual && settings.migrate) {
throw new Error("auto migration is enabled, no need to run manual migration") throw new Error("auto migration is enabled, no need to run manual migration")
} }
if (migrations.length > 0) { if (migrations.length > 0) {
log("will add", migrations.length, "typeorm migrations...") log("will add", migrations.length, "typeorm migrations...")
} }
const executedMigrations = await storageManager.Connect(migrations) const { executedMigrations, executedMetricsMigrations } = await storageManager.Connect(migrations, metricsMigrations)
if (migrations.length > 0) { if (migrations.length > 0) {
log(executedMigrations.length, "of", migrations.length, "migrations were executed correctly") log(executedMigrations.length, "of", migrations.length, "migrations were executed correctly")
log(executedMigrations) log(executedMigrations)
log("-------------------")
} if (metricsMigrations.length > 0) {
log(executedMetricsMigrations.length, "of", migrations.length, "metrics migrations were executed correctly")
log(executedMetricsMigrations)
} }
} }