Merge pull request #777 from shocknet/admin-chans

admin channels
This commit is contained in:
Justin (shocknet) 2024-12-02 16:32:02 -05:00 committed by GitHub
commit 28b56a256a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
34 changed files with 18810 additions and 5990 deletions

View file

@ -2,15 +2,17 @@ import { DataSource } from "typeorm"
import { BalanceEvent } from "./build/src/services/storage/entity/BalanceEvent.js"
import { ChannelBalanceEvent } from "./build/src/services/storage/entity/ChannelsBalanceEvent.js"
import { ChannelRouting } from "./build/src/services/storage/entity/ChannelRouting.js"
import { RootOperation } from "./build/src/services/storage/entity/RootOperation.js"
import { LndMetrics1703170330183 } from './build/src/services/storage/migrations/1703170330183-lnd_metrics.js'
import { ChannelRouting1709316653538 } from './build/src/services/storage/migrations/1709316653538-channel_routing.js'
import { HtlcCount1724266887195 } from './build/src/services/storage/migrations/1724266887195-htlc_count.js'
import { BalanceEvents1724860966825 } from './build/src/services/storage/migrations/1724860966825-balance_events.js'
export default new DataSource({
type: "sqlite",
database: "metrics.sqlite",
entities: [BalanceEvent, ChannelBalanceEvent, ChannelRouting],
migrations: [LndMetrics1703170330183, ChannelRouting1709316653538, HtlcCount1724266887195]
entities: [BalanceEvent, ChannelBalanceEvent, ChannelRouting, RootOperation],
migrations: [LndMetrics1703170330183, ChannelRouting1709316653538, HtlcCount1724266887195, BalanceEvents1724860966825]
});
//npx typeorm migration:generate ./src/services/storage/migrations/balance_events -d ./metricsDatasource.js
//npx typeorm migration:generate ./src/services/storage/migrations/root_ops -d ./metricsDatasource.js

11660
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -18,6 +18,11 @@ The nostr server will send back a message response, and inside the body there wi
- input: [AddAppRequest](#AddAppRequest)
- output: [AuthApp](#AuthApp)
- AddPeer
- auth type: __Admin__
- input: [AddPeerRequest](#AddPeerRequest)
- This methods has an __empty__ __response__ body
- AddProduct
- auth type: __User__
- input: [AddProductRequest](#AddProductRequest)
@ -48,6 +53,11 @@ The nostr server will send back a message response, and inside the body there wi
- This methods has an __empty__ __request__ body
- This methods has an __empty__ __response__ body
- CloseChannel
- auth type: __Admin__
- input: [CloseChannelRequest](#CloseChannelRequest)
- output: [CloseChannelResponse](#CloseChannelResponse)
- CreateOneTimeInviteLink
- auth type: __Admin__
- input: [CreateOneTimeInviteLinkRequest](#CreateOneTimeInviteLinkRequest)
@ -181,7 +191,7 @@ The nostr server will send back a message response, and inside the body there wi
- output: [NewInvoiceResponse](#NewInvoiceResponse)
- OpenChannel
- auth type: __User__
- auth type: __Admin__
- input: [OpenChannelRequest](#OpenChannelRequest)
- output: [OpenChannelResponse](#OpenChannelResponse)
@ -210,6 +220,11 @@ The nostr server will send back a message response, and inside the body there wi
- input: [CallbackUrl](#CallbackUrl)
- output: [CallbackUrl](#CallbackUrl)
- UpdateChannelPolicy
- auth type: __Admin__
- input: [UpdateChannelPolicyRequest](#UpdateChannelPolicyRequest)
- This methods has an __empty__ __response__ body
- UseInviteLink
- auth type: __GuestWithPub__
- input: [UseInviteLinkRequest](#UseInviteLinkRequest)
@ -282,6 +297,13 @@ The nostr server will send back a message response, and inside the body there wi
- input: [AddAppUserInvoiceRequest](#AddAppUserInvoiceRequest)
- output: [NewInvoiceResponse](#NewInvoiceResponse)
- AddPeer
- auth type: __Admin__
- http method: __post__
- http route: __/api/admin/peer__
- input: [AddPeerRequest](#AddPeerRequest)
- This methods has an __empty__ __response__ body
- AddProduct
- auth type: __User__
- http method: __post__
@ -324,6 +346,13 @@ The nostr server will send back a message response, and inside the body there wi
- This methods has an __empty__ __request__ body
- This methods has an __empty__ __response__ body
- CloseChannel
- auth type: __Admin__
- http method: __post__
- http route: __/api/admin/channel/close__
- input: [CloseChannelRequest](#CloseChannelRequest)
- output: [CloseChannelResponse](#CloseChannelResponse)
- CreateOneTimeInviteLink
- auth type: __Admin__
- http method: __post__
@ -600,9 +629,9 @@ The nostr server will send back a message response, and inside the body there wi
- output: [NewInvoiceResponse](#NewInvoiceResponse)
- OpenChannel
- auth type: __User__
- auth type: __Admin__
- http method: __post__
- http route: __/api/user/open/channel__
- http route: __/api/admin/channel/open__
- input: [OpenChannelRequest](#OpenChannelRequest)
- output: [OpenChannelResponse](#OpenChannelResponse)
@ -697,6 +726,13 @@ The nostr server will send back a message response, and inside the body there wi
- input: [CallbackUrl](#CallbackUrl)
- output: [CallbackUrl](#CallbackUrl)
- UpdateChannelPolicy
- auth type: __Admin__
- http method: __post__
- http route: __/api/admin/channel/policy/update__
- input: [UpdateChannelPolicyRequest](#UpdateChannelPolicyRequest)
- This methods has an __empty__ __response__ body
- UseInviteLink
- auth type: __GuestWithPub__
- http method: __post__
@ -736,6 +772,11 @@ The nostr server will send back a message response, and inside the body there wi
- __fail_if_exists__: _boolean_
- __identifier__: _string_
### AddPeerRequest
- __host__: _string_
- __port__: _number_
- __pubkey__: _string_
### AddProductRequest
- __name__: _string_
- __price_sats__: _number_
@ -794,9 +835,26 @@ The nostr server will send back a message response, and inside the body there wi
### CallbackUrl
- __url__: _string_
### ChannelPolicy
- __base_fee_msat__: _number_
- __fee_rate_ppm__: _number_
- __max_htlc_msat__: _number_
- __min_htlc_msat__: _number_
- __timelock_delta__: _number_
### CloseChannelRequest
- __force__: _boolean_
- __funding_txid__: _string_
- __output_index__: _number_
- __sat_per_v_byte__: _number_
### CloseChannelResponse
- __closing_txid__: _string_
### ClosedChannel
- __capacity__: _number_
- __channel_id__: _string_
- __close_tx_timestamp__: _number_
- __closed_height__: _number_
### ClosureMigration
@ -926,6 +984,9 @@ The nostr server will send back a message response, and inside the body there wi
### LndGetInfoResponse
- __alias__: _string_
- __synced_to_chain__: _boolean_
- __synced_to_graph__: _boolean_
- __watchdog_barking__: _boolean_
### LndMetrics
- __nodes__: ARRAY of: _[LndNodeMetrics](#LndNodeMetrics)_
@ -946,6 +1007,7 @@ The nostr server will send back a message response, and inside the body there wi
- __online_channels__: _number_
- __open_channels__: ARRAY of: _[OpenChannel](#OpenChannel)_
- __pending_channels__: _number_
- __root_ops__: ARRAY of: _[RootOperation](#RootOperation)_
### LndSeed
- __seed__: ARRAY of: _string_
@ -998,19 +1060,22 @@ The nostr server will send back a message response, and inside the body there wi
- __active__: _boolean_
- __capacity__: _number_
- __channel_id__: _string_
- __channel_point__: _string_
- __label__: _string_
- __lifetime__: _number_
- __local_balance__: _number_
- __policy__: _[ChannelPolicy](#ChannelPolicy)_ *this field is optional
- __remote_balance__: _number_
### OpenChannelRequest
- __closeAddress__: _string_
- __destination__: _string_
- __fundingAmount__: _number_
- __pushAmount__: _number_
- __close_address__: _string_ *this field is optional
- __local_funding_amount__: _number_
- __node_pubkey__: _string_
- __push_sat__: _number_ *this field is optional
- __sat_per_v_byte__: _number_
### OpenChannelResponse
- __channelId__: _string_
- __channel_id__: _string_
### PayAddressRequest
- __address__: _string_
@ -1062,6 +1127,12 @@ The nostr server will send back a message response, and inside the body there wi
### RequestNPubLinkingTokenResponse
- __token__: _string_
### RootOperation
- __amount__: _number_
- __created_at_unix__: _number_
- __op_id__: _string_
- __op_type__: _[OperationType](#OperationType)_
### RoutingEvent
- __event_type__: _string_
- __failure_string__: _string_
@ -1096,6 +1167,10 @@ The nostr server will send back a message response, and inside the body there wi
- __amount__: _number_
- __invoice__: _string_
### UpdateChannelPolicyRequest
- __policy__: _[ChannelPolicy](#ChannelPolicy)_
- __update__: _[UpdateChannelPolicyRequest_update](#UpdateChannelPolicyRequest_update)_
### UsageMetric
- __auth_in_nano__: _number_
- __batch__: _boolean_
@ -1164,6 +1239,10 @@ The nostr server will send back a message response, and inside the body there wi
- __MONTH__
- __WEEK__
### OperationType
- __CHAIN_OP__
- __INVOICE_OP__
### UserOperationType
- __INCOMING_INVOICE__
- __INCOMING_TX__

View file

@ -58,12 +58,14 @@ type Client struct {
AddAppInvoice func(req AddAppInvoiceRequest) (*NewInvoiceResponse, error)
AddAppUser func(req AddAppUserRequest) (*AppUser, error)
AddAppUserInvoice func(req AddAppUserInvoiceRequest) (*NewInvoiceResponse, error)
AddPeer func(req AddPeerRequest) error
AddProduct func(req AddProductRequest) (*Product, error)
AuthApp func(req AuthAppRequest) (*AuthApp, error)
AuthorizeDebit func(req DebitAuthorizationRequest) (*DebitAuthorization, error)
BanDebit func(req DebitOperation) error
BanUser func(req BanUserRequest) (*BanUserResponse, error)
// batching method: BatchUser not implemented
CloseChannel func(req CloseChannelRequest) (*CloseChannelResponse, error)
CreateOneTimeInviteLink func(req CreateOneTimeInviteLinkRequest) (*CreateOneTimeInviteLinkResponse, error)
DecodeInvoice func(req DecodeInvoiceRequest) (*DecodeInvoiceResponse, error)
EditDebit func(req DebitAuthorizationRequest) error
@ -115,6 +117,7 @@ type Client struct {
SetMockAppUserBalance func(req SetMockAppUserBalanceRequest) error
SetMockInvoiceAsPaid func(req SetMockInvoiceAsPaidRequest) error
UpdateCallbackUrl func(req CallbackUrl) (*CallbackUrl, error)
UpdateChannelPolicy func(req UpdateChannelPolicyRequest) error
UseInviteLink func(req UseInviteLinkRequest) error
UserHealth func() error
}
@ -237,6 +240,30 @@ func NewClient(params ClientParams) *Client {
}
return &res, nil
},
AddPeer: func(req AddPeerRequest) error {
auth, err := params.RetrieveAdminAuth()
if err != nil {
return err
}
finalRoute := "/api/admin/peer"
body, err := json.Marshal(req)
if err != nil {
return err
}
resBody, err := doPostRequest(params.BaseURL+finalRoute, body, auth)
if err != nil {
return err
}
result := ResultError{}
err = json.Unmarshal(resBody, &result)
if err != nil {
return err
}
if result.Status == "ERROR" {
return fmt.Errorf(result.Reason)
}
return nil
},
AddProduct: func(req AddProductRequest) (*Product, error) {
auth, err := params.RetrieveUserAuth()
if err != nil {
@ -378,6 +405,35 @@ func NewClient(params ClientParams) *Client {
return &res, nil
},
// batching method: BatchUser not implemented
CloseChannel: func(req CloseChannelRequest) (*CloseChannelResponse, error) {
auth, err := params.RetrieveAdminAuth()
if err != nil {
return nil, err
}
finalRoute := "/api/admin/channel/close"
body, err := json.Marshal(req)
if err != nil {
return nil, err
}
resBody, err := doPostRequest(params.BaseURL+finalRoute, body, auth)
if err != nil {
return nil, err
}
result := ResultError{}
err = json.Unmarshal(resBody, &result)
if err != nil {
return nil, err
}
if result.Status == "ERROR" {
return nil, fmt.Errorf(result.Reason)
}
res := CloseChannelResponse{}
err = json.Unmarshal(resBody, &res)
if err != nil {
return nil, err
}
return &res, nil
},
CreateOneTimeInviteLink: func(req CreateOneTimeInviteLinkRequest) (*CreateOneTimeInviteLinkResponse, error) {
auth, err := params.RetrieveAdminAuth()
if err != nil {
@ -1267,11 +1323,11 @@ func NewClient(params ClientParams) *Client {
return &res, nil
},
OpenChannel: func(req OpenChannelRequest) (*OpenChannelResponse, error) {
auth, err := params.RetrieveUserAuth()
auth, err := params.RetrieveAdminAuth()
if err != nil {
return nil, err
}
finalRoute := "/api/user/open/channel"
finalRoute := "/api/admin/channel/open"
body, err := json.Marshal(req)
if err != nil {
return nil, err
@ -1637,6 +1693,30 @@ func NewClient(params ClientParams) *Client {
}
return &res, nil
},
UpdateChannelPolicy: func(req UpdateChannelPolicyRequest) error {
auth, err := params.RetrieveAdminAuth()
if err != nil {
return err
}
finalRoute := "/api/admin/channel/policy/update"
body, err := json.Marshal(req)
if err != nil {
return err
}
resBody, err := doPostRequest(params.BaseURL+finalRoute, body, auth)
if err != nil {
return err
}
result := ResultError{}
err = json.Unmarshal(resBody, &result)
if err != nil {
return err
}
if result.Status == "ERROR" {
return fmt.Errorf(result.Reason)
}
return nil
},
UseInviteLink: func(req UseInviteLinkRequest) error {
auth, err := params.RetrieveGuestWithPubAuth()
if err != nil {

View file

@ -64,6 +64,13 @@ const (
WEEK IntervalType = "WEEK"
)
type OperationType string
const (
CHAIN_OP OperationType = "CHAIN_OP"
INVOICE_OP OperationType = "INVOICE_OP"
)
type UserOperationType string
const (
@ -95,6 +102,11 @@ type AddAppUserRequest struct {
Fail_if_exists bool `json:"fail_if_exists"`
Identifier string `json:"identifier"`
}
type AddPeerRequest struct {
Host string `json:"host"`
Port int64 `json:"port"`
Pubkey string `json:"pubkey"`
}
type AddProductRequest struct {
Name string `json:"name"`
Price_sats int64 `json:"price_sats"`
@ -153,10 +165,27 @@ type BannedAppUser struct {
type CallbackUrl struct {
Url string `json:"url"`
}
type ChannelPolicy struct {
Base_fee_msat int64 `json:"base_fee_msat"`
Fee_rate_ppm int64 `json:"fee_rate_ppm"`
Max_htlc_msat int64 `json:"max_htlc_msat"`
Min_htlc_msat int64 `json:"min_htlc_msat"`
Timelock_delta int64 `json:"timelock_delta"`
}
type CloseChannelRequest struct {
Force bool `json:"force"`
Funding_txid string `json:"funding_txid"`
Output_index int64 `json:"output_index"`
Sat_per_v_byte int64 `json:"sat_per_v_byte"`
}
type CloseChannelResponse struct {
Closing_txid string `json:"closing_txid"`
}
type ClosedChannel struct {
Capacity int64 `json:"capacity"`
Channel_id string `json:"channel_id"`
Closed_height int64 `json:"closed_height"`
Capacity int64 `json:"capacity"`
Channel_id string `json:"channel_id"`
Close_tx_timestamp int64 `json:"close_tx_timestamp"`
Closed_height int64 `json:"closed_height"`
}
type ClosureMigration struct {
Closes_at_unix int64 `json:"closes_at_unix"`
@ -284,7 +313,10 @@ type LndGetInfoRequest struct {
Nodeid int64 `json:"nodeId"`
}
type LndGetInfoResponse struct {
Alias string `json:"alias"`
Alias string `json:"alias"`
Synced_to_chain bool `json:"synced_to_chain"`
Synced_to_graph bool `json:"synced_to_graph"`
Watchdog_barking bool `json:"watchdog_barking"`
}
type LndMetrics struct {
Nodes []LndNodeMetrics `json:"nodes"`
@ -305,6 +337,7 @@ type LndNodeMetrics struct {
Online_channels int64 `json:"online_channels"`
Open_channels []OpenChannel `json:"open_channels"`
Pending_channels int64 `json:"pending_channels"`
Root_ops []RootOperation `json:"root_ops"`
}
type LndSeed struct {
Seed []string `json:"seed"`
@ -354,22 +387,25 @@ type NewInvoiceResponse struct {
Invoice string `json:"invoice"`
}
type OpenChannel struct {
Active bool `json:"active"`
Capacity int64 `json:"capacity"`
Channel_id string `json:"channel_id"`
Label string `json:"label"`
Lifetime int64 `json:"lifetime"`
Local_balance int64 `json:"local_balance"`
Remote_balance int64 `json:"remote_balance"`
Active bool `json:"active"`
Capacity int64 `json:"capacity"`
Channel_id string `json:"channel_id"`
Channel_point string `json:"channel_point"`
Label string `json:"label"`
Lifetime int64 `json:"lifetime"`
Local_balance int64 `json:"local_balance"`
Policy *ChannelPolicy `json:"policy"`
Remote_balance int64 `json:"remote_balance"`
}
type OpenChannelRequest struct {
Closeaddress string `json:"closeAddress"`
Destination string `json:"destination"`
Fundingamount int64 `json:"fundingAmount"`
Pushamount int64 `json:"pushAmount"`
Close_address string `json:"close_address"`
Local_funding_amount int64 `json:"local_funding_amount"`
Node_pubkey string `json:"node_pubkey"`
Push_sat int64 `json:"push_sat"`
Sat_per_v_byte int64 `json:"sat_per_v_byte"`
}
type OpenChannelResponse struct {
Channelid string `json:"channelId"`
Channel_id string `json:"channel_id"`
}
type PayAddressRequest struct {
Address string `json:"address"`
@ -421,6 +457,12 @@ type RequestNPubLinkingTokenRequest struct {
type RequestNPubLinkingTokenResponse struct {
Token string `json:"token"`
}
type RootOperation struct {
Amount int64 `json:"amount"`
Created_at_unix int64 `json:"created_at_unix"`
Op_id string `json:"op_id"`
Op_type OperationType `json:"op_type"`
}
type RoutingEvent struct {
Event_type string `json:"event_type"`
Failure_string string `json:"failure_string"`
@ -455,6 +497,10 @@ type SetMockInvoiceAsPaidRequest struct {
Amount int64 `json:"amount"`
Invoice string `json:"invoice"`
}
type UpdateChannelPolicyRequest struct {
Policy *ChannelPolicy `json:"policy"`
Update *UpdateChannelPolicyRequest_update `json:"update"`
}
type UsageMetric struct {
Auth_in_nano int64 `json:"auth_in_nano"`
Batch bool `json:"batch"`
@ -563,3 +609,15 @@ type NPubLinking_state struct {
Linking_token *string `json:"linking_token"`
Unlinked *Empty `json:"unlinked"`
}
type UpdateChannelPolicyRequest_update_type string
const (
ALL UpdateChannelPolicyRequest_update_type = "all"
CHANNEL_POINT UpdateChannelPolicyRequest_update_type = "channel_point"
)
type UpdateChannelPolicyRequest_update struct {
Type UpdateChannelPolicyRequest_update_type `json:"type"`
All *Empty `json:"all"`
Channel_point *string `json:"channel_point"`
}

View file

@ -122,6 +122,28 @@ export default (methods: Types.ServerMethods, opts: ServerOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.AddPeer) throw new Error('method: AddPeer is not implemented')
app.post('/api/admin/peer', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'AddPeer', batch: false, nostr: false, batchSize: 0}
const stats: Types.RequestStats = { startMs:req.startTimeMs || 0, start:req.startTime || 0n, parse: process.hrtime.bigint(), guard: 0n, validate: 0n, handle: 0n }
let authCtx: Types.AuthContext = {}
try {
if (!methods.AddPeer) throw new Error('method: AddPeer is not implemented')
const authContext = await opts.AdminAuthGuard(req.headers['authorization'])
authCtx = authContext
stats.guard = process.hrtime.bigint()
const request = req.body
const error = Types.AddPeerRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authContext }, opts.metricsCallback)
const query = req.query
const params = req.params
await methods.AddPeer({rpcName:'AddPeer', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res.json({status: 'OK'})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.AddProduct) throw new Error('method: AddProduct is not implemented')
app.post('/api/user/product/add', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'AddProduct', batch: false, nostr: false, batchSize: 0}
@ -433,18 +455,6 @@ export default (methods: Types.ServerMethods, opts: ServerOptions) => {
callsMetrics.push({ ...opInfo, ...opStats, ...ctx })
}
break
case 'OpenChannel':
if (!methods.OpenChannel) {
throw new Error('method OpenChannel not found' )
} else {
const error = Types.OpenChannelRequestValidate(operation.req)
opStats.validate = process.hrtime.bigint()
if (error !== null) throw error
const res = await methods.OpenChannel({...operation, ctx}); responses.push({ status: 'OK', ...res })
opStats.handle = process.hrtime.bigint()
callsMetrics.push({ ...opInfo, ...opStats, ...ctx })
}
break
case 'PayAddress':
if (!methods.PayAddress) {
throw new Error('method PayAddress not found' )
@ -525,6 +535,28 @@ export default (methods: Types.ServerMethods, opts: ServerOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...ctx }, ...callsMetrics])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.CloseChannel) throw new Error('method: CloseChannel is not implemented')
app.post('/api/admin/channel/close', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'CloseChannel', batch: false, nostr: false, batchSize: 0}
const stats: Types.RequestStats = { startMs:req.startTimeMs || 0, start:req.startTime || 0n, parse: process.hrtime.bigint(), guard: 0n, validate: 0n, handle: 0n }
let authCtx: Types.AuthContext = {}
try {
if (!methods.CloseChannel) throw new Error('method: CloseChannel is not implemented')
const authContext = await opts.AdminAuthGuard(req.headers['authorization'])
authCtx = authContext
stats.guard = process.hrtime.bigint()
const request = req.body
const error = Types.CloseChannelRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authContext }, opts.metricsCallback)
const query = req.query
const params = req.params
const response = await methods.CloseChannel({rpcName:'CloseChannel', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res.json({status: 'OK', ...response})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.CreateOneTimeInviteLink) throw new Error('method: CreateOneTimeInviteLink is not implemented')
app.post('/api/admin/app/invite/create', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'CreateOneTimeInviteLink', batch: false, nostr: false, batchSize: 0}
@ -1204,13 +1236,13 @@ export default (methods: Types.ServerMethods, opts: ServerOptions) => {
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.OpenChannel) throw new Error('method: OpenChannel is not implemented')
app.post('/api/user/open/channel', async (req, res) => {
app.post('/api/admin/channel/open', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'OpenChannel', batch: false, nostr: false, batchSize: 0}
const stats: Types.RequestStats = { startMs:req.startTimeMs || 0, start:req.startTime || 0n, parse: process.hrtime.bigint(), guard: 0n, validate: 0n, handle: 0n }
let authCtx: Types.AuthContext = {}
try {
if (!methods.OpenChannel) throw new Error('method: OpenChannel is not implemented')
const authContext = await opts.UserAuthGuard(req.headers['authorization'])
const authContext = await opts.AdminAuthGuard(req.headers['authorization'])
authCtx = authContext
stats.guard = process.hrtime.bigint()
const request = req.body
@ -1511,6 +1543,28 @@ export default (methods: Types.ServerMethods, opts: ServerOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.UpdateChannelPolicy) throw new Error('method: UpdateChannelPolicy is not implemented')
app.post('/api/admin/channel/policy/update', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'UpdateChannelPolicy', batch: false, nostr: false, batchSize: 0}
const stats: Types.RequestStats = { startMs:req.startTimeMs || 0, start:req.startTime || 0n, parse: process.hrtime.bigint(), guard: 0n, validate: 0n, handle: 0n }
let authCtx: Types.AuthContext = {}
try {
if (!methods.UpdateChannelPolicy) throw new Error('method: UpdateChannelPolicy is not implemented')
const authContext = await opts.AdminAuthGuard(req.headers['authorization'])
authCtx = authContext
stats.guard = process.hrtime.bigint()
const request = req.body
const error = Types.UpdateChannelPolicyRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authContext }, opts.metricsCallback)
const query = req.query
const params = req.params
await methods.UpdateChannelPolicy({rpcName:'UpdateChannelPolicy', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res.json({status: 'OK'})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
} catch (ex) { const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
})
if (!opts.allowNotImplementedMethods && !methods.UseInviteLink) throw new Error('method: UseInviteLink is not implemented')
app.post('/api/guest/invite', async (req, res) => {
const info: Types.RequestInfo = { rpcName: 'UseInviteLink', batch: false, nostr: false, batchSize: 0}

View file

@ -73,6 +73,17 @@ export default (params: ClientParams) => ({
}
return { status: 'ERROR', reason: 'invalid response' }
},
AddPeer: async (request: Types.AddPeerRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveAdminAuth()
if (auth === null) throw new Error('retrieveAdminAuth() returned null')
let finalRoute = '/api/admin/peer'
const { data } = await axios.post(params.baseUrl + finalRoute, request, { headers: { 'authorization': auth } })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
return data
}
return { status: 'ERROR', reason: 'invalid response' }
},
AddProduct: async (request: Types.AddProductRequest): Promise<ResultError | ({ status: 'OK' }& Types.Product)> => {
const auth = await params.retrieveUserAuth()
if (auth === null) throw new Error('retrieveUserAuth() returned null')
@ -151,6 +162,20 @@ export default (params: ClientParams) => ({
}
return { status: 'ERROR', reason: 'invalid response' }
},
CloseChannel: async (request: Types.CloseChannelRequest): Promise<ResultError | ({ status: 'OK' }& Types.CloseChannelResponse)> => {
const auth = await params.retrieveAdminAuth()
if (auth === null) throw new Error('retrieveAdminAuth() returned null')
let finalRoute = '/api/admin/channel/close'
const { data } = await axios.post(params.baseUrl + finalRoute, request, { headers: { 'authorization': auth } })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
const result = data
if(!params.checkResult) return { status: 'OK', ...result }
const error = Types.CloseChannelResponseValidate(result)
if (error === null) { return { status: 'OK', ...result } } else return { status: 'ERROR', reason: error.message }
}
return { status: 'ERROR', reason: 'invalid response' }
},
CreateOneTimeInviteLink: async (request: Types.CreateOneTimeInviteLinkRequest): Promise<ResultError | ({ status: 'OK' }& Types.CreateOneTimeInviteLinkResponse)> => {
const auth = await params.retrieveAdminAuth()
if (auth === null) throw new Error('retrieveAdminAuth() returned null')
@ -611,9 +636,9 @@ export default (params: ClientParams) => ({
return { status: 'ERROR', reason: 'invalid response' }
},
OpenChannel: async (request: Types.OpenChannelRequest): Promise<ResultError | ({ status: 'OK' }& Types.OpenChannelResponse)> => {
const auth = await params.retrieveUserAuth()
if (auth === null) throw new Error('retrieveUserAuth() returned null')
let finalRoute = '/api/user/open/channel'
const auth = await params.retrieveAdminAuth()
if (auth === null) throw new Error('retrieveAdminAuth() returned null')
let finalRoute = '/api/admin/channel/open'
const { data } = await axios.post(params.baseUrl + finalRoute, request, { headers: { 'authorization': auth } })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
@ -785,6 +810,17 @@ export default (params: ClientParams) => ({
}
return { status: 'ERROR', reason: 'invalid response' }
},
UpdateChannelPolicy: async (request: Types.UpdateChannelPolicyRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveAdminAuth()
if (auth === null) throw new Error('retrieveAdminAuth() returned null')
let finalRoute = '/api/admin/channel/policy/update'
const { data } = await axios.post(params.baseUrl + finalRoute, request, { headers: { 'authorization': auth } })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
return data
}
return { status: 'ERROR', reason: 'invalid response' }
},
UseInviteLink: async (request: Types.UseInviteLinkRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveGuestWithPubAuth()
if (auth === null) throw new Error('retrieveGuestWithPubAuth() returned null')

View file

@ -27,6 +27,18 @@ export default (params: NostrClientParams, send: (to:string, message: NostrRequ
}
return { status: 'ERROR', reason: 'invalid response' }
},
AddPeer: async (request: Types.AddPeerRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveNostrAdminAuth()
if (auth === null) throw new Error('retrieveNostrAdminAuth() returned null')
const nostrRequest: NostrRequest = {}
nostrRequest.body = request
const data = await send(params.pubDestination, {rpcName:'AddPeer',authIdentifier:auth, ...nostrRequest })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
return data
}
return { status: 'ERROR', reason: 'invalid response' }
},
AddProduct: async (request: Types.AddProductRequest): Promise<ResultError | ({ status: 'OK' }& Types.Product)> => {
const auth = await params.retrieveNostrUserAuth()
if (auth === null) throw new Error('retrieveNostrUserAuth() returned null')
@ -110,6 +122,21 @@ export default (params: NostrClientParams, send: (to:string, message: NostrRequ
}
return { status: 'ERROR', reason: 'invalid response' }
},
CloseChannel: async (request: Types.CloseChannelRequest): Promise<ResultError | ({ status: 'OK' }& Types.CloseChannelResponse)> => {
const auth = await params.retrieveNostrAdminAuth()
if (auth === null) throw new Error('retrieveNostrAdminAuth() returned null')
const nostrRequest: NostrRequest = {}
nostrRequest.body = request
const data = await send(params.pubDestination, {rpcName:'CloseChannel',authIdentifier:auth, ...nostrRequest })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
const result = data
if(!params.checkResult) return { status: 'OK', ...result }
const error = Types.CloseChannelResponseValidate(result)
if (error === null) { return { status: 'OK', ...result } } else return { status: 'ERROR', reason: error.message }
}
return { status: 'ERROR', reason: 'invalid response' }
},
CreateOneTimeInviteLink: async (request: Types.CreateOneTimeInviteLinkRequest): Promise<ResultError | ({ status: 'OK' }& Types.CreateOneTimeInviteLinkResponse)> => {
const auth = await params.retrieveNostrAdminAuth()
if (auth === null) throw new Error('retrieveNostrAdminAuth() returned null')
@ -484,8 +511,8 @@ export default (params: NostrClientParams, send: (to:string, message: NostrRequ
return { status: 'ERROR', reason: 'invalid response' }
},
OpenChannel: async (request: Types.OpenChannelRequest): Promise<ResultError | ({ status: 'OK' }& Types.OpenChannelResponse)> => {
const auth = await params.retrieveNostrUserAuth()
if (auth === null) throw new Error('retrieveNostrUserAuth() returned null')
const auth = await params.retrieveNostrAdminAuth()
if (auth === null) throw new Error('retrieveNostrAdminAuth() returned null')
const nostrRequest: NostrRequest = {}
nostrRequest.body = request
const data = await send(params.pubDestination, {rpcName:'OpenChannel',authIdentifier:auth, ...nostrRequest })
@ -567,6 +594,18 @@ export default (params: NostrClientParams, send: (to:string, message: NostrRequ
}
return { status: 'ERROR', reason: 'invalid response' }
},
UpdateChannelPolicy: async (request: Types.UpdateChannelPolicyRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveNostrAdminAuth()
if (auth === null) throw new Error('retrieveNostrAdminAuth() returned null')
const nostrRequest: NostrRequest = {}
nostrRequest.body = request
const data = await send(params.pubDestination, {rpcName:'UpdateChannelPolicy',authIdentifier:auth, ...nostrRequest })
if (data.status === 'ERROR' && typeof data.reason === 'string') return data
if (data.status === 'OK') {
return data
}
return { status: 'ERROR', reason: 'invalid response' }
},
UseInviteLink: async (request: Types.UseInviteLinkRequest): Promise<ResultError | ({ status: 'OK' })> => {
const auth = await params.retrieveNostrGuestWithPubAuth()
if (auth === null) throw new Error('retrieveNostrGuestWithPubAuth() returned null')

View file

@ -48,6 +48,22 @@ export default (methods: Types.ServerMethods, opts: NostrOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'AddPeer':
try {
if (!methods.AddPeer) throw new Error('method: AddPeer is not implemented')
const authContext = await opts.NostrAdminAuthGuard(req.appId, req.authIdentifier)
stats.guard = process.hrtime.bigint()
authCtx = authContext
const request = req.body
const error = Types.AddPeerRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback)
await methods.AddPeer({rpcName:'AddPeer', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res({status: 'OK'})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'AddProduct':
try {
if (!methods.AddProduct) throw new Error('method: AddProduct is not implemented')
@ -327,18 +343,6 @@ export default (methods: Types.ServerMethods, opts: NostrOptions) => {
callsMetrics.push({ ...opInfo, ...opStats, ...ctx })
}
break
case 'OpenChannel':
if (!methods.OpenChannel) {
throw new Error('method not defined: OpenChannel')
} else {
const error = Types.OpenChannelRequestValidate(operation.req)
opStats.validate = process.hrtime.bigint()
if (error !== null) throw error
const res = await methods.OpenChannel({...operation, ctx}); responses.push({ status: 'OK', ...res })
opStats.handle = process.hrtime.bigint()
callsMetrics.push({ ...opInfo, ...opStats, ...ctx })
}
break
case 'PayAddress':
if (!methods.PayAddress) {
throw new Error('method not defined: PayAddress')
@ -419,6 +423,22 @@ export default (methods: Types.ServerMethods, opts: NostrOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...ctx }, ...callsMetrics])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'CloseChannel':
try {
if (!methods.CloseChannel) throw new Error('method: CloseChannel is not implemented')
const authContext = await opts.NostrAdminAuthGuard(req.appId, req.authIdentifier)
stats.guard = process.hrtime.bigint()
authCtx = authContext
const request = req.body
const error = Types.CloseChannelRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback)
const response = await methods.CloseChannel({rpcName:'CloseChannel', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res({status: 'OK', ...response})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'CreateOneTimeInviteLink':
try {
if (!methods.CreateOneTimeInviteLink) throw new Error('method: CreateOneTimeInviteLink is not implemented')
@ -799,7 +819,7 @@ export default (methods: Types.ServerMethods, opts: NostrOptions) => {
case 'OpenChannel':
try {
if (!methods.OpenChannel) throw new Error('method: OpenChannel is not implemented')
const authContext = await opts.NostrUserAuthGuard(req.appId, req.authIdentifier)
const authContext = await opts.NostrAdminAuthGuard(req.appId, req.authIdentifier)
stats.guard = process.hrtime.bigint()
authCtx = authContext
const request = req.body
@ -892,6 +912,22 @@ export default (methods: Types.ServerMethods, opts: NostrOptions) => {
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'UpdateChannelPolicy':
try {
if (!methods.UpdateChannelPolicy) throw new Error('method: UpdateChannelPolicy is not implemented')
const authContext = await opts.NostrAdminAuthGuard(req.appId, req.authIdentifier)
stats.guard = process.hrtime.bigint()
authCtx = authContext
const request = req.body
const error = Types.UpdateChannelPolicyRequestValidate(request)
stats.validate = process.hrtime.bigint()
if (error !== null) return logErrorAndReturnResponse(error, 'invalid request body', res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback)
await methods.UpdateChannelPolicy({rpcName:'UpdateChannelPolicy', ctx:authContext , req: request})
stats.handle = process.hrtime.bigint()
res({status: 'OK'})
opts.metricsCallback([{ ...info, ...stats, ...authContext }])
}catch(ex){ const e = ex as any; logErrorAndReturnResponse(e, e.message || e, res, logger, { ...info, ...stats, ...authCtx }, opts.metricsCallback); if (opts.throwErrors) throw e }
break
case 'UseInviteLink':
try {
if (!methods.UseInviteLink) throw new Error('method: UseInviteLink is not implemented')

View file

@ -7,8 +7,8 @@ export type RequestMetric = AuthContext & RequestInfo & RequestStats & { error?:
export type AdminContext = {
admin_id: string
}
export type AdminMethodInputs = AddApp_Input | AuthApp_Input | BanUser_Input | CreateOneTimeInviteLink_Input | GetInviteLinkState_Input | GetSeed_Input | ListChannels_Input | LndGetInfo_Input
export type AdminMethodOutputs = AddApp_Output | AuthApp_Output | BanUser_Output | CreateOneTimeInviteLink_Output | GetInviteLinkState_Output | GetSeed_Output | ListChannels_Output | LndGetInfo_Output
export type AdminMethodInputs = AddApp_Input | AddPeer_Input | AuthApp_Input | BanUser_Input | CloseChannel_Input | CreateOneTimeInviteLink_Input | GetInviteLinkState_Input | GetSeed_Input | ListChannels_Input | LndGetInfo_Input | OpenChannel_Input | UpdateChannelPolicy_Input
export type AdminMethodOutputs = AddApp_Output | AddPeer_Output | AuthApp_Output | BanUser_Output | CloseChannel_Output | CreateOneTimeInviteLink_Output | GetInviteLinkState_Output | GetSeed_Output | ListChannels_Output | LndGetInfo_Output | OpenChannel_Output | UpdateChannelPolicy_Output
export type AppContext = {
app_id: string
}
@ -34,8 +34,8 @@ export type UserContext = {
app_user_id: string
user_id: string
}
export type UserMethodInputs = AddProduct_Input | AuthorizeDebit_Input | BanDebit_Input | DecodeInvoice_Input | EditDebit_Input | EnrollAdminToken_Input | GetDebitAuthorizations_Input | GetLNURLChannelLink_Input | GetLnurlPayLink_Input | GetLnurlWithdrawLink_Input | GetPaymentState_Input | GetUserInfo_Input | GetUserOperations_Input | NewAddress_Input | NewInvoice_Input | NewProductInvoice_Input | OpenChannel_Input | PayAddress_Input | PayInvoice_Input | ResetDebit_Input | RespondToDebit_Input | UpdateCallbackUrl_Input | UserHealth_Input
export type UserMethodOutputs = AddProduct_Output | AuthorizeDebit_Output | BanDebit_Output | DecodeInvoice_Output | EditDebit_Output | EnrollAdminToken_Output | GetDebitAuthorizations_Output | GetLNURLChannelLink_Output | GetLnurlPayLink_Output | GetLnurlWithdrawLink_Output | GetPaymentState_Output | GetUserInfo_Output | GetUserOperations_Output | NewAddress_Output | NewInvoice_Output | NewProductInvoice_Output | OpenChannel_Output | PayAddress_Output | PayInvoice_Output | ResetDebit_Output | RespondToDebit_Output | UpdateCallbackUrl_Output | UserHealth_Output
export type UserMethodInputs = AddProduct_Input | AuthorizeDebit_Input | BanDebit_Input | DecodeInvoice_Input | EditDebit_Input | EnrollAdminToken_Input | GetDebitAuthorizations_Input | GetLNURLChannelLink_Input | GetLnurlPayLink_Input | GetLnurlWithdrawLink_Input | GetPaymentState_Input | GetUserInfo_Input | GetUserOperations_Input | NewAddress_Input | NewInvoice_Input | NewProductInvoice_Input | PayAddress_Input | PayInvoice_Input | ResetDebit_Input | RespondToDebit_Input | UpdateCallbackUrl_Input | UserHealth_Input
export type UserMethodOutputs = AddProduct_Output | AuthorizeDebit_Output | BanDebit_Output | DecodeInvoice_Output | EditDebit_Output | EnrollAdminToken_Output | GetDebitAuthorizations_Output | GetLNURLChannelLink_Output | GetLnurlPayLink_Output | GetLnurlWithdrawLink_Output | GetPaymentState_Output | GetUserInfo_Output | GetUserOperations_Output | NewAddress_Output | NewInvoice_Output | NewProductInvoice_Output | PayAddress_Output | PayInvoice_Output | ResetDebit_Output | RespondToDebit_Output | UpdateCallbackUrl_Output | UserHealth_Output
export type AuthContext = AdminContext | AppContext | GuestContext | GuestWithPubContext | MetricsContext | UserContext
export type AddApp_Input = {rpcName:'AddApp', req: AddAppRequest}
@ -50,6 +50,9 @@ export type AddAppUser_Output = ResultError | ({ status: 'OK' } & AppUser)
export type AddAppUserInvoice_Input = {rpcName:'AddAppUserInvoice', req: AddAppUserInvoiceRequest}
export type AddAppUserInvoice_Output = ResultError | ({ status: 'OK' } & NewInvoiceResponse)
export type AddPeer_Input = {rpcName:'AddPeer', req: AddPeerRequest}
export type AddPeer_Output = ResultError | { status: 'OK' }
export type AddProduct_Input = {rpcName:'AddProduct', req: AddProductRequest}
export type AddProduct_Output = ResultError | ({ status: 'OK' } & Product)
@ -68,6 +71,9 @@ export type BanUser_Output = ResultError | ({ status: 'OK' } & BanUserResponse)
export type BatchUser_Input = UserMethodInputs
export type BatchUser_Output = UserMethodOutputs
export type CloseChannel_Input = {rpcName:'CloseChannel', req: CloseChannelRequest}
export type CloseChannel_Output = ResultError | ({ status: 'OK' } & CloseChannelResponse)
export type CreateOneTimeInviteLink_Input = {rpcName:'CreateOneTimeInviteLink', req: CreateOneTimeInviteLinkRequest}
export type CreateOneTimeInviteLink_Output = ResultError | ({ status: 'OK' } & CreateOneTimeInviteLinkResponse)
@ -243,6 +249,9 @@ export type SetMockInvoiceAsPaid_Output = ResultError | { status: 'OK' }
export type UpdateCallbackUrl_Input = {rpcName:'UpdateCallbackUrl', req: CallbackUrl}
export type UpdateCallbackUrl_Output = ResultError | ({ status: 'OK' } & CallbackUrl)
export type UpdateChannelPolicy_Input = {rpcName:'UpdateChannelPolicy', req: UpdateChannelPolicyRequest}
export type UpdateChannelPolicy_Output = ResultError | { status: 'OK' }
export type UseInviteLink_Input = {rpcName:'UseInviteLink', req: UseInviteLinkRequest}
export type UseInviteLink_Output = ResultError | { status: 'OK' }
@ -254,11 +263,13 @@ export type ServerMethods = {
AddAppInvoice?: (req: AddAppInvoice_Input & {ctx: AppContext }) => Promise<NewInvoiceResponse>
AddAppUser?: (req: AddAppUser_Input & {ctx: AppContext }) => Promise<AppUser>
AddAppUserInvoice?: (req: AddAppUserInvoice_Input & {ctx: AppContext }) => Promise<NewInvoiceResponse>
AddPeer?: (req: AddPeer_Input & {ctx: AdminContext }) => Promise<void>
AddProduct?: (req: AddProduct_Input & {ctx: UserContext }) => Promise<Product>
AuthApp?: (req: AuthApp_Input & {ctx: AdminContext }) => Promise<AuthApp>
AuthorizeDebit?: (req: AuthorizeDebit_Input & {ctx: UserContext }) => Promise<DebitAuthorization>
BanDebit?: (req: BanDebit_Input & {ctx: UserContext }) => Promise<void>
BanUser?: (req: BanUser_Input & {ctx: AdminContext }) => Promise<BanUserResponse>
CloseChannel?: (req: CloseChannel_Input & {ctx: AdminContext }) => Promise<CloseChannelResponse>
CreateOneTimeInviteLink?: (req: CreateOneTimeInviteLink_Input & {ctx: AdminContext }) => Promise<CreateOneTimeInviteLinkResponse>
DecodeInvoice?: (req: DecodeInvoice_Input & {ctx: UserContext }) => Promise<DecodeInvoiceResponse>
EditDebit?: (req: EditDebit_Input & {ctx: UserContext }) => Promise<void>
@ -296,7 +307,7 @@ export type ServerMethods = {
NewAddress?: (req: NewAddress_Input & {ctx: UserContext }) => Promise<NewAddressResponse>
NewInvoice?: (req: NewInvoice_Input & {ctx: UserContext }) => Promise<NewInvoiceResponse>
NewProductInvoice?: (req: NewProductInvoice_Input & {ctx: UserContext }) => Promise<NewInvoiceResponse>
OpenChannel?: (req: OpenChannel_Input & {ctx: UserContext }) => Promise<OpenChannelResponse>
OpenChannel?: (req: OpenChannel_Input & {ctx: AdminContext }) => Promise<OpenChannelResponse>
PayAddress?: (req: PayAddress_Input & {ctx: UserContext }) => Promise<PayAddressResponse>
PayAppUserInvoice?: (req: PayAppUserInvoice_Input & {ctx: AppContext }) => Promise<PayInvoiceResponse>
PayInvoice?: (req: PayInvoice_Input & {ctx: UserContext }) => Promise<PayInvoiceResponse>
@ -310,6 +321,7 @@ export type ServerMethods = {
SetMockAppUserBalance?: (req: SetMockAppUserBalance_Input & {ctx: AppContext }) => Promise<void>
SetMockInvoiceAsPaid?: (req: SetMockInvoiceAsPaid_Input & {ctx: GuestContext }) => Promise<void>
UpdateCallbackUrl?: (req: UpdateCallbackUrl_Input & {ctx: UserContext }) => Promise<CallbackUrl>
UpdateChannelPolicy?: (req: UpdateChannelPolicy_Input & {ctx: AdminContext }) => Promise<void>
UseInviteLink?: (req: UseInviteLink_Input & {ctx: GuestWithPubContext }) => Promise<void>
UserHealth?: (req: UserHealth_Input & {ctx: UserContext }) => Promise<void>
}
@ -332,6 +344,14 @@ export const enumCheckIntervalType = (e?: IntervalType): boolean => {
for (const v in IntervalType) if (e === v) return true
return false
}
export enum OperationType {
CHAIN_OP = 'CHAIN_OP',
INVOICE_OP = 'INVOICE_OP',
}
export const enumCheckOperationType = (e?: OperationType): boolean => {
for (const v in OperationType) if (e === v) return true
return false
}
export enum UserOperationType {
INCOMING_INVOICE = 'INCOMING_INVOICE',
INCOMING_TX = 'INCOMING_TX',
@ -463,6 +483,34 @@ export const AddAppUserRequestValidate = (o?: AddAppUserRequest, opts: AddAppUse
return null
}
export type AddPeerRequest = {
host: string
port: number
pubkey: string
}
export const AddPeerRequestOptionalFields: [] = []
export type AddPeerRequestOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
host_CustomCheck?: (v: string) => boolean
port_CustomCheck?: (v: number) => boolean
pubkey_CustomCheck?: (v: string) => boolean
}
export const AddPeerRequestValidate = (o?: AddPeerRequest, opts: AddPeerRequestOptions = {}, path: string = 'AddPeerRequest::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.host !== 'string') return new Error(`${path}.host: is not a string`)
if (opts.host_CustomCheck && !opts.host_CustomCheck(o.host)) return new Error(`${path}.host: custom check failed`)
if (typeof o.port !== 'number') return new Error(`${path}.port: is not a number`)
if (opts.port_CustomCheck && !opts.port_CustomCheck(o.port)) return new Error(`${path}.port: custom check failed`)
if (typeof o.pubkey !== 'string') return new Error(`${path}.pubkey: is not a string`)
if (opts.pubkey_CustomCheck && !opts.pubkey_CustomCheck(o.pubkey)) return new Error(`${path}.pubkey: custom check failed`)
return null
}
export type AddProductRequest = {
name: string
price_sats: number
@ -810,9 +858,99 @@ export const CallbackUrlValidate = (o?: CallbackUrl, opts: CallbackUrlOptions =
return null
}
export type ChannelPolicy = {
base_fee_msat: number
fee_rate_ppm: number
max_htlc_msat: number
min_htlc_msat: number
timelock_delta: number
}
export const ChannelPolicyOptionalFields: [] = []
export type ChannelPolicyOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
base_fee_msat_CustomCheck?: (v: number) => boolean
fee_rate_ppm_CustomCheck?: (v: number) => boolean
max_htlc_msat_CustomCheck?: (v: number) => boolean
min_htlc_msat_CustomCheck?: (v: number) => boolean
timelock_delta_CustomCheck?: (v: number) => boolean
}
export const ChannelPolicyValidate = (o?: ChannelPolicy, opts: ChannelPolicyOptions = {}, path: string = 'ChannelPolicy::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.base_fee_msat !== 'number') return new Error(`${path}.base_fee_msat: is not a number`)
if (opts.base_fee_msat_CustomCheck && !opts.base_fee_msat_CustomCheck(o.base_fee_msat)) return new Error(`${path}.base_fee_msat: custom check failed`)
if (typeof o.fee_rate_ppm !== 'number') return new Error(`${path}.fee_rate_ppm: is not a number`)
if (opts.fee_rate_ppm_CustomCheck && !opts.fee_rate_ppm_CustomCheck(o.fee_rate_ppm)) return new Error(`${path}.fee_rate_ppm: custom check failed`)
if (typeof o.max_htlc_msat !== 'number') return new Error(`${path}.max_htlc_msat: is not a number`)
if (opts.max_htlc_msat_CustomCheck && !opts.max_htlc_msat_CustomCheck(o.max_htlc_msat)) return new Error(`${path}.max_htlc_msat: custom check failed`)
if (typeof o.min_htlc_msat !== 'number') return new Error(`${path}.min_htlc_msat: is not a number`)
if (opts.min_htlc_msat_CustomCheck && !opts.min_htlc_msat_CustomCheck(o.min_htlc_msat)) return new Error(`${path}.min_htlc_msat: custom check failed`)
if (typeof o.timelock_delta !== 'number') return new Error(`${path}.timelock_delta: is not a number`)
if (opts.timelock_delta_CustomCheck && !opts.timelock_delta_CustomCheck(o.timelock_delta)) return new Error(`${path}.timelock_delta: custom check failed`)
return null
}
export type CloseChannelRequest = {
force: boolean
funding_txid: string
output_index: number
sat_per_v_byte: number
}
export const CloseChannelRequestOptionalFields: [] = []
export type CloseChannelRequestOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
force_CustomCheck?: (v: boolean) => boolean
funding_txid_CustomCheck?: (v: string) => boolean
output_index_CustomCheck?: (v: number) => boolean
sat_per_v_byte_CustomCheck?: (v: number) => boolean
}
export const CloseChannelRequestValidate = (o?: CloseChannelRequest, opts: CloseChannelRequestOptions = {}, path: string = 'CloseChannelRequest::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.force !== 'boolean') return new Error(`${path}.force: is not a boolean`)
if (opts.force_CustomCheck && !opts.force_CustomCheck(o.force)) return new Error(`${path}.force: custom check failed`)
if (typeof o.funding_txid !== 'string') return new Error(`${path}.funding_txid: is not a string`)
if (opts.funding_txid_CustomCheck && !opts.funding_txid_CustomCheck(o.funding_txid)) return new Error(`${path}.funding_txid: custom check failed`)
if (typeof o.output_index !== 'number') return new Error(`${path}.output_index: is not a number`)
if (opts.output_index_CustomCheck && !opts.output_index_CustomCheck(o.output_index)) return new Error(`${path}.output_index: custom check failed`)
if (typeof o.sat_per_v_byte !== 'number') return new Error(`${path}.sat_per_v_byte: is not a number`)
if (opts.sat_per_v_byte_CustomCheck && !opts.sat_per_v_byte_CustomCheck(o.sat_per_v_byte)) return new Error(`${path}.sat_per_v_byte: custom check failed`)
return null
}
export type CloseChannelResponse = {
closing_txid: string
}
export const CloseChannelResponseOptionalFields: [] = []
export type CloseChannelResponseOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
closing_txid_CustomCheck?: (v: string) => boolean
}
export const CloseChannelResponseValidate = (o?: CloseChannelResponse, opts: CloseChannelResponseOptions = {}, path: string = 'CloseChannelResponse::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.closing_txid !== 'string') return new Error(`${path}.closing_txid: is not a string`)
if (opts.closing_txid_CustomCheck && !opts.closing_txid_CustomCheck(o.closing_txid)) return new Error(`${path}.closing_txid: custom check failed`)
return null
}
export type ClosedChannel = {
capacity: number
channel_id: string
close_tx_timestamp: number
closed_height: number
}
export const ClosedChannelOptionalFields: [] = []
@ -820,6 +958,7 @@ export type ClosedChannelOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
capacity_CustomCheck?: (v: number) => boolean
channel_id_CustomCheck?: (v: string) => boolean
close_tx_timestamp_CustomCheck?: (v: number) => boolean
closed_height_CustomCheck?: (v: number) => boolean
}
export const ClosedChannelValidate = (o?: ClosedChannel, opts: ClosedChannelOptions = {}, path: string = 'ClosedChannel::root.'): Error | null => {
@ -832,6 +971,9 @@ export const ClosedChannelValidate = (o?: ClosedChannel, opts: ClosedChannelOpti
if (typeof o.channel_id !== 'string') return new Error(`${path}.channel_id: is not a string`)
if (opts.channel_id_CustomCheck && !opts.channel_id_CustomCheck(o.channel_id)) return new Error(`${path}.channel_id: custom check failed`)
if (typeof o.close_tx_timestamp !== 'number') return new Error(`${path}.close_tx_timestamp: is not a number`)
if (opts.close_tx_timestamp_CustomCheck && !opts.close_tx_timestamp_CustomCheck(o.close_tx_timestamp)) return new Error(`${path}.close_tx_timestamp: custom check failed`)
if (typeof o.closed_height !== 'number') return new Error(`${path}.closed_height: is not a number`)
if (opts.closed_height_CustomCheck && !opts.closed_height_CustomCheck(o.closed_height)) return new Error(`${path}.closed_height: custom check failed`)
@ -1601,11 +1743,17 @@ export const LndGetInfoRequestValidate = (o?: LndGetInfoRequest, opts: LndGetInf
export type LndGetInfoResponse = {
alias: string
synced_to_chain: boolean
synced_to_graph: boolean
watchdog_barking: boolean
}
export const LndGetInfoResponseOptionalFields: [] = []
export type LndGetInfoResponseOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
alias_CustomCheck?: (v: string) => boolean
synced_to_chain_CustomCheck?: (v: boolean) => boolean
synced_to_graph_CustomCheck?: (v: boolean) => boolean
watchdog_barking_CustomCheck?: (v: boolean) => boolean
}
export const LndGetInfoResponseValidate = (o?: LndGetInfoResponse, opts: LndGetInfoResponseOptions = {}, path: string = 'LndGetInfoResponse::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
@ -1614,6 +1762,15 @@ export const LndGetInfoResponseValidate = (o?: LndGetInfoResponse, opts: LndGetI
if (typeof o.alias !== 'string') return new Error(`${path}.alias: is not a string`)
if (opts.alias_CustomCheck && !opts.alias_CustomCheck(o.alias)) return new Error(`${path}.alias: custom check failed`)
if (typeof o.synced_to_chain !== 'boolean') return new Error(`${path}.synced_to_chain: is not a boolean`)
if (opts.synced_to_chain_CustomCheck && !opts.synced_to_chain_CustomCheck(o.synced_to_chain)) return new Error(`${path}.synced_to_chain: custom check failed`)
if (typeof o.synced_to_graph !== 'boolean') return new Error(`${path}.synced_to_graph: is not a boolean`)
if (opts.synced_to_graph_CustomCheck && !opts.synced_to_graph_CustomCheck(o.synced_to_graph)) return new Error(`${path}.synced_to_graph: custom check failed`)
if (typeof o.watchdog_barking !== 'boolean') return new Error(`${path}.watchdog_barking: is not a boolean`)
if (opts.watchdog_barking_CustomCheck && !opts.watchdog_barking_CustomCheck(o.watchdog_barking)) return new Error(`${path}.watchdog_barking: custom check failed`)
return null
}
@ -1676,6 +1833,7 @@ export type LndNodeMetrics = {
online_channels: number
open_channels: OpenChannel[]
pending_channels: number
root_ops: RootOperation[]
}
export const LndNodeMetricsOptionalFields: [] = []
export type LndNodeMetricsOptions = OptionsBaseMessage & {
@ -1696,6 +1854,8 @@ export type LndNodeMetricsOptions = OptionsBaseMessage & {
open_channels_ItemOptions?: OpenChannelOptions
open_channels_CustomCheck?: (v: OpenChannel[]) => boolean
pending_channels_CustomCheck?: (v: number) => boolean
root_ops_ItemOptions?: RootOperationOptions
root_ops_CustomCheck?: (v: RootOperation[]) => boolean
}
export const LndNodeMetricsValidate = (o?: LndNodeMetrics, opts: LndNodeMetricsOptions = {}, path: string = 'LndNodeMetrics::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
@ -1754,6 +1914,13 @@ export const LndNodeMetricsValidate = (o?: LndNodeMetrics, opts: LndNodeMetricsO
if (typeof o.pending_channels !== 'number') return new Error(`${path}.pending_channels: is not a number`)
if (opts.pending_channels_CustomCheck && !opts.pending_channels_CustomCheck(o.pending_channels)) return new Error(`${path}.pending_channels: custom check failed`)
if (!Array.isArray(o.root_ops)) return new Error(`${path}.root_ops: is not an array`)
for (let index = 0; index < o.root_ops.length; index++) {
const root_opsErr = RootOperationValidate(o.root_ops[index], opts.root_ops_ItemOptions, `${path}.root_ops[${index}]`)
if (root_opsErr !== null) return root_opsErr
}
if (opts.root_ops_CustomCheck && !opts.root_ops_CustomCheck(o.root_ops)) return new Error(`${path}.root_ops: custom check failed`)
return null
}
@ -2038,20 +2205,25 @@ export type OpenChannel = {
active: boolean
capacity: number
channel_id: string
channel_point: string
label: string
lifetime: number
local_balance: number
policy?: ChannelPolicy
remote_balance: number
}
export const OpenChannelOptionalFields: [] = []
export type OpenChannelOptionalField = 'policy'
export const OpenChannelOptionalFields: OpenChannelOptionalField[] = ['policy']
export type OpenChannelOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
checkOptionalsAreSet?: OpenChannelOptionalField[]
active_CustomCheck?: (v: boolean) => boolean
capacity_CustomCheck?: (v: number) => boolean
channel_id_CustomCheck?: (v: string) => boolean
channel_point_CustomCheck?: (v: string) => boolean
label_CustomCheck?: (v: string) => boolean
lifetime_CustomCheck?: (v: number) => boolean
local_balance_CustomCheck?: (v: number) => boolean
policy_Options?: ChannelPolicyOptions
remote_balance_CustomCheck?: (v: number) => boolean
}
export const OpenChannelValidate = (o?: OpenChannel, opts: OpenChannelOptions = {}, path: string = 'OpenChannel::root.'): Error | null => {
@ -2067,6 +2239,9 @@ export const OpenChannelValidate = (o?: OpenChannel, opts: OpenChannelOptions =
if (typeof o.channel_id !== 'string') return new Error(`${path}.channel_id: is not a string`)
if (opts.channel_id_CustomCheck && !opts.channel_id_CustomCheck(o.channel_id)) return new Error(`${path}.channel_id: custom check failed`)
if (typeof o.channel_point !== 'string') return new Error(`${path}.channel_point: is not a string`)
if (opts.channel_point_CustomCheck && !opts.channel_point_CustomCheck(o.channel_point)) return new Error(`${path}.channel_point: custom check failed`)
if (typeof o.label !== 'string') return new Error(`${path}.label: is not a string`)
if (opts.label_CustomCheck && !opts.label_CustomCheck(o.label)) return new Error(`${path}.label: custom check failed`)
@ -2076,6 +2251,12 @@ export const OpenChannelValidate = (o?: OpenChannel, opts: OpenChannelOptions =
if (typeof o.local_balance !== 'number') return new Error(`${path}.local_balance: is not a number`)
if (opts.local_balance_CustomCheck && !opts.local_balance_CustomCheck(o.local_balance)) return new Error(`${path}.local_balance: custom check failed`)
if (typeof o.policy === 'object' || opts.allOptionalsAreSet || opts.checkOptionalsAreSet?.includes('policy')) {
const policyErr = ChannelPolicyValidate(o.policy, opts.policy_Options, `${path}.policy`)
if (policyErr !== null) return policyErr
}
if (typeof o.remote_balance !== 'number') return new Error(`${path}.remote_balance: is not a number`)
if (opts.remote_balance_CustomCheck && !opts.remote_balance_CustomCheck(o.remote_balance)) return new Error(`${path}.remote_balance: custom check failed`)
@ -2083,52 +2264,58 @@ export const OpenChannelValidate = (o?: OpenChannel, opts: OpenChannelOptions =
}
export type OpenChannelRequest = {
closeAddress: string
destination: string
fundingAmount: number
pushAmount: number
close_address?: string
local_funding_amount: number
node_pubkey: string
push_sat?: number
sat_per_v_byte: number
}
export const OpenChannelRequestOptionalFields: [] = []
export type OpenChannelRequestOptionalField = 'close_address' | 'push_sat'
export const OpenChannelRequestOptionalFields: OpenChannelRequestOptionalField[] = ['close_address', 'push_sat']
export type OpenChannelRequestOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
closeAddress_CustomCheck?: (v: string) => boolean
destination_CustomCheck?: (v: string) => boolean
fundingAmount_CustomCheck?: (v: number) => boolean
pushAmount_CustomCheck?: (v: number) => boolean
checkOptionalsAreSet?: OpenChannelRequestOptionalField[]
close_address_CustomCheck?: (v?: string) => boolean
local_funding_amount_CustomCheck?: (v: number) => boolean
node_pubkey_CustomCheck?: (v: string) => boolean
push_sat_CustomCheck?: (v?: number) => boolean
sat_per_v_byte_CustomCheck?: (v: number) => boolean
}
export const OpenChannelRequestValidate = (o?: OpenChannelRequest, opts: OpenChannelRequestOptions = {}, path: string = 'OpenChannelRequest::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.closeAddress !== 'string') return new Error(`${path}.closeAddress: is not a string`)
if (opts.closeAddress_CustomCheck && !opts.closeAddress_CustomCheck(o.closeAddress)) return new Error(`${path}.closeAddress: custom check failed`)
if ((o.close_address || opts.allOptionalsAreSet || opts.checkOptionalsAreSet?.includes('close_address')) && typeof o.close_address !== 'string') return new Error(`${path}.close_address: is not a string`)
if (opts.close_address_CustomCheck && !opts.close_address_CustomCheck(o.close_address)) return new Error(`${path}.close_address: custom check failed`)
if (typeof o.destination !== 'string') return new Error(`${path}.destination: is not a string`)
if (opts.destination_CustomCheck && !opts.destination_CustomCheck(o.destination)) return new Error(`${path}.destination: custom check failed`)
if (typeof o.local_funding_amount !== 'number') return new Error(`${path}.local_funding_amount: is not a number`)
if (opts.local_funding_amount_CustomCheck && !opts.local_funding_amount_CustomCheck(o.local_funding_amount)) return new Error(`${path}.local_funding_amount: custom check failed`)
if (typeof o.fundingAmount !== 'number') return new Error(`${path}.fundingAmount: is not a number`)
if (opts.fundingAmount_CustomCheck && !opts.fundingAmount_CustomCheck(o.fundingAmount)) return new Error(`${path}.fundingAmount: custom check failed`)
if (typeof o.node_pubkey !== 'string') return new Error(`${path}.node_pubkey: is not a string`)
if (opts.node_pubkey_CustomCheck && !opts.node_pubkey_CustomCheck(o.node_pubkey)) return new Error(`${path}.node_pubkey: custom check failed`)
if (typeof o.pushAmount !== 'number') return new Error(`${path}.pushAmount: is not a number`)
if (opts.pushAmount_CustomCheck && !opts.pushAmount_CustomCheck(o.pushAmount)) return new Error(`${path}.pushAmount: custom check failed`)
if ((o.push_sat || opts.allOptionalsAreSet || opts.checkOptionalsAreSet?.includes('push_sat')) && typeof o.push_sat !== 'number') return new Error(`${path}.push_sat: is not a number`)
if (opts.push_sat_CustomCheck && !opts.push_sat_CustomCheck(o.push_sat)) return new Error(`${path}.push_sat: custom check failed`)
if (typeof o.sat_per_v_byte !== 'number') return new Error(`${path}.sat_per_v_byte: is not a number`)
if (opts.sat_per_v_byte_CustomCheck && !opts.sat_per_v_byte_CustomCheck(o.sat_per_v_byte)) return new Error(`${path}.sat_per_v_byte: custom check failed`)
return null
}
export type OpenChannelResponse = {
channelId: string
channel_id: string
}
export const OpenChannelResponseOptionalFields: [] = []
export type OpenChannelResponseOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
channelId_CustomCheck?: (v: string) => boolean
channel_id_CustomCheck?: (v: string) => boolean
}
export const OpenChannelResponseValidate = (o?: OpenChannelResponse, opts: OpenChannelResponseOptions = {}, path: string = 'OpenChannelResponse::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.channelId !== 'string') return new Error(`${path}.channelId: is not a string`)
if (opts.channelId_CustomCheck && !opts.channelId_CustomCheck(o.channelId)) return new Error(`${path}.channelId: custom check failed`)
if (typeof o.channel_id !== 'string') return new Error(`${path}.channel_id: is not a string`)
if (opts.channel_id_CustomCheck && !opts.channel_id_CustomCheck(o.channel_id)) return new Error(`${path}.channel_id: custom check failed`)
return null
}
@ -2418,6 +2605,39 @@ export const RequestNPubLinkingTokenResponseValidate = (o?: RequestNPubLinkingTo
return null
}
export type RootOperation = {
amount: number
created_at_unix: number
op_id: string
op_type: OperationType
}
export const RootOperationOptionalFields: [] = []
export type RootOperationOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
amount_CustomCheck?: (v: number) => boolean
created_at_unix_CustomCheck?: (v: number) => boolean
op_id_CustomCheck?: (v: string) => boolean
op_type_CustomCheck?: (v: OperationType) => boolean
}
export const RootOperationValidate = (o?: RootOperation, opts: RootOperationOptions = {}, path: string = 'RootOperation::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
if (typeof o.amount !== 'number') return new Error(`${path}.amount: is not a number`)
if (opts.amount_CustomCheck && !opts.amount_CustomCheck(o.amount)) return new Error(`${path}.amount: custom check failed`)
if (typeof o.created_at_unix !== 'number') return new Error(`${path}.created_at_unix: is not a number`)
if (opts.created_at_unix_CustomCheck && !opts.created_at_unix_CustomCheck(o.created_at_unix)) return new Error(`${path}.created_at_unix: custom check failed`)
if (typeof o.op_id !== 'string') return new Error(`${path}.op_id: is not a string`)
if (opts.op_id_CustomCheck && !opts.op_id_CustomCheck(o.op_id)) return new Error(`${path}.op_id: custom check failed`)
if (!enumCheckOperationType(o.op_type)) return new Error(`${path}.op_type: is not a valid OperationType`)
if (opts.op_type_CustomCheck && !opts.op_type_CustomCheck(o.op_type)) return new Error(`${path}.op_type: custom check failed`)
return null
}
export type RoutingEvent = {
event_type: string
failure_string: string
@ -2606,6 +2826,31 @@ export const SetMockInvoiceAsPaidRequestValidate = (o?: SetMockInvoiceAsPaidRequ
return null
}
export type UpdateChannelPolicyRequest = {
policy: ChannelPolicy
update: UpdateChannelPolicyRequest_update
}
export const UpdateChannelPolicyRequestOptionalFields: [] = []
export type UpdateChannelPolicyRequestOptions = OptionsBaseMessage & {
checkOptionalsAreSet?: []
policy_Options?: ChannelPolicyOptions
update_Options?: UpdateChannelPolicyRequest_updateOptions
}
export const UpdateChannelPolicyRequestValidate = (o?: UpdateChannelPolicyRequest, opts: UpdateChannelPolicyRequestOptions = {}, path: string = 'UpdateChannelPolicyRequest::root.'): Error | null => {
if (opts.checkOptionalsAreSet && opts.allOptionalsAreSet) return new Error(path + ': only one of checkOptionalsAreSet or allOptionalNonDefault can be set for each message')
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
const policyErr = ChannelPolicyValidate(o.policy, opts.policy_Options, `${path}.policy`)
if (policyErr !== null) return policyErr
const updateErr = UpdateChannelPolicyRequest_updateValidate(o.update, opts.update_Options, `${path}.update`)
if (updateErr !== null) return updateErr
return null
}
export type UsageMetric = {
auth_in_nano: number
batch: boolean
@ -3073,6 +3318,42 @@ export const NPubLinking_stateValidate = (o?: NPubLinking_state, opts:NPubLinkin
if (unlinkedErr !== null) return unlinkedErr
break
default:
return new Error(path + ': unknown type '+ stringType)
}
return null
}
export enum UpdateChannelPolicyRequest_update_type {
ALL = 'all',
CHANNEL_POINT = 'channel_point',
}
export const enumCheckUpdateChannelPolicyRequest_update_type = (e?: UpdateChannelPolicyRequest_update_type): boolean => {
for (const v in UpdateChannelPolicyRequest_update_type) if (e === v) return true
return false
}
export type UpdateChannelPolicyRequest_update =
{type:UpdateChannelPolicyRequest_update_type.ALL, all:Empty}|
{type:UpdateChannelPolicyRequest_update_type.CHANNEL_POINT, channel_point:string}
export type UpdateChannelPolicyRequest_updateOptions = {
all_Options?: EmptyOptions
channel_point_CustomCheck?: (v: string) => boolean
}
export const UpdateChannelPolicyRequest_updateValidate = (o?: UpdateChannelPolicyRequest_update, opts:UpdateChannelPolicyRequest_updateOptions = {}, path: string = 'UpdateChannelPolicyRequest_update::root.'): Error | null => {
if (typeof o !== 'object' || o === null) return new Error(path + ': object is not an instance of an object or is null')
const stringType: string = o.type
switch (o.type) {
case UpdateChannelPolicyRequest_update_type.ALL:
const allErr = EmptyValidate(o.all, opts.all_Options, `${path}.all`)
if (allErr !== null) return allErr
break
case UpdateChannelPolicyRequest_update_type.CHANNEL_POINT:
if (typeof o.channel_point !== 'string') return new Error(`${path}.channel_point: is not a string`)
if (opts.channel_point_CustomCheck && !opts.channel_point_CustomCheck(o.channel_point)) return new Error(`${path}.channel_point: custom check failed`)
break
default:
return new Error(path + ': unknown type '+ stringType)

View file

@ -8,8 +8,8 @@ import type { BlockEpoch } from "./chainnotifier.js";
import type { SpendEvent } from "./chainnotifier.js";
import type { SpendRequest } from "./chainnotifier.js";
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
import type { ConfEvent } from "./chainnotifier";
import type { ConfRequest } from "./chainnotifier";
import type { ConfEvent } from "./chainnotifier.js";
import type { ConfRequest } from "./chainnotifier.js";
import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc";
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
/**

View file

@ -4,6 +4,8 @@
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
import { Lightning } from "./lightning.js";
import type { LookupHtlcResolutionResponse } from "./lightning.js";
import type { LookupHtlcResolutionRequest } from "./lightning.js";
import type { ListAliasesResponse } from "./lightning.js";
import type { ListAliasesRequest } from "./lightning.js";
import type { CustomMessage } from "./lightning.js";
@ -95,6 +97,8 @@ import type { PendingChannelsResponse } from "./lightning.js";
import type { PendingChannelsRequest } from "./lightning.js";
import type { GetRecoveryInfoResponse } from "./lightning.js";
import type { GetRecoveryInfoRequest } from "./lightning.js";
import type { GetDebugInfoResponse } from "./lightning.js";
import type { GetDebugInfoRequest } from "./lightning.js";
import type { GetInfoResponse } from "./lightning.js";
import type { GetInfoRequest } from "./lightning.js";
import type { PeerEvent } from "./lightning.js";
@ -130,12 +134,6 @@ import type { WalletBalanceResponse } from "./lightning.js";
import type { WalletBalanceRequest } from "./lightning.js";
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
import type { GetDebugInfoResponse } from "./lightning.js";
import type { GetDebugInfoRequest } from "./lightning.js";
import type { LookupHtlcResolutionResponse } from "./lightning.js";
import type { LookupHtlcResolutionRequest } from "./lightning.js";
//
// Comments in this file will be directly parsed into the API
// Documentation as descriptions of the associated method, message, or field.

394
proto/lnd/signer.client.ts Normal file
View file

@ -0,0 +1,394 @@
// @generated by protobuf-ts 2.8.1
// @generated from protobuf file "signer.proto" (package "signrpc", syntax proto3)
// tslint:disable
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
import { Signer } from "./signer.js";
import type { MuSig2CleanupResponse } from "./signer.js";
import type { MuSig2CleanupRequest } from "./signer.js";
import type { MuSig2CombineSigResponse } from "./signer.js";
import type { MuSig2CombineSigRequest } from "./signer.js";
import type { MuSig2SignResponse } from "./signer.js";
import type { MuSig2SignRequest } from "./signer.js";
import type { MuSig2RegisterNoncesResponse } from "./signer.js";
import type { MuSig2RegisterNoncesRequest } from "./signer.js";
import type { MuSig2SessionResponse } from "./signer.js";
import type { MuSig2SessionRequest } from "./signer.js";
import type { MuSig2CombineKeysResponse } from "./signer.js";
import type { MuSig2CombineKeysRequest } from "./signer.js";
import type { SharedKeyResponse } from "./signer.js";
import type { SharedKeyRequest } from "./signer.js";
import type { VerifyMessageResp } from "./signer.js";
import type { VerifyMessageReq } from "./signer.js";
import type { SignMessageResp } from "./signer.js";
import type { SignMessageReq } from "./signer.js";
import type { InputScriptResp } from "./signer.js";
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
import type { SignResp } from "./signer.js";
import type { SignReq } from "./signer.js";
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
/**
* Signer is a service that gives access to the signing functionality of the
* daemon's wallet.
*
* @generated from protobuf service signrpc.Signer
*/
export interface ISignerClient {
/**
*
* SignOutputRaw is a method that can be used to generated a signature for a
* set of inputs/outputs to a transaction. Each request specifies details
* concerning how the outputs should be signed, which keys they should be
* signed with, and also any optional tweaks. The return value is a fixed
* 64-byte signature (the same format as we use on the wire in Lightning).
*
* If we are unable to sign using the specified keys, then an error will be
* returned.
*
* @generated from protobuf rpc: SignOutputRaw(signrpc.SignReq) returns (signrpc.SignResp);
*/
signOutputRaw(input: SignReq, options?: RpcOptions): UnaryCall<SignReq, SignResp>;
/**
*
* ComputeInputScript generates a complete InputIndex for the passed
* transaction with the signature as defined within the passed SignDescriptor.
* This method should be capable of generating the proper input script for both
* regular p2wkh/p2tr outputs and p2wkh outputs nested within a regular p2sh
* output.
*
* Note that when using this method to sign inputs belonging to the wallet,
* the only items of the SignDescriptor that need to be populated are pkScript
* in the TxOut field, the value in that same field, and finally the input
* index.
*
* @generated from protobuf rpc: ComputeInputScript(signrpc.SignReq) returns (signrpc.InputScriptResp);
*/
computeInputScript(input: SignReq, options?: RpcOptions): UnaryCall<SignReq, InputScriptResp>;
/**
*
* SignMessage signs a message with the key specified in the key locator. The
* returned signature is fixed-size LN wire format encoded.
*
* The main difference to SignMessage in the main RPC is that a specific key is
* used to sign the message instead of the node identity private key.
*
* @generated from protobuf rpc: SignMessage(signrpc.SignMessageReq) returns (signrpc.SignMessageResp);
*/
signMessage(input: SignMessageReq, options?: RpcOptions): UnaryCall<SignMessageReq, SignMessageResp>;
/**
*
* VerifyMessage verifies a signature over a message using the public key
* provided. The signature must be fixed-size LN wire format encoded.
*
* The main difference to VerifyMessage in the main RPC is that the public key
* used to sign the message does not have to be a node known to the network.
*
* @generated from protobuf rpc: VerifyMessage(signrpc.VerifyMessageReq) returns (signrpc.VerifyMessageResp);
*/
verifyMessage(input: VerifyMessageReq, options?: RpcOptions): UnaryCall<VerifyMessageReq, VerifyMessageResp>;
/**
*
* DeriveSharedKey returns a shared secret key by performing Diffie-Hellman key
* derivation between the ephemeral public key in the request and the node's
* key specified in the key_desc parameter. Either a key locator or a raw
* public key is expected in the key_desc, if neither is supplied, defaults to
* the node's identity private key:
* P_shared = privKeyNode * ephemeralPubkey
* The resulting shared public key is serialized in the compressed format and
* hashed with sha256, resulting in the final key length of 256bit.
*
* @generated from protobuf rpc: DeriveSharedKey(signrpc.SharedKeyRequest) returns (signrpc.SharedKeyResponse);
*/
deriveSharedKey(input: SharedKeyRequest, options?: RpcOptions): UnaryCall<SharedKeyRequest, SharedKeyResponse>;
/**
*
* MuSig2CombineKeys (experimental!) is a stateless helper RPC that can be used
* to calculate the combined MuSig2 public key from a list of all participating
* signers' public keys. This RPC is completely stateless and deterministic and
* does not create any signing session. It can be used to determine the Taproot
* public key that should be put in an on-chain output once all public keys are
* known. A signing session is only needed later when that output should be
* _spent_ again.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CombineKeys(signrpc.MuSig2CombineKeysRequest) returns (signrpc.MuSig2CombineKeysResponse);
*/
muSig2CombineKeys(input: MuSig2CombineKeysRequest, options?: RpcOptions): UnaryCall<MuSig2CombineKeysRequest, MuSig2CombineKeysResponse>;
/**
*
* MuSig2CreateSession (experimental!) creates a new MuSig2 signing session
* using the local key identified by the key locator. The complete list of all
* public keys of all signing parties must be provided, including the public
* key of the local signing key. If nonces of other parties are already known,
* they can be submitted as well to reduce the number of RPC calls necessary
* later on.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CreateSession(signrpc.MuSig2SessionRequest) returns (signrpc.MuSig2SessionResponse);
*/
muSig2CreateSession(input: MuSig2SessionRequest, options?: RpcOptions): UnaryCall<MuSig2SessionRequest, MuSig2SessionResponse>;
/**
*
* MuSig2RegisterNonces (experimental!) registers one or more public nonces of
* other signing participants for a session identified by its ID. This RPC can
* be called multiple times until all nonces are registered.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2RegisterNonces(signrpc.MuSig2RegisterNoncesRequest) returns (signrpc.MuSig2RegisterNoncesResponse);
*/
muSig2RegisterNonces(input: MuSig2RegisterNoncesRequest, options?: RpcOptions): UnaryCall<MuSig2RegisterNoncesRequest, MuSig2RegisterNoncesResponse>;
/**
*
* MuSig2Sign (experimental!) creates a partial signature using the local
* signing key that was specified when the session was created. This can only
* be called when all public nonces of all participants are known and have been
* registered with the session. If this node isn't responsible for combining
* all the partial signatures, then the cleanup flag should be set, indicating
* that the session can be removed from memory once the signature was produced.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2Sign(signrpc.MuSig2SignRequest) returns (signrpc.MuSig2SignResponse);
*/
muSig2Sign(input: MuSig2SignRequest, options?: RpcOptions): UnaryCall<MuSig2SignRequest, MuSig2SignResponse>;
/**
*
* MuSig2CombineSig (experimental!) combines the given partial signature(s)
* with the local one, if it already exists. Once a partial signature of all
* participants is registered, the final signature will be combined and
* returned.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CombineSig(signrpc.MuSig2CombineSigRequest) returns (signrpc.MuSig2CombineSigResponse);
*/
muSig2CombineSig(input: MuSig2CombineSigRequest, options?: RpcOptions): UnaryCall<MuSig2CombineSigRequest, MuSig2CombineSigResponse>;
/**
*
* MuSig2Cleanup (experimental!) allows a caller to clean up a session early in
* cases where it's obvious that the signing session won't succeed and the
* resources can be released.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2Cleanup(signrpc.MuSig2CleanupRequest) returns (signrpc.MuSig2CleanupResponse);
*/
muSig2Cleanup(input: MuSig2CleanupRequest, options?: RpcOptions): UnaryCall<MuSig2CleanupRequest, MuSig2CleanupResponse>;
}
/**
* Signer is a service that gives access to the signing functionality of the
* daemon's wallet.
*
* @generated from protobuf service signrpc.Signer
*/
export class SignerClient implements ISignerClient, ServiceInfo {
typeName = Signer.typeName;
methods = Signer.methods;
options = Signer.options;
constructor(private readonly _transport: RpcTransport) {
}
/**
*
* SignOutputRaw is a method that can be used to generated a signature for a
* set of inputs/outputs to a transaction. Each request specifies details
* concerning how the outputs should be signed, which keys they should be
* signed with, and also any optional tweaks. The return value is a fixed
* 64-byte signature (the same format as we use on the wire in Lightning).
*
* If we are unable to sign using the specified keys, then an error will be
* returned.
*
* @generated from protobuf rpc: SignOutputRaw(signrpc.SignReq) returns (signrpc.SignResp);
*/
signOutputRaw(input: SignReq, options?: RpcOptions): UnaryCall<SignReq, SignResp> {
const method = this.methods[0], opt = this._transport.mergeOptions(options);
return stackIntercept<SignReq, SignResp>("unary", this._transport, method, opt, input);
}
/**
*
* ComputeInputScript generates a complete InputIndex for the passed
* transaction with the signature as defined within the passed SignDescriptor.
* This method should be capable of generating the proper input script for both
* regular p2wkh/p2tr outputs and p2wkh outputs nested within a regular p2sh
* output.
*
* Note that when using this method to sign inputs belonging to the wallet,
* the only items of the SignDescriptor that need to be populated are pkScript
* in the TxOut field, the value in that same field, and finally the input
* index.
*
* @generated from protobuf rpc: ComputeInputScript(signrpc.SignReq) returns (signrpc.InputScriptResp);
*/
computeInputScript(input: SignReq, options?: RpcOptions): UnaryCall<SignReq, InputScriptResp> {
const method = this.methods[1], opt = this._transport.mergeOptions(options);
return stackIntercept<SignReq, InputScriptResp>("unary", this._transport, method, opt, input);
}
/**
*
* SignMessage signs a message with the key specified in the key locator. The
* returned signature is fixed-size LN wire format encoded.
*
* The main difference to SignMessage in the main RPC is that a specific key is
* used to sign the message instead of the node identity private key.
*
* @generated from protobuf rpc: SignMessage(signrpc.SignMessageReq) returns (signrpc.SignMessageResp);
*/
signMessage(input: SignMessageReq, options?: RpcOptions): UnaryCall<SignMessageReq, SignMessageResp> {
const method = this.methods[2], opt = this._transport.mergeOptions(options);
return stackIntercept<SignMessageReq, SignMessageResp>("unary", this._transport, method, opt, input);
}
/**
*
* VerifyMessage verifies a signature over a message using the public key
* provided. The signature must be fixed-size LN wire format encoded.
*
* The main difference to VerifyMessage in the main RPC is that the public key
* used to sign the message does not have to be a node known to the network.
*
* @generated from protobuf rpc: VerifyMessage(signrpc.VerifyMessageReq) returns (signrpc.VerifyMessageResp);
*/
verifyMessage(input: VerifyMessageReq, options?: RpcOptions): UnaryCall<VerifyMessageReq, VerifyMessageResp> {
const method = this.methods[3], opt = this._transport.mergeOptions(options);
return stackIntercept<VerifyMessageReq, VerifyMessageResp>("unary", this._transport, method, opt, input);
}
/**
*
* DeriveSharedKey returns a shared secret key by performing Diffie-Hellman key
* derivation between the ephemeral public key in the request and the node's
* key specified in the key_desc parameter. Either a key locator or a raw
* public key is expected in the key_desc, if neither is supplied, defaults to
* the node's identity private key:
* P_shared = privKeyNode * ephemeralPubkey
* The resulting shared public key is serialized in the compressed format and
* hashed with sha256, resulting in the final key length of 256bit.
*
* @generated from protobuf rpc: DeriveSharedKey(signrpc.SharedKeyRequest) returns (signrpc.SharedKeyResponse);
*/
deriveSharedKey(input: SharedKeyRequest, options?: RpcOptions): UnaryCall<SharedKeyRequest, SharedKeyResponse> {
const method = this.methods[4], opt = this._transport.mergeOptions(options);
return stackIntercept<SharedKeyRequest, SharedKeyResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2CombineKeys (experimental!) is a stateless helper RPC that can be used
* to calculate the combined MuSig2 public key from a list of all participating
* signers' public keys. This RPC is completely stateless and deterministic and
* does not create any signing session. It can be used to determine the Taproot
* public key that should be put in an on-chain output once all public keys are
* known. A signing session is only needed later when that output should be
* _spent_ again.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CombineKeys(signrpc.MuSig2CombineKeysRequest) returns (signrpc.MuSig2CombineKeysResponse);
*/
muSig2CombineKeys(input: MuSig2CombineKeysRequest, options?: RpcOptions): UnaryCall<MuSig2CombineKeysRequest, MuSig2CombineKeysResponse> {
const method = this.methods[5], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2CombineKeysRequest, MuSig2CombineKeysResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2CreateSession (experimental!) creates a new MuSig2 signing session
* using the local key identified by the key locator. The complete list of all
* public keys of all signing parties must be provided, including the public
* key of the local signing key. If nonces of other parties are already known,
* they can be submitted as well to reduce the number of RPC calls necessary
* later on.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CreateSession(signrpc.MuSig2SessionRequest) returns (signrpc.MuSig2SessionResponse);
*/
muSig2CreateSession(input: MuSig2SessionRequest, options?: RpcOptions): UnaryCall<MuSig2SessionRequest, MuSig2SessionResponse> {
const method = this.methods[6], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2SessionRequest, MuSig2SessionResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2RegisterNonces (experimental!) registers one or more public nonces of
* other signing participants for a session identified by its ID. This RPC can
* be called multiple times until all nonces are registered.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2RegisterNonces(signrpc.MuSig2RegisterNoncesRequest) returns (signrpc.MuSig2RegisterNoncesResponse);
*/
muSig2RegisterNonces(input: MuSig2RegisterNoncesRequest, options?: RpcOptions): UnaryCall<MuSig2RegisterNoncesRequest, MuSig2RegisterNoncesResponse> {
const method = this.methods[7], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2RegisterNoncesRequest, MuSig2RegisterNoncesResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2Sign (experimental!) creates a partial signature using the local
* signing key that was specified when the session was created. This can only
* be called when all public nonces of all participants are known and have been
* registered with the session. If this node isn't responsible for combining
* all the partial signatures, then the cleanup flag should be set, indicating
* that the session can be removed from memory once the signature was produced.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2Sign(signrpc.MuSig2SignRequest) returns (signrpc.MuSig2SignResponse);
*/
muSig2Sign(input: MuSig2SignRequest, options?: RpcOptions): UnaryCall<MuSig2SignRequest, MuSig2SignResponse> {
const method = this.methods[8], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2SignRequest, MuSig2SignResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2CombineSig (experimental!) combines the given partial signature(s)
* with the local one, if it already exists. Once a partial signature of all
* participants is registered, the final signature will be combined and
* returned.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2CombineSig(signrpc.MuSig2CombineSigRequest) returns (signrpc.MuSig2CombineSigResponse);
*/
muSig2CombineSig(input: MuSig2CombineSigRequest, options?: RpcOptions): UnaryCall<MuSig2CombineSigRequest, MuSig2CombineSigResponse> {
const method = this.methods[9], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2CombineSigRequest, MuSig2CombineSigResponse>("unary", this._transport, method, opt, input);
}
/**
*
* MuSig2Cleanup (experimental!) allows a caller to clean up a session early in
* cases where it's obvious that the signing session won't succeed and the
* resources can be released.
*
* NOTE: The MuSig2 BIP is not final yet and therefore this API must be
* considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
* releases. Backward compatibility is not guaranteed!
*
* @generated from protobuf rpc: MuSig2Cleanup(signrpc.MuSig2CleanupRequest) returns (signrpc.MuSig2CleanupResponse);
*/
muSig2Cleanup(input: MuSig2CleanupRequest, options?: RpcOptions): UnaryCall<MuSig2CleanupRequest, MuSig2CleanupResponse> {
const method = this.methods[10], opt = this._transport.mergeOptions(options);
return stackIntercept<MuSig2CleanupRequest, MuSig2CleanupResponse>("unary", this._transport, method, opt, input);
}
}

2502
proto/lnd/signer.ts Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,958 @@
// @generated by protobuf-ts 2.8.1
// @generated from protobuf file "walletkit.proto" (package "walletrpc", syntax proto3)
// tslint:disable
import type { RpcTransport } from "@protobuf-ts/runtime-rpc";
import type { ServiceInfo } from "@protobuf-ts/runtime-rpc";
import { WalletKit } from "./walletkit.js";
import type { FinalizePsbtResponse } from "./walletkit.js";
import type { FinalizePsbtRequest } from "./walletkit.js";
import type { SignPsbtResponse } from "./walletkit.js";
import type { SignPsbtRequest } from "./walletkit.js";
import type { FundPsbtResponse } from "./walletkit.js";
import type { FundPsbtRequest } from "./walletkit.js";
import type { LabelTransactionResponse } from "./walletkit.js";
import type { LabelTransactionRequest } from "./walletkit.js";
import type { ListSweepsResponse } from "./walletkit.js";
import type { ListSweepsRequest } from "./walletkit.js";
import type { BumpForceCloseFeeResponse } from "./walletkit.js";
import type { BumpForceCloseFeeRequest } from "./walletkit.js";
import type { BumpFeeResponse } from "./walletkit.js";
import type { BumpFeeRequest } from "./walletkit.js";
import type { PendingSweepsResponse } from "./walletkit.js";
import type { PendingSweepsRequest } from "./walletkit.js";
import type { EstimateFeeResponse } from "./walletkit.js";
import type { EstimateFeeRequest } from "./walletkit.js";
import type { SendOutputsResponse } from "./walletkit.js";
import type { SendOutputsRequest } from "./walletkit.js";
import type { RemoveTransactionResponse } from "./walletkit.js";
import type { PublishResponse } from "./walletkit.js";
import type { Transaction as Transaction$ } from "./walletkit.js";
import type { ImportTapscriptResponse } from "./walletkit.js";
import type { ImportTapscriptRequest } from "./walletkit.js";
import type { ImportPublicKeyResponse } from "./walletkit.js";
import type { ImportPublicKeyRequest } from "./walletkit.js";
import type { ImportAccountResponse } from "./walletkit.js";
import type { ImportAccountRequest } from "./walletkit.js";
import type { VerifyMessageWithAddrResponse } from "./walletkit.js";
import type { VerifyMessageWithAddrRequest } from "./walletkit.js";
import type { SignMessageWithAddrResponse } from "./walletkit.js";
import type { SignMessageWithAddrRequest } from "./walletkit.js";
import type { ListAddressesResponse } from "./walletkit.js";
import type { ListAddressesRequest } from "./walletkit.js";
import type { RequiredReserveResponse } from "./walletkit.js";
import type { RequiredReserveRequest } from "./walletkit.js";
import type { ListAccountsResponse } from "./walletkit.js";
import type { ListAccountsRequest } from "./walletkit.js";
import type { Transaction } from "./lightning";
import type { GetTransactionRequest } from "./walletkit.js";
import type { AddrResponse } from "./walletkit.js";
import type { AddrRequest } from "./walletkit.js";
import type { KeyLocator } from "./signer.js";
import type { KeyDescriptor } from "./signer.js";
import type { KeyReq } from "./walletkit.js";
import type { ListLeasesResponse } from "./walletkit.js";
import type { ListLeasesRequest } from "./walletkit.js";
import type { ReleaseOutputResponse } from "./walletkit.js";
import type { ReleaseOutputRequest } from "./walletkit.js";
import type { LeaseOutputResponse } from "./walletkit.js";
import type { LeaseOutputRequest } from "./walletkit.js";
import { stackIntercept } from "@protobuf-ts/runtime-rpc";
import type { ListUnspentResponse } from "./walletkit.js";
import type { ListUnspentRequest } from "./walletkit.js";
import type { UnaryCall } from "@protobuf-ts/runtime-rpc";
import type { RpcOptions } from "@protobuf-ts/runtime-rpc";
//
// Comments in this file will be directly parsed into the API
// Documentation as descriptions of the associated method, message, or field.
// These descriptions should go right above the definition of the object, and
// can be in either block or // comment format.
//
// An RPC method can be matched to an lncli command by placing a line in the
// beginning of the description in exactly the following format:
// lncli: `methodname`
//
// Failure to specify the exact name of the command will cause documentation
// generation to fail.
//
// More information on how exactly the gRPC documentation is generated from
// this proto file can be found here:
// https://github.com/lightninglabs/lightning-api
/**
* WalletKit is a service that gives access to the core functionalities of the
* daemon's wallet.
*
* @generated from protobuf service walletrpc.WalletKit
*/
export interface IWalletKitClient {
/**
*
* ListUnspent returns a list of all utxos spendable by the wallet with a
* number of confirmations between the specified minimum and maximum. By
* default, all utxos are listed. To list only the unconfirmed utxos, set
* the unconfirmed_only to true.
*
* @generated from protobuf rpc: ListUnspent(walletrpc.ListUnspentRequest) returns (walletrpc.ListUnspentResponse);
*/
listUnspent(input: ListUnspentRequest, options?: RpcOptions): UnaryCall<ListUnspentRequest, ListUnspentResponse>;
/**
* lncli: `wallet leaseoutput`
* LeaseOutput locks an output to the given ID, preventing it from being
* available for any future coin selection attempts. The absolute time of the
* lock's expiration is returned. The expiration of the lock can be extended by
* successive invocations of this RPC. Outputs can be unlocked before their
* expiration through `ReleaseOutput`.
*
* @generated from protobuf rpc: LeaseOutput(walletrpc.LeaseOutputRequest) returns (walletrpc.LeaseOutputResponse);
*/
leaseOutput(input: LeaseOutputRequest, options?: RpcOptions): UnaryCall<LeaseOutputRequest, LeaseOutputResponse>;
/**
* lncli: `wallet releaseoutput`
* ReleaseOutput unlocks an output, allowing it to be available for coin
* selection if it remains unspent. The ID should match the one used to
* originally lock the output.
*
* @generated from protobuf rpc: ReleaseOutput(walletrpc.ReleaseOutputRequest) returns (walletrpc.ReleaseOutputResponse);
*/
releaseOutput(input: ReleaseOutputRequest, options?: RpcOptions): UnaryCall<ReleaseOutputRequest, ReleaseOutputResponse>;
/**
* lncli: `wallet listleases`
* ListLeases lists all currently locked utxos.
*
* @generated from protobuf rpc: ListLeases(walletrpc.ListLeasesRequest) returns (walletrpc.ListLeasesResponse);
*/
listLeases(input: ListLeasesRequest, options?: RpcOptions): UnaryCall<ListLeasesRequest, ListLeasesResponse>;
/**
*
* DeriveNextKey attempts to derive the *next* key within the key family
* (account in BIP43) specified. This method should return the next external
* child within this branch.
*
* @generated from protobuf rpc: DeriveNextKey(walletrpc.KeyReq) returns (signrpc.KeyDescriptor);
*/
deriveNextKey(input: KeyReq, options?: RpcOptions): UnaryCall<KeyReq, KeyDescriptor>;
/**
*
* DeriveKey attempts to derive an arbitrary key specified by the passed
* KeyLocator.
*
* @generated from protobuf rpc: DeriveKey(signrpc.KeyLocator) returns (signrpc.KeyDescriptor);
*/
deriveKey(input: KeyLocator, options?: RpcOptions): UnaryCall<KeyLocator, KeyDescriptor>;
/**
*
* NextAddr returns the next unused address within the wallet.
*
* @generated from protobuf rpc: NextAddr(walletrpc.AddrRequest) returns (walletrpc.AddrResponse);
*/
nextAddr(input: AddrRequest, options?: RpcOptions): UnaryCall<AddrRequest, AddrResponse>;
/**
* lncli: `wallet gettx`
* GetTransaction returns details for a transaction found in the wallet.
*
* @generated from protobuf rpc: GetTransaction(walletrpc.GetTransactionRequest) returns (lnrpc.Transaction);
*/
getTransaction(input: GetTransactionRequest, options?: RpcOptions): UnaryCall<GetTransactionRequest, Transaction>;
/**
* lncli: `wallet accounts list`
* ListAccounts retrieves all accounts belonging to the wallet by default. A
* name and key scope filter can be provided to filter through all of the
* wallet accounts and return only those matching.
*
* @generated from protobuf rpc: ListAccounts(walletrpc.ListAccountsRequest) returns (walletrpc.ListAccountsResponse);
*/
listAccounts(input: ListAccountsRequest, options?: RpcOptions): UnaryCall<ListAccountsRequest, ListAccountsResponse>;
/**
* lncli: `wallet requiredreserve`
* RequiredReserve returns the minimum amount of satoshis that should be kept
* in the wallet in order to fee bump anchor channels if necessary. The value
* scales with the number of public anchor channels but is capped at a maximum.
*
* @generated from protobuf rpc: RequiredReserve(walletrpc.RequiredReserveRequest) returns (walletrpc.RequiredReserveResponse);
*/
requiredReserve(input: RequiredReserveRequest, options?: RpcOptions): UnaryCall<RequiredReserveRequest, RequiredReserveResponse>;
/**
* lncli: `wallet addresses list`
* ListAddresses retrieves all the addresses along with their balance. An
* account name filter can be provided to filter through all of the
* wallet accounts and return the addresses of only those matching.
*
* @generated from protobuf rpc: ListAddresses(walletrpc.ListAddressesRequest) returns (walletrpc.ListAddressesResponse);
*/
listAddresses(input: ListAddressesRequest, options?: RpcOptions): UnaryCall<ListAddressesRequest, ListAddressesResponse>;
/**
* lncli: `wallet addresses signmessage`
* SignMessageWithAddr returns the compact signature (base64 encoded) created
* with the private key of the provided address. This requires the address
* to be solely based on a public key lock (no scripts). Obviously the internal
* lnd wallet has to possess the private key of the address otherwise
* an error is returned.
*
* This method aims to provide full compatibility with the bitcoin-core and
* btcd implementation. Bitcoin-core's algorithm is not specified in a
* BIP and only applicable for legacy addresses. This method enhances the
* signing for additional address types: P2WKH, NP2WKH, P2TR.
* For P2TR addresses this represents a special case. ECDSA is used to create
* a compact signature which makes the public key of the signature recoverable.
*
* @generated from protobuf rpc: SignMessageWithAddr(walletrpc.SignMessageWithAddrRequest) returns (walletrpc.SignMessageWithAddrResponse);
*/
signMessageWithAddr(input: SignMessageWithAddrRequest, options?: RpcOptions): UnaryCall<SignMessageWithAddrRequest, SignMessageWithAddrResponse>;
/**
* lncli: `wallet addresses verifymessage`
* VerifyMessageWithAddr returns the validity and the recovered public key of
* the provided compact signature (base64 encoded). The verification is
* twofold. First the validity of the signature itself is checked and then
* it is verified that the recovered public key of the signature equals
* the public key of the provided address. There is no dependence on the
* private key of the address therefore also external addresses are allowed
* to verify signatures.
* Supported address types are P2PKH, P2WKH, NP2WKH, P2TR.
*
* This method is the counterpart of the related signing method
* (SignMessageWithAddr) and aims to provide full compatibility to
* bitcoin-core's implementation. Although bitcoin-core/btcd only provide
* this functionality for legacy addresses this function enhances it to
* the address types: P2PKH, P2WKH, NP2WKH, P2TR.
*
* The verification for P2TR addresses is a special case and requires the
* ECDSA compact signature to compare the reovered public key to the internal
* taproot key. The compact ECDSA signature format was used because there
* are still no known compact signature schemes for schnorr signatures.
*
* @generated from protobuf rpc: VerifyMessageWithAddr(walletrpc.VerifyMessageWithAddrRequest) returns (walletrpc.VerifyMessageWithAddrResponse);
*/
verifyMessageWithAddr(input: VerifyMessageWithAddrRequest, options?: RpcOptions): UnaryCall<VerifyMessageWithAddrRequest, VerifyMessageWithAddrResponse>;
/**
* lncli: `wallet accounts import`
* ImportAccount imports an account backed by an account extended public key.
* The master key fingerprint denotes the fingerprint of the root key
* corresponding to the account public key (also known as the key with
* derivation path m/). This may be required by some hardware wallets for
* proper identification and signing.
*
* The address type can usually be inferred from the key's version, but may be
* required for certain keys to map them into the proper scope.
*
* For BIP-0044 keys, an address type must be specified as we intend to not
* support importing BIP-0044 keys into the wallet using the legacy
* pay-to-pubkey-hash (P2PKH) scheme. A nested witness address type will force
* the standard BIP-0049 derivation scheme, while a witness address type will
* force the standard BIP-0084 derivation scheme.
*
* For BIP-0049 keys, an address type must also be specified to make a
* distinction between the standard BIP-0049 address schema (nested witness
* pubkeys everywhere) and our own BIP-0049Plus address schema (nested pubkeys
* externally, witness pubkeys internally).
*
* NOTE: Events (deposits/spends) for keys derived from an account will only be
* detected by lnd if they happen after the import. Rescans to detect past
* events will be supported later on.
*
* @generated from protobuf rpc: ImportAccount(walletrpc.ImportAccountRequest) returns (walletrpc.ImportAccountResponse);
*/
importAccount(input: ImportAccountRequest, options?: RpcOptions): UnaryCall<ImportAccountRequest, ImportAccountResponse>;
/**
* lncli: `wallet accounts import-pubkey`
* ImportPublicKey imports a public key as watch-only into the wallet. The
* public key is converted into a simple address of the given type and that
* address script is watched on chain. For Taproot keys, this will only watch
* the BIP-0086 style output script. Use ImportTapscript for more advanced key
* spend or script spend outputs.
*
* NOTE: Events (deposits/spends) for a key will only be detected by lnd if
* they happen after the import. Rescans to detect past events will be
* supported later on.
*
* @generated from protobuf rpc: ImportPublicKey(walletrpc.ImportPublicKeyRequest) returns (walletrpc.ImportPublicKeyResponse);
*/
importPublicKey(input: ImportPublicKeyRequest, options?: RpcOptions): UnaryCall<ImportPublicKeyRequest, ImportPublicKeyResponse>;
/**
*
* ImportTapscript imports a Taproot script and internal key and adds the
* resulting Taproot output key as a watch-only output script into the wallet.
* For BIP-0086 style Taproot keys (no root hash commitment and no script spend
* path) use ImportPublicKey.
*
* NOTE: Events (deposits/spends) for a key will only be detected by lnd if
* they happen after the import. Rescans to detect past events will be
* supported later on.
*
* NOTE: Taproot keys imported through this RPC currently _cannot_ be used for
* funding PSBTs. Only tracking the balance and UTXOs is currently supported.
*
* @generated from protobuf rpc: ImportTapscript(walletrpc.ImportTapscriptRequest) returns (walletrpc.ImportTapscriptResponse);
*/
importTapscript(input: ImportTapscriptRequest, options?: RpcOptions): UnaryCall<ImportTapscriptRequest, ImportTapscriptResponse>;
/**
* lncli: `wallet publishtx`
* PublishTransaction attempts to publish the passed transaction to the
* network. Once this returns without an error, the wallet will continually
* attempt to re-broadcast the transaction on start up, until it enters the
* chain.
*
* @generated from protobuf rpc: PublishTransaction(walletrpc.Transaction) returns (walletrpc.PublishResponse);
*/
publishTransaction(input: Transaction$, options?: RpcOptions): UnaryCall<Transaction$, PublishResponse>;
/**
* lncli: `wallet removetx`
* RemoveTransaction attempts to remove the provided transaction from the
* internal transaction store of the wallet.
*
* @generated from protobuf rpc: RemoveTransaction(walletrpc.GetTransactionRequest) returns (walletrpc.RemoveTransactionResponse);
*/
removeTransaction(input: GetTransactionRequest, options?: RpcOptions): UnaryCall<GetTransactionRequest, RemoveTransactionResponse>;
/**
*
* SendOutputs is similar to the existing sendmany call in Bitcoind, and
* allows the caller to create a transaction that sends to several outputs at
* once. This is ideal when wanting to batch create a set of transactions.
*
* @generated from protobuf rpc: SendOutputs(walletrpc.SendOutputsRequest) returns (walletrpc.SendOutputsResponse);
*/
sendOutputs(input: SendOutputsRequest, options?: RpcOptions): UnaryCall<SendOutputsRequest, SendOutputsResponse>;
/**
* lncli: `wallet estimatefeerate`
* EstimateFee attempts to query the internal fee estimator of the wallet to
* determine the fee (in sat/kw) to attach to a transaction in order to
* achieve the confirmation target.
*
* @generated from protobuf rpc: EstimateFee(walletrpc.EstimateFeeRequest) returns (walletrpc.EstimateFeeResponse);
*/
estimateFee(input: EstimateFeeRequest, options?: RpcOptions): UnaryCall<EstimateFeeRequest, EstimateFeeResponse>;
/**
* lncli: `wallet pendingsweeps`
* PendingSweeps returns lists of on-chain outputs that lnd is currently
* attempting to sweep within its central batching engine. Outputs with similar
* fee rates are batched together in order to sweep them within a single
* transaction.
*
* NOTE: Some of the fields within PendingSweepsRequest are not guaranteed to
* remain supported. This is an advanced API that depends on the internals of
* the UtxoSweeper, so things may change.
*
* @generated from protobuf rpc: PendingSweeps(walletrpc.PendingSweepsRequest) returns (walletrpc.PendingSweepsResponse);
*/
pendingSweeps(input: PendingSweepsRequest, options?: RpcOptions): UnaryCall<PendingSweepsRequest, PendingSweepsResponse>;
/**
* lncli: `wallet bumpfee`
* BumpFee is an endpoint that allows users to interact with lnd's sweeper
* directly. It takes an outpoint from an unconfirmed transaction and sends it
* to the sweeper for potential fee bumping. Depending on whether the outpoint
* has been registered in the sweeper (an existing input, e.g., an anchor
* output) or not (a new input, e.g., an unconfirmed wallet utxo), this will
* either be an RBF or CPFP attempt.
*
* When receiving an input, lnds sweeper needs to understand its time
* sensitivity to make economical fee bumps - internally a fee function is
* created using the deadline and budget to guide the process. When the
* deadline is approaching, the fee function will increase the fee rate and
* perform an RBF.
*
* When a force close happens, all the outputs from the force closing
* transaction will be registered in the sweeper. The sweeper will then handle
* the creation, publish, and fee bumping of the sweeping transactions.
* Everytime a new block comes in, unless the sweeping transaction is
* confirmed, an RBF is attempted. To interfere with this automatic process,
* users can use BumpFee to specify customized fee rate, budget, deadline, and
* whether the sweep should happen immediately. It's recommended to call
* `ListSweeps` to understand the shape of the existing sweeping transaction
* first - depending on the number of inputs in this transaction, the RBF
* requirements can be quite different.
*
* This RPC also serves useful when wanting to perform a Child-Pays-For-Parent
* (CPFP), where the child transaction pays for its parent's fee. This can be
* done by specifying an outpoint within the low fee transaction that is under
* the control of the wallet.
*
* @generated from protobuf rpc: BumpFee(walletrpc.BumpFeeRequest) returns (walletrpc.BumpFeeResponse);
*/
bumpFee(input: BumpFeeRequest, options?: RpcOptions): UnaryCall<BumpFeeRequest, BumpFeeResponse>;
/**
* lncli: `wallet bumpforceclosefee`
* BumpForceCloseFee is an endpoint that allows users to bump the fee of a
* channel force close. This only works for channels with option_anchors.
*
* @generated from protobuf rpc: BumpForceCloseFee(walletrpc.BumpForceCloseFeeRequest) returns (walletrpc.BumpForceCloseFeeResponse);
*/
bumpForceCloseFee(input: BumpForceCloseFeeRequest, options?: RpcOptions): UnaryCall<BumpForceCloseFeeRequest, BumpForceCloseFeeResponse>;
/**
* lncli: `wallet listsweeps`
* ListSweeps returns a list of the sweep transactions our node has produced.
* Note that these sweeps may not be confirmed yet, as we record sweeps on
* broadcast, not confirmation.
*
* @generated from protobuf rpc: ListSweeps(walletrpc.ListSweepsRequest) returns (walletrpc.ListSweepsResponse);
*/
listSweeps(input: ListSweepsRequest, options?: RpcOptions): UnaryCall<ListSweepsRequest, ListSweepsResponse>;
/**
* lncli: `wallet labeltx`
* LabelTransaction adds a label to a transaction. If the transaction already
* has a label the call will fail unless the overwrite bool is set. This will
* overwrite the existing transaction label. Labels must not be empty, and
* cannot exceed 500 characters.
*
* @generated from protobuf rpc: LabelTransaction(walletrpc.LabelTransactionRequest) returns (walletrpc.LabelTransactionResponse);
*/
labelTransaction(input: LabelTransactionRequest, options?: RpcOptions): UnaryCall<LabelTransactionRequest, LabelTransactionResponse>;
/**
* lncli: `wallet psbt fund`
* FundPsbt creates a fully populated PSBT that contains enough inputs to fund
* the outputs specified in the template. There are three ways a user can
* specify what we call the template (a list of inputs and outputs to use in
* the PSBT): Either as a PSBT packet directly with no coin selection (using
* the legacy "psbt" field), a PSBT with advanced coin selection support (using
* the new "coin_select" field) or as a raw RPC message (using the "raw"
* field).
* The legacy "psbt" and "raw" modes, the following restrictions apply:
* 1. If there are no inputs specified in the template, coin selection is
* performed automatically.
* 2. If the template does contain any inputs, it is assumed that full
* coin selection happened externally and no additional inputs are added. If
* the specified inputs aren't enough to fund the outputs with the given fee
* rate, an error is returned.
*
* The new "coin_select" mode does not have these restrictions and allows the
* user to specify a PSBT with inputs and outputs and still perform coin
* selection on top of that.
* For all modes this RPC requires any inputs that are specified to be locked
* by the user (if they belong to this node in the first place).
*
* After either selecting or verifying the inputs, all input UTXOs are locked
* with an internal app ID.
*
* NOTE: If this method returns without an error, it is the caller's
* responsibility to either spend the locked UTXOs (by finalizing and then
* publishing the transaction) or to unlock/release the locked UTXOs in case of
* an error on the caller's side.
*
* @generated from protobuf rpc: FundPsbt(walletrpc.FundPsbtRequest) returns (walletrpc.FundPsbtResponse);
*/
fundPsbt(input: FundPsbtRequest, options?: RpcOptions): UnaryCall<FundPsbtRequest, FundPsbtResponse>;
/**
*
* SignPsbt expects a partial transaction with all inputs and outputs fully
* declared and tries to sign all unsigned inputs that have all required fields
* (UTXO information, BIP32 derivation information, witness or sig scripts)
* set.
* If no error is returned, the PSBT is ready to be given to the next signer or
* to be finalized if lnd was the last signer.
*
* NOTE: This RPC only signs inputs (and only those it can sign), it does not
* perform any other tasks (such as coin selection, UTXO locking or
* input/output/fee value validation, PSBT finalization). Any input that is
* incomplete will be skipped.
*
* @generated from protobuf rpc: SignPsbt(walletrpc.SignPsbtRequest) returns (walletrpc.SignPsbtResponse);
*/
signPsbt(input: SignPsbtRequest, options?: RpcOptions): UnaryCall<SignPsbtRequest, SignPsbtResponse>;
/**
* lncli: `wallet psbt finalize`
* FinalizePsbt expects a partial transaction with all inputs and outputs fully
* declared and tries to sign all inputs that belong to the wallet. Lnd must be
* the last signer of the transaction. That means, if there are any unsigned
* non-witness inputs or inputs without UTXO information attached or inputs
* without witness data that do not belong to lnd's wallet, this method will
* fail. If no error is returned, the PSBT is ready to be extracted and the
* final TX within to be broadcast.
*
* NOTE: This method does NOT publish the transaction once finalized. It is the
* caller's responsibility to either publish the transaction on success or
* unlock/release any locked UTXOs in case of an error in this method.
*
* @generated from protobuf rpc: FinalizePsbt(walletrpc.FinalizePsbtRequest) returns (walletrpc.FinalizePsbtResponse);
*/
finalizePsbt(input: FinalizePsbtRequest, options?: RpcOptions): UnaryCall<FinalizePsbtRequest, FinalizePsbtResponse>;
}
//
// Comments in this file will be directly parsed into the API
// Documentation as descriptions of the associated method, message, or field.
// These descriptions should go right above the definition of the object, and
// can be in either block or // comment format.
//
// An RPC method can be matched to an lncli command by placing a line in the
// beginning of the description in exactly the following format:
// lncli: `methodname`
//
// Failure to specify the exact name of the command will cause documentation
// generation to fail.
//
// More information on how exactly the gRPC documentation is generated from
// this proto file can be found here:
// https://github.com/lightninglabs/lightning-api
/**
* WalletKit is a service that gives access to the core functionalities of the
* daemon's wallet.
*
* @generated from protobuf service walletrpc.WalletKit
*/
export class WalletKitClient implements IWalletKitClient, ServiceInfo {
typeName = WalletKit.typeName;
methods = WalletKit.methods;
options = WalletKit.options;
constructor(private readonly _transport: RpcTransport) {
}
/**
*
* ListUnspent returns a list of all utxos spendable by the wallet with a
* number of confirmations between the specified minimum and maximum. By
* default, all utxos are listed. To list only the unconfirmed utxos, set
* the unconfirmed_only to true.
*
* @generated from protobuf rpc: ListUnspent(walletrpc.ListUnspentRequest) returns (walletrpc.ListUnspentResponse);
*/
listUnspent(input: ListUnspentRequest, options?: RpcOptions): UnaryCall<ListUnspentRequest, ListUnspentResponse> {
const method = this.methods[0], opt = this._transport.mergeOptions(options);
return stackIntercept<ListUnspentRequest, ListUnspentResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet leaseoutput`
* LeaseOutput locks an output to the given ID, preventing it from being
* available for any future coin selection attempts. The absolute time of the
* lock's expiration is returned. The expiration of the lock can be extended by
* successive invocations of this RPC. Outputs can be unlocked before their
* expiration through `ReleaseOutput`.
*
* @generated from protobuf rpc: LeaseOutput(walletrpc.LeaseOutputRequest) returns (walletrpc.LeaseOutputResponse);
*/
leaseOutput(input: LeaseOutputRequest, options?: RpcOptions): UnaryCall<LeaseOutputRequest, LeaseOutputResponse> {
const method = this.methods[1], opt = this._transport.mergeOptions(options);
return stackIntercept<LeaseOutputRequest, LeaseOutputResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet releaseoutput`
* ReleaseOutput unlocks an output, allowing it to be available for coin
* selection if it remains unspent. The ID should match the one used to
* originally lock the output.
*
* @generated from protobuf rpc: ReleaseOutput(walletrpc.ReleaseOutputRequest) returns (walletrpc.ReleaseOutputResponse);
*/
releaseOutput(input: ReleaseOutputRequest, options?: RpcOptions): UnaryCall<ReleaseOutputRequest, ReleaseOutputResponse> {
const method = this.methods[2], opt = this._transport.mergeOptions(options);
return stackIntercept<ReleaseOutputRequest, ReleaseOutputResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet listleases`
* ListLeases lists all currently locked utxos.
*
* @generated from protobuf rpc: ListLeases(walletrpc.ListLeasesRequest) returns (walletrpc.ListLeasesResponse);
*/
listLeases(input: ListLeasesRequest, options?: RpcOptions): UnaryCall<ListLeasesRequest, ListLeasesResponse> {
const method = this.methods[3], opt = this._transport.mergeOptions(options);
return stackIntercept<ListLeasesRequest, ListLeasesResponse>("unary", this._transport, method, opt, input);
}
/**
*
* DeriveNextKey attempts to derive the *next* key within the key family
* (account in BIP43) specified. This method should return the next external
* child within this branch.
*
* @generated from protobuf rpc: DeriveNextKey(walletrpc.KeyReq) returns (signrpc.KeyDescriptor);
*/
deriveNextKey(input: KeyReq, options?: RpcOptions): UnaryCall<KeyReq, KeyDescriptor> {
const method = this.methods[4], opt = this._transport.mergeOptions(options);
return stackIntercept<KeyReq, KeyDescriptor>("unary", this._transport, method, opt, input);
}
/**
*
* DeriveKey attempts to derive an arbitrary key specified by the passed
* KeyLocator.
*
* @generated from protobuf rpc: DeriveKey(signrpc.KeyLocator) returns (signrpc.KeyDescriptor);
*/
deriveKey(input: KeyLocator, options?: RpcOptions): UnaryCall<KeyLocator, KeyDescriptor> {
const method = this.methods[5], opt = this._transport.mergeOptions(options);
return stackIntercept<KeyLocator, KeyDescriptor>("unary", this._transport, method, opt, input);
}
/**
*
* NextAddr returns the next unused address within the wallet.
*
* @generated from protobuf rpc: NextAddr(walletrpc.AddrRequest) returns (walletrpc.AddrResponse);
*/
nextAddr(input: AddrRequest, options?: RpcOptions): UnaryCall<AddrRequest, AddrResponse> {
const method = this.methods[6], opt = this._transport.mergeOptions(options);
return stackIntercept<AddrRequest, AddrResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet gettx`
* GetTransaction returns details for a transaction found in the wallet.
*
* @generated from protobuf rpc: GetTransaction(walletrpc.GetTransactionRequest) returns (lnrpc.Transaction);
*/
getTransaction(input: GetTransactionRequest, options?: RpcOptions): UnaryCall<GetTransactionRequest, Transaction> {
const method = this.methods[7], opt = this._transport.mergeOptions(options);
return stackIntercept<GetTransactionRequest, Transaction>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet accounts list`
* ListAccounts retrieves all accounts belonging to the wallet by default. A
* name and key scope filter can be provided to filter through all of the
* wallet accounts and return only those matching.
*
* @generated from protobuf rpc: ListAccounts(walletrpc.ListAccountsRequest) returns (walletrpc.ListAccountsResponse);
*/
listAccounts(input: ListAccountsRequest, options?: RpcOptions): UnaryCall<ListAccountsRequest, ListAccountsResponse> {
const method = this.methods[8], opt = this._transport.mergeOptions(options);
return stackIntercept<ListAccountsRequest, ListAccountsResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet requiredreserve`
* RequiredReserve returns the minimum amount of satoshis that should be kept
* in the wallet in order to fee bump anchor channels if necessary. The value
* scales with the number of public anchor channels but is capped at a maximum.
*
* @generated from protobuf rpc: RequiredReserve(walletrpc.RequiredReserveRequest) returns (walletrpc.RequiredReserveResponse);
*/
requiredReserve(input: RequiredReserveRequest, options?: RpcOptions): UnaryCall<RequiredReserveRequest, RequiredReserveResponse> {
const method = this.methods[9], opt = this._transport.mergeOptions(options);
return stackIntercept<RequiredReserveRequest, RequiredReserveResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet addresses list`
* ListAddresses retrieves all the addresses along with their balance. An
* account name filter can be provided to filter through all of the
* wallet accounts and return the addresses of only those matching.
*
* @generated from protobuf rpc: ListAddresses(walletrpc.ListAddressesRequest) returns (walletrpc.ListAddressesResponse);
*/
listAddresses(input: ListAddressesRequest, options?: RpcOptions): UnaryCall<ListAddressesRequest, ListAddressesResponse> {
const method = this.methods[10], opt = this._transport.mergeOptions(options);
return stackIntercept<ListAddressesRequest, ListAddressesResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet addresses signmessage`
* SignMessageWithAddr returns the compact signature (base64 encoded) created
* with the private key of the provided address. This requires the address
* to be solely based on a public key lock (no scripts). Obviously the internal
* lnd wallet has to possess the private key of the address otherwise
* an error is returned.
*
* This method aims to provide full compatibility with the bitcoin-core and
* btcd implementation. Bitcoin-core's algorithm is not specified in a
* BIP and only applicable for legacy addresses. This method enhances the
* signing for additional address types: P2WKH, NP2WKH, P2TR.
* For P2TR addresses this represents a special case. ECDSA is used to create
* a compact signature which makes the public key of the signature recoverable.
*
* @generated from protobuf rpc: SignMessageWithAddr(walletrpc.SignMessageWithAddrRequest) returns (walletrpc.SignMessageWithAddrResponse);
*/
signMessageWithAddr(input: SignMessageWithAddrRequest, options?: RpcOptions): UnaryCall<SignMessageWithAddrRequest, SignMessageWithAddrResponse> {
const method = this.methods[11], opt = this._transport.mergeOptions(options);
return stackIntercept<SignMessageWithAddrRequest, SignMessageWithAddrResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet addresses verifymessage`
* VerifyMessageWithAddr returns the validity and the recovered public key of
* the provided compact signature (base64 encoded). The verification is
* twofold. First the validity of the signature itself is checked and then
* it is verified that the recovered public key of the signature equals
* the public key of the provided address. There is no dependence on the
* private key of the address therefore also external addresses are allowed
* to verify signatures.
* Supported address types are P2PKH, P2WKH, NP2WKH, P2TR.
*
* This method is the counterpart of the related signing method
* (SignMessageWithAddr) and aims to provide full compatibility to
* bitcoin-core's implementation. Although bitcoin-core/btcd only provide
* this functionality for legacy addresses this function enhances it to
* the address types: P2PKH, P2WKH, NP2WKH, P2TR.
*
* The verification for P2TR addresses is a special case and requires the
* ECDSA compact signature to compare the reovered public key to the internal
* taproot key. The compact ECDSA signature format was used because there
* are still no known compact signature schemes for schnorr signatures.
*
* @generated from protobuf rpc: VerifyMessageWithAddr(walletrpc.VerifyMessageWithAddrRequest) returns (walletrpc.VerifyMessageWithAddrResponse);
*/
verifyMessageWithAddr(input: VerifyMessageWithAddrRequest, options?: RpcOptions): UnaryCall<VerifyMessageWithAddrRequest, VerifyMessageWithAddrResponse> {
const method = this.methods[12], opt = this._transport.mergeOptions(options);
return stackIntercept<VerifyMessageWithAddrRequest, VerifyMessageWithAddrResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet accounts import`
* ImportAccount imports an account backed by an account extended public key.
* The master key fingerprint denotes the fingerprint of the root key
* corresponding to the account public key (also known as the key with
* derivation path m/). This may be required by some hardware wallets for
* proper identification and signing.
*
* The address type can usually be inferred from the key's version, but may be
* required for certain keys to map them into the proper scope.
*
* For BIP-0044 keys, an address type must be specified as we intend to not
* support importing BIP-0044 keys into the wallet using the legacy
* pay-to-pubkey-hash (P2PKH) scheme. A nested witness address type will force
* the standard BIP-0049 derivation scheme, while a witness address type will
* force the standard BIP-0084 derivation scheme.
*
* For BIP-0049 keys, an address type must also be specified to make a
* distinction between the standard BIP-0049 address schema (nested witness
* pubkeys everywhere) and our own BIP-0049Plus address schema (nested pubkeys
* externally, witness pubkeys internally).
*
* NOTE: Events (deposits/spends) for keys derived from an account will only be
* detected by lnd if they happen after the import. Rescans to detect past
* events will be supported later on.
*
* @generated from protobuf rpc: ImportAccount(walletrpc.ImportAccountRequest) returns (walletrpc.ImportAccountResponse);
*/
importAccount(input: ImportAccountRequest, options?: RpcOptions): UnaryCall<ImportAccountRequest, ImportAccountResponse> {
const method = this.methods[13], opt = this._transport.mergeOptions(options);
return stackIntercept<ImportAccountRequest, ImportAccountResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet accounts import-pubkey`
* ImportPublicKey imports a public key as watch-only into the wallet. The
* public key is converted into a simple address of the given type and that
* address script is watched on chain. For Taproot keys, this will only watch
* the BIP-0086 style output script. Use ImportTapscript for more advanced key
* spend or script spend outputs.
*
* NOTE: Events (deposits/spends) for a key will only be detected by lnd if
* they happen after the import. Rescans to detect past events will be
* supported later on.
*
* @generated from protobuf rpc: ImportPublicKey(walletrpc.ImportPublicKeyRequest) returns (walletrpc.ImportPublicKeyResponse);
*/
importPublicKey(input: ImportPublicKeyRequest, options?: RpcOptions): UnaryCall<ImportPublicKeyRequest, ImportPublicKeyResponse> {
const method = this.methods[14], opt = this._transport.mergeOptions(options);
return stackIntercept<ImportPublicKeyRequest, ImportPublicKeyResponse>("unary", this._transport, method, opt, input);
}
/**
*
* ImportTapscript imports a Taproot script and internal key and adds the
* resulting Taproot output key as a watch-only output script into the wallet.
* For BIP-0086 style Taproot keys (no root hash commitment and no script spend
* path) use ImportPublicKey.
*
* NOTE: Events (deposits/spends) for a key will only be detected by lnd if
* they happen after the import. Rescans to detect past events will be
* supported later on.
*
* NOTE: Taproot keys imported through this RPC currently _cannot_ be used for
* funding PSBTs. Only tracking the balance and UTXOs is currently supported.
*
* @generated from protobuf rpc: ImportTapscript(walletrpc.ImportTapscriptRequest) returns (walletrpc.ImportTapscriptResponse);
*/
importTapscript(input: ImportTapscriptRequest, options?: RpcOptions): UnaryCall<ImportTapscriptRequest, ImportTapscriptResponse> {
const method = this.methods[15], opt = this._transport.mergeOptions(options);
return stackIntercept<ImportTapscriptRequest, ImportTapscriptResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet publishtx`
* PublishTransaction attempts to publish the passed transaction to the
* network. Once this returns without an error, the wallet will continually
* attempt to re-broadcast the transaction on start up, until it enters the
* chain.
*
* @generated from protobuf rpc: PublishTransaction(walletrpc.Transaction) returns (walletrpc.PublishResponse);
*/
publishTransaction(input: Transaction$, options?: RpcOptions): UnaryCall<Transaction$, PublishResponse> {
const method = this.methods[16], opt = this._transport.mergeOptions(options);
return stackIntercept<Transaction$, PublishResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet removetx`
* RemoveTransaction attempts to remove the provided transaction from the
* internal transaction store of the wallet.
*
* @generated from protobuf rpc: RemoveTransaction(walletrpc.GetTransactionRequest) returns (walletrpc.RemoveTransactionResponse);
*/
removeTransaction(input: GetTransactionRequest, options?: RpcOptions): UnaryCall<GetTransactionRequest, RemoveTransactionResponse> {
const method = this.methods[17], opt = this._transport.mergeOptions(options);
return stackIntercept<GetTransactionRequest, RemoveTransactionResponse>("unary", this._transport, method, opt, input);
}
/**
*
* SendOutputs is similar to the existing sendmany call in Bitcoind, and
* allows the caller to create a transaction that sends to several outputs at
* once. This is ideal when wanting to batch create a set of transactions.
*
* @generated from protobuf rpc: SendOutputs(walletrpc.SendOutputsRequest) returns (walletrpc.SendOutputsResponse);
*/
sendOutputs(input: SendOutputsRequest, options?: RpcOptions): UnaryCall<SendOutputsRequest, SendOutputsResponse> {
const method = this.methods[18], opt = this._transport.mergeOptions(options);
return stackIntercept<SendOutputsRequest, SendOutputsResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet estimatefeerate`
* EstimateFee attempts to query the internal fee estimator of the wallet to
* determine the fee (in sat/kw) to attach to a transaction in order to
* achieve the confirmation target.
*
* @generated from protobuf rpc: EstimateFee(walletrpc.EstimateFeeRequest) returns (walletrpc.EstimateFeeResponse);
*/
estimateFee(input: EstimateFeeRequest, options?: RpcOptions): UnaryCall<EstimateFeeRequest, EstimateFeeResponse> {
const method = this.methods[19], opt = this._transport.mergeOptions(options);
return stackIntercept<EstimateFeeRequest, EstimateFeeResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet pendingsweeps`
* PendingSweeps returns lists of on-chain outputs that lnd is currently
* attempting to sweep within its central batching engine. Outputs with similar
* fee rates are batched together in order to sweep them within a single
* transaction.
*
* NOTE: Some of the fields within PendingSweepsRequest are not guaranteed to
* remain supported. This is an advanced API that depends on the internals of
* the UtxoSweeper, so things may change.
*
* @generated from protobuf rpc: PendingSweeps(walletrpc.PendingSweepsRequest) returns (walletrpc.PendingSweepsResponse);
*/
pendingSweeps(input: PendingSweepsRequest, options?: RpcOptions): UnaryCall<PendingSweepsRequest, PendingSweepsResponse> {
const method = this.methods[20], opt = this._transport.mergeOptions(options);
return stackIntercept<PendingSweepsRequest, PendingSweepsResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet bumpfee`
* BumpFee is an endpoint that allows users to interact with lnd's sweeper
* directly. It takes an outpoint from an unconfirmed transaction and sends it
* to the sweeper for potential fee bumping. Depending on whether the outpoint
* has been registered in the sweeper (an existing input, e.g., an anchor
* output) or not (a new input, e.g., an unconfirmed wallet utxo), this will
* either be an RBF or CPFP attempt.
*
* When receiving an input, lnds sweeper needs to understand its time
* sensitivity to make economical fee bumps - internally a fee function is
* created using the deadline and budget to guide the process. When the
* deadline is approaching, the fee function will increase the fee rate and
* perform an RBF.
*
* When a force close happens, all the outputs from the force closing
* transaction will be registered in the sweeper. The sweeper will then handle
* the creation, publish, and fee bumping of the sweeping transactions.
* Everytime a new block comes in, unless the sweeping transaction is
* confirmed, an RBF is attempted. To interfere with this automatic process,
* users can use BumpFee to specify customized fee rate, budget, deadline, and
* whether the sweep should happen immediately. It's recommended to call
* `ListSweeps` to understand the shape of the existing sweeping transaction
* first - depending on the number of inputs in this transaction, the RBF
* requirements can be quite different.
*
* This RPC also serves useful when wanting to perform a Child-Pays-For-Parent
* (CPFP), where the child transaction pays for its parent's fee. This can be
* done by specifying an outpoint within the low fee transaction that is under
* the control of the wallet.
*
* @generated from protobuf rpc: BumpFee(walletrpc.BumpFeeRequest) returns (walletrpc.BumpFeeResponse);
*/
bumpFee(input: BumpFeeRequest, options?: RpcOptions): UnaryCall<BumpFeeRequest, BumpFeeResponse> {
const method = this.methods[21], opt = this._transport.mergeOptions(options);
return stackIntercept<BumpFeeRequest, BumpFeeResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet bumpforceclosefee`
* BumpForceCloseFee is an endpoint that allows users to bump the fee of a
* channel force close. This only works for channels with option_anchors.
*
* @generated from protobuf rpc: BumpForceCloseFee(walletrpc.BumpForceCloseFeeRequest) returns (walletrpc.BumpForceCloseFeeResponse);
*/
bumpForceCloseFee(input: BumpForceCloseFeeRequest, options?: RpcOptions): UnaryCall<BumpForceCloseFeeRequest, BumpForceCloseFeeResponse> {
const method = this.methods[22], opt = this._transport.mergeOptions(options);
return stackIntercept<BumpForceCloseFeeRequest, BumpForceCloseFeeResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet listsweeps`
* ListSweeps returns a list of the sweep transactions our node has produced.
* Note that these sweeps may not be confirmed yet, as we record sweeps on
* broadcast, not confirmation.
*
* @generated from protobuf rpc: ListSweeps(walletrpc.ListSweepsRequest) returns (walletrpc.ListSweepsResponse);
*/
listSweeps(input: ListSweepsRequest, options?: RpcOptions): UnaryCall<ListSweepsRequest, ListSweepsResponse> {
const method = this.methods[23], opt = this._transport.mergeOptions(options);
return stackIntercept<ListSweepsRequest, ListSweepsResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet labeltx`
* LabelTransaction adds a label to a transaction. If the transaction already
* has a label the call will fail unless the overwrite bool is set. This will
* overwrite the existing transaction label. Labels must not be empty, and
* cannot exceed 500 characters.
*
* @generated from protobuf rpc: LabelTransaction(walletrpc.LabelTransactionRequest) returns (walletrpc.LabelTransactionResponse);
*/
labelTransaction(input: LabelTransactionRequest, options?: RpcOptions): UnaryCall<LabelTransactionRequest, LabelTransactionResponse> {
const method = this.methods[24], opt = this._transport.mergeOptions(options);
return stackIntercept<LabelTransactionRequest, LabelTransactionResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet psbt fund`
* FundPsbt creates a fully populated PSBT that contains enough inputs to fund
* the outputs specified in the template. There are three ways a user can
* specify what we call the template (a list of inputs and outputs to use in
* the PSBT): Either as a PSBT packet directly with no coin selection (using
* the legacy "psbt" field), a PSBT with advanced coin selection support (using
* the new "coin_select" field) or as a raw RPC message (using the "raw"
* field).
* The legacy "psbt" and "raw" modes, the following restrictions apply:
* 1. If there are no inputs specified in the template, coin selection is
* performed automatically.
* 2. If the template does contain any inputs, it is assumed that full
* coin selection happened externally and no additional inputs are added. If
* the specified inputs aren't enough to fund the outputs with the given fee
* rate, an error is returned.
*
* The new "coin_select" mode does not have these restrictions and allows the
* user to specify a PSBT with inputs and outputs and still perform coin
* selection on top of that.
* For all modes this RPC requires any inputs that are specified to be locked
* by the user (if they belong to this node in the first place).
*
* After either selecting or verifying the inputs, all input UTXOs are locked
* with an internal app ID.
*
* NOTE: If this method returns without an error, it is the caller's
* responsibility to either spend the locked UTXOs (by finalizing and then
* publishing the transaction) or to unlock/release the locked UTXOs in case of
* an error on the caller's side.
*
* @generated from protobuf rpc: FundPsbt(walletrpc.FundPsbtRequest) returns (walletrpc.FundPsbtResponse);
*/
fundPsbt(input: FundPsbtRequest, options?: RpcOptions): UnaryCall<FundPsbtRequest, FundPsbtResponse> {
const method = this.methods[25], opt = this._transport.mergeOptions(options);
return stackIntercept<FundPsbtRequest, FundPsbtResponse>("unary", this._transport, method, opt, input);
}
/**
*
* SignPsbt expects a partial transaction with all inputs and outputs fully
* declared and tries to sign all unsigned inputs that have all required fields
* (UTXO information, BIP32 derivation information, witness or sig scripts)
* set.
* If no error is returned, the PSBT is ready to be given to the next signer or
* to be finalized if lnd was the last signer.
*
* NOTE: This RPC only signs inputs (and only those it can sign), it does not
* perform any other tasks (such as coin selection, UTXO locking or
* input/output/fee value validation, PSBT finalization). Any input that is
* incomplete will be skipped.
*
* @generated from protobuf rpc: SignPsbt(walletrpc.SignPsbtRequest) returns (walletrpc.SignPsbtResponse);
*/
signPsbt(input: SignPsbtRequest, options?: RpcOptions): UnaryCall<SignPsbtRequest, SignPsbtResponse> {
const method = this.methods[26], opt = this._transport.mergeOptions(options);
return stackIntercept<SignPsbtRequest, SignPsbtResponse>("unary", this._transport, method, opt, input);
}
/**
* lncli: `wallet psbt finalize`
* FinalizePsbt expects a partial transaction with all inputs and outputs fully
* declared and tries to sign all inputs that belong to the wallet. Lnd must be
* the last signer of the transaction. That means, if there are any unsigned
* non-witness inputs or inputs without UTXO information attached or inputs
* without witness data that do not belong to lnd's wallet, this method will
* fail. If no error is returned, the PSBT is ready to be extracted and the
* final TX within to be broadcast.
*
* NOTE: This method does NOT publish the transaction once finalized. It is the
* caller's responsibility to either publish the transaction on success or
* unlock/release any locked UTXOs in case of an error in this method.
*
* @generated from protobuf rpc: FinalizePsbt(walletrpc.FinalizePsbtRequest) returns (walletrpc.FinalizePsbtResponse);
*/
finalizePsbt(input: FinalizePsbtRequest, options?: RpcOptions): UnaryCall<FinalizePsbtRequest, FinalizePsbtResponse> {
const method = this.methods[27], opt = this._transport.mergeOptions(options);
return stackIntercept<FinalizePsbtRequest, FinalizePsbtResponse>("unary", this._transport, method, opt, input);
}
}

5641
proto/lnd/walletkit.ts Normal file

File diff suppressed because it is too large Load diff

709
proto/others/signer.proto Normal file
View file

@ -0,0 +1,709 @@
syntax = "proto3";
package signrpc;
option go_package = "github.com/lightningnetwork/lnd/lnrpc/signrpc";
// Signer is a service that gives access to the signing functionality of the
// daemon's wallet.
service Signer {
/*
SignOutputRaw is a method that can be used to generated a signature for a
set of inputs/outputs to a transaction. Each request specifies details
concerning how the outputs should be signed, which keys they should be
signed with, and also any optional tweaks. The return value is a fixed
64-byte signature (the same format as we use on the wire in Lightning).
If we are unable to sign using the specified keys, then an error will be
returned.
*/
rpc SignOutputRaw (SignReq) returns (SignResp);
/*
ComputeInputScript generates a complete InputIndex for the passed
transaction with the signature as defined within the passed SignDescriptor.
This method should be capable of generating the proper input script for both
regular p2wkh/p2tr outputs and p2wkh outputs nested within a regular p2sh
output.
Note that when using this method to sign inputs belonging to the wallet,
the only items of the SignDescriptor that need to be populated are pkScript
in the TxOut field, the value in that same field, and finally the input
index.
*/
rpc ComputeInputScript (SignReq) returns (InputScriptResp);
/*
SignMessage signs a message with the key specified in the key locator. The
returned signature is fixed-size LN wire format encoded.
The main difference to SignMessage in the main RPC is that a specific key is
used to sign the message instead of the node identity private key.
*/
rpc SignMessage (SignMessageReq) returns (SignMessageResp);
/*
VerifyMessage verifies a signature over a message using the public key
provided. The signature must be fixed-size LN wire format encoded.
The main difference to VerifyMessage in the main RPC is that the public key
used to sign the message does not have to be a node known to the network.
*/
rpc VerifyMessage (VerifyMessageReq) returns (VerifyMessageResp);
/*
DeriveSharedKey returns a shared secret key by performing Diffie-Hellman key
derivation between the ephemeral public key in the request and the node's
key specified in the key_desc parameter. Either a key locator or a raw
public key is expected in the key_desc, if neither is supplied, defaults to
the node's identity private key:
P_shared = privKeyNode * ephemeralPubkey
The resulting shared public key is serialized in the compressed format and
hashed with sha256, resulting in the final key length of 256bit.
*/
rpc DeriveSharedKey (SharedKeyRequest) returns (SharedKeyResponse);
/*
MuSig2CombineKeys (experimental!) is a stateless helper RPC that can be used
to calculate the combined MuSig2 public key from a list of all participating
signers' public keys. This RPC is completely stateless and deterministic and
does not create any signing session. It can be used to determine the Taproot
public key that should be put in an on-chain output once all public keys are
known. A signing session is only needed later when that output should be
_spent_ again.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2CombineKeys (MuSig2CombineKeysRequest)
returns (MuSig2CombineKeysResponse);
/*
MuSig2CreateSession (experimental!) creates a new MuSig2 signing session
using the local key identified by the key locator. The complete list of all
public keys of all signing parties must be provided, including the public
key of the local signing key. If nonces of other parties are already known,
they can be submitted as well to reduce the number of RPC calls necessary
later on.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2CreateSession (MuSig2SessionRequest)
returns (MuSig2SessionResponse);
/*
MuSig2RegisterNonces (experimental!) registers one or more public nonces of
other signing participants for a session identified by its ID. This RPC can
be called multiple times until all nonces are registered.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2RegisterNonces (MuSig2RegisterNoncesRequest)
returns (MuSig2RegisterNoncesResponse);
/*
MuSig2Sign (experimental!) creates a partial signature using the local
signing key that was specified when the session was created. This can only
be called when all public nonces of all participants are known and have been
registered with the session. If this node isn't responsible for combining
all the partial signatures, then the cleanup flag should be set, indicating
that the session can be removed from memory once the signature was produced.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2Sign (MuSig2SignRequest) returns (MuSig2SignResponse);
/*
MuSig2CombineSig (experimental!) combines the given partial signature(s)
with the local one, if it already exists. Once a partial signature of all
participants is registered, the final signature will be combined and
returned.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2CombineSig (MuSig2CombineSigRequest)
returns (MuSig2CombineSigResponse);
/*
MuSig2Cleanup (experimental!) allows a caller to clean up a session early in
cases where it's obvious that the signing session won't succeed and the
resources can be released.
NOTE: The MuSig2 BIP is not final yet and therefore this API must be
considered to be HIGHLY EXPERIMENTAL and subject to change in upcoming
releases. Backward compatibility is not guaranteed!
*/
rpc MuSig2Cleanup (MuSig2CleanupRequest) returns (MuSig2CleanupResponse);
}
message KeyLocator {
// The family of key being identified.
int32 key_family = 1;
// The precise index of the key being identified.
int32 key_index = 2;
}
message KeyDescriptor {
/*
The raw bytes of the public key in the key pair being identified. Either
this or the KeyLocator must be specified.
*/
bytes raw_key_bytes = 1;
/*
The key locator that identifies which private key to use for signing.
Either this or the raw bytes of the target public key must be specified.
*/
KeyLocator key_loc = 2;
}
message TxOut {
// The value of the output being spent.
int64 value = 1;
// The script of the output being spent.
bytes pk_script = 2;
}
enum SignMethod {
/*
Specifies that a SegWit v0 (p2wkh, np2wkh, p2wsh) input script should be
signed.
*/
SIGN_METHOD_WITNESS_V0 = 0;
/*
Specifies that a SegWit v1 (p2tr) input should be signed by using the
BIP0086 method (commit to internal key only).
*/
SIGN_METHOD_TAPROOT_KEY_SPEND_BIP0086 = 1;
/*
Specifies that a SegWit v1 (p2tr) input should be signed by using a given
taproot hash to commit to in addition to the internal key.
*/
SIGN_METHOD_TAPROOT_KEY_SPEND = 2;
/*
Specifies that a SegWit v1 (p2tr) input should be spent using the script
path and that a specific leaf script should be signed for.
*/
SIGN_METHOD_TAPROOT_SCRIPT_SPEND = 3;
}
message SignDescriptor {
/*
A descriptor that precisely describes *which* key to use for signing. This
may provide the raw public key directly, or require the Signer to re-derive
the key according to the populated derivation path.
Note that if the key descriptor was obtained through walletrpc.DeriveKey,
then the key locator MUST always be provided, since the derived keys are not
persisted unlike with DeriveNextKey.
*/
KeyDescriptor key_desc = 1;
/*
A scalar value that will be added to the private key corresponding to the
above public key to obtain the private key to be used to sign this input.
This value is typically derived via the following computation:
* derivedKey = privkey + sha256(perCommitmentPoint || pubKey) mod N
*/
bytes single_tweak = 2;
/*
A private key that will be used in combination with its corresponding
private key to derive the private key that is to be used to sign the target
input. Within the Lightning protocol, this value is typically the
commitment secret from a previously revoked commitment transaction. This
value is in combination with two hash values, and the original private key
to derive the private key to be used when signing.
* k = (privKey*sha256(pubKey || tweakPub) +
tweakPriv*sha256(tweakPub || pubKey)) mod N
*/
bytes double_tweak = 3;
/*
The 32 byte input to the taproot tweak derivation that is used to derive
the output key from an internal key: outputKey = internalKey +
tagged_hash("tapTweak", internalKey || tapTweak).
When doing a BIP 86 spend, this field can be an empty byte slice.
When doing a normal key path spend, with the output key committing to an
actual script root, then this field should be: the tapscript root hash.
*/
bytes tap_tweak = 10;
/*
The full script required to properly redeem the output. This field will
only be populated if a p2tr, p2wsh or a p2sh output is being signed. If a
taproot script path spend is being attempted, then this should be the raw
leaf script.
*/
bytes witness_script = 4;
/*
A description of the output being spent. The value and script MUST be
provided.
*/
TxOut output = 5;
/*
The target sighash type that should be used when generating the final
sighash, and signature.
*/
uint32 sighash = 7;
/*
The target input within the transaction that should be signed.
*/
int32 input_index = 8;
/*
The sign method specifies how the input should be signed. Depending on the
method, either the tap_tweak, witness_script or both need to be specified.
Defaults to SegWit v0 signing to be backward compatible with older RPC
clients.
*/
SignMethod sign_method = 9;
}
message SignReq {
// The raw bytes of the transaction to be signed.
bytes raw_tx_bytes = 1;
// A set of sign descriptors, for each input to be signed.
repeated SignDescriptor sign_descs = 2;
/*
The full list of UTXO information for each of the inputs being spent. This
is required when spending one or more taproot (SegWit v1) outputs.
*/
repeated TxOut prev_outputs = 3;
}
message SignResp {
/*
A set of signatures realized in a fixed 64-byte format ordered in ascending
input order.
*/
repeated bytes raw_sigs = 1;
}
message InputScript {
// The serializes witness stack for the specified input.
repeated bytes witness = 1;
/*
The optional sig script for the specified witness that will only be set if
the input specified is a nested p2sh witness program.
*/
bytes sig_script = 2;
}
message InputScriptResp {
// The set of fully valid input scripts requested.
repeated InputScript input_scripts = 1;
}
message SignMessageReq {
/*
The message to be signed. When using REST, this field must be encoded as
base64.
*/
bytes msg = 1;
// The key locator that identifies which key to use for signing.
KeyLocator key_loc = 2;
// Double-SHA256 hash instead of just the default single round.
bool double_hash = 3;
/*
Use the compact (pubkey recoverable) format instead of the raw lnwire
format. This option cannot be used with Schnorr signatures.
*/
bool compact_sig = 4;
/*
Use Schnorr signature. This option cannot be used with compact format.
*/
bool schnorr_sig = 5;
/*
The optional Taproot tweak bytes to apply to the private key before creating
a Schnorr signature. The private key is tweaked as described in BIP-341:
privKey + h_tapTweak(internalKey || tapTweak)
*/
bytes schnorr_sig_tap_tweak = 6;
/*
An optional tag that can be provided when taking a tagged hash of a
message. This option can only be used when schnorr_sig is true.
*/
bytes tag = 7;
}
message SignMessageResp {
/*
The signature for the given message in the fixed-size LN wire format.
*/
bytes signature = 1;
}
message VerifyMessageReq {
// The message over which the signature is to be verified. When using
// REST, this field must be encoded as base64.
bytes msg = 1;
/*
The fixed-size LN wire encoded signature to be verified over the given
message. When using REST, this field must be encoded as base64.
*/
bytes signature = 2;
/*
The public key the signature has to be valid for. When using REST, this
field must be encoded as base64. If the is_schnorr_sig option is true, then
the public key is expected to be in the 32-byte x-only serialization
according to BIP-340.
*/
bytes pubkey = 3;
/*
Specifies if the signature is a Schnorr signature.
*/
bool is_schnorr_sig = 4;
/*
An optional tag that can be provided when taking a tagged hash of a
message. This option can only be used when is_schnorr_sig is true.
*/
bytes tag = 5;
}
message VerifyMessageResp {
// Whether the signature was valid over the given message.
bool valid = 1;
}
message SharedKeyRequest {
// The ephemeral public key to use for the DH key derivation.
bytes ephemeral_pubkey = 1;
/*
Deprecated. The optional key locator of the local key that should be used.
If this parameter is not set then the node's identity private key will be
used.
*/
KeyLocator key_loc = 2 [deprecated = true];
/*
A key descriptor describes the key used for performing ECDH. Either a key
locator or a raw public key is expected, if neither is supplied, defaults to
the node's identity private key.
*/
KeyDescriptor key_desc = 3;
}
message SharedKeyResponse {
// The shared public key, hashed with sha256.
bytes shared_key = 1;
}
message TweakDesc {
/*
Tweak is the 32-byte value that will modify the public key.
*/
bytes tweak = 1;
/*
Specifies if the target key should be converted to an x-only public key
before tweaking. If true, then the public key will be mapped to an x-only
key before the tweaking operation is applied.
*/
bool is_x_only = 2;
}
message TaprootTweakDesc {
/*
The root hash of the tapscript tree if a script path is committed to. If
the MuSig2 key put on chain doesn't also commit to a script path (BIP-0086
key spend only), then this needs to be empty and the key_spend_only field
below must be set to true. This is required because gRPC cannot
differentiate between a zero-size byte slice and a nil byte slice (both
would be serialized the same way). So the extra boolean is required.
*/
bytes script_root = 1;
/*
Indicates that the above script_root is expected to be empty because this
is a BIP-0086 key spend only commitment where only the internal key is
committed to instead of also including a script root hash.
*/
bool key_spend_only = 2;
}
enum MuSig2Version {
/*
The default value on the RPC is zero for enums so we need to represent an
invalid/undefined version by default to make sure clients upgrade their
software to set the version explicitly.
*/
MUSIG2_VERSION_UNDEFINED = 0;
/*
The version of MuSig2 that lnd 0.15.x shipped with, which corresponds to the
version v0.4.0 of the MuSig2 BIP draft.
*/
MUSIG2_VERSION_V040 = 1;
/*
The current version of MuSig2 which corresponds to the version v1.0.0rc2 of
the MuSig2 BIP draft.
*/
MUSIG2_VERSION_V100RC2 = 2;
}
message MuSig2CombineKeysRequest {
/*
A list of all public keys (serialized in 32-byte x-only format for v0.4.0
and 33-byte compressed format for v1.0.0rc2!) participating in the signing
session. The list will always be sorted lexicographically internally. This
must include the local key which is described by the above key_loc.
*/
repeated bytes all_signer_pubkeys = 1;
/*
A series of optional generic tweaks to be applied to the aggregated
public key.
*/
repeated TweakDesc tweaks = 2;
/*
An optional taproot specific tweak that must be specified if the MuSig2
combined key will be used as the main taproot key of a taproot output
on-chain.
*/
TaprootTweakDesc taproot_tweak = 3;
/*
The mandatory version of the MuSig2 BIP draft to use. This is necessary to
differentiate between the changes that were made to the BIP while this
experimental RPC was already released. Some of those changes affect how the
combined key and nonces are created.
*/
MuSig2Version version = 4;
}
message MuSig2CombineKeysResponse {
/*
The combined public key (in the 32-byte x-only format) with all tweaks
applied to it. If a taproot tweak is specified, this corresponds to the
taproot key that can be put into the on-chain output.
*/
bytes combined_key = 1;
/*
The raw combined public key (in the 32-byte x-only format) before any tweaks
are applied to it. If a taproot tweak is specified, this corresponds to the
internal key that needs to be put into the witness if the script spend path
is used.
*/
bytes taproot_internal_key = 2;
/*
The version of the MuSig2 BIP that was used to combine the keys.
*/
MuSig2Version version = 4;
}
message MuSig2SessionRequest {
/*
The key locator that identifies which key to use for signing.
*/
KeyLocator key_loc = 1;
/*
A list of all public keys (serialized in 32-byte x-only format for v0.4.0
and 33-byte compressed format for v1.0.0rc2!) participating in the signing
session. The list will always be sorted lexicographically internally. This
must include the local key which is described by the above key_loc.
*/
repeated bytes all_signer_pubkeys = 2;
/*
An optional list of all public nonces of other signing participants that
might already be known.
*/
repeated bytes other_signer_public_nonces = 3;
/*
A series of optional generic tweaks to be applied to the aggregated
public key.
*/
repeated TweakDesc tweaks = 4;
/*
An optional taproot specific tweak that must be specified if the MuSig2
combined key will be used as the main taproot key of a taproot output
on-chain.
*/
TaprootTweakDesc taproot_tweak = 5;
/*
The mandatory version of the MuSig2 BIP draft to use. This is necessary to
differentiate between the changes that were made to the BIP while this
experimental RPC was already released. Some of those changes affect how the
combined key and nonces are created.
*/
MuSig2Version version = 6;
/*
A set of pre generated secret local nonces to use in the musig2 session.
This field is optional. This can be useful for protocols that need to send
nonces ahead of time before the set of signer keys are known. This value
MUST be 97 bytes and be the concatenation of two CSPRNG generated 32 byte
values and local public key used for signing as specified in the key_loc
field.
*/
bytes pregenerated_local_nonce = 7;
}
message MuSig2SessionResponse {
/*
The unique ID that represents this signing session. A session can be used
for producing a signature a single time. If the signing fails for any
reason, a new session with the same participants needs to be created.
*/
bytes session_id = 1;
/*
The combined public key (in the 32-byte x-only format) with all tweaks
applied to it. If a taproot tweak is specified, this corresponds to the
taproot key that can be put into the on-chain output.
*/
bytes combined_key = 2;
/*
The raw combined public key (in the 32-byte x-only format) before any tweaks
are applied to it. If a taproot tweak is specified, this corresponds to the
internal key that needs to be put into the witness if the script spend path
is used.
*/
bytes taproot_internal_key = 3;
/*
The two public nonces the local signer uses, combined into a single value
of 66 bytes. Can be split into the two 33-byte points to get the individual
nonces.
*/
bytes local_public_nonces = 4;
/*
Indicates whether all nonces required to start the signing process are known
now.
*/
bool have_all_nonces = 5;
/*
The version of the MuSig2 BIP that was used to create the session.
*/
MuSig2Version version = 6;
}
message MuSig2RegisterNoncesRequest {
/*
The unique ID of the signing session those nonces should be registered with.
*/
bytes session_id = 1;
/*
A list of all public nonces of other signing participants that should be
registered.
*/
repeated bytes other_signer_public_nonces = 3;
}
message MuSig2RegisterNoncesResponse {
/*
Indicates whether all nonces required to start the signing process are known
now.
*/
bool have_all_nonces = 1;
}
message MuSig2SignRequest {
/*
The unique ID of the signing session to use for signing.
*/
bytes session_id = 1;
/*
The 32-byte SHA256 digest of the message to sign.
*/
bytes message_digest = 2;
/*
Cleanup indicates that after signing, the session state can be cleaned up,
since another participant is going to be responsible for combining the
partial signatures.
*/
bool cleanup = 3;
}
message MuSig2SignResponse {
/*
The partial signature created by the local signer.
*/
bytes local_partial_signature = 1;
}
message MuSig2CombineSigRequest {
/*
The unique ID of the signing session to combine the signatures for.
*/
bytes session_id = 1;
/*
The list of all other participants' partial signatures to add to the current
session.
*/
repeated bytes other_partial_signatures = 2;
}
message MuSig2CombineSigResponse {
/*
Indicates whether all partial signatures required to create a final, full
signature are known yet. If this is true, then the final_signature field is
set, otherwise it is empty.
*/
bool have_all_signatures = 1;
/*
The final, full signature that is valid for the combined public key.
*/
bytes final_signature = 2;
}
message MuSig2CleanupRequest {
/*
The unique ID of the signing session that should be removed/cleaned up.
*/
bytes session_id = 1;
}
message MuSig2CleanupResponse {
}

1568
proto/others/walletkit.proto Normal file

File diff suppressed because it is too large Load diff

View file

@ -144,6 +144,34 @@ service LightningPub {
option (nostr) = true;
}
rpc AddPeer(structs.AddPeerRequest) returns (structs.Empty) {
option (auth_type) = "Admin";
option (http_method) = "post";
option (http_route) = "/api/admin/peer";
option (nostr) = true;
}
rpc UpdateChannelPolicy (structs.UpdateChannelPolicyRequest) returns (structs.Empty) {
option (auth_type) = "Admin";
option (http_method) = "post";
option (http_route) = "/api/admin/channel/policy/update";
option (nostr) = true;
}
rpc OpenChannel(structs.OpenChannelRequest) returns (structs.OpenChannelResponse) {
option (auth_type) = "Admin";
option (http_method) = "post";
option (http_route) = "/api/admin/channel/open";
option (nostr) = true;
}
rpc CloseChannel(structs.CloseChannelRequest) returns (structs.CloseChannelResponse) {
option (auth_type) = "Admin";
option (http_method) = "post";
option (http_route) = "/api/admin/channel/close";
option (nostr) = true;
}
rpc GetUsageMetrics(structs.Empty) returns (structs.UsageMetrics) {
option (auth_type) = "Metrics";
option (http_method) = "post";
@ -419,13 +447,6 @@ service LightningPub {
option (nostr) = true;
}
rpc OpenChannel(structs.OpenChannelRequest) returns (structs.OpenChannelResponse){
option (auth_type) = "User";
option (http_method) = "post";
option (http_route) = "/api/user/open/channel";
option (nostr) = true;
}
rpc GetLnurlWithdrawLink(structs.Empty) returns (structs.LnurlLinkResponse){
option (auth_type) = "User";
option (http_method) = "get";

View file

@ -88,6 +88,22 @@ message RoutingEvent {
bool forward_fail_event = 12;
}
message ChannelPolicy {
int64 base_fee_msat= 1;
int64 fee_rate_ppm = 2;
int64 max_htlc_msat = 3;
int64 min_htlc_msat = 4;
int64 timelock_delta = 5;
}
message UpdateChannelPolicyRequest {
oneof update {
string channel_point = 1;
Empty all = 2;
}
ChannelPolicy policy = 3;
}
message OpenChannel {
string channel_id = 1;
int64 capacity = 2;
@ -96,17 +112,30 @@ message OpenChannel {
int64 local_balance=5;
int64 remote_balance = 6;
string label = 7;
string channel_point = 8;
optional ChannelPolicy policy = 9;
}
message ClosedChannel {
string channel_id = 1;
int64 capacity = 2;
int64 closed_height =4;
int64 close_tx_timestamp = 5;
}
message GraphPoint {
int64 x = 1;
int64 y = 2;
}
enum OperationType {
CHAIN_OP = 0;
INVOICE_OP = 1;
}
message RootOperation {
OperationType op_type = 1;
string op_id = 2;
int64 amount = 3;
int64 created_at_unix = 4;
}
message LndNodeMetrics {
repeated GraphPoint chain_balance = 1;
@ -120,6 +149,7 @@ message LndNodeMetrics {
repeated ClosedChannel closed_channels = 9;
int64 forwarding_events = 11;
int64 forwarding_fees = 12;
repeated RootOperation root_ops = 13;
}
message LndMetrics {
@ -131,6 +161,37 @@ message LndChannels {
repeated OpenChannel open_channels = 1;
}
message OpenChannelRequest{
string node_pubkey = 1;
int64 local_funding_amount = 2;
int64 sat_per_v_byte = 3;
optional int64 push_sat = 5;
optional string close_address = 6;
}
message OpenChannelResponse{
string channel_id = 1;
}
message CloseChannelRequest{
bool force = 2;
int64 sat_per_v_byte = 3;
string funding_txid = 4;
int64 output_index = 5;
}
message CloseChannelResponse{
string closing_txid = 1;
}
message AddPeerRequest {
string pubkey = 1;
string host = 2;
int64 port = 3;
}
message LndGetInfoRequest {
int64 nodeId = 1;
}
@ -142,6 +203,9 @@ message SetMockInvoiceAsPaidRequest {
message LndGetInfoResponse {
string alias = 1;
bool synced_to_chain = 2;
bool synced_to_graph = 3;
bool watchdog_barking = 4;
}
message BanUserRequest {
@ -308,17 +372,6 @@ message PaymentState{
int64 network_fee = 4;
}
message OpenChannelRequest{
string destination = 1;
int64 fundingAmount = 2;
int64 pushAmount = 3;
string closeAddress = 4;
}
message OpenChannelResponse{
string channelId = 1;
}
message LnurlLinkResponse{
string lnurl = 1;
string k1 = 2;

42
src/e2e.ts Normal file
View file

@ -0,0 +1,42 @@
import 'dotenv/config'
import NewServer from '../proto/autogenerated/ts/express_server.js'
import GetServerMethods from './services/serverMethods/index.js'
import serverOptions from './auth.js';
import { LoadNosrtSettingsFromEnv } from './services/nostr/index.js'
import nostrMiddleware from './nostrMiddleware.js'
import { getLogger } from './services/helpers/logger.js';
import { initMainHandler } from './services/main/init.js';
import { LoadMainSettingsFromEnv } from './services/main/settings.js';
import { nip19 } from 'nostr-tools'
//@ts-ignore
const { nprofileEncode } = nip19
const start = async () => {
const log = getLogger({})
const mainSettings = LoadMainSettingsFromEnv()
const keepOn = await initMainHandler(log, mainSettings)
if (!keepOn) {
log("manual process ended")
return
}
const { apps, mainHandler, liquidityProviderInfo, wizard, adminManager } = keepOn
const serverMethods = GetServerMethods(mainHandler)
const nostrSettings = LoadNosrtSettingsFromEnv()
log("initializing nostr middleware")
const { Send } = nostrMiddleware(serverMethods, mainHandler,
{ ...nostrSettings, apps, clients: [liquidityProviderInfo] },
(e, p) => mainHandler.liquidityProvider.onEvent(e, p)
)
log("starting server")
mainHandler.attachNostrSend(Send)
mainHandler.StartBeacons()
const appNprofile = nprofileEncode({ pubkey: liquidityProviderInfo.publicKey, relays: nostrSettings.relays })
if (wizard) {
wizard.AddConnectInfo(appNprofile, nostrSettings.relays)
}
adminManager.setAppNprofile(appNprofile)
const Server = NewServer(serverMethods, serverOptions(mainHandler))
Server.Listen(mainSettings.servicePort)
}
start()

View file

@ -8,7 +8,7 @@ import { LightningClient } from '../../../proto/lnd/lightning.client.js'
import { InvoicesClient } from '../../../proto/lnd/invoices.client.js'
import { RouterClient } from '../../../proto/lnd/router.client.js'
import { ChainNotifierClient } from '../../../proto/lnd/chainnotifier.client.js'
import { GetInfoResponse, AddressType, NewAddressResponse, AddInvoiceResponse, Invoice_InvoiceState, PayReq, Payment_PaymentStatus, Payment, PaymentFailureReason, SendCoinsResponse, EstimateFeeResponse, ChannelBalanceResponse, TransactionDetails, ListChannelsResponse, ClosedChannelsResponse, PendingChannelsResponse, ForwardingHistoryResponse, CoinSelectionStrategy } from '../../../proto/lnd/lightning.js'
import { GetInfoResponse, AddressType, NewAddressResponse, AddInvoiceResponse, Invoice_InvoiceState, PayReq, Payment_PaymentStatus, Payment, PaymentFailureReason, SendCoinsResponse, EstimateFeeResponse, ChannelBalanceResponse, TransactionDetails, ListChannelsResponse, ClosedChannelsResponse, PendingChannelsResponse, ForwardingHistoryResponse, CoinSelectionStrategy, OpenStatusUpdate, CloseStatusUpdate, PendingUpdate } from '../../../proto/lnd/lightning.js'
import { OpenChannelReq } from './openChannelReq.js';
import { AddInvoiceReq } from './addInvoiceReq.js';
import { PayInvoiceReq } from './payInvoiceReq.js';
@ -19,6 +19,7 @@ import { HtlcEvent_EventType } from '../../../proto/lnd/router.js';
import { LiquidityProvider, LiquidityRequest } from '../main/liquidityProvider.js';
import { Utils } from '../helpers/utilsWrapper.js';
import { TxPointSettings } from '../storage/stateBundler.js';
import { WalletKitClient } from '../../../proto/lnd/walletkit.client.js';
const DeadLineMetadata = (deadline = 10 * 1000) => ({ deadline: Date.now() + deadline })
const deadLndRetrySeconds = 5
type TxActionOptions = { useProvider: boolean, from: 'user' | 'system' }
@ -27,6 +28,7 @@ export default class {
invoices: InvoicesClient
router: RouterClient
chainNotifier: ChainNotifierClient
walletKit: WalletKitClient
settings: LndSettings
ready = false
latestKnownBlockHeigh = 0
@ -67,6 +69,7 @@ export default class {
this.invoices = new InvoicesClient(transport)
this.router = new RouterClient(transport)
this.chainNotifier = new ChainNotifierClient(transport)
this.walletKit = new WalletKitClient(transport)
this.liquidProvider = liquidProvider
}
@ -380,6 +383,28 @@ export default class {
return res.response
}
async GetChannelInfo(chanId: string) {
const res = await this.lightning.getChanInfo({ chanId, chanPoint: "" }, DeadLineMetadata())
return res.response
}
async UpdateChannelPolicy(chanPoint: string, policy: Types.ChannelPolicy) {
const split = chanPoint.split(':')
const res = await this.lightning.updateChannelPolicy({
scope: policy ? { oneofKind: 'chanPoint', chanPoint: { fundingTxid: { oneofKind: 'fundingTxidStr', fundingTxidStr: split[0] }, outputIndex: Number(split[1]) } } : { oneofKind: 'global', global: true },
baseFeeMsat: BigInt(policy.base_fee_msat),
feeRate: 0,
maxHtlcMsat: BigInt(policy.max_htlc_msat),
minHtlcMsat: BigInt(policy.min_htlc_msat),
timeLockDelta: policy.timelock_delta,
inboundFee: undefined,
feeRatePpm: policy.fee_rate_ppm,
minHtlcMsatSpecified: policy.min_htlc_msat > 0,
}, DeadLineMetadata())
return res.response
}
async GetChannelBalance() {
const res = await this.lightning.channelBalance({}, DeadLineMetadata())
return res.response
@ -485,21 +510,71 @@ export default class {
}
async GetTx(txid: string) {
const res = await this.walletKit.getTransaction({ txid }, DeadLineMetadata())
return res.response
}
async AddPeer(pub: string, host: string, port: number) {
const res = await this.lightning.connectPeer({
addr: {
pubkey: pub,
host: host + ":" + port,
},
perm: true,
timeout: 0n
})
return res.response
}
async ListPeers() {
const res = await this.lightning.listPeers({ latestError: true }, DeadLineMetadata())
return res.response
}
async OpenChannel(destination: string, closeAddress: string, fundingAmount: number, pushSats: number) {
async OpenChannel(destination: string, closeAddress: string, fundingAmount: number, pushSats: number, satsPerVByte: number): Promise<OpenStatusUpdate> {
const abortController = new AbortController()
const req = OpenChannelReq(destination, closeAddress, fundingAmount, pushSats)
const req = OpenChannelReq(destination, closeAddress, fundingAmount, pushSats, satsPerVByte)
const stream = this.lightning.openChannel(req, { abort: abortController.signal })
return new Promise((res, rej) => {
stream.responses.onMessage(message => {
console.log("message", message)
switch (message.update.oneofKind) {
case 'chanPending':
res(Buffer.from(message.pendingChanId).toString('base64'))
res(message)
break
}
})
stream.responses.onError(error => {
console.log("error", error)
rej(error)
})
})
}
async CloseChannel(fundingTx: string, outputIndex: number, force: boolean, satPerVByte: number): Promise<PendingUpdate> {
const stream = this.lightning.closeChannel({
deliveryAddress: "",
force: force,
satPerByte: 0n,
satPerVbyte: BigInt(satPerVByte),
noWait: false,
maxFeePerVbyte: 0n,
targetConf: 0,
channelPoint: {
fundingTxid: {
fundingTxidStr: fundingTx,
oneofKind: "fundingTxidStr"
},
outputIndex: outputIndex
},
}, DeadLineMetadata())
return new Promise((res, rej) => {
stream.responses.onMessage(message => {
console.log("message", message)
switch (message.update.oneofKind) {
case 'closePending':
res(message.update.closePending)
break
}
})

View file

@ -1,12 +1,12 @@
import { CommitmentType, OpenChannelRequest } from "../../../proto/lnd/lightning.js";
export const OpenChannelReq = (destination: string, closeAddress: string, fundingAmount: number, pushSats: number): OpenChannelRequest => ({
export const OpenChannelReq = (destination: string, closeAddress: string, fundingAmount: number, pushSats: number, satsPerVByte: number): OpenChannelRequest => ({
nodePubkey: Buffer.from(destination, 'hex'),
closeAddress: closeAddress,
localFundingAmount: BigInt(fundingAmount),
pushSat: BigInt(pushSats),
satPerVbyte: 0n, // TBD
satPerVbyte: BigInt(satsPerVByte), // TBD
private: false,
minConfs: 0, // TBD
baseFee: 1n, // TBD

View file

@ -1,12 +1,16 @@
import fs, { watchFile } from "fs";
import crypto from 'crypto'
import { getLogger } from "../helpers/logger.js";
import { ERROR, getLogger } from "../helpers/logger.js";
import { MainSettings, getDataPath } from "./settings.js";
import Storage from "../storage/index.js";
import * as Types from '../../../proto/autogenerated/ts/types.js'
import LND from "../lnd/lnd.js";
export class AdminManager {
storage: Storage
log = getLogger({ component: "adminManager" })
adminNpub = ""
@ -22,8 +26,8 @@ export class AdminManager {
this.storage = storage
this.dataDir = mainSettings.storageSettings.dataDir
this.adminNpubPath = getDataPath(this.dataDir, 'admin.npub')
this.adminEnrollTokenPath = getDataPath(this.dataDir, '.admin_enroll')
this.adminConnectPath = getDataPath(this.dataDir, '.admin_connect')
this.adminEnrollTokenPath = getDataPath(this.dataDir, 'admin.enroll')
this.adminConnectPath = getDataPath(this.dataDir, 'admin.connect')
this.appNprofilePath = getDataPath(this.dataDir, 'app.nprofile')
this.start()
}
@ -142,18 +146,79 @@ export class AdminManager {
}
}
ListChannels = async (): Promise<Types.LndChannels> => {
const channels = await this.lnd.ListChannels(true)
async LndGetInfo(): Promise<Types.LndGetInfoResponse> {
const info = await this.lnd.GetInfo()
return {
open_channels: channels.channels.map(c => ({
alias: info.alias,
synced_to_chain: info.syncedToChain,
synced_to_graph: info.syncedToGraph,
watchdog_barking: this.lnd.outgoingOpsLocked
}
}
ListChannels = async (): Promise<Types.LndChannels> => {
const { channels } = await this.lnd.ListChannels(true)
const { identityPubkey } = await this.lnd.GetInfo()
const openChannels = await Promise.all(channels.map(async c => {
const info = await this.lnd.GetChannelInfo(c.chanId)
const policies = [{ pub: info.node1Pub, policy: info.node1Policy }, { pub: info.node2Pub, policy: info.node2Policy }]
const myPolicy = policies.find(p => p.pub === identityPubkey)?.policy
const policy: Types.ChannelPolicy | undefined = myPolicy ? {
base_fee_msat: Number(myPolicy.feeBaseMsat),
fee_rate_ppm: Number(myPolicy.feeRateMilliMsat),
timelock_delta: Number(myPolicy.timeLockDelta),
max_htlc_msat: Number(myPolicy.maxHtlcMsat),
min_htlc_msat: Number(myPolicy.minHtlc),
} : undefined
return {
channel_point: c.channelPoint,
active: c.active,
capacity: Number(c.capacity),
local_balance: Number(c.localBalance),
remote_balance: Number(c.remoteBalance),
channel_id: c.chanId,
label: c.peerAlias || c.remotePubkey,
lifetime: Number(c.lifetime)
}))
lifetime: Number(c.lifetime),
policy,
}
}))
return {
open_channels: openChannels
}
}
async UpdateChannelPolicy(req: Types.UpdateChannelPolicyRequest): Promise<void> {
const chanPoint = req.update.type === Types.UpdateChannelPolicyRequest_update_type.CHANNEL_POINT ? req.update.channel_point : ""
const res = await this.lnd.UpdateChannelPolicy(chanPoint, req.policy)
if (res.failedUpdates.length > 0) {
this.log(ERROR, "failed to update policy", res.failedUpdates)
throw new Error("failed to update policy")
}
}
async AddPeer(req: Types.AddPeerRequest) {
await this.lnd.AddPeer(req.pubkey, req.host, req.port)
}
async OpenChannel(req: Types.OpenChannelRequest): Promise<Types.OpenChannelResponse> {
let closeAddr = req.close_address
if (!closeAddr) {
const addr = await this.lnd.NewAddress(Types.AddressType.WITNESS_PUBKEY_HASH, { useProvider: false, from: 'system' })
closeAddr = addr.address
}
const res = await this.lnd.OpenChannel(req.node_pubkey, closeAddr, req.local_funding_amount, req.push_sat || 0, req.sat_per_v_byte)
return {
channel_id: Buffer.from(res.pendingChanId).toString('hex')
}
}
async CloseChannel(req: Types.CloseChannelRequest): Promise<Types.CloseChannelResponse> {
const res = await this.lnd.CloseChannel(req.funding_txid, req.output_index, req.force, req.sat_per_v_byte)
return {
closing_txid: Buffer.from(res.txid).toString('hex')
}
}
}

View file

@ -148,7 +148,10 @@ export default class {
return this.storage.StartTransaction(async tx => {
const { blockHeight } = await this.lnd.GetInfo()
const userAddress = await this.storage.paymentStorage.GetAddressOwner(address, tx)
if (!userAddress) { return }
if (!userAddress) {
await this.metricsManager.AddRootAddressPaid(address, txOutput, amount)
return
}
const internal = used === 'internal'
let log = getLogger({})
if (!userAddress.linkedApplication) {
@ -188,7 +191,10 @@ export default class {
return this.storage.StartTransaction(async tx => {
let log = getLogger({})
const userInvoice = await this.storage.paymentStorage.GetInvoiceOwner(paymentRequest, tx)
if (!userInvoice) { return }
if (!userInvoice) {
await this.metricsManager.AddRootInvoicePaid(paymentRequest, amount)
return
}
const internal = used === 'internal'
if (userInvoice.paid_at_unix > 0 && internal) { log("cannot pay internally, invoice already paid"); return }
if (userInvoice.paid_at_unix > 0 && !internal && userInvoice.paidByLnd) { log("invoice already paid by lnd"); return }

View file

@ -626,8 +626,6 @@ export default class {
return this.GetLnurlPayInfoFromUser(linkedUser.user.user_id, linkedUser.application, { metadata: defaultLnAddressMetadata(this.settings.lnurlMetaText, addressName) })
}
async OpenChannel(userId: string, req: Types.OpenChannelRequest): Promise<Types.OpenChannelResponse> { throw new Error("WIP") }
mapOperations(operations: UserOperationInfo[], type: Types.UserOperationType, inbound: boolean): Types.UserOperations {
if (operations.length === 0) {
return {

View file

@ -9,6 +9,8 @@ import LND from '../lnd/lnd.js'
import HtlcTracker from './htlcTracker.js'
const maxEvents = 100_000
export default class Handler {
storage: Storage
lnd: LND
htlcTracker: HtlcTracker
@ -25,12 +27,11 @@ export default class Handler {
async NewBlockCb(height: number, balanceInfo: BalanceInfo) {
const providers = await this.storage.liquidityStorage.GetTrackedProviders()
let lndTotal = 0
const channels = await this.lnd.GetChannelBalance()
let providerTotal = 0
console.log({ providers })
providers.forEach(p => {
if (p.provider_type === 'lnd') {
lndTotal += p.latest_balance
} else {
if (p.provider_type === 'lnPub') {
providerTotal += p.latest_balance
}
})
@ -39,7 +40,7 @@ export default class Handler {
confirmed_chain_balance: balanceInfo.confirmedBalance,
unconfirmed_chain_balance: balanceInfo.unconfirmedBalance,
total_chain_balance: balanceInfo.totalBalance,
channels_balance: lndTotal,
channels_balance: Number(channels.localBalance?.sat) || 0,
external_balance: providerTotal
}
const channelsEvents: Partial<ChannelBalanceEvent>[] = balanceInfo.channelsBalance.map(c => ({
@ -216,10 +217,17 @@ export default class Handler {
async GetLndMetrics(req: Types.LndMetricsRequest): Promise<Types.LndMetrics> {
const { openChannels, totalActive, totalInactive } = await this.GetChannelsInfo()
const { totalPendingOpen, totalPendingClose } = await this.GetPendingChannelsInfo()
const { channels: closedChannels } = await this.lnd.ListClosedChannels()
const rawRouting = await this.storage.metricsStorage.GetChannelRouting({ from: req.from_unix, to: req.to_unix })
const [chansInfo, pendingChansInfo, closedChansInfo, routing, rootOps] = await Promise.all([
this.GetChannelsInfo(),
this.GetPendingChannelsInfo(),
this.lnd.ListClosedChannels(),
this.storage.metricsStorage.GetChannelRouting({ from: req.from_unix, to: req.to_unix }),
this.storage.metricsStorage.GetRootOperations({ from: req.from_unix, to: req.to_unix })
])
const { openChannels, totalActive, totalInactive } = chansInfo
const { totalPendingOpen, totalPendingClose } = pendingChansInfo
const { channels: closedChannels } = closedChansInfo
const rawRouting = routing
let totalEvents = 0
let totalFees = 0
rawRouting.forEach(r => {
@ -243,7 +251,10 @@ export default class Handler {
externalBalance.push({ x: e.block_height, y: e.external_balance })
}
})
const closed = await Promise.all(closedChannels.map(async c => {
const tx = await this.lnd.GetTx(c.closingTxHash)
return { capacity: Number(c.capacity), channel_id: c.chanId, closed_height: c.closeHeight, close_tx_timestamp: Number(tx.timeStamp) }
}))
return {
nodes: [{
chain_balance: chainBalance,
@ -253,11 +264,28 @@ export default class Handler {
pending_channels: totalPendingOpen,
offline_channels: totalInactive,
online_channels: totalActive,
closed_channels: closedChannels.map(c => ({ capacity: Number(c.capacity), channel_id: c.chanId, closed_height: c.closeHeight })),
open_channels: openChannels.map(c => ({ active: c.active, capacity: Number(c.capacity), channel_id: c.chanId, lifetime: Number(c.lifetime), local_balance: Number(c.localBalance), remote_balance: Number(c.remoteBalance), label: c.peerAlias })),
closed_channels: closed,
open_channels: openChannels.map(c => ({ channel_point: c.channelPoint, active: c.active, capacity: Number(c.capacity), channel_id: c.chanId, lifetime: Number(c.lifetime), local_balance: Number(c.localBalance), remote_balance: Number(c.remoteBalance), label: c.peerAlias })),
forwarding_events: totalEvents,
forwarding_fees: totalFees
forwarding_fees: totalFees,
root_ops: rootOps.map(r => ({ amount: r.operation_amount, created_at_unix: r.created_at.getTime(), op_id: r.operation_identifier, op_type: mapRootOpType(r.operation_type) })),
}],
}
}
async AddRootAddressPaid(address: string, txOutput: { hash: string; index: number }, amount: number) {
await this.storage.metricsStorage.AddRootOperation("chain", `${address}:${txOutput.hash}:${txOutput.index}`, amount)
}
async AddRootInvoicePaid(paymentRequest: string, amount: number) {
await this.storage.metricsStorage.AddRootOperation("invoice", paymentRequest, amount)
}
}
const mapRootOpType = (opType: string): Types.OperationType => {
switch (opType) {
case "chain": return Types.OperationType.CHAIN_OP
case "invoice": return Types.OperationType.INVOICE_OP
default: throw new Error("Unknown operation type")
}
}

View file

@ -16,11 +16,45 @@ export default (mainHandler: Main): Types.ServerMethods => {
ListChannels: async ({ ctx }) => {
return mainHandler.adminManager.ListChannels()
},
AddPeer: async ({ ctx, req }) => {
const err = Types.AddPeerRequestValidate(req, {
pubkey_CustomCheck: pubkey => pubkey !== '',
host_CustomCheck: host => host !== '',
port_CustomCheck: port => port > 0
})
if (err != null) throw new Error(err.message)
return mainHandler.adminManager.AddPeer(req)
},
UpdateChannelPolicy: async ({ ctx, req }) => {
const err = Types.UpdateChannelPolicyRequestValidate(req, {
update_Options: {
channel_point_CustomCheck: cp => cp !== '',
}
})
if (err != null) throw new Error(err.message)
return mainHandler.adminManager.UpdateChannelPolicy(req)
},
OpenChannel: async ({ ctx, req }) => {
const err = Types.OpenChannelRequestValidate(req, {
node_pubkey_CustomCheck: pubkey => pubkey !== '',
local_funding_amount_CustomCheck: amt => amt > 0,
sat_per_v_byte_CustomCheck: spv => spv > 0,
})
if (err != null) throw new Error(err.message)
return mainHandler.adminManager.OpenChannel(req)
},
CloseChannel: async ({ ctx, req }) => {
const err = Types.CloseChannelRequestValidate(req, {
funding_txid_CustomCheck: chanId => chanId !== '',
sat_per_v_byte_CustomCheck: spv => spv > 0
})
if (err != null) throw new Error(err.message)
return mainHandler.adminManager.CloseChannel(req)
},
EncryptionExchange: async () => { },
Health: async () => { await mainHandler.lnd.Health() },
LndGetInfo: async ({ ctx }) => {
const info = await mainHandler.lnd.GetInfo()
return { alias: info.alias }
return await mainHandler.adminManager.LndGetInfo()
},
BanUser: async ({ ctx, req }) => {
const err = Types.BanUserRequestValidate(req, {
@ -54,15 +88,6 @@ export default (mainHandler: Main): Types.ServerMethods => {
if (err != null) throw new Error(err.message)
return mainHandler.paymentManager.GetPaymentState(ctx.user_id, req)
},
OpenChannel: async ({ ctx, req }) => {
const err = Types.OpenChannelRequestValidate(req, {
fundingAmount_CustomCheck: amt => amt > 0,
pushAmount_CustomCheck: amt => amt > 0,
destination_CustomCheck: dest => dest !== ""
})
if (err != null) throw new Error(err.message)
return mainHandler.paymentManager.OpenChannel(ctx.user_id, req)
},
NewAddress: ({ ctx, req }) => mainHandler.paymentManager.NewAddress(ctx, req),
PayAddress: async ({ ctx, req }) => {
const err = Types.PayAddressRequestValidate(req, {

View file

@ -22,6 +22,7 @@ import { LndNodeInfo } from "./entity/LndNodeInfo.js"
import { TrackedProvider } from "./entity/TrackedProvider.js"
import { InviteToken } from "./entity/InviteToken.js"
import { DebitAccess } from "./entity/DebitAccess.js"
import { RootOperation } from "./entity/RootOperation.js"
export type DbSettings = {
@ -41,7 +42,7 @@ export const newMetricsDb = async (settings: DbSettings, metricsMigrations: Func
const source = await new DataSource({
type: "sqlite",
database: settings.metricsDatabaseFile,
entities: [BalanceEvent, ChannelBalanceEvent, ChannelRouting],
entities: [BalanceEvent, ChannelBalanceEvent, ChannelRouting, RootOperation],
migrations: metricsMigrations
}).initialize();
const log = getLogger({});

View file

@ -0,0 +1,22 @@
import { Entity, PrimaryGeneratedColumn, Column, Index, Check, CreateDateColumn, UpdateDateColumn } from "typeorm"
@Entity()
export class RootOperation {
@PrimaryGeneratedColumn()
serial_id: number
@Column()
operation_type: string
@Column()
operation_amount: number
@Column()
operation_identifier: string
@CreateDateColumn()
created_at: Date
@UpdateDateColumn()
updated_at: Date
}

View file

@ -5,8 +5,10 @@ import TransactionsQueue, { TX } from "./transactionsQueue.js";
import { StorageSettings } from "./index.js";
import { newMetricsDb } from "./db.js";
import { ChannelRouting } from "./entity/ChannelRouting.js";
import { RootOperation } from "./entity/RootOperation.js";
export default class {
DB: DataSource | EntityManager
settings: StorageSettings
txQueue: TransactionsQueue
@ -98,6 +100,16 @@ export default class {
await repo.update(existing.serial_id, { latest_index_offset: event.latest_index_offset })
}
}
async AddRootOperation(opType: string, id: string, amount: number, entityManager = this.DB) {
const newOp = entityManager.getRepository(RootOperation).create({ operation_type: opType, operation_amount: amount, operation_identifier: id })
return this.txQueue.PushToQueue<RootOperation>({ exec: async db => db.getRepository(RootOperation).save(newOp), dbTx: false })
}
async GetRootOperations({ from, to }: { from?: number, to?: number }, entityManager = this.DB) {
const q = getTimeQuery({ from, to })
return entityManager.getRepository(RootOperation).find(q)
}
}
const getTimeQuery = ({ from, to }: { from?: number, to?: number }): FindManyOptions<{ created_at: Date }> => {

View file

@ -0,0 +1,14 @@
import { MigrationInterface, QueryRunner } from "typeorm";
export class RootOps1732566440447 implements MigrationInterface {
name = 'RootOps1732566440447'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE TABLE "root_operation" ("serial_id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "operation_type" varchar NOT NULL, "operation_amount" integer NOT NULL, "operation_identifier" varchar NOT NULL, "created_at" datetime NOT NULL DEFAULT (datetime('now')), "updated_at" datetime NOT NULL DEFAULT (datetime('now')))`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE "root_operation"`);
}
}

View file

@ -16,8 +16,9 @@ import { DebitAccess1726496225078 } from './1726496225078-debit_access.js'
import { DebitAccessFixes1726685229264 } from './1726685229264-debit_access_fixes.js'
import { DebitToPub1727105758354 } from './1727105758354-debit_to_pub.js'
import { UserCbUrl1727112281043 } from './1727112281043-user_cb_url.js'
import { RootOps1732566440447 } from './1732566440447-root_ops.js'
const allMigrations = [Initial1703170309875, LspOrder1718387847693, LiquidityProvider1719335699480, LndNodeInfo1720187506189, TrackedProvider1720814323679, CreateInviteTokenTable1721751414878, PaymentIndex1721760297610, DebitAccess1726496225078, DebitAccessFixes1726685229264, DebitToPub1727105758354, UserCbUrl1727112281043]
const allMetricsMigrations = [LndMetrics1703170330183, ChannelRouting1709316653538, HtlcCount1724266887195, BalanceEvents1724860966825]
const allMetricsMigrations = [LndMetrics1703170330183, ChannelRouting1709316653538, HtlcCount1724266887195, BalanceEvents1724860966825, RootOps1732566440447]
export const TypeOrmMigrationRunner = async (log: PubLogger, storageManager: Storage, settings: DbSettings, arg: string | undefined): Promise<boolean> => {
if (arg === 'fake_initial_migration') {
runFakeMigration(settings.databaseFile, [Initial1703170309875])

View file

@ -0,0 +1,22 @@
import { defaultInvoiceExpiry } from '../services/storage/paymentStorage.js'
import { runSanityCheck, safelySetUserBalance, TestBase } from './testBase.js'
export const ignore = false
export const dev = false
export default async (T: TestBase) => {
await safelySetUserBalance(T, T.user1, 2000)
await openAdminChannel(T)
await runSanityCheck(T)
}
const openAdminChannel = async (T: TestBase) => {
T.d("starting openAdminChannel")
const info = await T.externalAccessToThirdLnd.GetInfo()
console.log(info)
const otherPub = info.identityPubkey
const openChannel = await T.main.adminManager.OpenChannel({
node_pubkey: otherPub, local_funding_amount: 100000, sat_per_v_byte: 1
})
console.log(openChannel)
T.d("opened admin channel")
}