From 3763fe9fbe38e235711a6cb1e9b194bc0942e026 Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Mon, 13 Apr 2026 16:41:43 +0200 Subject: [PATCH 1/6] schemas: add schemas for all plugin hooks Added schemas for: peer_connected, recover, commitment_revocation, db_write, invoice_payment, openchannel, openchannel2, openchannel2_changed, openchannel2_sign, rbf_channel, htlc_accepted, rpc_command, custommsg, onion_message_recv and onion_message_recv_secret Changelog-None --- doc/Makefile | 2 +- doc/schemas/hook/commitment_revocation.json | 75 ++++ doc/schemas/hook/custommsg.json | 64 ++++ doc/schemas/hook/db_write.json | 89 +++++ doc/schemas/hook/htlc_accepted.json | 321 ++++++++++++++++++ doc/schemas/hook/invoice_payment.json | 114 +++++++ doc/schemas/hook/onion_message_recv.json | 171 ++++++++++ .../hook/onion_message_recv_secret.json | 185 ++++++++++ doc/schemas/hook/openchannel.json | 226 ++++++++++++ doc/schemas/hook/openchannel2.json | 289 ++++++++++++++++ doc/schemas/hook/openchannel2_changed.json | 100 ++++++ doc/schemas/hook/openchannel2_sign.json | 103 ++++++ doc/schemas/hook/peer_connected.json | 96 ++++++ doc/schemas/hook/rbf_channel.json | 157 +++++++++ doc/schemas/hook/recover.json | 47 +++ doc/schemas/hook/rpc_command.json | 226 ++++++++++++ 16 files changed, 2264 insertions(+), 1 deletion(-) create mode 100644 doc/schemas/hook/commitment_revocation.json create mode 100644 doc/schemas/hook/custommsg.json create mode 100644 doc/schemas/hook/db_write.json create mode 100644 doc/schemas/hook/htlc_accepted.json create mode 100644 doc/schemas/hook/invoice_payment.json create mode 100644 doc/schemas/hook/onion_message_recv.json create mode 100644 doc/schemas/hook/onion_message_recv_secret.json create mode 100644 doc/schemas/hook/openchannel.json create mode 100644 doc/schemas/hook/openchannel2.json create mode 100644 doc/schemas/hook/openchannel2_changed.json create mode 100644 doc/schemas/hook/openchannel2_sign.json create mode 100644 doc/schemas/hook/peer_connected.json create mode 100644 doc/schemas/hook/rbf_channel.json create mode 100644 doc/schemas/hook/recover.json create mode 100644 doc/schemas/hook/rpc_command.json diff --git a/doc/Makefile b/doc/Makefile index 5f1b8306234f..fbf4b64d749b 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -188,7 +188,7 @@ doc/schemas/sql.json: doc/schemas/sql-template.json plugins/sql doc-all: $(MANPAGES) doc/index.rst -SCHEMAS := $(wildcard doc/schemas/*.json) $(wildcard doc/schemas/notification/*.json) +SCHEMAS := $(wildcard doc/schemas/*.json) $(wildcard doc/schemas/notification/*.json) $(wildcard doc/schemas/hook/*.json) # Don't try to build sql.json tables with plugins/sql if we don't have sqlite3 ifeq ($(HAVE_SQLITE3),0) diff --git a/doc/schemas/hook/commitment_revocation.json b/doc/schemas/hook/commitment_revocation.json new file mode 100644 index 000000000000..f655a076a36b --- /dev/null +++ b/doc/schemas/hook/commitment_revocation.json @@ -0,0 +1,75 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "commitment_revocation", + "title": "Hook fired when a commitment transaction is revoked", + "description": [ + "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", + "", + "1. Proposal of a new state commitment in the form of a commitment transaction", + "2. Exchange of signatures for the agreed upon commitment transaction", + "3. Verification that the signatures match the commitment transaction", + "4. Exchange of revocation secrets that could be used to penalize an eventual misbehaving party", + "", + "The `commitment_revocation` hook is used to inform the plugin about the state transition being completed, and deliver the penalty transaction.", + "The penalty transaction could then be sent to a watchtower that automatically reacts in case one party attempts to settle using a revoked commitment.", + "", + "This is a chained hook: multiple plugins may be registered." + ], + "request": { + "additionalProperties": false, + "required": [ + "commitment_txid", + "penalty_tx", + "channel_id", + "commitnum" + ], + "properties": { + "commitment_txid": { + "type": "txid", + "description": [ + "The txid of the revoked commitment transaction." + ] + }, + "penalty_tx": { + "type": "hex", + "description": [ + "The penalty transaction that can spend the revoked commitment.", + "Can be sent to a watchtower for enforcement." + ] + }, + "channel_id": { + "added": "v0.10.2", + "type": "hash", + "description": [ + "The channel_id for which the revocation occurred." + ] + }, + "commitnum": { + "added": "v0.10.2", + "type": "u64", + "description": [ + "The commitment number identifying the revoked state." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Plugins should always return \"continue\", otherwise subsequent hook subscribers would not get called." + ] + } + } + } +} diff --git a/doc/schemas/hook/custommsg.json b/doc/schemas/hook/custommsg.json new file mode 100644 index 000000000000..b1077a1515ff --- /dev/null +++ b/doc/schemas/hook/custommsg.json @@ -0,0 +1,64 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "custommsg", + "title": "Hook for handling custom peer messages", + "description": [ + "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", + "", + "The goal of these two components is to allow the implementation of custom protocols or prototypes on top of a Core Lightning node, without having to change the node's implementation itself.", + "", + "Messages are restricted to odd-numbered types and must not conflict with internally handled message types.", + "These limitations are in place in order to avoid conflicts with the internal state tracking, and avoiding disconnections or channel closures, since odd-numbered message can be ignored by nodes (see \"it's ok to be odd\" in BOLT #1 for details).", + "", + "Note that if the hook registration specifies \"filters\" then that should be a JSON array of message numbers, and the hook will only be called for those.", + "Otherwise, the hook is called for all messages not handled internally. (added in v25.12)", + "", + "This is a chained hook and MUST return `{\"result\": \"continue\"}`." + ], + "request": { + "required": [ + "peer_id", + "payload" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer that sent the message." + ] + }, + "payload": { + "type": "hex", + "description": [ + "The raw message payload as a hex string.", + "", + "The first two bytes encode the message type (big-endian), followed by the message payload.", + "The plugin must implement the parsing of the message, including the type prefix, since Core Lightning does not know how to parse the message." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must always be `continue`. Any other value will cause the hook to fail." + ] + } + } + }, + "see_also": [ + "lightning-sendcustommsg(7)" + ] +} diff --git a/doc/schemas/hook/db_write.json b/doc/schemas/hook/db_write.json new file mode 100644 index 000000000000..317faff7984c --- /dev/null +++ b/doc/schemas/hook/db_write.json @@ -0,0 +1,89 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "db_write", + "title": "Hook fired before database writes are committed", + "description": [ + "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", + "This hook will be useless (the \"writes\" field will always be empty) if you are using a PostgreSQL database.", + "", + "This hook is extremely restricted:", + "1. A plugin registering for this hook should not perform anything that may cause a database operation in response (pretty much, anything but logging).", + "2. A plugin registering for this hook should not register for other hooks or commands, as these may become intermingled and break rule #1.", + "3. The hook will be called before your plugin is initialized!", + "", + "This hook is strongly synchronous: `lightningd` will halt almost all processing until all plugins have responded.", + "", + "This hook is intended for creating continuous backups. The intent is that your backup plugin maintains three pieces of information (possibly in separate files):", + "1. A snapshot of the database", + "2. A log of database queries that will bring that snapshot up-to-date", + "3. The previous `data_version`", + "", + "`data_version` is an unsigned 32-bit number that will always increment by 1 each time `db_write` is called. Note that this will wrap around on the limit of 32-bit numbers.", + "", + "`writes` is an array of strings, each string being a database query that modifies the database.", + "If the `data_version` above is validated correctly, then you can simply append this to the log of database queries.", + "", + "Your plugin MUST validate the `data_version`. It MUST keep track of the previous `data_version` it got, and:", + "1. If the new `data_version` is exactly one higher than the previous, then this is the ideal case and nothing bad happened and we should save this and continue.", + "2. If the new `data_version` is exactly the same value as the previous, then the previous set of queries was not committed.", + " Your plugin MAY overwrite the previous set of queries with the current set, or it MAY overwrite its entire backup with a new snapshot of the database and the current `writes` array", + " (treating this case as if `data_version` were two or more higher than the previous).", + "3. If the new `data_version` is less than the previous, your plugin MUST halt and catch fire, and have the operator inspect what exactly happened here.", + "4. Otherwise, some queries were lost and your plugin SHOULD recover by creating a new snapshot of the database: copy the database file, back up the given `writes` array, then delete", + " (or atomically rename if in a POSIX filesystem) the previous backups of the database and SQL statements, or you MAY fail the hook to abort `lightningd`.", + "", + "The \"rolling up\" of the database could be done periodically as well if the log of SQL statements has grown large.", + "", + "Any response other than `{\"result\": \"continue\"}` will cause `lightningd` to error without committing to the database! This is the expected way to halt and catch fire.", + "", + "`db_write` is a parallel-chained hook, i.e., multiple plugins can register it, and all of them will be invoked simultaneously without regard for order of registration.", + "The hook is considered handled if all registered plugins return `{\"result\": \"continue\"}`. If any plugin returns anything else, `lightningd` will error without committing to the database." + ], + "request": { + "additionalProperties": false, + "required": [ + "data_version", + "writes" + ], + "properties": { + "data_version": { + "type": "u32", + "description": [ + "A monotonically increasing 32-bit unsigned integer representing the database version.", + "Wraps around at the 32-bit limit." + ] + }, + "writes": { + "type": "array", + "description": [ + "Array of SQL statements that modify the database.", + "If using PostgreSQL, this array will always be empty.", + "Each entry is a SQL query string." + ], + "items": { + "type": "string" + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be \"continue\" for the database commit to proceed.", + "Any other value will abort the commit and cause `lightningd` to error." + ] + } + } + } +} diff --git a/doc/schemas/hook/htlc_accepted.json b/doc/schemas/hook/htlc_accepted.json new file mode 100644 index 000000000000..bbba2a6cc4d2 --- /dev/null +++ b/doc/schemas/hook/htlc_accepted.json @@ -0,0 +1,321 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "htlc_accepted", + "title": "Hook for handling incoming HTLCs", + "description": [ + "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", + "", + "The plugin can inspect the HTLC and decide to continue processing, fail it, or resolve it.", + "", + "lightningd will replay the HTLCs for which it doesn't have a final verdict during startup.", + "This means that, if the plugin response wasn't processed before the HTLC was forwarded, failed, or resolved,", + "then the plugin may see the same HTLC again during startup. It is therefore paramount that the plugin is idempotent if it talks to an external system.", + "", + "This is a chained hook: plugins are called in order until one returns a result other than `continue`.", + "After this the event is considered handled and the remaining plugins are skipped." + ], + "request": { + "required": [ + "onion", + "htlc" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "added": "v25.12", + "type": "pubkey", + "description": [ + "The `node_id` of the peer that offered this HTLC.", + "This field may be absent if the peer is unknown." + ] + }, + "onion": { + "type": "object", + "additionalProperties": false, + "required": [ + "payload", + "next_onion", + "shared_secret" + ], + "properties": { + "payload": { + "type": "hex", + "description": [ + "The raw unparsed onion payload received from the sender." + ] + }, + "type": { + "type": "string", + "enum": [ + "tlv" + ], + "description": [ + "Indicates that the payload is TLV formatted.", + "Only present if the payload was successfully parsed." + ] + }, + "short_channel_id": { + "type": "short_channel_id", + "description": [ + "Determines the channel that the sender is hinting should be used next.", + "Not present if this node is the final destination." + ] + }, + "next_node_id": { + "type": "pubkey", + "description": [ + "The node_id of the next hop.", + "Only present if specified in the onion payload." + ] + }, + "forward_msat": { + "type": "msat", + "description": [ + "The amount to forward to the next hop." + ] + }, + "outgoing_cltv_value": { + "type": "u32", + "description": [ + "Determines what the CLTV value for the HTLC that we forward to the next hop should be." + ] + }, + "total_msat": { + "type": "msat", + "description": [ + "The total payment amount.", + "Only present for final recipients using modern TLV payloads." + ] + }, + "payment_secret": { + "type": "secret", + "description": [ + "The payment secret (which the payer should have obtained from the invoice) provided by the sender.", + "Only present for final recipients." + ] + }, + "payment_metadata": { + "type": "hex", + "description": [ + "Additional metadata provided in the onion payload.", + "Only present if included by the sender." + ] + }, + "next_onion": { + "type": "hex", + "description": [ + "The fully processed onion that we should be sending to the next hop as part of the outgoing HTLC.", + "Processed in this case means that we took the incoming onion, decrypted it, extracted the payload destined for us, and serialised the resulting onion again." + ] + }, + "shared_secret": { + "type": "secret", + "description": [ + "The shared secret used to decrypt the incoming onion.", + "It is shared with the sender that constructed the onion." + ] + } + } + }, + "htlc": { + "type": "object", + "additionalProperties": false, + "required": [ + "short_channel_id", + "id", + "amount_msat", + "cltv_expiry", + "cltv_expiry_relative", + "payment_hash" + ], + "properties": { + "short_channel_id": { + "added": "v0.12.0", + "type": "short_channel_id", + "description": [ + "The channel this HTLC is coming from." + ] + }, + "id": { + "added": "v0.12.0", + "type": "u64", + "description": [ + "The unique HTLC identifier assigned by the channel peer." + ] + }, + "amount_msat": { + "added": "v0.12.0", + "type": "msat", + "description": [ + "The amount received in this HTLC.", + "This amount minus the `forward_msat` amount is the fee that will stay with us." + ] + }, + "cltv_expiry": { + "type": "u32", + "description": [ + "Determines when the HTLC reverts back to the sender.", + "`cltv_expiry` minus `outgoing_cltv_value` should be equal or larger than our `cltv_delta` setting." + ] + }, + "cltv_expiry_relative": { + "type": "u32", + "description": [ + "Hints how much time we still have to claim the HTLC.", + "It is the `cltv_expiry` minus the current blockheight and is passed along mainly to avoid the plugin having to look up the current blockheight." + ] + }, + "payment_hash": { + "type": "hash", + "description": [ + "The payment hash used to identify the payment." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Optional TLV stream attached to the HTLC." + ] + } + } + }, + "forward_to": { + "type": "hash", + "description": [ + "The `channel_id` we intend to forward the HTLC to.", + "Will not be present if the `short_channel_id` was invalid or we were the final destination." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "fail", + "resolve" + ], + "description": [ + "Determines how the HTLC should be handled.", + "", + "`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,", + "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", + "", + "It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,", + "so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;", + "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", + "", + "It can also specify forward_to in the response, replacing the destination.", + "This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.", + "", + "Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.", + "", + "If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.", + "", + "", + "`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).", + "", + "Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).", + "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", + "returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.", + "", + "", + "`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.", + "Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed." + ] + }, + "payload": { + "type": "hex", + "description": [ + "Replacement TLV payload to use instead of the original onion payload." + ] + }, + "forward_to": { + "type": "hash", + "description": [ + "Overrides the forwarding destination." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Replacement TLV stream for forwarded HTLCs." + ] + }, + "invoice_msat": { + "added": "v25.12", + "type": "msat", + "description": [ + "Overrides the invoice amount for final destination checks." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Failure message to return if result is `fail`." + ] + }, + "failure_onion": { + "type": "hex", + "description": [ + "Serialized failure onion to return if result is `fail`." + ] + }, + "payment_key": { + "type": "secret", + "description": [ + "Preimage used to resolve the HTLC if result is `resolve`." + ] + } + }, + "if": { + "properties": { + "result": { + "enum": [ + "fail" + ] + } + } + }, + "then": { + "anyOf": [ + { + "required": [ + "failure_message" + ] + }, + { + "required": [ + "failure_onion" + ] + } + ] + }, + "else": { + "if": { + "properties": { + "result": { + "enum": [ + "resolve" + ] + } + } + }, + "then": { + "required": [ + "payment_key" + ] + } + } + } +} diff --git a/doc/schemas/hook/invoice_payment.json b/doc/schemas/hook/invoice_payment.json new file mode 100644 index 000000000000..3b9657cebab7 --- /dev/null +++ b/doc/schemas/hook/invoice_payment.json @@ -0,0 +1,114 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "invoice_payment", + "title": "Hook fired when a payment for an invoice is received", + "description": [ + "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", + "", + "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", + "", + "The plugin can:", + "- accept the payment by returning {\"result\": \"continue\"}", + "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", + "", + "If `failure_message` is provided, the payment will be failed with that message.", + "If result is \"reject\" and no `failure_message` is provided, the payment fails with `incorrect_or_unknown_payment_details`.", + "`failure_message` must NOT be provided when result is \"continue\".", + "", + "Before version 23.11 the msat field was encoded as a string with an 'msat' suffix." + ], + "request": { + "additionalProperties": false, + "required": [ + "payment" + ], + "properties": { + "payment": { + "type": "object", + "additionalProperties": true, + "required": [ + "label", + "preimage", + "msat" + ], + "properties": { + "label": { + "type": "string", + "description": [ + "Unique label identifying the invoice." + ] + }, + "preimage": { + "type": "secret", + "description": [ + "The payment preimage." + ] + }, + "msat": { + "type": "msat", + "description": [ + "Amount paid in millisatoshis." + ] + } + }, + "description": [ + "Basic payment information.", + "Additional TLV-derived fields may be included when running in developer mode." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Controls whether the payment is accepted or rejected.", + "\"continue\" accepts the payment.", + "\"reject\" fails the payment." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Optional BOLT 4 failure message.", + "Used to provide a specific failure reason when rejecting the payment." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "failure_message": { + "type": "hex" + } + } + } + }, + "see_also": [ + "lightning-listinvoices(7)" + ] +} diff --git a/doc/schemas/hook/onion_message_recv.json b/doc/schemas/hook/onion_message_recv.json new file mode 100644 index 000000000000..c5c837370405 --- /dev/null +++ b/doc/schemas/hook/onion_message_recv.json @@ -0,0 +1,171 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv", + "title": "Hook for receiving unsolicited onion messages", + "description": [ + "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", + "", + "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "additionalProperties": false, + "properties": { + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } +} diff --git a/doc/schemas/hook/onion_message_recv_secret.json b/doc/schemas/hook/onion_message_recv_secret.json new file mode 100644 index 000000000000..55ada6b4a45b --- /dev/null +++ b/doc/schemas/hook/onion_message_recv_secret.json @@ -0,0 +1,185 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv_secret", + "title": "Hook for receiving onion messages via blinded paths", + "description": [ + "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", + "", + "The presence of `pathsecret` allows the plugin to authenticate that the message used the intended return path.", + "", + "Replies MUST only be sent when the `pathsecret` matches expectations.", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "required": [ + "pathsecret" + ], + "additionalProperties": false, + "properties": { + "pathsecret": { + "type": "secret", + "description": [ + "Shared secret identifying the blinded path.", + "", + "Used to verify that the sender used a path previously provided by this node.", + "This prevents probing attacks and unauthorized replies." + ] + }, + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } +} diff --git a/doc/schemas/hook/openchannel.json b/doc/schemas/hook/openchannel.json new file mode 100644 index 000000000000..260614de7a99 --- /dev/null +++ b/doc/schemas/hook/openchannel.json @@ -0,0 +1,226 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel", + "title": "Hook fired when a peer proposes opening a channel using v1 protocol", + "description": [ + "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and may include additional fields defined by the protocol.", + "", + "Plugins can reject the channel or modify certain parameters before accepting it.", + "", + "This is a chained hook: the first plugin returning a non-\"continue\" result terminates the chain.", + "Mutation fields (`close_to`, `mindepth`, `reserve`) are only applied from the first plugin that sets them.", + "Additional fields may be present in the request as defined by BOLT #2.", + "Providing invalid values (e.g., invalid `close_to` address) will cause lightningd to exit." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel" + ], + "properties": { + "openchannel": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "funding_msat", + "push_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "channel_reserve_msat", + "htlc_minimum_msat", + "feerate_per_kw", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The peer's node_id." + ] + }, + "funding_msat": { + "type": "msat", + "description": [ + "Funding amount proposed by the peer." + ] + }, + "push_msat": { + "type": "msat", + "description": [ + "Amount pushed to us at channel open." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Dust limit for outputs." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum HTLC value allowed in flight." + ] + }, + "channel_reserve_msat": { + "type": "msat", + "description": [ + "Channel reserve required by the peer." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value." + ] + }, + "feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate in satoshi per kw." + ] + }, + "to_self_delay": { + "type": "u32", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u32", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey proposed by the peer." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the channel opening request." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer when rejecting." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address for mutual close output.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "mindepth": { + "added": "v0.12.0", + "type": "u32", + "description": [ + "`mindepth` is the number of confirmations to require before making the channel usable.", + "Notice that setting this to 0 (zeroconf) or some other low value might expose you to double-spending issues,", + "so only lower this value from the default if you trust the peer not to double-spend, or you reject incoming payments,", + "including forwards, until the funding is confirmed." + ] + }, + "reserve": { + "added": "v22.11", + "type": "sat", + "description": [ + "`reserve` is an absolute value for the amount (in satoshi) in the channel that the peer must keep on their side.", + "This ensures that they always have something to lose, so only lower this below the 1% of funding amount if you trust the peer.", + "The protocol requires this to be larger than the dust limit, hence it will be adjusted to be the dust limit if the specified value is below." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } +} diff --git a/doc/schemas/hook/openchannel2.json b/doc/schemas/hook/openchannel2.json new file mode 100644 index 000000000000..4b551102bc56 --- /dev/null +++ b/doc/schemas/hook/openchannel2.json @@ -0,0 +1,289 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2", + "title": "Hook fired when a peer proposes opening a channel using v2 protocol", + "description": [ + "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and dual-funding extensions.", + "There may be additional fields present depending on negotiated features.", + "", + "`requested_lease_msat`, `lease_blockheight_start`, and `node_blockheight` are only present if the peer requested a funding lease (`option_will_fund`).", + "", + "The plugin can reject the channel, accept it, or contribute funds via a PSBT when accepting.", + "", + "See `plugins/funder.c` for an example of how to use this hook to contribute funds to a channel open.", + "", + "This is a chained hook: multiple plugins may be invoked.", + "Returning any result other than \"continue\" terminates the chain.", + "Only the first plugin that sets mutation fields (e.g. `close_to`) will have them applied.", + "Invalid `close_to` addresses will cause lightningd to exit.", + "The PSBT must be consistent with the funding transaction and respect feerate constraints." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel2" + ], + "properties": { + "openchannel2": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "channel_id", + "their_funding_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "htlc_minimum_msat", + "funding_feerate_per_kw", + "commitment_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "locktime", + "channel_max_msat", + "require_confirmed_inputs", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the channel." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "Temporary `channel_id` assigned for this channel negotiation." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "Amount contributed by the remote peer to the channel funding transaction." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Minimum output value below which outputs are considered dust." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum total value of outstanding HTLCs allowed in the channel at any time." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value the peer will accept." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for the funding transaction." + ] + }, + "commitment_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for commitment transactions." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "Maximum feerate we are willing to accept for commitment transactions." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "Minimum feerate we are willing to accept for commitment transactions." + ] + }, + "to_self_delay": { + "type": "u16", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u16", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "Locktime to be used in the funding transaction." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey provided by the peer for cooperative close." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "Maximum capacity this channel is allowed to reach." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "Amount of liquidity the peer is requesting us to lease to them.", + "Only present if `option_will_fund` is negotiated." + ] + }, + "lease_blockheight_start": { + "type": "u32", + "description": [ + "Blockheight at which the lease period begins.", + "Only present if `requested_lease_msat` is present." + ] + }, + "node_blockheight": { + "type": "u32", + "description": [ + "Current blockheight of the node.", + "Used in conjunction with lease parameters.", + "Only present if `requested_lease_msat` is present." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the peer requires all funding inputs to be confirmed." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Indicates whether to accept or reject the channel proposal.", + "Returning \"continue\" allows the channel negotiation to proceed.", + "Returning \"reject\" aborts the channel opening." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Error message sent to the peer when rejecting the channel.", + "Only valid if result is \"reject\"." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address to which funds will be sent on cooperative close.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "psbt": { + "type": "string", + "description": [ + "Partially Signed Bitcoin Transaction contributing inputs and outputs for the funding transaction.", + "Used when the plugin contributes funds to the channel." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "Amount we contribute to the channel funding.", + "This amount must NOT be included in any outputs in the provided PSBT.", + "Change outputs must be included separately." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } +} diff --git a/doc/schemas/hook/openchannel2_changed.json b/doc/schemas/hook/openchannel2_changed.json new file mode 100644 index 000000000000..0d19729ca046 --- /dev/null +++ b/doc/schemas/hook/openchannel2_changed.json @@ -0,0 +1,100 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_changed", + "title": "Hook for handling updates to the dual-funding PSBT", + "description": [ + "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", + "", + "This allows plugins to inspect and modify the PSBT before it is sent back to the peer.", + "", + "The negotiation continues until neither side makes further changes to the PSBT, at which point commitment transactions are exchanged.", + "", + "See `plugins/funder.c` for an example of how to use this hook to continue a v2 channel open." + ], + "request": { + "required": [ + "openchannel2_changed" + ], + "additionalProperties": false, + "properties": { + "openchannel2_changed": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt", + "require_confirmed_inputs" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary channel_id identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The current Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "This PSBT includes contributions from both peers and may be modified." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with the channel opening negotiation." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The updated PSBT to send back to the peer.", + "If no modifications are made, this should be identical to the input PSBT." + ] + } + } + }, + "examples": [ + { + "request": { + "id": "example:openchannel2_changed#1", + "method": "openchannel2_changed", + "params": { + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": true + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] +} diff --git a/doc/schemas/hook/openchannel2_sign.json b/doc/schemas/hook/openchannel2_sign.json new file mode 100644 index 000000000000..3917e8b38335 --- /dev/null +++ b/doc/schemas/hook/openchannel2_sign.json @@ -0,0 +1,103 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_sign", + "title": "Hook for signing the dual-funding PSBT", + "description": [ + "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", + "", + "The plugin is expected to sign any inputs it owns in the provided PSBT and return the updated PSBT.", + "", + "If no inputs need to be signed, the original PSBT should be returned unchanged.", + "", + "Once both sides have provided signatures, the funding transaction will be broadcast.", + "", + "See `plugins/funder.c` for an example of how to use this hook to sign a funding transaction." + ], + "request": { + "required": [ + "openchannel2_sign" + ], + "additionalProperties": false, + "properties": { + "openchannel2_sign": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary `channel_id` identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "The plugin should add signatures for any inputs it controls." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with channel opening." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The PSBT including any added signatures.", + "If no inputs were signed, this should be identical to the input PSBT." + ] + } + } + }, + "example_notifications": [ + { + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + } + ], + "examples": [ + { + "request": { + "id": "example:openchannel2_sign#1", + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] +} diff --git a/doc/schemas/hook/peer_connected.json b/doc/schemas/hook/peer_connected.json new file mode 100644 index 000000000000..78fd60c74611 --- /dev/null +++ b/doc/schemas/hook/peer_connected.json @@ -0,0 +1,96 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "peer_connected", + "title": "Hook fired when a peer connects and completes handshake", + "description": [ + "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", + "", + "This is a chained hook: the first plugin returning \"disconnect\" stops further processing.", + "Plugins can call `listpeers` to retrieve additional information about the peer." + ], + "request": { + "additionalProperties": false, + "required": [ + "peer" + ], + "properties": { + "peer": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "direction", + "addr", + "features" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The node_id of the connected peer." + ] + }, + "direction": { + "type": "string", + "enum": [ + "in", + "out" + ], + "description": [ + "Connection direction: `in` for incoming, `out` for outgoing." + ] + }, + "addr": { + "type": "string", + "description": [ + "The `addr` field shows the address that we are connected to ourselves, not the gossiped list of known addresses.", + "In particular this means that the port for incoming connections is an ephemeral port, that may not be available for reconnections." + ] + }, + "remote_addr": { + "type": "string", + "description": [ + "Our own address as reported by the remote peer. Helps with detecting our own IPv4 changes behind NAT." + ] + }, + "features": { + "type": "hex", + "description": [ + "Feature bits advertised by the peer, encoded as hex." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "disconnect" + ], + "description": [ + "Whether to allow the connection to proceed or disconnect the peer." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer before disconnection.", + "Only used if result is \"disconnect\"." + ] + } + } + }, + "see_also": [ + "lightning-listpeers(7)" + ] +} diff --git a/doc/schemas/hook/rbf_channel.json b/doc/schemas/hook/rbf_channel.json new file mode 100644 index 000000000000..6aed12464dd2 --- /dev/null +++ b/doc/schemas/hook/rbf_channel.json @@ -0,0 +1,157 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rbf_channel", + "title": "Hook for handling RBF channel funding requests", + "description": [ + "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", + "", + "The plugin can choose to reject or continue the negotiation.", + "", + "If continuing, the plugin may contribute additional inputs and outputs by returning a PSBT and specifying an `our_funding_msat` amount.", + "", + "The `our_funding_msat` value must not be included in any output in the PSBT. Change outputs should be included and calculated using the provided `funding_feerate_per_kw`." + ], + "request": { + "required": [ + "rbf_channel" + ], + "additionalProperties": false, + "properties": { + "rbf_channel": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "channel_id", + "their_last_funding_msat", + "their_funding_msat", + "our_last_funding_msat", + "funding_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "channel_max_msat", + "locktime", + "require_confirmed_inputs" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the RBF." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "The `channel_id` of the channel being modified." + ] + }, + "their_last_funding_msat": { + "type": "msat", + "description": [ + "The peer's previous contribution to the funding transaction." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "The peer's proposed new funding contribution." + ] + }, + "our_last_funding_msat": { + "type": "msat", + "description": [ + "Our previous contribution to the funding transaction." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "The feerate to use for the updated funding transaction, in satoshis per kw." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "The maximum feerate we are willing to accept for the funding transaction." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "The minimum feerate we are willing to accept for the funding transaction." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "The maximum total channel capacity allowed for this channel." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "The locktime to use for the funding transaction." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "If present, the amount of liquidity the peer is requesting us to lease.", + "This field is optional and only included if the peer requested a lease." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the RBF proposal." + ] + }, + "psbt": { + "type": "string", + "description": [ + "A PSBT containing additional inputs and outputs to contribute to the funding transaction.", + "Only valid if `result` is `continue`." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "The amount we are contributing to the new funding transaction.", + "Must not be included in any output in the PSBT." + ] + }, + "error_message": { + "type": "string", + "description": [ + "An error message explaining the rejection.", + "Only used if `result` is `reject` and will be sent to the peer." + ] + } + } + } +} diff --git a/doc/schemas/hook/recover.json b/doc/schemas/hook/recover.json new file mode 100644 index 000000000000..f9f246c7bb0f --- /dev/null +++ b/doc/schemas/hook/recover.json @@ -0,0 +1,47 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "v23.08", + "type": "object", + "notification": "recover", + "title": "Hook fired when node starts in recovery mode", + "description": [ + "The **recover** hook is called whenever the node is started using the --recovery flag.", + "It provides the codex32 secret used to derive the HSM secret.", + "Plugins can use this to reconnect to peers who keep your peer storage backups with them and recover state or funds.", + "", + "This hook is informational and does not allow altering execution flow.", + "Plugins are expected to perform recovery-related side effects such as reconnecting to peers." + ], + "request": { + "additionalProperties": false, + "required": [ + "codex32" + ], + "properties": { + "codex32": { + "type": "string", + "description": [ + "The codex32-encoded secret provided via --recover.", + "Used to reconstruct the node's HSM secret." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Returning \"continue\" resumes normal execution." + ] + } + } + } +} diff --git a/doc/schemas/hook/rpc_command.json b/doc/schemas/hook/rpc_command.json new file mode 100644 index 000000000000..4bbf8ec3dffc --- /dev/null +++ b/doc/schemas/hook/rpc_command.json @@ -0,0 +1,226 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rpc_command", + "title": "Hook for intercepting and modifying RPC commands", + "description": [ + "The **rpc_command** hook allows a plugin to take over any RPC command.", + "", + "You can optionally specify a `filters` array, containing the command names you want to intercept: without this, all commands will be sent to this hook. (added in v25.12)", + "", + "The plugin receives the full JSON-RPC request and may choose to continue, replace the request, or return a custom result or error.", + "", + "This is a chained hook: only the first plugin that modifies the request or response will take effect. Other plugins will then be ignored and a warning will be logged." + ], + "request": { + "required": [ + "rpc_command" + ], + "additionalProperties": false, + "properties": { + "rpc_command": { + "type": "object", + "description": [ + "The original JSON-RPC request object." + ], + "additionalProperties": true, + "required": [ + "id", + "method", + "params" + ], + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Indicates that lightningd should continue processing the RPC command normally." + ] + }, + "replace": { + "type": "object", + "description": [ + "Replaces the original JSON-RPC request with a new one." + ], + "additionalProperties": true, + "required": [ + "jsonrpc", + "id", + "method", + "params" + ], + "properties": { + "jsonrpc": { + "type": "string", + "enum": [ + "2.0" + ], + "description": [ + "The JSON-RPC version." + ] + }, + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + }, + "return": { + "type": "object", + "description": [ + "Returns a custom JSON-RPC response to the caller." + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "object", + "description": [ + "Custom result object to return to the caller." + ] + }, + "error": { + "type": "object", + "description": [ + "Custom error object to return to the caller." + ], + "additionalProperties": true, + "properties": { + "code": { + "type": "integer", + "description": [ + "JSON-RPC error code." + ] + }, + "message": { + "type": "string", + "description": [ + "Human-readable error message." + ] + } + }, + "required": [ + "code", + "message" + ] + } + } + } + }, + "oneOf": [ + { + "required": [ + "result" + ] + }, + { + "required": [ + "replace" + ] + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "result" + ] + } + } + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "error" + ] + } + } + } + ] + } +} From 04f508d3403e87f374cda7fbb57c507242943153 Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Mon, 13 Apr 2026 16:50:45 +0200 Subject: [PATCH 2/6] crates: add JsonObjectOrArray and JsonScalar types for raw json rpc parameters We use it later for the `rpc_command` hook Changelog-None --- cln-grpc/Cargo.toml | 1 - cln-grpc/proto/primitives.proto | 37 ++++++++ cln-grpc/src/pb.rs | 146 +++++++++++++++++++++++++++++++- cln-rpc/src/primitives.rs | 16 ++++ 4 files changed, 198 insertions(+), 2 deletions(-) diff --git a/cln-grpc/Cargo.toml b/cln-grpc/Cargo.toml index 133106079c77..c43039441244 100644 --- a/cln-grpc/Cargo.toml +++ b/cln-grpc/Cargo.toml @@ -27,7 +27,6 @@ tokio = { version = "1.36.0", features = ["sync"] } futures-core = "0.3.30" tokio-util = "0.7.10" -[dev-dependencies] serde_json = "1.0.72" [build-dependencies] diff --git a/cln-grpc/proto/primitives.proto b/cln-grpc/proto/primitives.proto index fa62fdcfd732..b91d52926633 100644 --- a/cln-grpc/proto/primitives.proto +++ b/cln-grpc/proto/primitives.proto @@ -147,3 +147,40 @@ enum PluginSubcommand { STARTDIR = 3; LIST = 4; } + +message JsonObjectOrArray { + oneof structure { + JsonObject object = 1; + JsonArray array = 2; + } +} + +message JsonObject { + map fields = 1; +} + +message JsonArray { + repeated JsonValue values = 1; +} + +message JsonValue { + oneof kind { + bool bool_value = 1; + int64 int_value = 2; + uint64 uint_value = 3; + double double_value = 4; + string string_value = 5; + JsonArray array = 6; + JsonObject object = 7; + } +} + +message JsonScalar { + oneof scalar { + bool bool_value = 1; + int64 int_value = 2; + uint64 uint_value = 3; + double double_value = 4; + string string_value = 5; + } +} diff --git a/cln-grpc/src/pb.rs b/cln-grpc/src/pb.rs index b518703bc37a..a1935cf6b1a4 100644 --- a/cln-grpc/src/pb.rs +++ b/cln-grpc/src/pb.rs @@ -10,7 +10,8 @@ mod convert { use cln_rpc::primitives::{ Amount as JAmount, AmountOrAll as JAmountOrAll, AmountOrAny as JAmountOrAny, - Feerate as JFeerate, Outpoint as JOutpoint, OutputDesc as JOutputDesc, + Feerate as JFeerate, JsonObjectOrArray as JJsonObjectOrArray, JsonScalar as JJsonScalar, + Outpoint as JOutpoint, OutputDesc as JOutputDesc, }; impl From for Amount { @@ -281,6 +282,149 @@ mod convert { } } + impl From for JsonValue { + fn from(v: serde_json::Value) -> Self { + let kind = match v { + serde_json::Value::Null => None, + serde_json::Value::Bool(b) => Some(json_value::Kind::BoolValue(b)), + serde_json::Value::Number(n) => { + if let Some(u) = n.as_u64() { + Some(json_value::Kind::UintValue(u)) + } else if let Some(i) = n.as_i64() { + Some(json_value::Kind::IntValue(i)) + } else if let Some(f) = n.as_f64() { + Some(json_value::Kind::DoubleValue(f)) + } else { + let error = format!("Failed to parse number: `{}`", n); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + } + serde_json::Value::String(s) => Some(json_value::Kind::StringValue(s)), + serde_json::Value::Array(arr) => Some(json_value::Kind::Array(JsonArray { + values: arr.into_iter().map(JsonValue::from).collect(), + })), + serde_json::Value::Object(obj) => Some(json_value::Kind::Object(JsonObject { + fields: obj + .into_iter() + .map(|(k, v)| (k, JsonValue::from(v))) + .collect(), + })), + }; + JsonValue { kind } + } + } + + impl From for JsonObjectOrArray { + fn from(v: JJsonObjectOrArray) -> Self { + let structure = match v { + JJsonObjectOrArray::Array(arr) => { + Some(json_object_or_array::Structure::Array(JsonArray { + values: arr.into_iter().map(JsonValue::from).collect(), + })) + } + JJsonObjectOrArray::Object(obj) => { + Some(json_object_or_array::Structure::Object(JsonObject { + fields: obj + .into_iter() + .map(|(k, v)| (k, JsonValue::from(v))) + .collect(), + })) + } + }; + JsonObjectOrArray { structure } + } + } + + impl From for serde_json::Value { + fn from(v: JsonValue) -> Self { + match v.kind { + None => serde_json::Value::Null, + Some(json_value::Kind::BoolValue(b)) => serde_json::Value::Bool(b), + Some(json_value::Kind::UintValue(u)) => serde_json::Value::Number(u.into()), + Some(json_value::Kind::IntValue(i)) => serde_json::Value::Number(i.into()), + Some(json_value::Kind::DoubleValue(f)) => match serde_json::Number::from_f64(f) { + Some(num) => serde_json::Value::Number(num), + None => { + let error = format!("Failed to parse number: `{}`", f); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + }, + Some(json_value::Kind::StringValue(s)) => serde_json::Value::String(s), + Some(json_value::Kind::Array(arr)) => serde_json::Value::Array( + arr.values + .into_iter() + .map(serde_json::Value::from) + .collect(), + ), + Some(json_value::Kind::Object(obj)) => serde_json::Value::Object( + obj.fields + .into_iter() + .map(|(k, v)| (k, serde_json::Value::from(v))) + .collect(), + ), + } + } + } + + impl From for JJsonObjectOrArray { + fn from(v: JsonObjectOrArray) -> Self { + match v.structure { + Some(json_object_or_array::Structure::Array(arr)) => JJsonObjectOrArray::Array( + arr.values + .into_iter() + .map(serde_json::Value::from) + .collect(), + ), + Some(json_object_or_array::Structure::Object(obj)) => JJsonObjectOrArray::Object( + obj.fields + .into_iter() + .map(|(k, v)| (k, serde_json::Value::from(v))) + .collect(), + ), + None => JJsonObjectOrArray::Array(vec![]), // or handle as error + } + } + } + + impl From for JJsonScalar { + fn from(v: JsonScalar) -> Self { + match v.scalar { + None => JJsonScalar::Null, + Some(json_scalar::Scalar::BoolValue(b)) => JJsonScalar::Bool(b), + Some(json_scalar::Scalar::IntValue(i)) => JJsonScalar::Number(i.into()), + Some(json_scalar::Scalar::DoubleValue(d)) => { + match serde_json::Number::from_f64(d) { + Some(num) => JJsonScalar::Number(num), + None => { + let error = format!("Failed to parse number: `{}`", d); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + } + } + Some(json_scalar::Scalar::UintValue(u)) => JJsonScalar::Number(u.into()), + Some(json_scalar::Scalar::StringValue(s)) => JJsonScalar::String(s), + } + } + } + #[cfg(test)] mod test { use super::*; diff --git a/cln-rpc/src/primitives.rs b/cln-rpc/src/primitives.rs index c99c94d7d39c..adaf75393c43 100644 --- a/cln-rpc/src/primitives.rs +++ b/cln-rpc/src/primitives.rs @@ -1173,3 +1173,19 @@ impl Serialize for TlvStream { map.end() } } + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum JsonObjectOrArray { + Object(serde_json::Map), + Array(Vec), +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum JsonScalar { + String(String), + Number(serde_json::Number), + Bool(bool), + Null, +} From b4e43799f841a2e97863438a0a03af5f1484cb66 Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Mon, 13 Apr 2026 16:52:52 +0200 Subject: [PATCH 3/6] msggen: add generation for hook schemas hooks are not for grpc, so we create a new object "rpc-only-enum-map" in .msggen.json and don't generate any grpc related code Changelog-None --- .msggen.json | 1000 +++++++- cln-rpc/Makefile | 2 +- cln-rpc/src/hooks.rs | 978 +++++++ cln-rpc/src/lib.rs | 292 +-- cln-rpc/src/test.rs | 1093 ++++++++ contrib/msggen/msggen/__main__.py | 6 +- contrib/msggen/msggen/gen/grpc/convert.py | 7 +- contrib/msggen/msggen/gen/grpc/unconvert.py | 7 +- contrib/msggen/msggen/gen/grpc/util.py | 2 + contrib/msggen/msggen/gen/grpc2py.py | 12 +- contrib/msggen/msggen/gen/rpc/__init__.py | 3 +- contrib/msggen/msggen/gen/rpc/hook.py | 133 + contrib/msggen/msggen/gen/rpc/rust.py | 17 +- contrib/msggen/msggen/model.py | 32 +- contrib/msggen/msggen/patch.py | 8 + contrib/msggen/msggen/schema.json | 2265 +++++++++++++++++ contrib/msggen/msggen/utils/utils.py | 73 +- .../pyln/grpc/primitives_pb2.py | 40 +- contrib/pyln-testing/pyln/testing/fixtures.py | 326 ++- 19 files changed, 5789 insertions(+), 507 deletions(-) create mode 100644 cln-rpc/src/hooks.rs create mode 100644 cln-rpc/src/test.rs create mode 100644 contrib/msggen/msggen/gen/rpc/hook.py diff --git a/.msggen.json b/.msggen.json index 12d622e1812a..33f9655a2c6c 100644 --- a/.msggen.json +++ b/.msggen.json @@ -14921,6 +14921,30 @@ "added": "pre-v0.10.1", "deprecated": "v25.09" }, + "commitment_revocation": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.channel_id": { + "added": "v0.10.2", + "deprecated": null + }, + "commitment_revocation.commitment_txid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.commitnum": { + "added": "v0.10.2", + "deprecated": null + }, + "commitment_revocation.penalty_tx": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, "connect": { "added": "pre-v0.10.1", "deprecated": null @@ -14965,6 +14989,38 @@ "added": "v24.02", "deprecated": null }, + "custommsg_hook": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.peer_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.data_version": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.writes[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, "deprecated_oneshot": { "added": "v24.02", "deprecated": null @@ -15021,232 +15077,780 @@ "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.resolved_time": { + "forward_event.resolved_time": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "forward_event.status": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "forward_event.style": { + "added": "v23.11", + "deprecated": null + }, + "htlc_accepted": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.extra_tlvs": { + "added": "v25.09", + "deprecated": null + }, + "htlc_accepted.failure_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.failure_onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.forward_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.amount_msat": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.htlc.cltv_expiry": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.cltv_expiry_relative": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.extra_tlvs": { + "added": "v25.09", + "deprecated": null + }, + "htlc_accepted.htlc.id": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.htlc.payment_hash": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.short_channel_id": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.invoice_msat": { + "added": "v25.12", + "deprecated": null + }, + "htlc_accepted.onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.forward_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.next_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.next_onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.outgoing_cltv_value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payment_metadata": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payment_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.shared_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.short_channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.total_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.type": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.payment_key": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.peer_id": { + "added": "v25.12", + "deprecated": null + }, + "htlc_accepted.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.outpoint": { + "added": "v23.11", + "deprecated": null + }, + "invoice_payment.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.failure_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "log": { + "added": "v24.02", + "deprecated": null + }, + "log.level": { + "added": "v24.02", + "deprecated": null + }, + "log.log": { + "added": "v24.02", + "deprecated": null + }, + "log.source": { + "added": "v24.02", + "deprecated": null + }, + "log.time": { + "added": "v24.02", + "deprecated": null + }, + "log.timestamp": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type.bits[]": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type.names[]": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].outnum": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.commitment_feerate": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].amount": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].announce": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].compact_lease": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].mindepth": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].push_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].request_amt": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].reserve": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error.code": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error.message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.feerate": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.minchannels": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.minconf": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.tx": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.txid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.utxos[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice_error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice_request": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_path_key": { + "added": "v24.11", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_scid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_scid_dir": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[].blinded_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[].encrypted_recipient_data": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[].number": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[].value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice_error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice_request": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.pathsecret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_path_key": { + "added": "v24.11", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_scid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_scid_dir": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[].blinded_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[].encrypted_recipient_data": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[].number": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[].value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onionmessage_forward_fail": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.incoming": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.next_node_id": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.next_short_channel_id_dir": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.outgoing": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.path_key": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.source": { + "added": "v24.11", + "deprecated": null + }, + "openchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.mindepth": { + "added": "v0.12.0", + "deprecated": null + }, + "openchannel.openchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_flags": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_reserve_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_type": { + "added": "v25.09", + "deprecated": null + }, + "openchannel.openchannel.channel_type.bits[]": { "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.status": { + "openchannel.openchannel.channel_type.names[]": { "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.style": { - "added": "v23.11", + "openchannel.openchannel.dust_limit_msat": { + "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation": { + "openchannel.openchannel.feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.label": { + "openchannel.openchannel.funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.msat": { + "openchannel.openchannel.htlc_minimum_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.preimage": { + "openchannel.openchannel.id": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment": { + "openchannel.openchannel.max_accepted_htlcs": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.label": { + "openchannel.openchannel.max_htlc_value_in_flight_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.msat": { + "openchannel.openchannel.push_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.outpoint": { - "added": "v23.11", + "openchannel.openchannel.shutdown_scriptpubkey": { + "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.preimage": { + "openchannel.openchannel.to_self_delay": { "added": "pre-v0.10.1", "deprecated": null }, - "log": { - "added": "v24.02", + "openchannel.reserve": { + "added": "v22.11", "deprecated": null }, - "log.level": { - "added": "v24.02", + "openchannel.result": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.log": { - "added": "v24.02", + "openchannel2": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.source": { - "added": "v24.02", + "openchannel2.close_to": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.time": { - "added": "v24.02", + "openchannel2.error_message": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.timestamp": { - "added": "v24.02", + "openchannel2.openchannel2": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel": { + "openchannel2.openchannel2.channel_flags": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[]": { + "openchannel2.openchannel2.channel_id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_id": { + "openchannel2.openchannel2.channel_max_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type": { + "added": "v25.09", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type.bits[]": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type.bits[]": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type.names[]": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type.names[]": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].close_to": { + "openchannel2.openchannel2.commitment_feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].id": { + "openchannel2.openchannel2.dust_limit_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].outnum": { + "openchannel2.openchannel2.feerate_our_max": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.commitment_feerate": { + "openchannel2.openchannel2.feerate_our_min": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[]": { + "openchannel2.openchannel2.funding_feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].amount": { + "openchannel2.openchannel2.htlc_minimum_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].announce": { + "openchannel2.openchannel2.id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].close_to": { + "openchannel2.openchannel2.lease_blockheight_start": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].compact_lease": { + "openchannel2.openchannel2.locktime": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].id": { + "openchannel2.openchannel2.max_accepted_htlcs": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].mindepth": { + "openchannel2.openchannel2.max_htlc_value_in_flight_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].push_msat": { + "openchannel2.openchannel2.node_blockheight": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].request_amt": { + "openchannel2.openchannel2.requested_lease_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].reserve": { - "added": "pre-v0.10.1", + "openchannel2.openchannel2.require_confirmed_inputs": { + "added": "v23.02", "deprecated": null }, - "multifundchannel.failed[]": { + "openchannel2.openchannel2.shutdown_scriptpubkey": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error": { + "openchannel2.openchannel2.their_funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error.code": { + "openchannel2.openchannel2.to_self_delay": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error.message": { + "openchannel2.our_funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].id": { + "openchannel2.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].method": { + "openchannel2.result": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.feerate": { + "openchannel2_changed": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.minchannels": { + "openchannel2_changed.openchannel2_changed": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.minconf": { + "openchannel2_changed.openchannel2_changed.channel_id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.tx": { + "openchannel2_changed.openchannel2_changed.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.txid": { - "added": "pre-v0.10.1", + "openchannel2_changed.openchannel2_changed.require_confirmed_inputs": { + "added": "v23.02", "deprecated": null }, - "multifundchannel.utxos[]": { + "openchannel2_changed.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail": { - "added": "v24.11", + "openchannel2_changed.result": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.incoming": { - "added": "v24.11", + "openchannel2_sign": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.next_node_id": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.next_short_channel_id_dir": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign.channel_id": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.outgoing": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign.psbt": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.path_key": { - "added": "v24.11", + "openchannel2_sign.psbt": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.source": { - "added": "v24.11", + "openchannel2_sign.result": { + "added": "pre-v0.10.1", "deprecated": null }, "openchannel_peer_sigs": { @@ -15357,6 +15961,42 @@ "added": "v25.09", "deprecated": null }, + "peer_connected": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.addr": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.direction": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.features": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.remote_addr": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, "plugin_started": { "added": "v25.02", "deprecated": null @@ -15389,6 +16029,154 @@ "added": "v25.02", "deprecated": null }, + "rbf_channel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.our_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.psbt": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.channel_max_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.feerate_our_max": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.feerate_our_min": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.funding_feerate_per_kw": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.locktime": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.our_last_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.requested_lease_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.require_confirmed_inputs": { + "added": "v23.02", + "deprecated": null + }, + "rbf_channel.rbf_channel.their_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.their_last_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "recover_hook": { + "added": "v23.08", + "deprecated": null + }, + "recover_hook.codex32": { + "added": "v23.08", + "deprecated": null + }, + "recover_hook.result": { + "added": "v23.08", + "deprecated": null + }, + "rpc_command": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.jsonrpc": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.params": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error.code": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error.message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.params": { + "added": "pre-v0.10.1", + "deprecated": null + }, "sendpay_failure": { "added": "pre-v0.10.1", "deprecated": null @@ -15613,5 +16401,63 @@ "added": "v24.02", "deprecated": null } + }, + "rpc-only-enum-map": { + "CommitmentRevocationResult": { + "continue": 0 + }, + "CustommsgHookResult": { + "continue": 0 + }, + "DbWriteResult": { + "continue": 0 + }, + "HtlcAcceptedOnionType": { + "tlv": 0 + }, + "HtlcAcceptedResult": { + "continue": 0, + "fail": 1, + "resolve": 2 + }, + "InvoicePaymentHookResult": { + "continue": 0, + "reject": 1 + }, + "Openchannel2ChangedResult": { + "continue": 0 + }, + "Openchannel2Result": { + "continue": 0, + "reject": 1 + }, + "Openchannel2SignResult": { + "continue": 0 + }, + "OpenchannelResult": { + "continue": 0, + "reject": 1 + }, + "PeerConnectedPeerDirection": { + "in": 0, + "out": 1 + }, + "PeerConnectedResult": { + "continue": 0, + "disconnect": 1 + }, + "RbfChannelResult": { + "continue": 0, + "reject": 1 + }, + "RecoverHookResult": { + "continue": 0 + }, + "RpcCommandReplaceJsonrpc": { + "2.0": 0 + }, + "RpcCommandResult": { + "continue": 0 + } } } \ No newline at end of file diff --git a/cln-rpc/Makefile b/cln-rpc/Makefile index 808dd71cca13..342685b6509a 100644 --- a/cln-rpc/Makefile +++ b/cln-rpc/Makefile @@ -2,7 +2,7 @@ cln-rpc-wrongdir: $(MAKE) -C .. cln-rpc-all CLN_RPC_EXAMPLES := target/${RUST_PROFILE}/examples/cln-rpc-getinfo -CLN_RPC_GENALL = cln-rpc/src/model.rs cln-rpc/src/notifications.rs +CLN_RPC_GENALL = cln-rpc/src/model.rs cln-rpc/src/notifications.rs cln-rpc/src/hooks.rs CLN_RPC_SOURCES = $(shell find cln-rpc -name *.rs) ${CLN_RPC_GENALL} DEFAULT_TARGETS += $(CLN_RPC_EXAMPLES) $(CLN_RPC_GENALL) diff --git a/cln-rpc/src/hooks.rs b/cln-rpc/src/hooks.rs new file mode 100644 index 000000000000..61dc899b5e34 --- /dev/null +++ b/cln-rpc/src/hooks.rs @@ -0,0 +1,978 @@ +// This file is autogenerated by `msggen` +// Do not edit it manually, your changes will be overwritten + + + +use serde::{Serialize, Deserialize}; +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum Hook { + #[serde(rename = "peer_connected")] + PeerConnected(events::PeerConnectedEvent), + #[serde(rename = "recover_hook")] + RecoverHook(events::RecoverHookEvent), + #[serde(rename = "commitment_revocation")] + CommitmentRevocation(events::CommitmentRevocationEvent), + #[serde(rename = "db_write")] + DbWrite(events::DbWriteEvent), + #[serde(rename = "invoice_payment_hook")] + InvoicePaymentHook(events::InvoicePaymentHookEvent), + #[serde(rename = "openchannel")] + Openchannel(events::OpenchannelEvent), + #[serde(rename = "openchannel2")] + Openchannel2(events::Openchannel2Event), + #[serde(rename = "openchannel2_changed")] + Openchannel2Changed(events::Openchannel2ChangedEvent), + #[serde(rename = "openchannel2_sign")] + Openchannel2Sign(events::Openchannel2SignEvent), + #[serde(rename = "rbf_channel")] + RbfChannel(events::RbfChannelEvent), + #[serde(rename = "htlc_accepted")] + HtlcAccepted(events::HtlcAcceptedEvent), + #[serde(rename = "rpc_command")] + RpcCommand(events::RpcCommandEvent), + #[serde(rename = "custommsg_hook")] + CustommsgHook(events::CustommsgHookEvent), + #[serde(rename = "onion_message_recv")] + OnionMessageRecv(events::OnionMessageRecvEvent), + #[serde(rename = "onion_message_recv_secret")] + OnionMessageRecvSecret(events::OnionMessageRecvSecretEvent), +} + + +pub mod events{ + use crate::primitives::*; + use serde::{Serialize, Deserialize}; + + /// ['Connection direction: `in` for incoming, `out` for outgoing.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum PeerConnectedPeerDirection { + #[serde(rename = "in")] + IN = 0, + #[serde(rename = "out")] + OUT = 1, + } + + impl TryFrom for PeerConnectedPeerDirection { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(PeerConnectedPeerDirection::IN), + 1 => Ok(PeerConnectedPeerDirection::OUT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum PeerConnectedPeerDirection", o)), + } + } + } + + impl ToString for PeerConnectedPeerDirection { + fn to_string(&self) -> String { + match self { + PeerConnectedPeerDirection::IN => "IN", + PeerConnectedPeerDirection::OUT => "OUT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedPeer { + #[serde(skip_serializing_if = "Option::is_none")] + pub remote_addr: Option, + // Path `peer_connected.peer.direction` + pub direction: PeerConnectedPeerDirection, + pub addr: String, + pub features: String, + pub id: PublicKey, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedEvent { + pub peer: PeerConnectedPeer, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RecoverHookEvent { + pub codex32: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CommitmentRevocationEvent { + pub channel_id: Sha256, + pub commitment_txid: String, + pub commitnum: u64, + pub penalty_tx: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct DbWriteEvent { + pub data_version: u32, + pub writes: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookPayment { + pub label: String, + pub msat: Amount, + pub preimage: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookEvent { + pub payment: InvoicePaymentHookPayment, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelOpenchannelChannelType { + pub bits: Vec, + pub names: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelOpenchannel { + #[serde(skip_serializing_if = "Option::is_none")] + pub channel_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub shutdown_scriptpubkey: Option, + pub channel_flags: u8, + pub channel_reserve_msat: Amount, + pub dust_limit_msat: Amount, + pub feerate_per_kw: u32, + pub funding_msat: Amount, + pub htlc_minimum_msat: Amount, + pub id: PublicKey, + pub max_accepted_htlcs: u32, + pub max_htlc_value_in_flight_msat: Amount, + pub push_msat: Amount, + pub to_self_delay: u32, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelEvent { + pub openchannel: OpenchannelOpenchannel, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Openchannel2ChannelType { + pub bits: Vec, + pub names: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Openchannel2 { + #[serde(skip_serializing_if = "Option::is_none")] + pub channel_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub lease_blockheight_start: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub node_blockheight: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub requested_lease_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub shutdown_scriptpubkey: Option, + pub channel_flags: u8, + pub channel_id: Sha256, + pub channel_max_msat: Amount, + pub commitment_feerate_per_kw: u32, + pub dust_limit_msat: Amount, + pub feerate_our_max: u32, + pub feerate_our_min: u32, + pub funding_feerate_per_kw: u32, + pub htlc_minimum_msat: Amount, + pub id: PublicKey, + pub locktime: u32, + pub max_accepted_htlcs: u16, + pub max_htlc_value_in_flight_msat: Amount, + pub require_confirmed_inputs: bool, + pub their_funding_msat: Amount, + pub to_self_delay: u16, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Event { + pub openchannel2: Openchannel2Openchannel2, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedOpenchannel2Changed { + pub channel_id: Sha256, + pub psbt: String, + pub require_confirmed_inputs: bool, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedEvent { + pub openchannel2_changed: Openchannel2ChangedOpenchannel2Changed, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignOpenchannel2Sign { + pub channel_id: Sha256, + pub psbt: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignEvent { + pub openchannel2_sign: Openchannel2SignOpenchannel2Sign, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelRbfChannel { + #[serde(skip_serializing_if = "Option::is_none")] + pub requested_lease_msat: Option, + pub channel_id: Sha256, + pub channel_max_msat: Amount, + pub feerate_our_max: u32, + pub feerate_our_min: u32, + pub funding_feerate_per_kw: u32, + pub id: PublicKey, + pub locktime: u32, + pub our_last_funding_msat: Amount, + pub require_confirmed_inputs: bool, + pub their_funding_msat: Amount, + pub their_last_funding_msat: Amount, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelEvent { + pub rbf_channel: RbfChannelRbfChannel, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedHtlc { + #[serde(skip_serializing_if = "Option::is_none")] + pub extra_tlvs: Option, + pub amount_msat: Amount, + pub cltv_expiry: u32, + pub cltv_expiry_relative: u32, + pub id: u64, + pub payment_hash: Sha256, + pub short_channel_id: ShortChannelId, + } + + /// ['Indicates that the payload is TLV formatted.', 'Only present if the payload was successfully parsed.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum HtlcAcceptedOnionType { + #[serde(rename = "tlv")] + TLV = 0, + } + + impl TryFrom for HtlcAcceptedOnionType { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(HtlcAcceptedOnionType::TLV), + o => Err(anyhow::anyhow!("Unknown variant {} for enum HtlcAcceptedOnionType", o)), + } + } + } + + impl ToString for HtlcAcceptedOnionType { + fn to_string(&self) -> String { + match self { + HtlcAcceptedOnionType::TLV => "TLV", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedOnion { + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub item_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub next_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub outgoing_cltv_value: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_metadata: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_secret: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub short_channel_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_msat: Option, + pub next_onion: String, + pub payload: String, + pub shared_secret: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedEvent { + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub peer_id: Option, + pub htlc: HtlcAcceptedHtlc, + pub onion: HtlcAcceptedOnion, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandRpcCommand { + pub id: JsonScalar, + pub method: String, + pub params: JsonObjectOrArray, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandEvent { + pub rpc_command: RpcCommandRpcCommand, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CustommsgHookEvent { + pub payload: String, + pub peer_id: PublicKey, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageReplyBlindedpathHops { + pub blinded_node_id: PublicKey, + pub encrypted_recipient_data: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageReplyBlindedpath { + #[serde(skip_serializing_if = "Option::is_none")] + pub first_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_path_key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid_dir: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub hops: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageUnknownFields { + pub number: u64, + pub value: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessage { + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_request: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reply_blindedpath: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub unknown_fields: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvEvent { + pub onion_message: OnionMessageRecvOnionMessage, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageReplyBlindedpathHops { + pub blinded_node_id: PublicKey, + pub encrypted_recipient_data: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageReplyBlindedpath { + #[serde(skip_serializing_if = "Option::is_none")] + pub first_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_path_key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid_dir: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub hops: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageUnknownFields { + pub number: u64, + pub value: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessage { + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_request: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reply_blindedpath: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub unknown_fields: Option>, + pub pathsecret: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretEvent { + pub onion_message: OnionMessageRecvSecretOnionMessage, + } + +} +pub mod actions{ + use crate::primitives::*; + use serde::{Serialize, Deserialize}; + + /// ['Whether to allow the connection to proceed or disconnect the peer.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum PeerConnectedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "disconnect")] + DISCONNECT = 1, + } + + impl TryFrom for PeerConnectedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(PeerConnectedResult::CONTINUE), + 1 => Ok(PeerConnectedResult::DISCONNECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum PeerConnectedResult", o)), + } + } + } + + impl ToString for PeerConnectedResult { + fn to_string(&self) -> String { + match self { + PeerConnectedResult::CONTINUE => "CONTINUE", + PeerConnectedResult::DISCONNECT => "DISCONNECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + // Path `peer_connected.result` + pub result: PeerConnectedResult, + } + + /// ['Returning "continue" resumes normal execution.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RecoverHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for RecoverHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RecoverHookResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RecoverHookResult", o)), + } + } + } + + impl ToString for RecoverHookResult { + fn to_string(&self) -> String { + match self { + RecoverHookResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RecoverHookAction { + // Path `recover_hook.result` + pub result: RecoverHookResult, + } + + /// ['Plugins should always return "continue", otherwise subsequent hook subscribers would not get called.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum CommitmentRevocationResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for CommitmentRevocationResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(CommitmentRevocationResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum CommitmentRevocationResult", o)), + } + } + } + + impl ToString for CommitmentRevocationResult { + fn to_string(&self) -> String { + match self { + CommitmentRevocationResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CommitmentRevocationAction { + // Path `commitment_revocation.result` + pub result: CommitmentRevocationResult, + } + + /// ['Must be "continue" for the database commit to proceed.', 'Any other value will abort the commit and cause `lightningd` to error.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum DbWriteResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for DbWriteResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(DbWriteResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum DbWriteResult", o)), + } + } + } + + impl ToString for DbWriteResult { + fn to_string(&self) -> String { + match self { + DbWriteResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct DbWriteAction { + // Path `db_write.result` + pub result: DbWriteResult, + } + + /// ['Controls whether the payment is accepted or rejected.', '"continue" accepts the payment.', '"reject" fails the payment.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum InvoicePaymentHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for InvoicePaymentHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(InvoicePaymentHookResult::CONTINUE), + 1 => Ok(InvoicePaymentHookResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum InvoicePaymentHookResult", o)), + } + } + } + + impl ToString for InvoicePaymentHookResult { + fn to_string(&self) -> String { + match self { + InvoicePaymentHookResult::CONTINUE => "CONTINUE", + InvoicePaymentHookResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_message: Option, + // Path `invoice_payment_hook.result` + pub result: InvoicePaymentHookResult, + } + + /// ['Whether to accept or reject the channel opening request.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum OpenchannelResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for OpenchannelResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(OpenchannelResult::CONTINUE), + 1 => Ok(OpenchannelResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum OpenchannelResult", o)), + } + } + } + + impl ToString for OpenchannelResult { + fn to_string(&self) -> String { + match self { + OpenchannelResult::CONTINUE => "CONTINUE", + OpenchannelResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub close_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub mindepth: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reserve: Option, + // Path `openchannel.result` + pub result: OpenchannelResult, + } + + /// ['Indicates whether to accept or reject the channel proposal.', 'Returning "continue" allows the channel negotiation to proceed.', 'Returning "reject" aborts the channel opening.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2Result { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for Openchannel2Result { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2Result::CONTINUE), + 1 => Ok(Openchannel2Result::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2Result", o)), + } + } + } + + impl ToString for Openchannel2Result { + fn to_string(&self) -> String { + match self { + Openchannel2Result::CONTINUE => "CONTINUE", + Openchannel2Result::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Action { + #[serde(skip_serializing_if = "Option::is_none")] + pub close_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub our_funding_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub psbt: Option, + // Path `openchannel2.result` + pub result: Openchannel2Result, + } + + /// ['Must be set to `continue` to proceed with the channel opening negotiation.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2ChangedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for Openchannel2ChangedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2ChangedResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2ChangedResult", o)), + } + } + } + + impl ToString for Openchannel2ChangedResult { + fn to_string(&self) -> String { + match self { + Openchannel2ChangedResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedAction { + // Path `openchannel2_changed.result` + pub result: Openchannel2ChangedResult, + pub psbt: String, + } + + /// ['Must be set to `continue` to proceed with channel opening.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2SignResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for Openchannel2SignResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2SignResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2SignResult", o)), + } + } + } + + impl ToString for Openchannel2SignResult { + fn to_string(&self) -> String { + match self { + Openchannel2SignResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignAction { + // Path `openchannel2_sign.result` + pub result: Openchannel2SignResult, + pub psbt: String, + } + + /// ['Whether to accept or reject the RBF proposal.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RbfChannelResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for RbfChannelResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RbfChannelResult::CONTINUE), + 1 => Ok(RbfChannelResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RbfChannelResult", o)), + } + } + } + + impl ToString for RbfChannelResult { + fn to_string(&self) -> String { + match self { + RbfChannelResult::CONTINUE => "CONTINUE", + RbfChannelResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub our_funding_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub psbt: Option, + // Path `rbf_channel.result` + pub result: RbfChannelResult, + } + + /// ['Determines how the HTLC should be handled.', '', '`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,', "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", '', 'It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,', 'so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;', "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", '', 'It can also specify forward_to in the response, replacing the destination.', 'This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.', '', 'Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.', '', 'If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.', '', '', '`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).', '', 'Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).', "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", 'returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.', '', '', '`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.', 'Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum HtlcAcceptedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "fail")] + FAIL = 1, + #[serde(rename = "resolve")] + RESOLVE = 2, + } + + impl TryFrom for HtlcAcceptedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(HtlcAcceptedResult::CONTINUE), + 1 => Ok(HtlcAcceptedResult::FAIL), + 2 => Ok(HtlcAcceptedResult::RESOLVE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum HtlcAcceptedResult", o)), + } + } + } + + impl ToString for HtlcAcceptedResult { + fn to_string(&self) -> String { + match self { + HtlcAcceptedResult::CONTINUE => "CONTINUE", + HtlcAcceptedResult::FAIL => "FAIL", + HtlcAcceptedResult::RESOLVE => "RESOLVE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub extra_tlvs: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_onion: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payload: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_key: Option, + // Path `htlc_accepted.result` + pub result: HtlcAcceptedResult, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturnError { + pub code: i64, + pub message: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturnResult { + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturn { + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + } + + /// ['The JSON-RPC version.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RpcCommandReplaceJsonrpc { + #[serde(rename = "2.0")] + NUM_2_0 = 0, + } + + impl TryFrom for RpcCommandReplaceJsonrpc { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RpcCommandReplaceJsonrpc::NUM_2_0), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RpcCommandReplaceJsonrpc", o)), + } + } + } + + impl ToString for RpcCommandReplaceJsonrpc { + fn to_string(&self) -> String { + match self { + RpcCommandReplaceJsonrpc::NUM_2_0 => "NUM_2_0", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReplace { + // Path `rpc_command.replace.jsonrpc` + pub jsonrpc: RpcCommandReplaceJsonrpc, + pub id: JsonScalar, + pub method: String, + pub params: JsonObjectOrArray, + } + + /// ['Indicates that lightningd should continue processing the RPC command normally.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RpcCommandResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for RpcCommandResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RpcCommandResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RpcCommandResult", o)), + } + } + } + + impl ToString for RpcCommandResult { + fn to_string(&self) -> String { + match self { + RpcCommandResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandAction { + #[serde(rename = "return")] + #[serde(skip_serializing_if = "Option::is_none")] + pub return_: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub replace: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + } + + /// ['Must always be `continue`. Any other value will cause the hook to fail.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum CustommsgHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for CustommsgHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(CustommsgHookResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum CustommsgHookResult", o)), + } + } + } + + impl ToString for CustommsgHookResult { + fn to_string(&self) -> String { + match self { + CustommsgHookResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CustommsgHookAction { + // Path `custommsg_hook.result` + pub result: CustommsgHookResult, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvAction { + pub result: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretAction { + pub result: String, + } + +} diff --git a/cln-rpc/src/lib.rs b/cln-rpc/src/lib.rs index 31c54622208c..9ca81b46feb7 100644 --- a/cln-rpc/src/lib.rs +++ b/cln-rpc/src/lib.rs @@ -91,11 +91,15 @@ use tokio::net::UnixStream; use tokio_util::codec::{FramedRead, FramedWrite}; pub mod codec; +pub mod hooks; pub mod jsonrpc; pub mod model; pub mod notifications; pub mod primitives; +#[cfg(test)] +mod test; + pub use crate::model::TypedRequest; pub use crate::{ model::{Request, Response}, @@ -328,291 +332,3 @@ where { f.as_ref().map_or(true, |value| value.is_empty()) } - -#[cfg(test)] -mod test { - use self::notifications::{BlockAddedNotification, CustomMsgNotification}; - - use super::*; - use crate::model::*; - use crate::primitives::PublicKey; - use futures_util::StreamExt; - use serde_json::json; - use std::str::FromStr; - use tokio_util::codec::{Framed, FramedRead}; - - #[tokio::test] - async fn call_raw_request() { - // Set up a pair of unix-streams - // The frame is a mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // Define the request and response send in the RPC-message - let rpc_request = serde_json::json!({ - "id" : 1, - "jsonrpc" : "2.0", - "params" : {}, - "method" : "some_method" - }); - let rpc_request2 = rpc_request.clone(); - - let rpc_response = serde_json::json!({ - "jsonrpc" : "2.0", - "id" : "1", - "result" : {"field_6" : 6} - }); - - // Spawn the task that performs the RPC-call - // Check that it reads the response correctly - let handle = tokio::task::spawn(async move { cln.call_raw_request(rpc_request2).await }); - - // Verify that our emulated server received a request - // and sendt the response - let read_req = dbg!(frame.next().await.unwrap().unwrap()); - assert_eq!(&rpc_request, &read_req); - frame.send(rpc_response).await.unwrap(); - - // Get the result from `call_raw_request` and verify - let actual_response: Result = handle.await.unwrap(); - let actual_response = actual_response.unwrap(); - assert_eq!(actual_response, json!({"field_6" : 6})); - } - - #[tokio::test] - async fn call_raw() { - let req = serde_json::json!({}); - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - - let mut read = FramedRead::new(uds2, JsonCodec::default()); - tokio::task::spawn(async move { - let _: serde_json::Value = cln.call_raw("getinfo", &req).await.unwrap(); - }); - - let read_req = dbg!(read.next().await.unwrap().unwrap()); - - assert_eq!( - json!({"id": 1, "method": "getinfo", "params": {}, "jsonrpc": "2.0"}), - read_req - ); - } - - #[tokio::test] - async fn test_call_enum_remote_error() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // Construct the request and response - let req = Request::Ping(requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }); - - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "error" : { - "code" : 666, - "message" : "MOCK_ERROR" - } - }); - - // Spawn the task which calls the rpc - let handle = tokio::task::spawn(async move { cln.call(req).await }); - - // Ensure the mock receives the request and returns a response - let _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(mock_resp).await.unwrap(); - - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - let rpc_error: RpcError = rpc_response.unwrap_err(); - - println!("RPC_ERROR : {:?}", rpc_error); - assert_eq!(rpc_error.code.unwrap(), 666); - assert_eq!(rpc_error.message, "MOCK_ERROR"); - } - - #[tokio::test] - async fn test_call_enum() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // We'll use the Ping request here because both the request - // and response have few arguments - let req = Request::Ping(requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }); - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "result" : { "totlen" : 123 } - }); - - // we create a task that sends the response and returns the response - let handle = tokio::task::spawn(async move { cln.call(req).await }); - - // Ensure our mock receives the request and sends the response - let read_req = dbg!(frame.next().await.unwrap().unwrap()); - assert_eq!( - read_req, - json!({"id" : 1, "jsonrpc" : "2.0", "method" : "ping", "params" : {"id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b"}}) - ); - frame.send(mock_resp).await.unwrap(); - - // Verify that the error response is correct - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - match rpc_response.unwrap() { - Response::Ping(ping) => { - assert_eq!(ping.totlen, 123); - } - _ => panic!("A Request::Getinfo should return Response::Getinfo"), - } - } - - #[tokio::test] - async fn test_call_typed() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // We'll use the Ping request here because both the request - // and response have few arguments - let req = requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }; - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "result" : { "totlen" : 123 } - }); - - // we create a task that sends the response and returns the response - let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); - - // Ensure our mock receives the request and sends the response - _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(mock_resp).await.unwrap(); - - // Verify that the error response is correct - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - let ping_response = rpc_response.unwrap(); - assert_eq!(ping_response.totlen, 123); - } - - #[tokio::test] - async fn test_call_typed_remote_error() { - // Create a dummy rpc-request - let req = requests::GetinfoRequest {}; - - // Create a dummy error response - let response = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "error" : { - "code" : 666, - "message" : "MOCK_ERROR", - }}); - - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - - // Send out the request - let mut frame = Framed::new(uds2, JsonCodec::default()); - - let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); - - // Dummy-server ensures the request has been received and send the error response - let _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(response).await.unwrap(); - - let rpc_response = handle.await.unwrap(); - let rpc_error = rpc_response.expect_err("Must be an RPC-error response"); - - assert_eq!(rpc_error.code.unwrap(), 666); - assert_eq!(rpc_error.message, "MOCK_ERROR"); - } - - #[test] - fn serialize_custom_msg_notification() { - let msg = CustomMsgNotification { - peer_id : PublicKey::from_str("0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b").unwrap(), - payload : String::from("941746573749") - }; - - let notification = Notification::CustomMsg(msg); - - assert_eq!( - serde_json::to_value(notification).unwrap(), - serde_json::json!( - { - "custommsg" : { - "peer_id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - "payload" : "941746573749" - } - } - ) - ); - - } - - #[test] - fn serialize_block_added_notification() { - let block_added = BlockAddedNotification { - hash : crate::primitives::Sha256::from_str("000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca").unwrap(), - height : 830702 - }; - - let notification = Notification::BlockAdded(block_added); - - assert_eq!( - serde_json::to_value(notification).unwrap(), - serde_json::json!({ - "block_added" : { - "hash" : "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", - "height" : 830702 - } - }) - ) - } - - #[test] - fn deserialize_connect_notification() { - let connect_json = serde_json::json!({ - "connect" : { - "address" : { - "address" : "127.0.0.1", - "port" : 38012, - "type" : "ipv4" - }, - "direction" : "in", - "id" : "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59" - } - }); - - let _ : Notification = serde_json::from_value(connect_json).unwrap(); - } -} diff --git a/cln-rpc/src/test.rs b/cln-rpc/src/test.rs new file mode 100644 index 000000000000..a5d988031832 --- /dev/null +++ b/cln-rpc/src/test.rs @@ -0,0 +1,1093 @@ +/// Verify serde round-trip: serialize to JSON, deserialize back, and +/// check the re-serialized value matches the first serialization. +macro_rules! assert_serde_roundtrip { + ($value:expr, $type:ty) => {{ + let v = serde_json::to_value(&$value).unwrap(); + let rt: $type = serde_json::from_value(v.clone()).unwrap(); + let v2 = serde_json::to_value(&rt).unwrap(); + assert_eq!(v, v2); + }}; +} + +use crate::{ + codec::JsonCodec, + hooks::{actions::*, events::*}, + notifications::{BlockAddedNotification, CustomMsgNotification}, + primitives::{Amount, JsonObjectOrArray, JsonScalar}, + ClnRpc, Notification, RpcError, +}; + +use super::*; +use crate::model::*; +use crate::primitives::PublicKey; +use futures_util::{SinkExt, StreamExt}; +use serde_json::json; +use std::str::FromStr; +use tokio::net::UnixStream; +use tokio_util::codec::{Framed, FramedRead}; + +#[tokio::test] +async fn call_raw_request() { + // Set up a pair of unix-streams + // The frame is a mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // Define the request and response send in the RPC-message + let rpc_request = serde_json::json!({ + "id" : 1, + "jsonrpc" : "2.0", + "params" : {}, + "method" : "some_method" + }); + let rpc_request2 = rpc_request.clone(); + + let rpc_response = serde_json::json!({ + "jsonrpc" : "2.0", + "id" : "1", + "result" : {"field_6" : 6} + }); + + // Spawn the task that performs the RPC-call + // Check that it reads the response correctly + let handle = tokio::task::spawn(async move { cln.call_raw_request(rpc_request2).await }); + + // Verify that our emulated server received a request + // and sendt the response + let read_req = dbg!(frame.next().await.unwrap().unwrap()); + assert_eq!(&rpc_request, &read_req); + frame.send(rpc_response).await.unwrap(); + + // Get the result from `call_raw_request` and verify + let actual_response: Result = handle.await.unwrap(); + let actual_response = actual_response.unwrap(); + assert_eq!(actual_response, json!({"field_6" : 6})); +} + +#[tokio::test] +async fn call_raw() { + let req = serde_json::json!({}); + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + + let mut read = FramedRead::new(uds2, JsonCodec::default()); + tokio::task::spawn(async move { + let _: serde_json::Value = cln.call_raw("getinfo", &req).await.unwrap(); + }); + + let read_req = dbg!(read.next().await.unwrap().unwrap()); + + assert_eq!( + json!({"id": 1, "method": "getinfo", "params": {}, "jsonrpc": "2.0"}), + read_req + ); +} + +#[tokio::test] +async fn test_call_enum_remote_error() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // Construct the request and response + let req = Request::Ping(requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }); + + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "error" : { + "code" : 666, + "message" : "MOCK_ERROR" + } + }); + + // Spawn the task which calls the rpc + let handle = tokio::task::spawn(async move { cln.call(req).await }); + + // Ensure the mock receives the request and returns a response + let _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(mock_resp).await.unwrap(); + + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + let rpc_error: RpcError = rpc_response.unwrap_err(); + + println!("RPC_ERROR : {:?}", rpc_error); + assert_eq!(rpc_error.code.unwrap(), 666); + assert_eq!(rpc_error.message, "MOCK_ERROR"); +} + +#[tokio::test] +async fn test_call_enum() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // We'll use the Ping request here because both the request + // and response have few arguments + let req = Request::Ping(requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }); + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "result" : { "totlen" : 123 } + }); + + // we create a task that sends the response and returns the response + let handle = tokio::task::spawn(async move { cln.call(req).await }); + + // Ensure our mock receives the request and sends the response + let read_req = dbg!(frame.next().await.unwrap().unwrap()); + assert_eq!( + read_req, + json!({"id" : 1, "jsonrpc" : "2.0", "method" : "ping", "params" : {"id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b"}}) + ); + frame.send(mock_resp).await.unwrap(); + + // Verify that the error response is correct + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + match rpc_response.unwrap() { + Response::Ping(ping) => { + assert_eq!(ping.totlen, 123); + } + _ => panic!("A Request::Getinfo should return Response::Getinfo"), + } +} + +#[tokio::test] +async fn test_call_typed() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // We'll use the Ping request here because both the request + // and response have few arguments + let req = requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }; + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "result" : { "totlen" : 123 } + }); + + // we create a task that sends the response and returns the response + let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); + + // Ensure our mock receives the request and sends the response + _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(mock_resp).await.unwrap(); + + // Verify that the error response is correct + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + let ping_response = rpc_response.unwrap(); + assert_eq!(ping_response.totlen, 123); +} + +#[tokio::test] +async fn test_call_typed_remote_error() { + // Create a dummy rpc-request + let req = requests::GetinfoRequest {}; + + // Create a dummy error response + let response = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "error" : { + "code" : 666, + "message" : "MOCK_ERROR", + }}); + + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + + // Send out the request + let mut frame = Framed::new(uds2, JsonCodec::default()); + + let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); + + // Dummy-server ensures the request has been received and send the error response + let _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(response).await.unwrap(); + + let rpc_response = handle.await.unwrap(); + let rpc_error = rpc_response.expect_err("Must be an RPC-error response"); + + assert_eq!(rpc_error.code.unwrap(), 666); + assert_eq!(rpc_error.message, "MOCK_ERROR"); +} + +#[test] +fn serialize_custom_msg_notification() { + let msg = CustomMsgNotification { + peer_id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + payload: String::from("941746573749"), + }; + + let notification = Notification::CustomMsg(msg); + + assert_eq!( + serde_json::to_value(notification).unwrap(), + serde_json::json!( + { + "custommsg" : { + "peer_id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + "payload" : "941746573749" + } + } + ) + ); +} + +#[test] +fn serialize_block_added_notification() { + let block_added = BlockAddedNotification { + hash: crate::primitives::Sha256::from_str( + "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", + ) + .unwrap(), + height: 830702, + }; + + let notification = Notification::BlockAdded(block_added); + + assert_eq!( + serde_json::to_value(notification).unwrap(), + serde_json::json!({ + "block_added" : { + "hash" : "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", + "height" : 830702 + } + }) + ) +} + +#[test] +fn deserialize_connect_notification() { + let connect_json = serde_json::json!({ + "connect" : { + "address" : { + "address" : "127.0.0.1", + "port" : 38012, + "type" : "ipv4" + }, + "direction" : "in", + "id" : "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59" + } + }); + + let _: Notification = serde_json::from_value(connect_json).unwrap(); +} + +#[test] +fn test_peer_connected_hook() { + let peer_connected_payload = serde_json::json!({ + "peer": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "direction": "in", + "addr": "34.239.230.56:9735", + "features": "" + + } + }); + let peer_connected: PeerConnectedEvent = + serde_json::from_value(peer_connected_payload).unwrap(); + assert_eq!( + peer_connected.peer.id, + PublicKey::from_str("03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f") + .unwrap() + ); + assert_eq!(peer_connected.peer.addr, "34.239.230.56:9735"); + assert_eq!( + peer_connected.peer.direction, + PeerConnectedPeerDirection::IN + ); + assert_eq!(peer_connected.peer.features, ""); + assert_serde_roundtrip!(peer_connected, PeerConnectedEvent); +} + +#[test] +fn test_recover_hook() { + let r = serde_json::json!( + { + "codex32": "cl10leetsllhdmn9m42vcsamx24zrxgs3qrl7ahwvhw4fnzrhve25gvezzyqqjdsjnzedu43ns" + }); + let d: RecoverHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.codex32, + "cl10leetsllhdmn9m42vcsamx24zrxgs3qrl7ahwvhw4fnzrhve25gvezzyqqjdsjnzedu43ns" + ); + assert_serde_roundtrip!(d, RecoverHookEvent); +} + +#[test] +fn test_commitment_revocation_hook() { + let r = serde_json::json!({ + "commitment_txid": "58eea2cf538cfed79f4d6b809b920b40bb6b35962c4bb4cc81f5550a7728ab05", + "penalty_tx": "02000000000101...ac00000000", + "channel_id": "fb16398de93e8690c665873715ef590c038dfac5dd6c49a9d4b61dccfcedc2fb", + "commitnum": 21 + }); + let d: CommitmentRevocationEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.commitment_txid, + "58eea2cf538cfed79f4d6b809b920b40bb6b35962c4bb4cc81f5550a7728ab05" + ); + assert_eq!(d.penalty_tx, "02000000000101...ac00000000"); + assert_eq!( + d.channel_id.to_string(), + "fb16398de93e8690c665873715ef590c038dfac5dd6c49a9d4b61dccfcedc2fb" + ); + assert_eq!(d.commitnum, 21); + assert_serde_roundtrip!(d, CommitmentRevocationEvent); +} + +#[test] +fn test_db_write_hook() { + let r = serde_json::json!({ + "data_version": 42, + "writes": [ + "PRAGMA foreign_keys = ON" + ] + }); + let d: DbWriteEvent = serde_json::from_value(r).unwrap(); + assert_eq!(d.data_version, 42); + assert_eq!(d.writes, vec!["PRAGMA foreign_keys = ON"]); + assert_serde_roundtrip!(d, DbWriteEvent); +} + +#[test] +fn test_invoice_payment_hook() { + let r = serde_json::json!({ + "payment": { + "label": "unique-label-for-invoice", + "preimage": "0000000000000000000000000000000000000000000000000000000000000000", + "msat": 10000 + } + }); + let d: InvoicePaymentHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!(d.payment.label, "unique-label-for-invoice"); + assert_eq!( + hex::encode(d.payment.preimage.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!(d.payment.msat, Amount::from_msat(10000)); + assert_serde_roundtrip!(d, InvoicePaymentHookEvent); +} + +#[test] +fn test_openchannel_hook() { + let r = serde_json::json!({ + "openchannel": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "funding_msat": 100000000, + "push_msat": 0, + "dust_limit_msat": 546000, + "max_htlc_value_in_flight_msat": 18446744073709551615u64, + "channel_reserve_msat": 1000000, + "htlc_minimum_msat": 0, + "feerate_per_kw": 7500, + "to_self_delay": 5, + "max_accepted_htlcs": 483, + "channel_flags": 1 + } + }); + let d: OpenchannelEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!(d.openchannel.funding_msat, Amount::from_msat(100000000)); + assert_eq!(d.openchannel.push_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel.dust_limit_msat, Amount::from_msat(546000)); + assert_eq!( + d.openchannel.max_htlc_value_in_flight_msat, + Amount::from_msat(18446744073709551615) + ); + assert_eq!( + d.openchannel.channel_reserve_msat, + Amount::from_msat(1000000) + ); + assert_eq!(d.openchannel.htlc_minimum_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel.feerate_per_kw, 7500); + assert_eq!(d.openchannel.to_self_delay, 5); + assert_eq!(d.openchannel.max_accepted_htlcs, 483); + assert_eq!(d.openchannel.channel_flags, 1); + assert_serde_roundtrip!(d, OpenchannelEvent); +} + +#[test] +fn test_openchannel2_hook() { + let r = serde_json::json!({ + "openchannel2": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "their_funding_msat": 100000000, + "dust_limit_msat": 546000, + "max_htlc_value_in_flight_msat": 18446744073709551615u64, + "htlc_minimum_msat": 0, + "funding_feerate_per_kw": 7500, + "commitment_feerate_per_kw": 7500, + "feerate_our_max": 10000, + "feerate_our_min": 253, + "to_self_delay": 5, + "max_accepted_htlcs": 483, + "channel_flags": 1, + "channel_type": {"bits": [12, 22], "names": ["static_remotekey/even", "anchors/even"]}, + "locktime": 2453, + "channel_max_msat": 16777215000u64, + "requested_lease_msat": 100000000, + "lease_blockheight_start": 683990, + "node_blockheight": 683990, + "require_confirmed_inputs": true + } + }); + let d: Openchannel2Event = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!( + d.openchannel2.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.openchannel2.their_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!(d.openchannel2.dust_limit_msat, Amount::from_msat(546000)); + assert_eq!( + d.openchannel2.max_htlc_value_in_flight_msat, + Amount::from_msat(18446744073709551615) + ); + assert_eq!(d.openchannel2.htlc_minimum_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel2.funding_feerate_per_kw, 7500); + assert_eq!(d.openchannel2.commitment_feerate_per_kw, 7500); + assert_eq!(d.openchannel2.feerate_our_max, 10000); + assert_eq!(d.openchannel2.feerate_our_min, 253); + assert_eq!(d.openchannel2.to_self_delay, 5); + assert_eq!(d.openchannel2.max_accepted_htlcs, 483); + assert_eq!(d.openchannel2.channel_flags, 1); + assert_eq!( + d.openchannel2.channel_type.as_ref().unwrap().bits, + vec![12, 22] + ); + assert_eq!( + d.openchannel2.channel_type.as_ref().unwrap().names, + vec!["static_remotekey/even", "anchors/even"] + ); + assert_eq!(d.openchannel2.locktime, 2453); + assert_eq!( + d.openchannel2.channel_max_msat, + Amount::from_msat(16777215000) + ); + assert_eq!( + d.openchannel2.requested_lease_msat.as_ref().unwrap(), + &Amount::from_msat(100000000) + ); + assert_eq!( + *d.openchannel2.lease_blockheight_start.as_ref().unwrap(), + 683990 + ); + assert_eq!(*d.openchannel2.node_blockheight.as_ref().unwrap(), 683990); + assert!(d.openchannel2.require_confirmed_inputs); + assert_serde_roundtrip!(d, Openchannel2Event); +} + +#[test] +fn test_openchannel2_changed_hook() { + let r = serde_json::json!({ + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": false + } + }); + let d: Openchannel2ChangedEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2_changed.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.openchannel2_changed.psbt, + "cHNidP8BADMCAAAAAQ+yBipSVZr..." + ); + assert!(!d.openchannel2_changed.require_confirmed_inputs); + assert_serde_roundtrip!(d, Openchannel2ChangedEvent); +} + +#[test] +fn test_openchannel2_sign_hook() { + let r = serde_json::json!({ + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + }); + let d: Openchannel2SignEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2_sign.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!(d.openchannel2_sign.psbt, "cHNidP8BADMCAAAAAQ+yBipSVZr..."); + assert_serde_roundtrip!(d, Openchannel2SignEvent); +} + +#[test] +fn test_rbf_channel_hook() { + let r = serde_json::json!({ + "rbf_channel": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "their_last_funding_msat": 100000000, + "their_funding_msat": 100000000, + "our_last_funding_msat": 100000000, + "funding_feerate_per_kw": 7500, + "feerate_our_max": 10000, + "feerate_our_min": 253, + "channel_max_msat": 16777215000u64, + "locktime": 2453, + "requested_lease_msat": 100000000, + "require_confirmed_inputs": true + } + }); + let d: RbfChannelEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.rbf_channel.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!( + d.rbf_channel.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.rbf_channel.their_last_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!( + d.rbf_channel.their_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!( + d.rbf_channel.our_last_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!(d.rbf_channel.funding_feerate_per_kw, 7500); + assert_eq!(d.rbf_channel.feerate_our_max, 10000); + assert_eq!(d.rbf_channel.feerate_our_min, 253); + assert_eq!( + d.rbf_channel.channel_max_msat, + Amount::from_msat(16777215000) + ); + assert_eq!(d.rbf_channel.locktime, 2453); + assert_eq!( + d.rbf_channel.requested_lease_msat.unwrap(), + Amount::from_msat(100000000) + ); + assert!(d.rbf_channel.require_confirmed_inputs); + assert_serde_roundtrip!(d, RbfChannelEvent); +} + +#[test] +fn test_htlc_accepted_hook() { + let r = serde_json::json!({ + "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "onion": { + "payload": "", + "short_channel_id": "1x2x3", + "forward_msat": 42, + "outgoing_cltv_value": 500014, + "shared_secret": "0000000000000000000000000000000000000000000000000000000000000000", + "next_onion": "[1365bytes of serialized onion]" + }, + "htlc": { + "short_channel_id": "4x5x6", + "id": 27, + "amount_msat": 43, + "cltv_expiry": 500028, + "cltv_expiry_relative": 10, + "payment_hash": "0000000000000000000000000000000000000000000000000000000000000000", + "extra_tlvs": "fdffff012afe00010001020539" + }, + "forward_to": "0000000000000000000000000000000000000000000000000000000000000000" + }); + let d: HtlcAcceptedEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.peer_id.unwrap().to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + + assert_eq!(d.onion.payload, ""); + assert_eq!(d.onion.short_channel_id.unwrap().to_string(), "1x2x3"); + assert_eq!(d.onion.forward_msat.unwrap(), Amount::from_msat(42)); + assert_eq!(d.onion.outgoing_cltv_value.unwrap(), 500014); + assert_eq!( + hex::encode(d.onion.shared_secret.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!(d.onion.next_onion, "[1365bytes of serialized onion]"); + + assert_eq!(d.htlc.short_channel_id.to_string(), "4x5x6"); + assert_eq!(d.htlc.id, 27); + assert_eq!(d.htlc.amount_msat, Amount::from_msat(43)); + assert_eq!(d.htlc.cltv_expiry, 500028); + assert_eq!(d.htlc.cltv_expiry_relative, 10); + assert_eq!( + d.htlc.payment_hash.to_string(), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!( + d.htlc.extra_tlvs.as_ref().unwrap(), + "fdffff012afe00010001020539" + ); + + assert_eq!( + d.forward_to.unwrap().to_string(), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_serde_roundtrip!(d, HtlcAcceptedEvent); +} + +#[test] +fn test_rpc_command_hook() { + let r = serde_json::json!({ + "rpc_command": { + "id": "3", + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let d: RpcCommandEvent = serde_json::from_value(r).unwrap(); + match &d.rpc_command.id { + JsonScalar::String(s) => assert_eq!(s, "3"), + _ => panic!("should be string"), + } + assert_eq!(d.rpc_command.method, "method_name"); + + let mut params = serde_json::Map::new(); + params.insert("param_1".to_string(), serde_json::Value::Array(vec![])); + params.insert( + "param_2".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + params.insert( + "param_n".to_string(), + serde_json::Value::String("".to_string()), + ); + assert_eq!( + d.rpc_command.params, + JsonObjectOrArray::Object(params.clone()) + ); + assert_serde_roundtrip!(d, RpcCommandEvent); + + let q = serde_json::json!({ + "replace": { + "jsonrpc": "2.0", + "id": "3", + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let e: RpcCommandAction = serde_json::from_value(q).unwrap(); + match &e.replace.as_ref().unwrap().id { + JsonScalar::String(s) => assert_eq!(s, "3"), + _ => panic!("should be string"), + } + assert_eq!(e.replace.as_ref().unwrap().method, "method_name"); + assert_eq!( + e.replace.as_ref().unwrap().params, + JsonObjectOrArray::Object(params) + ); + assert_serde_roundtrip!(e, RpcCommandAction); + + let r = serde_json::json!({ + "rpc_command": { + "id": 3, + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let d: RpcCommandEvent = serde_json::from_value(r).unwrap(); + match &d.rpc_command.id { + JsonScalar::Number(number) => assert_eq!(number.as_u64().unwrap(), 3), + _ => panic!("should be number"), + } + assert_eq!(d.rpc_command.method, "method_name"); + + let mut params = serde_json::Map::new(); + params.insert("param_1".to_string(), serde_json::Value::Array(vec![])); + params.insert( + "param_2".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + params.insert( + "param_n".to_string(), + serde_json::Value::String("".to_string()), + ); + assert_eq!( + d.rpc_command.params, + JsonObjectOrArray::Object(params.clone()) + ); + assert_serde_roundtrip!(d, RpcCommandEvent); + + let q = serde_json::json!({ + "replace": { + "jsonrpc": "2.0", + "id": 3, + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let e: RpcCommandAction = serde_json::from_value(q).unwrap(); + match &e.replace.as_ref().unwrap().id { + JsonScalar::Number(number) => assert_eq!(number.as_u64().unwrap(), 3), + _ => panic!("should be number"), + } + assert_eq!(e.replace.as_ref().unwrap().method, "method_name"); + assert_eq!( + e.replace.as_ref().unwrap().params, + JsonObjectOrArray::Object(params) + ); + assert_serde_roundtrip!(e, RpcCommandAction); +} + +#[test] +fn test_custommsg_hook() { + let r = serde_json::json!({ + "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "payload": "1337ffffffff" + }); + let d: CustommsgHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.peer_id.to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!(d.payload, "1337ffffffff"); + assert_serde_roundtrip!(d, CustommsgHookEvent); +} + +#[test] +fn test_onionmessage_recv() { + let r = serde_json::json!({ + "onion_message": { + "reply_blindedpath": { + "first_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "first_scid": "100x200x300", + "first_scid_dir": 1, + "first_path_key": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "hops": [ + { + "blinded_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "encrypted_recipient_data": "0a020d0da" + } + ] + }, + "invoice_request": "0a020d0db", + "invoice": "0a020d0dc", + "invoice_error": "0a020d0dd", + "unknown_fields": [ + { + "number": 12345, + "value": "0a020d0de" + } + ] + } + }); + let d: OnionMessageRecvEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_node_id + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid + .as_ref() + .unwrap() + .to_string(), + "100x200x300" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid_dir + .as_ref() + .unwrap(), + &1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_path_key + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .len(), + 1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .blinded_node_id + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .encrypted_recipient_data, + "0a020d0da" + ); + assert_eq!( + d.onion_message.invoice_request.as_ref().unwrap(), + "0a020d0db" + ); + assert_eq!(d.onion_message.invoice.as_ref().unwrap(), "0a020d0dc"); + assert_eq!(d.onion_message.invoice_error.as_ref().unwrap(), "0a020d0dd"); + assert_eq!(d.onion_message.unknown_fields.as_ref().unwrap().len(), 1); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .number, + 12345 + ); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .value, + "0a020d0de" + ); + assert_serde_roundtrip!(d, OnionMessageRecvEvent); +} + +#[test] +fn test_onionmessage_recv_secret() { + let r = serde_json::json!({ + "onion_message": { + "pathsecret": "0000000000000000000000000000000000000000000000000000000000000000", + "reply_blindedpath": { + "first_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "first_scid": "100x200x300", + "first_scid_dir": 1, + "first_path_key": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "hops": [ + { + "blinded_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "encrypted_recipient_data": "0a020d0da" + } + ] + }, + "invoice_request": "0a020d0db", + "invoice": "0a020d0dc", + "invoice_error": "0a020d0dd", + "unknown_fields": [ + { + "number": 12345, + "value": "0a020d0de" + } + ] + } + }); + let d: OnionMessageRecvSecretEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + hex::encode(d.onion_message.pathsecret.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_node_id + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid + .as_ref() + .unwrap() + .to_string(), + "100x200x300" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid_dir + .as_ref() + .unwrap(), + &1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_path_key + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .len(), + 1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .blinded_node_id + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .encrypted_recipient_data, + "0a020d0da" + ); + assert_eq!( + d.onion_message.invoice_request.as_ref().unwrap(), + "0a020d0db" + ); + assert_eq!(d.onion_message.invoice.as_ref().unwrap(), "0a020d0dc"); + assert_eq!(d.onion_message.invoice_error.as_ref().unwrap(), "0a020d0dd"); + assert_eq!(d.onion_message.unknown_fields.as_ref().unwrap().len(), 1); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .number, + 12345 + ); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .value, + "0a020d0de" + ); + assert_serde_roundtrip!(d, OnionMessageRecvSecretEvent); +} diff --git a/contrib/msggen/msggen/__main__.py b/contrib/msggen/msggen/__main__.py index 46d0e28e4458..d637d3c02c2c 100644 --- a/contrib/msggen/msggen/__main__.py +++ b/contrib/msggen/msggen/__main__.py @@ -4,7 +4,7 @@ from pathlib import Path from msggen.gen.grpc import GrpcGenerator, GrpcConverterGenerator, GrpcUnconverterGenerator, GrpcServerGenerator from msggen.gen.grpc2py import Grpc2PyGenerator -from msggen.gen.rpc import RustGenerator, NotificationGenerator +from msggen.gen.rpc import RustGenerator, NotificationGenerator, HookGenerator from msggen.gen.generator import GeneratorChain from msggen.utils import load_jsonrpc_service, combine_schemas import logging @@ -54,6 +54,10 @@ def add_handler_gen_rust_jsonrpc(generator_chain: GeneratorChain, meta): dest = open(fname, "w") generator_chain.add_generator(NotificationGenerator(dest, meta)) + fname = Path("cln-rpc") / "src" / "hooks.rs" + dest = open(fname, "w") + generator_chain.add_generator(HookGenerator(dest, meta)) + def load_msggen_meta(): meta = json.load(open('.msggen.json', 'r')) diff --git a/contrib/msggen/msggen/gen/grpc/convert.py b/contrib/msggen/msggen/gen/grpc/convert.py index a65c447db4fc..a1ea3f5064c0 100644 --- a/contrib/msggen/msggen/gen/grpc/convert.py +++ b/contrib/msggen/msggen/gen/grpc/convert.py @@ -133,8 +133,13 @@ def generate_composite(self, prefix, field: CompositeField, override=None): "DecodeRoutehintList?": f"c.{name}.map(|drl| drl.into())", "string_map": f"Some(c.{name})", "string_map?": f"c.{name}.unwrap_or(HashMap::new())", + "json_object_or_array": f"Some(c.{name})", + "json_object_or_array?": f"c.{name}.map(|f| f.into())", + "json_scalar": f"Some(c.{name})", + "json_scalar?": f"c.{name}.map(|f| f.into())", }.get( - typ, f"c.{name}" # default to just assignment + typ, + f"c.{name}", # default to just assignment ) if f.deprecated: diff --git a/contrib/msggen/msggen/gen/grpc/unconvert.py b/contrib/msggen/msggen/gen/grpc/unconvert.py index 73984f75a47a..69f4d9d88965 100644 --- a/contrib/msggen/msggen/gen/grpc/unconvert.py +++ b/contrib/msggen/msggen/gen/grpc/unconvert.py @@ -134,8 +134,13 @@ def generate_composite(self, prefix, field: CompositeField, override=None) -> No "TlvStream?": f"c.{name}.map(|s| s.into())", "string_map": f"c.{name}.unwrap()", "string_map?": f"Some(c.{name})", + "json_object_or_array": f"c.{name}.unwrap()", + "json_object_or_array?": f"Some(c.{name})", + "json_scalar": f"c.{name}.unwrap()", + "json_scalar?": f"Some(c.{name})", }.get( - typ, f"c.{name}" # default to just assignment + typ, + f"c.{name}", # default to just assignment ) self.write(f"{name}: {rhs}, // Rule #1 for type {typ}\n", numindent=3) elif isinstance(f, CompositeField): diff --git a/contrib/msggen/msggen/gen/grpc/util.py b/contrib/msggen/msggen/gen/grpc/util.py index 267691d59be6..fcfb1512255a 100644 --- a/contrib/msggen/msggen/gen/grpc/util.py +++ b/contrib/msggen/msggen/gen/grpc/util.py @@ -36,6 +36,8 @@ "bip340sig": "string", "hash": "bytes", "string_map": "map", + "json_object_or_array": "JsonObjectOrArray", + "json_scalar": "JsonScalar", } diff --git a/contrib/msggen/msggen/gen/grpc2py.py b/contrib/msggen/msggen/gen/grpc2py.py index 9a1feba9aa4f..3f4e1809eadb 100644 --- a/contrib/msggen/msggen/gen/grpc2py.py +++ b/contrib/msggen/msggen/gen/grpc2py.py @@ -21,8 +21,8 @@ def decamelcase(c): "ListPeers.peers[].channels[].state_changes[]": None, } override_field = { - 'RoutehintList': '"routes": [[decodekeysend_routes2py(i) for i in routehints] for routehints in m.routes]', - 'DecodeRoutehintList': '"routes": [[decodepay_routes2py(i) for i in routehints] for routehints in m.routes]', + "RoutehintList": '"routes": [[decodekeysend_routes2py(i) for i in routehints] for routehints in m.routes]', + "DecodeRoutehintList": '"routes": [[decodepay_routes2py(i) for i in routehints] for routehints in m.routes]', } @@ -59,6 +59,9 @@ def __init__(self, dest: TextIO): "currency": "m.{name}", "number": "m.{name}", "outpoint": "m.{name}", + "string_map": "m.{name}", + "json_object_or_array": "m.{name}", + "json_scalar": "m.{name}", } def generate_responses(self, service): @@ -168,7 +171,10 @@ def {converter_name}(m): for f in field.fields: name = f.normalized() if isinstance(f, PrimitiveField) and f.typename in override_field: - self.write(f' {override_field[f.typename]}, # OverrideField in {f.typename}\n', cleanup=False) + self.write( + f" {override_field[f.typename]}, # OverrideField in {f.typename}\n", + cleanup=False, + ) elif isinstance(f, PrimitiveField): typ = f.typename diff --git a/contrib/msggen/msggen/gen/rpc/__init__.py b/contrib/msggen/msggen/gen/rpc/__init__.py index dee5ba87eede..0eaba3f813b5 100644 --- a/contrib/msggen/msggen/gen/rpc/__init__.py +++ b/contrib/msggen/msggen/gen/rpc/__init__.py @@ -1,4 +1,5 @@ +from msggen.gen.rpc.hook import HookGenerator from msggen.gen.rpc.notification import NotificationGenerator from msggen.gen.rpc.rust import RustGenerator -__all__ = [RustGenerator, NotificationGenerator] +__all__ = [RustGenerator, NotificationGenerator, HookGenerator] diff --git a/contrib/msggen/msggen/gen/rpc/hook.py b/contrib/msggen/msggen/gen/rpc/hook.py new file mode 100644 index 000000000000..5a084fb617cb --- /dev/null +++ b/contrib/msggen/msggen/gen/rpc/hook.py @@ -0,0 +1,133 @@ +import logging +from textwrap import dedent, indent +from typing import Any, Dict, Optional, TextIO, List + +from msggen.model import Service, TypeName, CompositeField, EnumField, Hook +from msggen.gen.generator import IGenerator +from msggen.gen.rpc.rust import gen_composite +from msggen.gen.grpc.proto import gather_subfields + + +class HookGenerator(IGenerator): + def __init__(self, dest: TextIO, meta: Dict[str, Any]): + self.dest = dest + self.meta = meta + self.logger = logging.getLogger(__name__) + self.meta = meta + + def write(self, text: str, numindent: Optional[int] = None) -> None: + raw = dedent(text) + if numindent is not None: + raw = indent(text, " " * numindent) + + self.dest.write(raw) + + def gather_hook_types(self, hooks: List[Hook]): + """Gather all types that might need to be defined + to represent hooks + """ + types = [] + for hook in hooks: + types.extend([hook.request, hook.response]) + for field in hook.request.fields: + types.extend(gather_subfields(field)) + for field in hook.response.fields: + types.extend(gather_subfields(field)) + return types + + def enumvar2number(self, typename: TypeName, variant): + """Find an existing variant number of generate a new one. + + If we don't have a variant number yet we'll just take the + largest one assigned so far and increment it by 1.""" + + typename = str(typename.name) + + m = self.meta["rpc-only-enum-map"] + variant = str(variant) + if typename not in m: + m[typename] = {} + + variants = m[typename] + if variant in variants: + return variants[variant] + + # Now find the maximum and increment once + n = max(variants.values()) if len(variants) else -1 + + m[typename][variant] = n + 1 + return m[typename][variant] + + def generate_enum(self, service: Service): + self.write("#[derive(Clone, Debug, Deserialize, Serialize)]\n") + self.write("pub enum Hook {\n") + for hook in service.hooks: + tn = hook.typename + name = hook.name + self.write(f'#[serde(rename = "{name}")]\n', numindent=1) + (self.write(f"{tn}(events::{tn}Event),\n", numindent=1),) + self.write("}\n") + + def generate_metadata(self, message: CompositeField, typename_override=None): + if message.omit(): + return + + # If override is not specified it is a function that returns itself + # This is equivalent to do not override + if typename_override is None: + typename_override = lambda x: x + + for _, f in enumerate(message.fields): + if isinstance(f, EnumField) and not f.override(): + self.logger.debug(f"Generating enum {f}") + + for i, v in self.enumerate_enum( + typename_override(f.typename), f.variants + ): + self.logger.debug(f"Generating enum variant {v}") + + def enumerate_enum(self, typename, variants): + enumerated_values = [(self.enumvar2number(typename, v), v) for v in variants] + sorted_enumerated_values = sorted(enumerated_values, key=lambda x: x[0]) + for i, v in sorted_enumerated_values: + yield (i, v) + + def generate(self, service: Service) -> None: + hook_fields = self.gather_hook_types(service.hooks) + for message in [f for f in hook_fields if isinstance(f, CompositeField)]: + self.generate_metadata(message, hook_typename_overrides) + + self.write("// This file is autogenerated by `msggen`\n") + self.write("// Do not edit it manually, your changes will be overwritten\n\n\n") + self.write("\n") + self.write("use serde::{Serialize, Deserialize};\n") + + self.generate_enum(service) + self.write("\n\n") + + self.write("pub mod events{\n") + self.write("use crate::primitives::*;\n", numindent=1) + self.write("use serde::{Serialize, Deserialize};\n\n", numindent=1) + for hook in service.hooks: + _, req_decl = gen_composite(hook.request, self.meta) + self.write(req_decl, numindent=1) + self.write("}\n") + + self.write("pub mod actions{\n") + self.write("use crate::primitives::*;\n", numindent=1) + self.write("use serde::{Serialize, Deserialize};\n\n", numindent=1) + for hook in service.hooks: + _, resp_decl = gen_composite( + hook.response, self.meta, hook_typename_overrides + ) + self.write(resp_decl, numindent=1) + self.write("}\n") + + +def hook_typename_overrides(typename: str): + # no overrides needed for now + # if isinstance(typename, TypeName): + # return_class = TypeName + # else: + # return_class = str + return typename diff --git a/contrib/msggen/msggen/gen/rpc/rust.py b/contrib/msggen/msggen/gen/rpc/rust.py index b2fa76251666..07b51a47b095 100644 --- a/contrib/msggen/msggen/gen/rpc/rust.py +++ b/contrib/msggen/msggen/gen/rpc/rust.py @@ -47,6 +47,8 @@ "bip340sig": "String", "integer": "i64", "string_map": "HashMap", + "json_object_or_array": "JsonObjectOrArray", + "json_scalar": "JsonScalar", } header = f""" @@ -110,12 +112,17 @@ def gen_enum(e, meta, override): m = meta["grpc-field-map"] m2 = meta["grpc-enum-map"] + m3 = meta["rpc-only-enum-map"] + + count = sum(message_name in d for d in (m, m2, m3)) + assert count <= 1 - assert not (message_name in m and message_name in m2) if message_name in m: m = m[message_name] elif message_name in m2: m = m2[message_name] + elif message_name in m3: + m = m3[message_name] else: m = {} @@ -221,7 +228,7 @@ def gen_primitive(p): def rename_if_necessary(original, name): if str(original) != str(name): - return f" #[serde(rename = \"{original}\")]\n" + return f' #[serde(rename = "{original}")]\n' else: return f"" @@ -253,9 +260,9 @@ def gen_array(a, meta, override=None): # Note: flake8 gets confused on these strings in f strings, hence suppression: # contrib/msggen/msggen/gen/rpc/rust.py:250:42: E226 missing whitespace around arithmetic operator if not a.optional: - defi += f" pub {name}: {'Vec<'*a.dims}{itemtype}{'>'*a.dims},\n" # noqa: E226 + defi += f" pub {name}: {'Vec<' * a.dims}{itemtype}{'>' * a.dims},\n" # noqa: E226 else: - defi += f" #[serde(skip_serializing_if = \"crate::is_none_or_empty\")]\n pub {name}: Option<{'Vec<'*a.dims}{itemtype}{'>'*a.dims}>,\n" # noqa: E226 + defi += f' #[serde(skip_serializing_if = "crate::is_none_or_empty")]\n pub {name}: Option<{"Vec<" * a.dims}{itemtype}{">" * a.dims}>,\n' # noqa: E226 return (defi, decl) @@ -282,7 +289,7 @@ def gen_composite(c, meta, override=None) -> Tuple[str, str]: if not c.optional: defi += f" pub {c.normalized()}: {c.typename},\n" else: - defi += f" #[serde(skip_serializing_if = \"Option::is_none\")]\n pub {c.normalized()}: Option<{c.typename}>,\n" + defi += f' #[serde(skip_serializing_if = "Option::is_none")]\n pub {c.normalized()}: Option<{c.typename}>,\n' return defi, r diff --git a/contrib/msggen/msggen/model.py b/contrib/msggen/msggen/model.py index a48336a97145..3925fd7f1c14 100644 --- a/contrib/msggen/msggen/model.py +++ b/contrib/msggen/msggen/model.py @@ -17,7 +17,8 @@ def __init__(self, name): def normalized(self): name = { - "type": "item_type" + "type": "item_type", + "return": "return_", }.get(self.name, self.name) name = name.replace(' ', '_').replace('-', '_').replace('[]', '').replace("/", "_") return name @@ -135,10 +136,11 @@ def override(self, default: Optional[str] = None) -> Optional[str]: class Service: """Top level class that wraps all the RPC methods. """ - def __init__(self, name: str, methods=None, notifications=None): + def __init__(self, name: str, methods=None, notifications=None, hooks=None): self.name: str = name self.methods: List[Method] = [] if methods is None else methods self.notifications: List[Notification] = [] if notifications is None else notifications + self.hooks: List[Hook] = [] if hooks is None else hooks # If we require linking with some external files we'll add # them here so the generator can use them. @@ -175,9 +177,24 @@ def gather_subfields(field: Field) -> List[Field]: for field in notification.response.fields: types.extend(gather_subfields(field)) + for hook in self.hooks: + types.extend([hook.request]) + for field in hook.request.fields: + types.extend(gather_subfields(field)) + for field in hook.response.fields: + types.extend(gather_subfields(field)) + return types +class Hook: + def __init__(self, name: str, typename: str, request: Field, response: Field): + self.name = name + self.typename = typename + self.request = request + self.response = response + + class Notification: def __init__(self, name: str, typename: str, request: Field, response: Field): self.name = name @@ -338,6 +355,9 @@ def __eq__(self, other): return self.variant == other.variant def normalized(self): + if self.variant.replace('.', '', 1).isdigit() or self.variant.lstrip('-').replace('.', '', 1).isdigit(): + normalized = self.variant.replace('.', '_') + return f"NUM_{normalized}" return self.variant.replace(' ', '_').replace('-', '_').replace("/", "_").upper() @@ -433,6 +453,8 @@ class PrimitiveField(Field): "bip340sig", "hash", "string_map", + "json_object_or_array", + "json_scalar" ] def __init__(self, typename, path, description, added, deprecated): @@ -521,6 +543,8 @@ def __str__(self): CheckRuneParamsField = ArrayField(itemtype=PrimitiveField("string", None, None, added=None, deprecated=None), dims=1, path=None, description=None, added=None, deprecated=None) ChainMovesExtraTagsField = ArrayField(itemtype=PrimitiveField("string", None, None, added=None, deprecated=None), dims=1, path=None, description=None, added=None, deprecated=None) ClnrestRegisterPathParamsField = PrimitiveField("string_map", None, None, added=None, deprecated=None) +JsonIdField = PrimitiveField("json_scalar", None, None, added=None, deprecated=None) +JsonObjectOrArrayField = PrimitiveField("json_object_or_array", None, None, added=None, deprecated=None) # TlvStreams are special, they don't have preset dict-keys, rather # they can specify `u64` keys pointing to hex payloads. So the schema @@ -557,6 +581,10 @@ def __str__(self): 'CheckRune.params': CheckRuneParamsField, "ListChainMoves.chainmoves[].extra_tags": ChainMovesExtraTagsField, "Clnrest-Register-Path.rune_restrictions.params": ClnrestRegisterPathParamsField, + "rpc_command.replace.id": JsonIdField, + "rpc_command.replace.params": JsonObjectOrArrayField, + "rpc_command.rpc_command.id": JsonIdField, + "rpc_command.rpc_command.params": JsonObjectOrArrayField } diff --git a/contrib/msggen/msggen/patch.py b/contrib/msggen/msggen/patch.py index 98c80050ed1c..cac192b2c377 100644 --- a/contrib/msggen/msggen/patch.py +++ b/contrib/msggen/msggen/patch.py @@ -50,6 +50,14 @@ def recurse(f: model.Field, inherited_added: Optional[str] = None, inherited_dep root_deprecated = root_deprecated[0] recurse(n.request, inherited_added=root_added, inherited_deprecated=root_deprecated) recurse(n.response, inherited_added=root_added, inherited_deprecated=root_deprecated) + for h in service.hooks: + root_added = getattr(h.request, 'added', None) or getattr(h, 'added', None) + root_deprecated = getattr(h.request, 'deprecated', None) or getattr(h, 'deprecated', None) + if isinstance(root_deprecated, list): + assert len(root_deprecated) == 2 + root_deprecated = root_deprecated[0] + recurse(h.request, inherited_added=root_added, inherited_deprecated=root_deprecated) + recurse(h.response, inherited_added=root_added, inherited_deprecated=root_deprecated) class VersionAnnotationPatch(Patch): diff --git a/contrib/msggen/msggen/schema.json b/contrib/msggen/msggen/schema.json index dc2744813c27..88b241c8e00d 100644 --- a/contrib/msggen/msggen/schema.json +++ b/contrib/msggen/msggen/schema.json @@ -39938,5 +39938,2270 @@ } } } + }, + "hooks": { + "commitment_revocation.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "commitment_revocation", + "title": "Hook fired when a commitment transaction is revoked", + "description": [ + "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", + "", + "1. Proposal of a new state commitment in the form of a commitment transaction", + "2. Exchange of signatures for the agreed upon commitment transaction", + "3. Verification that the signatures match the commitment transaction", + "4. Exchange of revocation secrets that could be used to penalize an eventual misbehaving party", + "", + "The `commitment_revocation` hook is used to inform the plugin about the state transition being completed, and deliver the penalty transaction.", + "The penalty transaction could then be sent to a watchtower that automatically reacts in case one party attempts to settle using a revoked commitment.", + "", + "This is a chained hook: multiple plugins may be registered." + ], + "request": { + "additionalProperties": false, + "required": [ + "commitment_txid", + "penalty_tx", + "channel_id", + "commitnum" + ], + "properties": { + "commitment_txid": { + "type": "txid", + "description": [ + "The txid of the revoked commitment transaction." + ] + }, + "penalty_tx": { + "type": "hex", + "description": [ + "The penalty transaction that can spend the revoked commitment.", + "Can be sent to a watchtower for enforcement." + ] + }, + "channel_id": { + "added": "v0.10.2", + "type": "hash", + "description": [ + "The channel_id for which the revocation occurred." + ] + }, + "commitnum": { + "added": "v0.10.2", + "type": "u64", + "description": [ + "The commitment number identifying the revoked state." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Plugins should always return \"continue\", otherwise subsequent hook subscribers would not get called." + ] + } + } + } + }, + "custommsg.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "custommsg", + "title": "Hook for handling custom peer messages", + "description": [ + "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", + "", + "The goal of these two components is to allow the implementation of custom protocols or prototypes on top of a Core Lightning node, without having to change the node's implementation itself.", + "", + "Messages are restricted to odd-numbered types and must not conflict with internally handled message types.", + "These limitations are in place in order to avoid conflicts with the internal state tracking, and avoiding disconnections or channel closures, since odd-numbered message can be ignored by nodes (see \"it's ok to be odd\" in BOLT #1 for details).", + "", + "Note that if the hook registration specifies \"filters\" then that should be a JSON array of message numbers, and the hook will only be called for those.", + "Otherwise, the hook is called for all messages not handled internally. (added in v25.12)", + "", + "This is a chained hook and MUST return `{\"result\": \"continue\"}`." + ], + "request": { + "required": [ + "peer_id", + "payload" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer that sent the message." + ] + }, + "payload": { + "type": "hex", + "description": [ + "The raw message payload as a hex string.", + "", + "The first two bytes encode the message type (big-endian), followed by the message payload.", + "The plugin must implement the parsing of the message, including the type prefix, since Core Lightning does not know how to parse the message." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must always be `continue`. Any other value will cause the hook to fail." + ] + } + } + }, + "see_also": [ + "lightning-sendcustommsg(7)" + ] + }, + "db_write.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "db_write", + "title": "Hook fired before database writes are committed", + "description": [ + "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", + "This hook will be useless (the \"writes\" field will always be empty) if you are using a PostgreSQL database.", + "", + "This hook is extremely restricted:", + "1. A plugin registering for this hook should not perform anything that may cause a database operation in response (pretty much, anything but logging).", + "2. A plugin registering for this hook should not register for other hooks or commands, as these may become intermingled and break rule #1.", + "3. The hook will be called before your plugin is initialized!", + "", + "This hook is strongly synchronous: `lightningd` will halt almost all processing until all plugins have responded.", + "", + "This hook is intended for creating continuous backups. The intent is that your backup plugin maintains three pieces of information (possibly in separate files):", + "1. A snapshot of the database", + "2. A log of database queries that will bring that snapshot up-to-date", + "3. The previous `data_version`", + "", + "`data_version` is an unsigned 32-bit number that will always increment by 1 each time `db_write` is called. Note that this will wrap around on the limit of 32-bit numbers.", + "", + "`writes` is an array of strings, each string being a database query that modifies the database.", + "If the `data_version` above is validated correctly, then you can simply append this to the log of database queries.", + "", + "Your plugin MUST validate the `data_version`. It MUST keep track of the previous `data_version` it got, and:", + "1. If the new `data_version` is exactly one higher than the previous, then this is the ideal case and nothing bad happened and we should save this and continue.", + "2. If the new `data_version` is exactly the same value as the previous, then the previous set of queries was not committed.", + " Your plugin MAY overwrite the previous set of queries with the current set, or it MAY overwrite its entire backup with a new snapshot of the database and the current `writes` array", + " (treating this case as if `data_version` were two or more higher than the previous).", + "3. If the new `data_version` is less than the previous, your plugin MUST halt and catch fire, and have the operator inspect what exactly happened here.", + "4. Otherwise, some queries were lost and your plugin SHOULD recover by creating a new snapshot of the database: copy the database file, back up the given `writes` array, then delete", + " (or atomically rename if in a POSIX filesystem) the previous backups of the database and SQL statements, or you MAY fail the hook to abort `lightningd`.", + "", + "The \"rolling up\" of the database could be done periodically as well if the log of SQL statements has grown large.", + "", + "Any response other than `{\"result\": \"continue\"}` will cause `lightningd` to error without committing to the database! This is the expected way to halt and catch fire.", + "", + "`db_write` is a parallel-chained hook, i.e., multiple plugins can register it, and all of them will be invoked simultaneously without regard for order of registration.", + "The hook is considered handled if all registered plugins return `{\"result\": \"continue\"}`. If any plugin returns anything else, `lightningd` will error without committing to the database." + ], + "request": { + "additionalProperties": false, + "required": [ + "data_version", + "writes" + ], + "properties": { + "data_version": { + "type": "u32", + "description": [ + "A monotonically increasing 32-bit unsigned integer representing the database version.", + "Wraps around at the 32-bit limit." + ] + }, + "writes": { + "type": "array", + "description": [ + "Array of SQL statements that modify the database.", + "If using PostgreSQL, this array will always be empty.", + "Each entry is a SQL query string." + ], + "items": { + "type": "string" + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be \"continue\" for the database commit to proceed.", + "Any other value will abort the commit and cause `lightningd` to error." + ] + } + } + } + }, + "htlc_accepted.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "htlc_accepted", + "title": "Hook for handling incoming HTLCs", + "description": [ + "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", + "", + "The plugin can inspect the HTLC and decide to continue processing, fail it, or resolve it.", + "", + "lightningd will replay the HTLCs for which it doesn't have a final verdict during startup.", + "This means that, if the plugin response wasn't processed before the HTLC was forwarded, failed, or resolved,", + "then the plugin may see the same HTLC again during startup. It is therefore paramount that the plugin is idempotent if it talks to an external system.", + "", + "This is a chained hook: plugins are called in order until one returns a result other than `continue`.", + "After this the event is considered handled and the remaining plugins are skipped." + ], + "request": { + "required": [ + "onion", + "htlc" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "added": "v25.12", + "type": "pubkey", + "description": [ + "The `node_id` of the peer that offered this HTLC.", + "This field may be absent if the peer is unknown." + ] + }, + "onion": { + "type": "object", + "additionalProperties": false, + "required": [ + "payload", + "next_onion", + "shared_secret" + ], + "properties": { + "payload": { + "type": "hex", + "description": [ + "The raw unparsed onion payload received from the sender." + ] + }, + "type": { + "type": "string", + "enum": [ + "tlv" + ], + "description": [ + "Indicates that the payload is TLV formatted.", + "Only present if the payload was successfully parsed." + ] + }, + "short_channel_id": { + "type": "short_channel_id", + "description": [ + "Determines the channel that the sender is hinting should be used next.", + "Not present if this node is the final destination." + ] + }, + "next_node_id": { + "type": "pubkey", + "description": [ + "The node_id of the next hop.", + "Only present if specified in the onion payload." + ] + }, + "forward_msat": { + "type": "msat", + "description": [ + "The amount to forward to the next hop." + ] + }, + "outgoing_cltv_value": { + "type": "u32", + "description": [ + "Determines what the CLTV value for the HTLC that we forward to the next hop should be." + ] + }, + "total_msat": { + "type": "msat", + "description": [ + "The total payment amount.", + "Only present for final recipients using modern TLV payloads." + ] + }, + "payment_secret": { + "type": "secret", + "description": [ + "The payment secret (which the payer should have obtained from the invoice) provided by the sender.", + "Only present for final recipients." + ] + }, + "payment_metadata": { + "type": "hex", + "description": [ + "Additional metadata provided in the onion payload.", + "Only present if included by the sender." + ] + }, + "next_onion": { + "type": "hex", + "description": [ + "The fully processed onion that we should be sending to the next hop as part of the outgoing HTLC.", + "Processed in this case means that we took the incoming onion, decrypted it, extracted the payload destined for us, and serialised the resulting onion again." + ] + }, + "shared_secret": { + "type": "secret", + "description": [ + "The shared secret used to decrypt the incoming onion.", + "It is shared with the sender that constructed the onion." + ] + } + } + }, + "htlc": { + "type": "object", + "additionalProperties": false, + "required": [ + "short_channel_id", + "id", + "amount_msat", + "cltv_expiry", + "cltv_expiry_relative", + "payment_hash" + ], + "properties": { + "short_channel_id": { + "added": "v0.12.0", + "type": "short_channel_id", + "description": [ + "The channel this HTLC is coming from." + ] + }, + "id": { + "added": "v0.12.0", + "type": "u64", + "description": [ + "The unique HTLC identifier assigned by the channel peer." + ] + }, + "amount_msat": { + "added": "v0.12.0", + "type": "msat", + "description": [ + "The amount received in this HTLC.", + "This amount minus the `forward_msat` amount is the fee that will stay with us." + ] + }, + "cltv_expiry": { + "type": "u32", + "description": [ + "Determines when the HTLC reverts back to the sender.", + "`cltv_expiry` minus `outgoing_cltv_value` should be equal or larger than our `cltv_delta` setting." + ] + }, + "cltv_expiry_relative": { + "type": "u32", + "description": [ + "Hints how much time we still have to claim the HTLC.", + "It is the `cltv_expiry` minus the current blockheight and is passed along mainly to avoid the plugin having to look up the current blockheight." + ] + }, + "payment_hash": { + "type": "hash", + "description": [ + "The payment hash used to identify the payment." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Optional TLV stream attached to the HTLC." + ] + } + } + }, + "forward_to": { + "type": "hash", + "description": [ + "The `channel_id` we intend to forward the HTLC to.", + "Will not be present if the `short_channel_id` was invalid or we were the final destination." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "fail", + "resolve" + ], + "description": [ + "Determines how the HTLC should be handled.", + "", + "`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,", + "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", + "", + "It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,", + "so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;", + "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", + "", + "It can also specify forward_to in the response, replacing the destination.", + "This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.", + "", + "Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.", + "", + "If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.", + "", + "", + "`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).", + "", + "Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).", + "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", + "returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.", + "", + "", + "`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.", + "Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed." + ] + }, + "payload": { + "type": "hex", + "description": [ + "Replacement TLV payload to use instead of the original onion payload." + ] + }, + "forward_to": { + "type": "hash", + "description": [ + "Overrides the forwarding destination." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Replacement TLV stream for forwarded HTLCs." + ] + }, + "invoice_msat": { + "added": "v25.12", + "type": "msat", + "description": [ + "Overrides the invoice amount for final destination checks." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Failure message to return if result is `fail`." + ] + }, + "failure_onion": { + "type": "hex", + "description": [ + "Serialized failure onion to return if result is `fail`." + ] + }, + "payment_key": { + "type": "secret", + "description": [ + "Preimage used to resolve the HTLC if result is `resolve`." + ] + } + }, + "if": { + "properties": { + "result": { + "enum": [ + "fail" + ] + } + } + }, + "then": { + "anyOf": [ + { + "required": [ + "failure_message" + ] + }, + { + "required": [ + "failure_onion" + ] + } + ] + }, + "else": { + "if": { + "properties": { + "result": { + "enum": [ + "resolve" + ] + } + } + }, + "then": { + "required": [ + "payment_key" + ] + } + } + } + }, + "invoice_payment.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "invoice_payment", + "title": "Hook fired when a payment for an invoice is received", + "description": [ + "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", + "", + "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", + "", + "The plugin can:", + "- accept the payment by returning {\"result\": \"continue\"}", + "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", + "", + "If `failure_message` is provided, the payment will be failed with that message.", + "If result is \"reject\" and no `failure_message` is provided, the payment fails with `incorrect_or_unknown_payment_details`.", + "`failure_message` must NOT be provided when result is \"continue\".", + "", + "Before version 23.11 the msat field was encoded as a string with an 'msat' suffix." + ], + "request": { + "additionalProperties": false, + "required": [ + "payment" + ], + "properties": { + "payment": { + "type": "object", + "additionalProperties": true, + "required": [ + "label", + "preimage", + "msat" + ], + "properties": { + "label": { + "type": "string", + "description": [ + "Unique label identifying the invoice." + ] + }, + "preimage": { + "type": "secret", + "description": [ + "The payment preimage." + ] + }, + "msat": { + "type": "msat", + "description": [ + "Amount paid in millisatoshis." + ] + } + }, + "description": [ + "Basic payment information.", + "Additional TLV-derived fields may be included when running in developer mode." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Controls whether the payment is accepted or rejected.", + "\"continue\" accepts the payment.", + "\"reject\" fails the payment." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Optional BOLT 4 failure message.", + "Used to provide a specific failure reason when rejecting the payment." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "failure_message": { + "type": "hex" + } + } + } + }, + "see_also": [ + "lightning-listinvoices(7)" + ] + }, + "onion_message_recv.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv", + "title": "Hook for receiving unsolicited onion messages", + "description": [ + "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", + "", + "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "additionalProperties": false, + "properties": { + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } + }, + "onion_message_recv_secret.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv_secret", + "title": "Hook for receiving onion messages via blinded paths", + "description": [ + "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", + "", + "The presence of `pathsecret` allows the plugin to authenticate that the message used the intended return path.", + "", + "Replies MUST only be sent when the `pathsecret` matches expectations.", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "required": [ + "pathsecret" + ], + "additionalProperties": false, + "properties": { + "pathsecret": { + "type": "secret", + "description": [ + "Shared secret identifying the blinded path.", + "", + "Used to verify that the sender used a path previously provided by this node.", + "This prevents probing attacks and unauthorized replies." + ] + }, + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } + }, + "openchannel.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel", + "title": "Hook fired when a peer proposes opening a channel using v1 protocol", + "description": [ + "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and may include additional fields defined by the protocol.", + "", + "Plugins can reject the channel or modify certain parameters before accepting it.", + "", + "This is a chained hook: the first plugin returning a non-\"continue\" result terminates the chain.", + "Mutation fields (`close_to`, `mindepth`, `reserve`) are only applied from the first plugin that sets them.", + "Additional fields may be present in the request as defined by BOLT #2.", + "Providing invalid values (e.g., invalid `close_to` address) will cause lightningd to exit." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel" + ], + "properties": { + "openchannel": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "funding_msat", + "push_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "channel_reserve_msat", + "htlc_minimum_msat", + "feerate_per_kw", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The peer's node_id." + ] + }, + "funding_msat": { + "type": "msat", + "description": [ + "Funding amount proposed by the peer." + ] + }, + "push_msat": { + "type": "msat", + "description": [ + "Amount pushed to us at channel open." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Dust limit for outputs." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum HTLC value allowed in flight." + ] + }, + "channel_reserve_msat": { + "type": "msat", + "description": [ + "Channel reserve required by the peer." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value." + ] + }, + "feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate in satoshi per kw." + ] + }, + "to_self_delay": { + "type": "u32", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u32", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey proposed by the peer." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the channel opening request." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer when rejecting." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address for mutual close output.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "mindepth": { + "added": "v0.12.0", + "type": "u32", + "description": [ + "`mindepth` is the number of confirmations to require before making the channel usable.", + "Notice that setting this to 0 (zeroconf) or some other low value might expose you to double-spending issues,", + "so only lower this value from the default if you trust the peer not to double-spend, or you reject incoming payments,", + "including forwards, until the funding is confirmed." + ] + }, + "reserve": { + "added": "v22.11", + "type": "sat", + "description": [ + "`reserve` is an absolute value for the amount (in satoshi) in the channel that the peer must keep on their side.", + "This ensures that they always have something to lose, so only lower this below the 1% of funding amount if you trust the peer.", + "The protocol requires this to be larger than the dust limit, hence it will be adjusted to be the dust limit if the specified value is below." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } + }, + "openchannel2.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2", + "title": "Hook fired when a peer proposes opening a channel using v2 protocol", + "description": [ + "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and dual-funding extensions.", + "There may be additional fields present depending on negotiated features.", + "", + "`requested_lease_msat`, `lease_blockheight_start`, and `node_blockheight` are only present if the peer requested a funding lease (`option_will_fund`).", + "", + "The plugin can reject the channel, accept it, or contribute funds via a PSBT when accepting.", + "", + "See `plugins/funder.c` for an example of how to use this hook to contribute funds to a channel open.", + "", + "This is a chained hook: multiple plugins may be invoked.", + "Returning any result other than \"continue\" terminates the chain.", + "Only the first plugin that sets mutation fields (e.g. `close_to`) will have them applied.", + "Invalid `close_to` addresses will cause lightningd to exit.", + "The PSBT must be consistent with the funding transaction and respect feerate constraints." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel2" + ], + "properties": { + "openchannel2": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "channel_id", + "their_funding_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "htlc_minimum_msat", + "funding_feerate_per_kw", + "commitment_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "locktime", + "channel_max_msat", + "require_confirmed_inputs", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the channel." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "Temporary `channel_id` assigned for this channel negotiation." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "Amount contributed by the remote peer to the channel funding transaction." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Minimum output value below which outputs are considered dust." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum total value of outstanding HTLCs allowed in the channel at any time." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value the peer will accept." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for the funding transaction." + ] + }, + "commitment_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for commitment transactions." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "Maximum feerate we are willing to accept for commitment transactions." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "Minimum feerate we are willing to accept for commitment transactions." + ] + }, + "to_self_delay": { + "type": "u16", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u16", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "Locktime to be used in the funding transaction." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey provided by the peer for cooperative close." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "Maximum capacity this channel is allowed to reach." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "Amount of liquidity the peer is requesting us to lease to them.", + "Only present if `option_will_fund` is negotiated." + ] + }, + "lease_blockheight_start": { + "type": "u32", + "description": [ + "Blockheight at which the lease period begins.", + "Only present if `requested_lease_msat` is present." + ] + }, + "node_blockheight": { + "type": "u32", + "description": [ + "Current blockheight of the node.", + "Used in conjunction with lease parameters.", + "Only present if `requested_lease_msat` is present." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the peer requires all funding inputs to be confirmed." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Indicates whether to accept or reject the channel proposal.", + "Returning \"continue\" allows the channel negotiation to proceed.", + "Returning \"reject\" aborts the channel opening." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Error message sent to the peer when rejecting the channel.", + "Only valid if result is \"reject\"." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address to which funds will be sent on cooperative close.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "psbt": { + "type": "string", + "description": [ + "Partially Signed Bitcoin Transaction contributing inputs and outputs for the funding transaction.", + "Used when the plugin contributes funds to the channel." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "Amount we contribute to the channel funding.", + "This amount must NOT be included in any outputs in the provided PSBT.", + "Change outputs must be included separately." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } + }, + "openchannel2_changed.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_changed", + "title": "Hook for handling updates to the dual-funding PSBT", + "description": [ + "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", + "", + "This allows plugins to inspect and modify the PSBT before it is sent back to the peer.", + "", + "The negotiation continues until neither side makes further changes to the PSBT, at which point commitment transactions are exchanged.", + "", + "See `plugins/funder.c` for an example of how to use this hook to continue a v2 channel open." + ], + "request": { + "required": [ + "openchannel2_changed" + ], + "additionalProperties": false, + "properties": { + "openchannel2_changed": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt", + "require_confirmed_inputs" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary channel_id identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The current Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "This PSBT includes contributions from both peers and may be modified." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with the channel opening negotiation." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The updated PSBT to send back to the peer.", + "If no modifications are made, this should be identical to the input PSBT." + ] + } + } + }, + "examples": [ + { + "request": { + "id": "example:openchannel2_changed#1", + "method": "openchannel2_changed", + "params": { + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": true + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] + }, + "openchannel2_sign.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_sign", + "title": "Hook for signing the dual-funding PSBT", + "description": [ + "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", + "", + "The plugin is expected to sign any inputs it owns in the provided PSBT and return the updated PSBT.", + "", + "If no inputs need to be signed, the original PSBT should be returned unchanged.", + "", + "Once both sides have provided signatures, the funding transaction will be broadcast.", + "", + "See `plugins/funder.c` for an example of how to use this hook to sign a funding transaction." + ], + "request": { + "required": [ + "openchannel2_sign" + ], + "additionalProperties": false, + "properties": { + "openchannel2_sign": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary `channel_id` identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "The plugin should add signatures for any inputs it controls." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with channel opening." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The PSBT including any added signatures.", + "If no inputs were signed, this should be identical to the input PSBT." + ] + } + } + }, + "example_notifications": [ + { + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + } + ], + "examples": [ + { + "request": { + "id": "example:openchannel2_sign#1", + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] + }, + "peer_connected.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "peer_connected", + "title": "Hook fired when a peer connects and completes handshake", + "description": [ + "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", + "", + "This is a chained hook: the first plugin returning \"disconnect\" stops further processing.", + "Plugins can call `listpeers` to retrieve additional information about the peer." + ], + "request": { + "additionalProperties": false, + "required": [ + "peer" + ], + "properties": { + "peer": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "direction", + "addr", + "features" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The node_id of the connected peer." + ] + }, + "direction": { + "type": "string", + "enum": [ + "in", + "out" + ], + "description": [ + "Connection direction: `in` for incoming, `out` for outgoing." + ] + }, + "addr": { + "type": "string", + "description": [ + "The `addr` field shows the address that we are connected to ourselves, not the gossiped list of known addresses.", + "In particular this means that the port for incoming connections is an ephemeral port, that may not be available for reconnections." + ] + }, + "remote_addr": { + "type": "string", + "description": [ + "Our own address as reported by the remote peer. Helps with detecting our own IPv4 changes behind NAT." + ] + }, + "features": { + "type": "hex", + "description": [ + "Feature bits advertised by the peer, encoded as hex." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "disconnect" + ], + "description": [ + "Whether to allow the connection to proceed or disconnect the peer." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer before disconnection.", + "Only used if result is \"disconnect\"." + ] + } + } + }, + "see_also": [ + "lightning-listpeers(7)" + ] + }, + "rbf_channel.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rbf_channel", + "title": "Hook for handling RBF channel funding requests", + "description": [ + "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", + "", + "The plugin can choose to reject or continue the negotiation.", + "", + "If continuing, the plugin may contribute additional inputs and outputs by returning a PSBT and specifying an `our_funding_msat` amount.", + "", + "The `our_funding_msat` value must not be included in any output in the PSBT. Change outputs should be included and calculated using the provided `funding_feerate_per_kw`." + ], + "request": { + "required": [ + "rbf_channel" + ], + "additionalProperties": false, + "properties": { + "rbf_channel": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "channel_id", + "their_last_funding_msat", + "their_funding_msat", + "our_last_funding_msat", + "funding_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "channel_max_msat", + "locktime", + "require_confirmed_inputs" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the RBF." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "The `channel_id` of the channel being modified." + ] + }, + "their_last_funding_msat": { + "type": "msat", + "description": [ + "The peer's previous contribution to the funding transaction." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "The peer's proposed new funding contribution." + ] + }, + "our_last_funding_msat": { + "type": "msat", + "description": [ + "Our previous contribution to the funding transaction." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "The feerate to use for the updated funding transaction, in satoshis per kw." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "The maximum feerate we are willing to accept for the funding transaction." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "The minimum feerate we are willing to accept for the funding transaction." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "The maximum total channel capacity allowed for this channel." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "The locktime to use for the funding transaction." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "If present, the amount of liquidity the peer is requesting us to lease.", + "This field is optional and only included if the peer requested a lease." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the RBF proposal." + ] + }, + "psbt": { + "type": "string", + "description": [ + "A PSBT containing additional inputs and outputs to contribute to the funding transaction.", + "Only valid if `result` is `continue`." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "The amount we are contributing to the new funding transaction.", + "Must not be included in any output in the PSBT." + ] + }, + "error_message": { + "type": "string", + "description": [ + "An error message explaining the rejection.", + "Only used if `result` is `reject` and will be sent to the peer." + ] + } + } + } + }, + "recover.json": { + "$schema": "../rpc-schema-draft.json", + "added": "v23.08", + "type": "object", + "notification": "recover", + "title": "Hook fired when node starts in recovery mode", + "description": [ + "The **recover** hook is called whenever the node is started using the --recovery flag.", + "It provides the codex32 secret used to derive the HSM secret.", + "Plugins can use this to reconnect to peers who keep your peer storage backups with them and recover state or funds.", + "", + "This hook is informational and does not allow altering execution flow.", + "Plugins are expected to perform recovery-related side effects such as reconnecting to peers." + ], + "request": { + "additionalProperties": false, + "required": [ + "codex32" + ], + "properties": { + "codex32": { + "type": "string", + "description": [ + "The codex32-encoded secret provided via --recover.", + "Used to reconstruct the node's HSM secret." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Returning \"continue\" resumes normal execution." + ] + } + } + } + }, + "rpc_command.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rpc_command", + "title": "Hook for intercepting and modifying RPC commands", + "description": [ + "The **rpc_command** hook allows a plugin to take over any RPC command.", + "", + "You can optionally specify a `filters` array, containing the command names you want to intercept: without this, all commands will be sent to this hook. (added in v25.12)", + "", + "The plugin receives the full JSON-RPC request and may choose to continue, replace the request, or return a custom result or error.", + "", + "This is a chained hook: only the first plugin that modifies the request or response will take effect. Other plugins will then be ignored and a warning will be logged." + ], + "request": { + "required": [ + "rpc_command" + ], + "additionalProperties": false, + "properties": { + "rpc_command": { + "type": "object", + "description": [ + "The original JSON-RPC request object." + ], + "additionalProperties": true, + "required": [ + "id", + "method", + "params" + ], + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Indicates that lightningd should continue processing the RPC command normally." + ] + }, + "replace": { + "type": "object", + "description": [ + "Replaces the original JSON-RPC request with a new one." + ], + "additionalProperties": true, + "required": [ + "jsonrpc", + "id", + "method", + "params" + ], + "properties": { + "jsonrpc": { + "type": "string", + "enum": [ + "2.0" + ], + "description": [ + "The JSON-RPC version." + ] + }, + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + }, + "return": { + "type": "object", + "description": [ + "Returns a custom JSON-RPC response to the caller." + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "object", + "description": [ + "Custom result object to return to the caller." + ] + }, + "error": { + "type": "object", + "description": [ + "Custom error object to return to the caller." + ], + "additionalProperties": true, + "properties": { + "code": { + "type": "integer", + "description": [ + "JSON-RPC error code." + ] + }, + "message": { + "type": "string", + "description": [ + "Human-readable error message." + ] + } + }, + "required": [ + "code", + "message" + ] + } + } + } + }, + "oneOf": [ + { + "required": [ + "result" + ] + }, + { + "required": [ + "replace" + ] + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "result" + ] + } + } + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "error" + ] + } + } + } + ] + } + } } } \ No newline at end of file diff --git a/contrib/msggen/msggen/utils/utils.py b/contrib/msggen/msggen/utils/utils.py index eb8b025df9a3..bee62921d70e 100644 --- a/contrib/msggen/msggen/utils/utils.py +++ b/contrib/msggen/msggen/utils/utils.py @@ -4,7 +4,7 @@ from importlib import resources from pathlib import Path -from msggen.model import CompositeField, Method, Notification, Service, TypeName +from msggen.model import CompositeField, Method, Notification, Hook, Service, TypeName grpc_method_names = [ "Getinfo", @@ -195,12 +195,32 @@ }, ] +hook_names = [ + {"name": "peer_connected", "typename": "PeerConnected"}, + {"name": "recover_hook", "schema_name": "recover", "typename": "RecoverHook"}, + {"name": "commitment_revocation", "typename": "CommitmentRevocation"}, + {"name": "db_write", "typename": "DbWrite"}, + {"name": "invoice_payment_hook", "schema_name": "invoice_payment", "typename": "InvoicePaymentHook"}, + {"name": "openchannel", "typename": "Openchannel"}, + {"name": "openchannel2", "typename": "Openchannel2"}, + {"name": "openchannel2_changed", "typename": "Openchannel2Changed"}, + {"name": "openchannel2_sign", "typename": "Openchannel2Sign"}, + {"name": "rbf_channel", "typename": "RbfChannel"}, + {"name": "htlc_accepted", "typename": "HtlcAccepted"}, + {"name": "rpc_command", "typename": "RpcCommand"}, + {"name": "custommsg_hook", "schema_name": "custommsg", "typename": "CustommsgHook"}, + {"name": "onion_message_recv", "typename": "OnionMessageRecv"}, + {"name": "onion_message_recv_secret", "typename": "OnionMessageRecvSecret"}, + +] + def combine_schemas(schema_dir: Path, dest: Path): """Enumerate all schema files, and combine it into a single JSON file.""" bundle = OrderedDict() methods = OrderedDict() notifications = OrderedDict() + hooks = OrderedDict() # Parse methods files = sorted(list(schema_dir.iterdir())) @@ -218,8 +238,17 @@ def combine_schemas(schema_dir: Path, dest: Path): continue notifications[f.name] = json.load(f.open()) + # Parse hooks + hooks_dir = schema_dir / "hook" + files = sorted(list(hooks_dir.iterdir())) + for f in files: + if not f.name.endswith("json"): + continue + hooks[f.name] = json.load(f.open()) + bundle["methods"] = methods bundle["notifications"] = notifications + bundle["hooks"] = hooks with dest.open(mode="w") as f: json.dump( @@ -314,6 +343,36 @@ def load_notification(name, typename: TypeName, schema_name=None): return Notification(name, TypeName(typename), request, response) +def load_hook(name, typename: TypeName, schema_name=None): + """Load a hook that can be used by a plug-in""" + typename = str(typename) + + hooks = get_schema_bundle()["hooks"] + if schema_name is None: + schema_name = name + hook_name = f"{schema_name.lower()}.json" + + root_added = hooks[hook_name].get("added", None) + root_deprecated = hooks[hook_name].get("deprecated", None) + + request = CompositeField.from_js(hooks[hook_name]["request"], path=name) + response = CompositeField.from_js(hooks[hook_name]["response"], path=name) + + if request.added is None: + request.added = root_added + if request.deprecated is None: + request.deprecated = root_deprecated + if response.added is None: + response.added = root_added + if response.deprecated is None: + response.deprecated = root_deprecated + + request.typename += "Event" + response.typename += "Action" + + return Hook(name, TypeName(typename), request, response) + + def load_jsonrpc_service(): methods = [load_jsonrpc_method(name) for name in grpc_method_names] notifications = [ @@ -324,7 +383,17 @@ def load_jsonrpc_service(): ) for names in grpc_notification_names ] - service = Service(name="Node", methods=methods, notifications=notifications) + hooks = [ + load_hook( + name=names["name"], + typename=names["typename"], + schema_name=names.get("schema_name"), + ) + for names in hook_names + ] + service = Service( + name="Node", methods=methods, notifications=notifications, hooks=hooks + ) service.includes = [ "primitives.proto" ] # Make sure we have the primitives included. diff --git a/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py b/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py index 97a670b9d4d5..3666bde88a84 100644 --- a/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py +++ b/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py @@ -24,25 +24,27 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10primitives.proto\x12\x03\x63ln\"\x16\n\x06\x41mount\x12\x0c\n\x04msat\x18\x01 \x01(\x04\"D\n\x0b\x41mountOrAll\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ll\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"D\n\x0b\x41mountOrAny\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ny\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"(\n\x08Outpoint\x12\x0c\n\x04txid\x18\x01 \x01(\x0c\x12\x0e\n\x06outnum\x18\x02 \x01(\r\"h\n\x07\x46\x65\x65rate\x12\x0e\n\x04slow\x18\x01 \x01(\x08H\x00\x12\x10\n\x06normal\x18\x02 \x01(\x08H\x00\x12\x10\n\x06urgent\x18\x03 \x01(\x08H\x00\x12\x0f\n\x05perkb\x18\x04 \x01(\rH\x00\x12\x0f\n\x05perkw\x18\x05 \x01(\rH\x00\x42\x07\n\x05style\":\n\nOutputDesc\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x1b\n\x06\x61mount\x18\x02 \x01(\x0b\x32\x0b.cln.Amount\"h\n\x08RouteHop\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x0c\n\x04scid\x18\x02 \x01(\t\x12\x1c\n\x07\x66\x65\x65\x62\x61se\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12\x0f\n\x07\x66\x65\x65prop\x18\x04 \x01(\r\x12\x13\n\x0b\x65xpirydelta\x18\x05 \x01(\r\"(\n\tRoutehint\x12\x1b\n\x04hops\x18\x01 \x03(\x0b\x32\r.cln.RouteHop\".\n\rRoutehintList\x12\x1d\n\x05hints\x18\x02 \x03(\x0b\x32\x0e.cln.Routehint\"\x9e\x01\n\x0e\x44\x65\x63odeRouteHop\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x18\n\x10short_channel_id\x18\x02 \x01(\t\x12\"\n\rfee_base_msat\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12#\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\r\x12\x19\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\r\"4\n\x0f\x44\x65\x63odeRoutehint\x12!\n\x04hops\x18\x01 \x03(\x0b\x32\x13.cln.DecodeRouteHop\":\n\x13\x44\x65\x63odeRoutehintList\x12#\n\x05hints\x18\x02 \x03(\x0b\x32\x14.cln.DecodeRoutehint\"\'\n\x08TlvEntry\x12\x0c\n\x04type\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c\"+\n\tTlvStream\x12\x1e\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\r.cln.TlvEntry*$\n\x0b\x43hannelSide\x12\t\n\x05LOCAL\x10\x00\x12\n\n\x06REMOTE\x10\x01*\xe7\x02\n\x0c\x43hannelState\x12\x0c\n\x08Openingd\x10\x00\x12\x1a\n\x16\x43hanneldAwaitingLockin\x10\x01\x12\x12\n\x0e\x43hanneldNormal\x10\x02\x12\x18\n\x14\x43hanneldShuttingDown\x10\x03\x12\x17\n\x13\x43losingdSigexchange\x10\x04\x12\x14\n\x10\x43losingdComplete\x10\x05\x12\x16\n\x12\x41waitingUnilateral\x10\x06\x12\x14\n\x10\x46undingSpendSeen\x10\x07\x12\x0b\n\x07Onchain\x10\x08\x12\x15\n\x11\x44ualopendOpenInit\x10\t\x12\x1b\n\x17\x44ualopendAwaitingLockin\x10\n\x12\x1a\n\x16\x43hanneldAwaitingSplice\x10\x0b\x12\x1a\n\x16\x44ualopendOpenCommitted\x10\x0c\x12\x1d\n\x19\x44ualopendOpenCommittReady\x10\r\x12\n\n\x06\x43losed\x10\x0e*\xd5\x03\n\tHtlcState\x12\x0f\n\x0bSentAddHtlc\x10\x00\x12\x11\n\rSentAddCommit\x10\x01\x12\x15\n\x11RcvdAddRevocation\x10\x02\x12\x14\n\x10RcvdAddAckCommit\x10\x03\x12\x18\n\x14SentAddAckRevocation\x10\x04\x12\x18\n\x14RcvdAddAckRevocation\x10\x05\x12\x12\n\x0eRcvdRemoveHtlc\x10\x06\x12\x14\n\x10RcvdRemoveCommit\x10\x07\x12\x18\n\x14SentRemoveRevocation\x10\x08\x12\x17\n\x13SentRemoveAckCommit\x10\t\x12\x1b\n\x17RcvdRemoveAckRevocation\x10\n\x12\x0f\n\x0bRcvdAddHtlc\x10\x0b\x12\x11\n\rRcvdAddCommit\x10\x0c\x12\x15\n\x11SentAddRevocation\x10\r\x12\x14\n\x10SentAddAckCommit\x10\x0e\x12\x12\n\x0eSentRemoveHtlc\x10\x0f\x12\x14\n\x10SentRemoveCommit\x10\x10\x12\x18\n\x14RcvdRemoveRevocation\x10\x11\x12\x17\n\x13RcvdRemoveAckCommit\x10\x12\x12\x1b\n\x17SentRemoveAckRevocation\x10\x13*\xa2\x01\n\x0f\x43hannelTypeName\x12\x19\n\x15static_remotekey_even\x10\x00\x12\x17\n\x13\x61nchor_outputs_even\x10\x01\x12!\n\x1d\x61nchors_zero_fee_htlc_tx_even\x10\x02\x12\x13\n\x0fscid_alias_even\x10\x03\x12\x11\n\rzeroconf_even\x10\x04\x12\x10\n\x0c\x61nchors_even\x10\x05*\x89\x01\n\x12\x41utocleanSubsystem\x12\x15\n\x11SUCCEEDEDFORWARDS\x10\x00\x12\x12\n\x0e\x46\x41ILEDFORWARDS\x10\x01\x12\x11\n\rSUCCEEDEDPAYS\x10\x02\x12\x0e\n\nFAILEDPAYS\x10\x03\x12\x10\n\x0cPAIDINVOICES\x10\x04\x12\x13\n\x0f\x45XPIREDINVOICES\x10\x05*K\n\x10PluginSubcommand\x12\t\n\x05START\x10\x00\x12\x08\n\x04STOP\x10\x01\x12\n\n\x06RESCAN\x10\x02\x12\x0c\n\x08STARTDIR\x10\x03\x12\x08\n\x04LIST\x10\x04\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10primitives.proto\x12\x03\x63ln\"\x16\n\x06\x41mount\x12\x0c\n\x04msat\x18\x01 \x01(\x04\"D\n\x0b\x41mountOrAll\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ll\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"D\n\x0b\x41mountOrAny\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ny\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"(\n\x08Outpoint\x12\x0c\n\x04txid\x18\x01 \x01(\x0c\x12\x0e\n\x06outnum\x18\x02 \x01(\r\"h\n\x07\x46\x65\x65rate\x12\x0e\n\x04slow\x18\x01 \x01(\x08H\x00\x12\x10\n\x06normal\x18\x02 \x01(\x08H\x00\x12\x10\n\x06urgent\x18\x03 \x01(\x08H\x00\x12\x0f\n\x05perkb\x18\x04 \x01(\rH\x00\x12\x0f\n\x05perkw\x18\x05 \x01(\rH\x00\x42\x07\n\x05style\":\n\nOutputDesc\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x1b\n\x06\x61mount\x18\x02 \x01(\x0b\x32\x0b.cln.Amount\"h\n\x08RouteHop\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x0c\n\x04scid\x18\x02 \x01(\t\x12\x1c\n\x07\x66\x65\x65\x62\x61se\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12\x0f\n\x07\x66\x65\x65prop\x18\x04 \x01(\r\x12\x13\n\x0b\x65xpirydelta\x18\x05 \x01(\r\"(\n\tRoutehint\x12\x1b\n\x04hops\x18\x01 \x03(\x0b\x32\r.cln.RouteHop\".\n\rRoutehintList\x12\x1d\n\x05hints\x18\x02 \x03(\x0b\x32\x0e.cln.Routehint\"\x9e\x01\n\x0e\x44\x65\x63odeRouteHop\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x18\n\x10short_channel_id\x18\x02 \x01(\t\x12\"\n\rfee_base_msat\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12#\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\r\x12\x19\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\r\"4\n\x0f\x44\x65\x63odeRoutehint\x12!\n\x04hops\x18\x01 \x03(\x0b\x32\x13.cln.DecodeRouteHop\":\n\x13\x44\x65\x63odeRoutehintList\x12#\n\x05hints\x18\x02 \x03(\x0b\x32\x14.cln.DecodeRoutehint\"\'\n\x08TlvEntry\x12\x0c\n\x04type\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c\"+\n\tTlvStream\x12\x1e\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\r.cln.TlvEntry\"d\n\x11JsonObjectOrArray\x12!\n\x06object\x18\x01 \x01(\x0b\x32\x0f.cln.JsonObjectH\x00\x12\x1f\n\x05\x61rray\x18\x02 \x01(\x0b\x32\x0e.cln.JsonArrayH\x00\x42\x0b\n\tstructure\"x\n\nJsonObject\x12+\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x1b.cln.JsonObject.FieldsEntry\x1a=\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.cln.JsonValue:\x02\x38\x01\"+\n\tJsonArray\x12\x1e\n\x06values\x18\x01 \x03(\x0b\x32\x0e.cln.JsonValue\"\xc8\x01\n\tJsonValue\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x14\n\nuint_value\x18\x03 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x05 \x01(\tH\x00\x12\x1f\n\x05\x61rray\x18\x06 \x01(\x0b\x32\x0e.cln.JsonArrayH\x00\x12!\n\x06object\x18\x07 \x01(\x0b\x32\x0f.cln.JsonObjectH\x00\x42\x06\n\x04kind\"\x87\x01\n\nJsonScalar\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x14\n\nuint_value\x18\x03 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x05 \x01(\tH\x00\x42\x08\n\x06scalar*$\n\x0b\x43hannelSide\x12\t\n\x05LOCAL\x10\x00\x12\n\n\x06REMOTE\x10\x01*\xe7\x02\n\x0c\x43hannelState\x12\x0c\n\x08Openingd\x10\x00\x12\x1a\n\x16\x43hanneldAwaitingLockin\x10\x01\x12\x12\n\x0e\x43hanneldNormal\x10\x02\x12\x18\n\x14\x43hanneldShuttingDown\x10\x03\x12\x17\n\x13\x43losingdSigexchange\x10\x04\x12\x14\n\x10\x43losingdComplete\x10\x05\x12\x16\n\x12\x41waitingUnilateral\x10\x06\x12\x14\n\x10\x46undingSpendSeen\x10\x07\x12\x0b\n\x07Onchain\x10\x08\x12\x15\n\x11\x44ualopendOpenInit\x10\t\x12\x1b\n\x17\x44ualopendAwaitingLockin\x10\n\x12\x1a\n\x16\x43hanneldAwaitingSplice\x10\x0b\x12\x1a\n\x16\x44ualopendOpenCommitted\x10\x0c\x12\x1d\n\x19\x44ualopendOpenCommittReady\x10\r\x12\n\n\x06\x43losed\x10\x0e*\xd5\x03\n\tHtlcState\x12\x0f\n\x0bSentAddHtlc\x10\x00\x12\x11\n\rSentAddCommit\x10\x01\x12\x15\n\x11RcvdAddRevocation\x10\x02\x12\x14\n\x10RcvdAddAckCommit\x10\x03\x12\x18\n\x14SentAddAckRevocation\x10\x04\x12\x18\n\x14RcvdAddAckRevocation\x10\x05\x12\x12\n\x0eRcvdRemoveHtlc\x10\x06\x12\x14\n\x10RcvdRemoveCommit\x10\x07\x12\x18\n\x14SentRemoveRevocation\x10\x08\x12\x17\n\x13SentRemoveAckCommit\x10\t\x12\x1b\n\x17RcvdRemoveAckRevocation\x10\n\x12\x0f\n\x0bRcvdAddHtlc\x10\x0b\x12\x11\n\rRcvdAddCommit\x10\x0c\x12\x15\n\x11SentAddRevocation\x10\r\x12\x14\n\x10SentAddAckCommit\x10\x0e\x12\x12\n\x0eSentRemoveHtlc\x10\x0f\x12\x14\n\x10SentRemoveCommit\x10\x10\x12\x18\n\x14RcvdRemoveRevocation\x10\x11\x12\x17\n\x13RcvdRemoveAckCommit\x10\x12\x12\x1b\n\x17SentRemoveAckRevocation\x10\x13*\xa2\x01\n\x0f\x43hannelTypeName\x12\x19\n\x15static_remotekey_even\x10\x00\x12\x17\n\x13\x61nchor_outputs_even\x10\x01\x12!\n\x1d\x61nchors_zero_fee_htlc_tx_even\x10\x02\x12\x13\n\x0fscid_alias_even\x10\x03\x12\x11\n\rzeroconf_even\x10\x04\x12\x10\n\x0c\x61nchors_even\x10\x05*\x89\x01\n\x12\x41utocleanSubsystem\x12\x15\n\x11SUCCEEDEDFORWARDS\x10\x00\x12\x12\n\x0e\x46\x41ILEDFORWARDS\x10\x01\x12\x11\n\rSUCCEEDEDPAYS\x10\x02\x12\x0e\n\nFAILEDPAYS\x10\x03\x12\x10\n\x0cPAIDINVOICES\x10\x04\x12\x13\n\x0f\x45XPIREDINVOICES\x10\x05*K\n\x10PluginSubcommand\x12\t\n\x05START\x10\x00\x12\x08\n\x04STOP\x10\x01\x12\n\n\x06RESCAN\x10\x02\x12\x0c\n\x08STARTDIR\x10\x03\x12\x08\n\x04LIST\x10\x04\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'primitives_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_CHANNELSIDE']._serialized_start=954 - _globals['_CHANNELSIDE']._serialized_end=990 - _globals['_CHANNELSTATE']._serialized_start=993 - _globals['_CHANNELSTATE']._serialized_end=1352 - _globals['_HTLCSTATE']._serialized_start=1355 - _globals['_HTLCSTATE']._serialized_end=1824 - _globals['_CHANNELTYPENAME']._serialized_start=1827 - _globals['_CHANNELTYPENAME']._serialized_end=1989 - _globals['_AUTOCLEANSUBSYSTEM']._serialized_start=1992 - _globals['_AUTOCLEANSUBSYSTEM']._serialized_end=2129 - _globals['_PLUGINSUBCOMMAND']._serialized_start=2131 - _globals['_PLUGINSUBCOMMAND']._serialized_end=2206 + _globals['_JSONOBJECT_FIELDSENTRY']._loaded_options = None + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_options = b'8\001' + _globals['_CHANNELSIDE']._serialized_start=1564 + _globals['_CHANNELSIDE']._serialized_end=1600 + _globals['_CHANNELSTATE']._serialized_start=1603 + _globals['_CHANNELSTATE']._serialized_end=1962 + _globals['_HTLCSTATE']._serialized_start=1965 + _globals['_HTLCSTATE']._serialized_end=2434 + _globals['_CHANNELTYPENAME']._serialized_start=2437 + _globals['_CHANNELTYPENAME']._serialized_end=2599 + _globals['_AUTOCLEANSUBSYSTEM']._serialized_start=2602 + _globals['_AUTOCLEANSUBSYSTEM']._serialized_end=2739 + _globals['_PLUGINSUBCOMMAND']._serialized_start=2741 + _globals['_PLUGINSUBCOMMAND']._serialized_end=2816 _globals['_AMOUNT']._serialized_start=25 _globals['_AMOUNT']._serialized_end=47 _globals['_AMOUNTORALL']._serialized_start=49 @@ -71,4 +73,16 @@ _globals['_TLVENTRY']._serialized_end=907 _globals['_TLVSTREAM']._serialized_start=909 _globals['_TLVSTREAM']._serialized_end=952 + _globals['_JSONOBJECTORARRAY']._serialized_start=954 + _globals['_JSONOBJECTORARRAY']._serialized_end=1054 + _globals['_JSONOBJECT']._serialized_start=1056 + _globals['_JSONOBJECT']._serialized_end=1176 + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_start=1115 + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_end=1176 + _globals['_JSONARRAY']._serialized_start=1178 + _globals['_JSONARRAY']._serialized_end=1221 + _globals['_JSONVALUE']._serialized_start=1224 + _globals['_JSONVALUE']._serialized_end=1424 + _globals['_JSONSCALAR']._serialized_start=1427 + _globals['_JSONSCALAR']._serialized_end=1562 # @@protoc_insertion_point(module_scope) diff --git a/contrib/pyln-testing/pyln/testing/fixtures.py b/contrib/pyln-testing/pyln/testing/fixtures.py index e04d6d8e7929..641d4bd7087d 100644 --- a/contrib/pyln-testing/pyln/testing/fixtures.py +++ b/contrib/pyln-testing/pyln/testing/fixtures.py @@ -1,6 +1,16 @@ from concurrent import futures from pyln.testing.db import SqliteDbProvider, PostgresDbProvider -from pyln.testing.utils import NodeFactory, BitcoinD, ElementsD, env, LightningNode, TEST_DEBUG, TEST_NETWORK, SLOW_MACHINE, VALGRIND +from pyln.testing.utils import ( + NodeFactory, + BitcoinD, + ElementsD, + env, + LightningNode, + TEST_DEBUG, + TEST_NETWORK, + SLOW_MACHINE, + VALGRIND, +) from pyln.client import Millisatoshi from typing import Dict from pathlib import Path @@ -28,7 +38,7 @@ def test_base_dir(): d = os.getenv("TEST_DIR", "/tmp") - directory = tempfile.mkdtemp(prefix='ltests-', dir=d) + directory = tempfile.mkdtemp(prefix="ltests-", dir=d) print("Running tests in {}".format(directory)) yield directory @@ -36,13 +46,19 @@ def test_base_dir(): # Now check if any test directory is left because the corresponding test # failed. If there are no such tests we can clean up the root test # directory. - contents = [d for d in os.listdir(directory) if os.path.isdir(os.path.join(directory, d)) and d.startswith('test_')] + contents = [ + d + for d in os.listdir(directory) + if os.path.isdir(os.path.join(directory, d)) and d.startswith("test_") + ] if contents == []: shutil.rmtree(directory) else: - print("Leaving base_dir {} intact, it still has test sub-directories with failure details: {}".format( - directory, contents - )) + print( + "Leaving base_dir {} intact, it still has test sub-directories with failure details: {}".format( + directory, contents + ) + ) @pytest.fixture(autouse=True) @@ -66,7 +82,7 @@ def setup_logging(): loggers = [logging.getLogger()] + list(logging.Logger.manager.loggerDict.values()) for logger in loggers: - handlers = getattr(logger, 'handlers', []) + handlers = getattr(logger, "handlers", []) for handler in handlers: logger.removeHandler(handler) @@ -80,7 +96,9 @@ def directory(request, test_base_dir, test_name): """ # Auto set value if it isn't in the dict yet __attempts[test_name] = __attempts.get(test_name, 0) + 1 - directory = os.path.join(test_base_dir, "{}_{}".format(test_name, __attempts[test_name])) + directory = os.path.join( + test_base_dir, "{}_{}".format(test_name, __attempts[test_name]) + ) request.node.has_errors = False if not os.path.exists(directory): @@ -92,9 +110,9 @@ def directory(request, test_base_dir, test_name): # determine whether we succeeded or failed. Outcome can be None if the # failure occurs during the setup phase, hence the use to getattr instead # of accessing it directly. - rep_call = getattr(request.node, 'rep_call', None) - outcome = 'passed' if rep_call is None else rep_call.outcome - failed = not outcome or request.node.has_errors or outcome != 'passed' + rep_call = getattr(request.node, "rep_call", None) + outcome = "passed" if rep_call is None else rep_call.outcome + failed = not outcome or request.node.has_errors or outcome != "passed" if not failed: try: @@ -102,13 +120,19 @@ def directory(request, test_base_dir, test_name): except OSError: # Usually, this means that e.g. valgrind is still running. Wait # a little and retry. - files = [os.path.join(dp, f) for dp, dn, fn in os.walk(directory) for f in fn] + files = [ + os.path.join(dp, f) for dp, dn, fn in os.walk(directory) for f in fn + ] print("Directory still contains files: ", files) print("... sleeping then retrying") time.sleep(10) shutil.rmtree(directory) else: - logging.debug("Test execution failed, leaving the test directory {} intact.".format(directory)) + logging.debug( + "Test execution failed, leaving the test directory {} intact.".format( + directory + ) + ) @pytest.fixture @@ -117,8 +141,8 @@ def test_name(request): network_daemons = { - 'regtest': BitcoinD, - 'liquid-regtest': ElementsD, + "regtest": BitcoinD, + "liquid-regtest": ElementsD, } @@ -129,12 +153,12 @@ def node_cls(): @pytest.fixture def bitcoind(request, directory, teardown_checks): - chaind = network_daemons[env('TEST_NETWORK', 'regtest')] + chaind = network_daemons[env("TEST_NETWORK", "regtest")] bitcoind = chaind(bitcoin_dir=directory) # @pytest.mark.parametrize('bitcoind', [False], indirect=True) if you don't # want bitcoind started! - if getattr(request, 'param', True): + if getattr(request, "param", True): try: bitcoind.start() except Exception: @@ -145,21 +169,25 @@ def bitcoind(request, directory, teardown_checks): # FIXME: include liquid-regtest in this check after elementsd has been # updated - if info['version'] < 200100 and env('TEST_NETWORK') != 'liquid-regtest': + if info["version"] < 200100 and env("TEST_NETWORK") != "liquid-regtest": bitcoind.rpc.stop() - raise ValueError("bitcoind is too old. At least version 20100 (v0.20.1)" - " is needed, current version is {}".format(info['version'])) - elif info['version'] < 160000: + raise ValueError( + "bitcoind is too old. At least version 20100 (v0.20.1)" + " is needed, current version is {}".format(info["version"]) + ) + elif info["version"] < 160000: bitcoind.rpc.stop() - raise ValueError("elementsd is too old. At least version 160000 (v0.16.0)" - " is needed, current version is {}".format(info['version'])) + raise ValueError( + "elementsd is too old. At least version 160000 (v0.16.0)" + " is needed, current version is {}".format(info["version"]) + ) info = bitcoind.rpc.getblockchaininfo() # Make sure we have some spendable funds - if info['blocks'] < 101: - bitcoind.generate_block(101 - info['blocks']) - elif bitcoind.rpc.getwalletinfo()['balance'] < 1: + if info["blocks"] < 101: + bitcoind.generate_block(101 - info["blocks"]) + elif bitcoind.rpc.getwalletinfo()["balance"] < 1: logging.debug("Insufficient balance, generating 1 block") bitcoind.generate_block(1) @@ -218,6 +246,7 @@ def teardown_checks(request): def _extra_validator(is_request: bool): """JSON Schema validator with additions for our specialized types""" + def is_hex(checker, instance): """Hex string""" if not checker.is_type(instance, "string"): @@ -272,9 +301,14 @@ def is_short_channel_id(checker, instance): # 2. the next 3 bytes: indicating the transaction index within the block # 3. the least significant 2 bytes: indicating the output index that pays to the # channel. - return (blocknum >= 0 and blocknum < 2**24 - and txnum >= 0 and txnum < 2**24 - and outnum >= 0 and outnum < 2**16) + return ( + blocknum >= 0 + and blocknum < 2**24 + and txnum >= 0 + and txnum < 2**24 + and outnum >= 0 + and outnum < 2**16 + ) def is_short_channel_id_dir(checker, instance): """Short channel id with direction""" @@ -307,7 +341,16 @@ def is_feerate(checker, instance): return False if instance in ("urgent", "normal", "slow", "minimum"): return True - if instance in ("opening", "mutual_close", "unilateral_close", "delayed_to_us", "htlc_resolution", "penalty", "min_acceptable", "max_acceptable"): + if instance in ( + "opening", + "mutual_close", + "unilateral_close", + "delayed_to_us", + "htlc_resolution", + "penalty", + "min_acceptable", + "max_acceptable", + ): return True if not instance.endswith("perkw") and not instance.endswith("perkb"): return False @@ -409,7 +452,7 @@ def is_sat_or_all(checker, instance): def is_currency(checker, instance): """currency including currency code""" - pattern = re.compile(r'^\d+(\.\d+)?[A-Z][A-Z][A-Z]$') + pattern = re.compile(r"^\d+(\.\d+)?[A-Z][A-Z][A-Z]$") if checker.is_type(instance, "string") and pattern.match(instance): return True return False @@ -425,67 +468,93 @@ def is_string_map(checker, instance): return False return True + def is_json_object_or_array(checker, instance): + """rpc method params can be either an object or an array""" + return checker.is_type(instance, "object") or checker.is_type(instance, "array") + + def is_json_scalar(checker, instance): + """rpc id can be either a string, number, or null""" + return checker.is_type(instance, "string") or checker.is_type(instance, "number") or checker.is_type(instance, "null") + # "msat" for request can be many forms if is_request: is_msat = is_msat_request else: is_msat = is_msat_response - type_checker = jsonschema.Draft7Validator.TYPE_CHECKER.redefine_many({ - "hex": is_hex, - "hash": is_32byte_hex, - "secret": is_32byte_hex, - "u64": is_u64, - "u32": is_u32, - "u16": is_u16, - "u8": is_u8, - "pubkey": is_pubkey, - "sat": is_sat, - "sat_or_all": is_sat_or_all, - "msat": is_msat, - "msat_or_all": is_msat_or_all, - "msat_or_any": is_msat_or_any, - "currency": is_currency, - "txid": is_txid, - "signature": is_signature, - "bip340sig": is_bip340sig, - "short_channel_id": is_short_channel_id, - "short_channel_id_dir": is_short_channel_id_dir, - "outpoint": is_outpoint, - "feerate": is_feerate, - "outputdesc": is_outputdesc, - "string_map": is_string_map, - }) - - return jsonschema.validators.extend(jsonschema.Draft7Validator, - type_checker=type_checker) + type_checker = jsonschema.Draft7Validator.TYPE_CHECKER.redefine_many( + { + "hex": is_hex, + "hash": is_32byte_hex, + "secret": is_32byte_hex, + "u64": is_u64, + "u32": is_u32, + "u16": is_u16, + "u8": is_u8, + "pubkey": is_pubkey, + "sat": is_sat, + "sat_or_all": is_sat_or_all, + "msat": is_msat, + "msat_or_all": is_msat_or_all, + "msat_or_any": is_msat_or_any, + "currency": is_currency, + "txid": is_txid, + "signature": is_signature, + "bip340sig": is_bip340sig, + "short_channel_id": is_short_channel_id, + "short_channel_id_dir": is_short_channel_id_dir, + "outpoint": is_outpoint, + "feerate": is_feerate, + "outputdesc": is_outputdesc, + "string_map": is_string_map, + "json_object_or_array": is_json_object_or_array, + "json_scalar": is_json_scalar, + } + ) + + return jsonschema.validators.extend( + jsonschema.Draft7Validator, type_checker=type_checker + ) def _load_schema(filename): """Load the schema from @filename and create a validator for it""" - with open(filename, 'r') as f: + with open(filename, "r") as f: data = json.load(f) - return [_extra_validator(True)(data.get('request', {})), _extra_validator(False)(data.get('response', {}))] + return [ + _extra_validator(True)(data.get("request", {})), + _extra_validator(False)(data.get("response", {})), + ] @pytest.fixture(autouse=True) def jsonschemas(): """Load schema file if it exist: returns request/response schemas by pairs""" try: - schemafiles = os.listdir('doc/schemas') + schemafiles = os.listdir("doc/schemas") except FileNotFoundError: schemafiles = [] schemas = {} for fname in schemafiles: - if fname.endswith('.json'): - base = fname.replace('lightning-', '').replace('.json', '') + if fname.endswith(".json"): + base = fname.replace("lightning-", "").replace(".json", "") # Request is 0 and Response is 1 - schemas[base] = _load_schema(os.path.join('doc/schemas', fname)) + schemas[base] = _load_schema(os.path.join("doc/schemas", fname)) return schemas @pytest.fixture -def node_factory(request, directory, test_name, bitcoind, executor, db_provider, teardown_checks, node_cls, jsonschemas): +def node_factory( + request, + directory, + test_name, + bitcoind, + executor, + db_provider, + teardown_checks, + node_cls, + jsonschemas, +): nf = NodeFactory( request, test_name, @@ -514,7 +583,11 @@ def map_node_error(nodes, f, msg): map_node_error(nf.nodes, printValgrindErrors, "reported valgrind errors") map_node_error(nf.nodes, printCrashLog, "had crash.log files") map_node_error(nf.nodes, checkBroken, "had BROKEN or That's weird messages") - map_node_error(nf.nodes, lambda n: not n.allow_warning and n.daemon.is_in_log(r' WARNING:'), "had warning messages") + map_node_error( + nf.nodes, + lambda n: not n.allow_warning and n.daemon.is_in_log(r" WARNING:"), + "had warning messages", + ) map_node_error(nf.nodes, checkReconnect, "had unexpected reconnections") map_node_error(nf.nodes, checkPluginJSON, "had malformed hooks/notifications") @@ -523,13 +596,29 @@ def map_node_error(nodes, f, msg): for n in nf.nodes: dumpGossipStore(n) - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log('Bad reestablish'), "had bad reestablish") - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log('bad hsm request'), "had bad hsm requests") - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log(r'Accessing a null column'), "Accessing a null column") + map_node_error( + nf.nodes, lambda n: n.daemon.is_in_log("Bad reestablish"), "had bad reestablish" + ) + map_node_error( + nf.nodes, + lambda n: n.daemon.is_in_log("bad hsm request"), + "had bad hsm requests", + ) + map_node_error( + nf.nodes, + lambda n: n.daemon.is_in_log(r"Accessing a null column"), + "Accessing a null column", + ) map_node_error(nf.nodes, checkMemleak, "had memleak messages") - map_node_error(nf.nodes, lambda n: n.rc != 0 and not n.may_fail, "Node exited with return code {n.rc}") + map_node_error( + nf.nodes, + lambda n: n.rc != 0 and not n.may_fail, + "Node exited with return code {n.rc}", + ) if not ok: - map_node_error(nf.nodes, prinErrlog, "some node failed unexpected, non-empty errlog file") + map_node_error( + nf.nodes, prinErrlog, "some node failed unexpected, non-empty errlog file" + ) for n in nf.nodes: n.daemon.cleanup_files() @@ -539,7 +628,7 @@ def getErrlog(node): for error_file in os.listdir(node.daemon.lightning_dir): if not re.fullmatch(r"errlog", error_file): continue - with open(os.path.join(node.daemon.lightning_dir, error_file), 'r') as f: + with open(os.path.join(node.daemon.lightning_dir, error_file), "r") as f: errors = f.read().strip() if errors: return errors, error_file @@ -549,7 +638,11 @@ def getErrlog(node): def prinErrlog(node): errors, fname = getErrlog(node) if errors: - print("-" * 31, "stderr of node {} captured in {} file".format(node.daemon.prefix, fname), "-" * 32) + print( + "-" * 31, + "stderr of node {} captured in {} file".format(node.daemon.prefix, fname), + "-" * 32, + ) print(errors) print("-" * 80) return 1 if errors else 0 @@ -559,7 +652,7 @@ def getValgrindErrors(node): for error_file in os.listdir(node.daemon.lightning_dir): if not re.fullmatch(r"valgrind-errors.\d+", error_file): continue - with open(os.path.join(node.daemon.lightning_dir, error_file), 'r') as f: + with open(os.path.join(node.daemon.lightning_dir, error_file), "r") as f: errors = f.read().strip() if errors: return errors, error_file @@ -580,8 +673,8 @@ def getCrashLog(node): if node.may_fail: return None, None try: - crashlog = os.path.join(node.daemon.lightning_dir, 'crash.log') - with open(crashlog, 'r') as f: + crashlog = os.path.join(node.daemon.lightning_dir, "crash.log") + with open(crashlog, "r") as f: return f.readlines(), crashlog except Exception: return None, None @@ -599,15 +692,17 @@ def printCrashLog(node): def checkReconnect(node): if node.may_reconnect: return 0 - if node.daemon.is_in_log('Peer has reconnected'): + if node.daemon.is_in_log("Peer has reconnected"): return 1 return 0 def dumpGossipStore(node): - gs_path = os.path.join(node.daemon.lightning_dir, TEST_NETWORK, 'gossip_store') - gs = subprocess.run(['devtools/dump-gossipstore', '--print-deleted', gs_path], - stdout=subprocess.PIPE) + gs_path = os.path.join(node.daemon.lightning_dir, TEST_NETWORK, "gossip_store") + gs = subprocess.run( + ["devtools/dump-gossipstore", "--print-deleted", gs_path], + stdout=subprocess.PIPE, + ) print("GOSSIP STORE CONTENTS for {}:\n".format(node.daemon.prefix)) print(gs.stdout.decode()) @@ -616,22 +711,24 @@ def checkBadGossip(node): if node.allow_bad_gossip: return 0 # We can get bad gossip order from inside error msgs. - if node.daemon.is_in_log('Bad gossip order:'): + if node.daemon.is_in_log("Bad gossip order:"): # This can happen if a node sees a node_announce after a channel # is deleted, however. - if node.daemon.is_in_log('Deleting channel'): + if node.daemon.is_in_log("Deleting channel"): return 0 return 1 # Other 'Bad' messages shouldn't happen. - if node.daemon.is_in_log(r'gossipd.*Bad (?!gossip order from error)'): + if node.daemon.is_in_log(r"gossipd.*Bad (?!gossip order from error)"): return 1 return 0 def checkBroken(node): node.daemon.logs_catchup() - broken_lines = [l for l in node.daemon.logs if '**BROKEN**' in l or "That's weird: " in l] + broken_lines = [ + l for l in node.daemon.logs if "**BROKEN**" in l or "That's weird: " in l + ] if node.broken_log: ex = re.compile(node.broken_log) broken_lines = [l for l in broken_lines if not ex.search(l)] @@ -650,16 +747,18 @@ def checkPluginJSON(node): return 0 try: - notificationfiles = os.listdir('doc/schemas/notification') + notificationfiles = os.listdir("doc/schemas/notification") except FileNotFoundError: notificationfiles = [] notifications = {} for fname in notificationfiles: - if fname.endswith('.json'): - base = fname.replace('.json', '') + if fname.endswith(".json"): + base = fname.replace(".json", "") # Request is 0 and Response is 1 - notifications[base] = _load_schema(os.path.join('doc/schemas/notification', fname)) + notifications[base] = _load_schema( + os.path.join("doc/schemas/notification", fname) + ) # FIXME: add doc/schemas/hook/ hooks = {} @@ -667,24 +766,27 @@ def checkPluginJSON(node): for f in (Path(node.daemon.lightning_dir) / "plugin-io").iterdir(): # e.g. hook_in-peer_connected-124567-358 io = json.loads(f.read_text()) - parts = f.name.split('-') - if parts[0] == 'hook_in': + parts = f.name.split("-") + if parts[0] == "hook_in": schema = hooks.get(parts[1]) - req = io['result'] + req = io["result"] direction = 1 - elif parts[0] == 'hook_out': + elif parts[0] == "hook_out": schema = hooks.get(parts[1]) - req = io['params'] + req = io["params"] direction = 0 else: - assert parts[0] == 'notification_out' + assert parts[0] == "notification_out" schema = notifications.get(parts[1]) # The notification is wrapped in an object of its own name. - req = io['params'][parts[1]] + req = io["params"][parts[1]] direction = 1 # Until v26.09, with channel_state_changed.null_scid, that notification will be non-schema compliant. - if f.name.startswith('notification_out-channel_state_changed-') and node.daemon.opts.get('allow-deprecated-apis', True) is True: + if ( + f.name.startswith("notification_out-channel_state_changed-") + and node.daemon.opts.get("allow-deprecated-apis", True) is True + ): continue if schema is not None: @@ -697,33 +799,33 @@ def checkPluginJSON(node): def checkBadReestablish(node): - if node.daemon.is_in_log('Bad reestablish'): + if node.daemon.is_in_log("Bad reestablish"): return 1 return 0 def checkBadHSMRequest(node): - if node.daemon.is_in_log('bad hsm request'): + if node.daemon.is_in_log("bad hsm request"): return 1 return 0 def checkMemleak(node): - if node.daemon.is_in_log('MEMLEAK:'): + if node.daemon.is_in_log("MEMLEAK:"): return 1 return 0 # Mapping from TEST_DB_PROVIDER env variable to class to be used providers = { - 'sqlite3': SqliteDbProvider, - 'postgres': PostgresDbProvider, + "sqlite3": SqliteDbProvider, + "postgres": PostgresDbProvider, } @pytest.fixture def db_provider(test_base_dir): - provider = providers[os.getenv('TEST_DB_PROVIDER', 'sqlite3')](test_base_dir) + provider = providers[os.getenv("TEST_DB_PROVIDER", "sqlite3")](test_base_dir) provider.start() yield provider provider.stop() @@ -740,29 +842,29 @@ def executor(teardown_checks): def chainparams(): """Return the chainparams for the TEST_NETWORK. - - chain_hash is in network byte order, not the RPC return order. - - example_addr doesn't belong to any node in the test (randomly generated) + - chain_hash is in network byte order, not the RPC return order. + - example_addr doesn't belong to any node in the test (randomly generated) """ chainparams = { - 'regtest': { + "regtest": { "bip173_prefix": "bcrt", "elements": False, "name": "regtest", - "p2sh_prefix": '2', + "p2sh_prefix": "2", "example_addr": "bcrt1qeyyk6sl5pr49ycpqyckvmttus5ttj25pd0zpvg", "feeoutput": False, - "chain_hash": '06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f', + "chain_hash": "06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f", }, - 'liquid-regtest': { + "liquid-regtest": { "bip173_prefix": "ert", "elements": True, "name": "liquid-regtest", - "p2sh_prefix": 'X', + "p2sh_prefix": "X", "example_addr": "ert1qjsesxflhs3632syhcz7llpfx20p5tr0kpllfve", "feeoutput": True, "chain_hash": "9f87eb580b9e5f11dc211e9fb66abb3699999044f8fe146801162393364286c6", - } + }, } - return chainparams[env('TEST_NETWORK', 'regtest')] + return chainparams[env("TEST_NETWORK", "regtest")] From 08b4578f259b2bd5f10a3df566228c0bb6fc2687 Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Mon, 13 Apr 2026 16:53:35 +0200 Subject: [PATCH 4/6] crates: add convenience typed methods for hook subscriptions We can now make practical use of the generated code from the hook schemas. --- plugins/src/lib.rs | 91 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 90 insertions(+), 1 deletion(-) diff --git a/plugins/src/lib.rs b/plugins/src/lib.rs index e9a3a10d7888..b08da4b510cb 100644 --- a/plugins/src/lib.rs +++ b/plugins/src/lib.rs @@ -2,6 +2,7 @@ use crate::codec::{JsonCodec, JsonRpcCodec}; pub use anyhow::anyhow; use anyhow::{Context, Result}; use futures::sink::SinkExt; +use serde::de::DeserializeOwned; use serde::Serialize; use tokio::io::{AsyncReadExt, AsyncWriteExt}; extern crate log; @@ -195,7 +196,9 @@ where self } - /// Add a subscription to a given `hookname` + /// Add a hook subscription for `hookname` with a raw [`serde_json::Value`] request and response. + /// Prefer [`Builder::hook_typed`] for type-safe hooks, or [`Builder::hook_from_builder`] if you + /// need to configure `before`, `after`, or `filters`. pub fn hook(mut self, hookname: &str, callback: C) -> Self where C: Send + Sync + 'static, @@ -215,11 +218,60 @@ where self } + /// Add a hook subscription using a [`HookBuilder`], which allows configuring `before`, `after`, + /// and `filters` in addition to the callback. Use [`HookBuilder::new`] for raw + /// [`serde_json::Value`] hooks or [`HookBuilder::new_typed`] for type-safe hooks. pub fn hook_from_builder(mut self, hook: HookBuilder) -> Builder { self.hooks.insert(hook.name.clone(), hook.build()); self } + /// Add a hook subscription for `hookname` with typed request and response. The request is + /// deserialized from JSON into `Req` and the response is serialized from `Resp` back to JSON + /// automatically. If deserialization of the request fails, the hook returns an error to CLN. + /// Use [`Builder::hook_from_builder`] with [`HookBuilder::new_typed`] if you additionally need + /// to configure `before`, `after`, or `filters`. + pub fn hook_typed(mut self, hookname: &str, callback: C) -> Self + where + C: Send + Sync + 'static, + C: Fn(Plugin, Req) -> F + 'static, + F: Future> + Send + 'static, + Req: DeserializeOwned + Send + 'static, + Resp: Serialize + Send + 'static, + { + let hookname = hookname.to_string(); + self.hooks.insert( + hookname.clone(), + Hook { + name: hookname.clone(), + callback: Box::new(move |p, r| { + let typed_req = serde_json::from_value(r).unwrap_or_else(|e| { + let error = format!( + "cln-plugin: hook '{hookname}' received a request that doesn't match \ + the expected schema. Error: {e}" + ); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message":error}}) + ); + std::process::exit(1); + }); + let fut = callback(p, typed_req); + Box::pin(async move { + let typed_resp = fut.await?; + serde_json::to_value(typed_resp).map_err(Error::from) + }) + }), + before: Vec::new(), + after: Vec::new(), + filters: None, + }, + ); + self + } + /// Register a custom RPC method for the RPC passthrough from the /// main daemon pub fn rpcmethod(mut self, name: &str, description: &str, callback: C) -> Builder @@ -498,6 +550,43 @@ where } } + pub fn new_typed(name: &str, callback: C) -> Self + where + C: Send + Sync + 'static, + C: Fn(Plugin, Req) -> F + 'static, + F: Future> + Send + 'static, + Req: DeserializeOwned + Send + 'static, + Resp: Serialize + Send + 'static, + { + let hookname = name.to_string(); + Self { + name: hookname.clone(), + callback: Box::new(move |p, r| { + let typed_req = serde_json::from_value(r).unwrap_or_else(|e| { + let error = format!( + "cln-plugin: hook '{hookname}' received a request that doesn't match \ + the expected schema. Error: {e}" + ); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message":error}}) + ); + std::process::exit(1); + }); + let fut = callback(p, typed_req); + Box::pin(async move { + let typed_resp = fut.await?; + serde_json::to_value(typed_resp).map_err(Error::from) + }) + }), + before: Vec::new(), + after: Vec::new(), + filters: None, + } + } + pub fn before(mut self, before: Vec) -> Self { self.before = before; self From 9f76538d8c759569202ce74a14574b077dd15ff3 Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Fri, 17 Apr 2026 11:55:58 +0200 Subject: [PATCH 5/6] schemas: neaten notification and hook schemas in preparation for markdown generation also includes formatting fixes for the bkpr-report schema, brackets usually need escaping in mdx Changelog-None --- contrib/msggen/msggen/schema.json | 670 ++++++------------ doc/schemas/bkpr-report.json | 76 +- doc/schemas/hook/commitment_revocation.json | 2 +- doc/schemas/hook/custommsg.json | 2 +- doc/schemas/hook/db_write.json | 2 +- doc/schemas/hook/htlc_accepted.json | 2 +- doc/schemas/hook/invoice_payment.json | 6 +- doc/schemas/hook/onion_message_recv.json | 4 +- .../hook/onion_message_recv_secret.json | 4 +- doc/schemas/hook/openchannel.json | 2 +- doc/schemas/hook/openchannel2.json | 2 +- doc/schemas/hook/openchannel2_changed.json | 2 +- doc/schemas/hook/openchannel2_sign.json | 4 +- doc/schemas/hook/peer_connected.json | 2 +- doc/schemas/hook/rbf_channel.json | 2 +- doc/schemas/hook/recover.json | 2 +- doc/schemas/hook/rpc_command.json | 2 +- .../notification/balance_snapshot.json | 25 +- doc/schemas/notification/block_added.json | 6 +- .../notification/channel_open_failed.json | 3 +- doc/schemas/notification/channel_opened.json | 12 +- .../notification/channel_state_changed.json | 28 +- doc/schemas/notification/coin_movement.json | 62 +- doc/schemas/notification/connect.json | 13 +- doc/schemas/notification/custommsg.json | 6 +- .../notification/deprecated_oneshot.json | 8 +- doc/schemas/notification/disconnect.json | 8 +- doc/schemas/notification/forward_event.json | 38 +- .../notification/invoice_creation.json | 14 +- doc/schemas/notification/invoice_payment.json | 14 +- doc/schemas/notification/log.json | 20 +- .../onionmessage_forward_fail.json | 23 +- .../notification/openchannel_peer_sigs.json | 11 +- doc/schemas/notification/plugin_started.json | 15 +- doc/schemas/notification/plugin_stopped.json | 15 +- doc/schemas/notification/sendpay_failure.json | 81 +-- doc/schemas/notification/sendpay_success.json | 50 +- doc/schemas/notification/shutdown.json | 10 +- doc/schemas/notification/warning.json | 17 +- .../notification/xpay_pay_part_end.json | 38 +- .../notification/xpay_pay_part_start.json | 37 +- 41 files changed, 456 insertions(+), 884 deletions(-) diff --git a/contrib/msggen/msggen/schema.json b/contrib/msggen/msggen/schema.json index 88b241c8e00d..156151cec622 100644 --- a/contrib/msggen/msggen/schema.json +++ b/contrib/msggen/msggen/schema.json @@ -5131,47 +5131,47 @@ "type": "string", "description": [ "This format string is used for each income event (note that `lightning-cli` can get confused if the format begins and ends with `{` and `}`, so you may need to add a space). The following tags in braces are replaced:", - "{account}: account name (channel id, or 'wallet')", - "{tag}: event tag. This will be one of:", - " * `deposit`: an onchain send to the wallet by `outpoint`.", - " * `htlc_fulfill`: an onchain HTLC fulfill (due to unilaterally closed channel) at `outpoint`.", - " * `invoice`: either incoming (positive credit) or outgoing (positive debit) payment.", - " * `invoice_fee`: the routing fee paid to pay an outgoing invoice", - " * `journal_entry`: an accounting fixup, caused by loss of data (or, a node which predates bookkeeper)", - " * `lease_fee`: a fee paid or received to lease a channel via the experimental liquidity advertisement option", - " * `onchain_fee,`: a miner fee paid to open/close a channel, or make a bitcoin payment. The `txid` will correspond to a `withdrawal` `outpoint` for withdrawl", - " * `pushed`: an amount pushed to or from us on channel open.", - " * `rebalance_fee`: routing fee paid for sending a payment to ourselves.", - " * `routed`: credit gained from routing a payment", - " * `withdrawal`: debit from an onchain spend.", - "{description}: description as provided in the invoice, if present", - "{credit}: credit amount in BTC", - "{debit}: debit amount in BTC", - "{fees}: fee amount in BTC", - "{localtime}: event timestamp in local time as YYYY-MM-DD HH:MM:SS", - "{utctime}: event timestamp in UTC as YYYY-MM-DD HH:MM:SS", - "{outpoint}: outpoint, if present", - "{txid}: txid, if present", - "{payment_id}: payment hash, if present", - "{bkpr-currency}: value of bkpr-currency, if any", - "{currencyrate}: exchange rate for 1 BTC at that event time, if available", - "{creditdebit}: +credit or -debit (or 0) in BTC", - "{currencycredit}: credit amount converted into bkpr-currency", - "{currencydebit}: debit amount converted into bkpr-currency", - "{currencycreditdebit}: +credit or -debit (or 0) in bkpr-currency", + " * `{account}`: account name (channel id, or 'wallet')", + " * `{tag}`: event tag. This will be one of:", + " * `deposit`: an onchain send to the wallet by `outpoint`.", + " * `htlc_fulfill`: an onchain HTLC fulfill (due to unilaterally closed channel) at `outpoint`.", + " * `invoice`: either incoming (positive credit) or outgoing (positive debit) payment.", + " * `invoice_fee`: the routing fee paid to pay an outgoing invoice", + " * `journal_entry`: an accounting fixup, caused by loss of data (or, a node which predates bookkeeper)", + " * `lease_fee`: a fee paid or received to lease a channel via the experimental liquidity advertisement option", + " * `onchain_fee,`: a miner fee paid to open/close a channel, or make a bitcoin payment. The `txid` will correspond to a `withdrawal` `outpoint` for withdrawl", + " * `pushed`: an amount pushed to or from us on channel open.", + " * `rebalance_fee`: routing fee paid for sending a payment to ourselves.", + " * `routed`: credit gained from routing a payment", + " * `withdrawal`: debit from an onchain spend.", + " * `{description}`: description as provided in the invoice, if present", + " * `{credit}`: credit amount in BTC", + " * `{debit}`: debit amount in BTC", + " * `{fees}`: fee amount in BTC", + " * `{localtime}`: event timestamp in local time as YYYY-MM-DD HH:MM:SS", + " * `{utctime}`: event timestamp in UTC as YYYY-MM-DD HH:MM:SS", + " * `{outpoint}`: outpoint, if present", + " * `{txid}`: txid, if present", + " * `{payment_id}`: payment hash, if present", + " * `{bkpr-currency}`: value of bkpr-currency, if any", + " * `{currencyrate}`: exchange rate for 1 BTC at that event time, if available", + " * `{creditdebit}`: +credit or -debit (or 0) in BTC", + " * `{currencycredit}`: credit amount converted into bkpr-currency", + " * `{currencydebit}`: debit amount converted into bkpr-currency", + " * `{currencycreditdebit}`: +credit or -debit (or 0) in bkpr-currency", "", - "If a field is unavailable, it expands to an empty string (or 0 for credit, debit, fees and creditdebit).", + " If a field is unavailable, it expands to an empty string (or 0 for credit, debit, fees and creditdebit).", "", - "Tags support C-style conditional syntax: {tag[?if-set][:if-not-set]}", - " * if-set: text to use when the tag is present (and non-zero for credit, debit, fees and creditdebit). Default is the tag value itself.", - " * if-not-set: text to use when the tag is absent (or zero for amount fields). Default is empty string (or 0 for amount fields).", - "Either or both parts may be omitted, and each part can itself contain tags. For example:", - " * {outpoint:NONE}: the outpoint value, or 'NONE' if not available", - " * {credit:0.00}: the credit value, or '0.00' if zero", - " * {outpoint?[{outpoint}]:NONE}: '[]' if outpoint is available, or 'NONE' if not", - " * {payment_id:{txid:UNKNOWN}}: the payment_id, or the txid if no payment_id, or 'UNKNOWN' if neither", + " Tags support C-style conditional syntax: `{tag[?if-set][:if-not-set]}`", + " * if-set: text to use when the tag is present (and non-zero for credit, debit, fees and creditdebit). Default is the tag value itself.", + " * if-not-set: text to use when the tag is absent (or zero for amount fields). Default is empty string (or 0 for amount fields).", + " Either or both parts may be omitted, and each part can itself contain tags. For example:", + " * `{outpoint:NONE}`: the outpoint value, or 'NONE' if not available", + " * `{credit:0.00}`: the credit value, or '0.00' if zero", + " * `{outpoint?[{outpoint}]:NONE}`: `[]` if outpoint is available, or 'NONE' if not", + " * `{payment_id:{txid:UNKNOWN}}`: the payment_id, or the txid if no payment_id, or 'UNKNOWN' if neither", "", - "To include a literal {, write {{." + " To include a literal `{`, write `{{`." ] }, "headers": { @@ -37900,11 +37900,8 @@ "The **balance_snapshot** notification informs whenever lightningd emits a balance snapshot for bookkeeping." ], "added": "v24.11", - "request": { - "added": "v24.11" - }, + "request": {}, "response": { - "added": "v24.11", "required": [ "node_id", "blockheight", @@ -37916,32 +37913,27 @@ "type": "pubkey", "description": [ "The node id the snapshot was taken for." - ], - "added": "v24.11" + ] }, "blockheight": { "type": "u32", "description": [ "The blockheight at which the snapshot was taken." - ], - "added": "v24.11" + ] }, "timestamp": { "type": "u32", "description": [ "The snapshot time as seconds since epoch." - ], - "added": "v24.11" + ] }, "accounts": { "type": "array", "description": [ "The balances for each tracked account at the snapshot time." ], - "added": "v24.11", "items": { "type": "object", - "added": "v24.11", "additionalProperties": false, "required": [ "account_id", @@ -37953,22 +37945,19 @@ "type": "string", "description": [ "The account identifier for the balance." - ], - "added": "v24.11" + ] }, "balance_msat": { "type": "msat", "description": [ "The account balance in millisatoshis." - ], - "added": "v24.11" + ] }, "coin_type": { "type": "string", "description": [ "The BIP173 coin type name for the balance." - ], - "added": "v24.11" + ] } } } @@ -37997,15 +37986,13 @@ "type": "hash", "description": [ "The hash of the block." - ], - "added": "v22.11" + ] }, "height": { "type": "u32", "description": [ "The total block height." - ], - "added": "v22.11" + ] } } } @@ -38030,8 +38017,7 @@ "type": "hash", "description": [ "The channel id of the channel." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38059,29 +38045,25 @@ "type": "pubkey", "description": [ "The id of the peer which opened the channel" - ], - "added": "pre-v0.10.1" + ] }, "funding_msat": { "type": "msat", "description": [ "The amount of the funding transaction" - ], - "added": "pre-v0.10.1" + ] }, "funding_txid": { "type": "txid", "description": [ "The transaction id of the funding transaction" - ], - "added": "pre-v0.10.1" + ] }, "channel_ready": { "type": "boolean", "description": [ "true if the channel is ready" - ], - "added": "pre-v0.10.1" + ] } } } @@ -38093,7 +38075,7 @@ "notification": "channel_state_changed", "title": "Notification for channel state change", "description": [ - "The **channel_state_changed** informs whenever the state of the channel has been updated." + "The **channel_state_changed** notification informs whenever the state of the channel has been updated." ], "added": "pre-v0.10.1", "request": {}, @@ -38110,29 +38092,25 @@ "type": "pubkey", "description": [ "The peer id of the channel." - ], - "added": "pre-v0.10.1" + ] }, "channel_id": { "type": "hash", "description": [ "The channel id of the channel." - ], - "added": "pre-v0.10.1" + ] }, "short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id of the channel. If the channel is not yet confirmed, this field will be null." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "string", "description": [ "The timestamp of the state change." - ], - "added": "pre-v0.10.1" + ] }, "old_state": { "type": "string", @@ -38155,8 +38133,7 @@ "description": [ "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally.", "The deprecated value 'unknown' is also present for new channels: after v26.04 this field will be omitted instead." - ], - "added": "pre-v0.10.1" + ] }, "new_state": { "type": "string", @@ -38178,10 +38155,9 @@ "CLOSED" ], "description": [ - "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally", + "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally.", "Note: *CLOSED* state was only added in v25.12." - ], - "added": "pre-v0.10.1" + ] }, "cause": { "type": "string", @@ -38195,15 +38171,13 @@ ], "description": [ "The cause of the state change." - ], - "added": "pre-v0.10.1" + ] }, "message": { "type": "string", "description": [ "The state change message." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38218,11 +38192,8 @@ "The **coin_movement** notification informs whenever lightningd records a finalized ledger movement." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "version", "coin_type", @@ -38240,22 +38211,19 @@ "type": "u32", "description": [ "The coin movement schema version." - ], - "added": "pre-v0.10.1" + ] }, "coin_type": { "type": "string", "description": [ "The BIP173 coin type name." - ], - "added": "pre-v0.10.1" + ] }, "node_id": { "type": "pubkey", "description": [ "The node id that emitted the notification." - ], - "added": "pre-v0.10.1" + ] }, "type": { "type": "string", @@ -38265,8 +38233,7 @@ ], "description": [ "Whether this is a channel or chain movement." - ], - "added": "pre-v0.10.1" + ] }, "created_index": { "type": "u64", @@ -38279,43 +38246,37 @@ "type": "string", "description": [ "The account identifier for the movement." - ], - "added": "pre-v0.10.1" + ] }, "credit_msat": { "type": "msat", "description": [ "Amount credited to the account." - ], - "added": "pre-v0.10.1" + ] }, "debit_msat": { "type": "msat", "description": [ "Amount debited from the account." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "u64", "description": [ "The UNIX timestamp when the movement was recorded." - ], - "added": "pre-v0.10.1" + ] }, "tags": { "type": "array", "description": [ "Deprecated legacy combined tag array, emitted only for deprecated output compatibility." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" ], "items": { "type": "string", - "added": "pre-v0.10.1", "description": [ "A legacy movement tag." ] @@ -38361,7 +38322,6 @@ "added": "v25.09", "items": { "type": "string", - "added": "v25.09", "description": [ "An additional movement tag." ] @@ -38371,29 +38331,25 @@ "type": "hash", "description": [ "The payment hash associated with the movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "part_id": { "type": "u64", "description": [ "The part id for a multi-part channel payment." - ], - "added": "pre-v0.10.1" + ] }, "group_id": { "type": "u64", "description": [ "The group id for a multi-part channel payment." - ], - "added": "pre-v0.10.1" + ] }, "fees_msat": { "type": "msat", "description": [ "The fees associated with a channel movement." - ], - "added": "pre-v0.10.1" + ] }, "utxo": { "type": "outpoint", @@ -38406,22 +38362,19 @@ "type": "pubkey", "description": [ "The peer associated with a chain movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "originating_account": { "type": "string", "description": [ "The originating account for an external chain movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "txid": { "type": "txid", "description": [ "Deprecated legacy field for the spending transaction id." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -38439,7 +38392,6 @@ "description": [ "Deprecated legacy field for the outpoint transaction id." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -38450,7 +38402,6 @@ "description": [ "Deprecated legacy field for the outpoint output index." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -38460,22 +38411,19 @@ "type": "msat", "description": [ "The output amount for a chain movement." - ], - "added": "pre-v0.10.1" + ] }, "output_count": { "type": "u32", "description": [ "The number of outputs in the spending transaction, if known." - ], - "added": "pre-v0.10.1" + ] }, "blockheight": { "type": "u32", "description": [ "The blockheight for a chain movement." - ], - "added": "pre-v0.10.1" + ] } }, "allOf": [ @@ -38524,7 +38472,7 @@ "notification": "connect", "title": "Notification for connection with a peer", "description": [ - "The **connect** informs whenever the node is connected to a peer." + "The **connect** notification informs whenever the node is connected to a peer." ], "additionalProperties": false, "added": "pre-v0.10.1", @@ -38540,8 +38488,7 @@ "type": "pubkey", "description": [ "The id of the peer which sent the custom message" - ], - "added": "pre-v0.10.1" + ] }, "direction": { "type": "string", @@ -38551,15 +38498,13 @@ ], "description": [ "Direction of the connection" - ], - "added": "pre-v0.10.1" + ] }, "address": { "type": "object", "description": [ "Address information (mainly useful if **direction** is *out*)" ], - "added": "pre-v0.10.1", "additionalProperties": true, "required": [ "type" @@ -38567,7 +38512,6 @@ "properties": { "type": { "type": "string", - "added": "pre-v0.10.1", "enum": [ "local socket", "ipv4", @@ -38602,7 +38546,6 @@ "type": {}, "socket": { "type": "string", - "added": "pre-v0.10.1", "description": [ "Socket filename" ] @@ -38634,14 +38577,12 @@ "type": {}, "address": { "type": "string", - "added": "pre-v0.10.1", "description": [ "Address in expected format for **type**" ] }, "port": { "type": "u16", - "added": "pre-v0.10.1", "description": [ "Port number" ] @@ -38675,15 +38616,13 @@ "type": "pubkey", "description": [ "The id of the peer which sent the custom message" - ], - "added": "v24.02" + ] }, "payload": { "type": "hex", "description": [ "The hex-encoded payload. The first 2 bytes represent the BOLT-8 message type followed by the message content" - ], - "added": "v24.02" + ] } } } @@ -38698,11 +38637,8 @@ "The **deprecated_oneshot** notification informs a plugin that the immediately following command uses different deprecation handling than the global setting." ], "added": "v24.02", - "request": { - "added": "v24.02" - }, + "request": {}, "response": { - "added": "v24.02", "required": [ "deprecated_ok" ], @@ -38711,8 +38647,7 @@ "type": "boolean", "description": [ "Whether deprecated APIs are allowed for the immediately following command." - ], - "added": "v24.02" + ] } } } @@ -38727,11 +38662,8 @@ "The **disconnect** notification informs whenever the node disconnects from a peer." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "id" ], @@ -38740,8 +38672,7 @@ "type": "pubkey", "description": [ "The id of the peer that disconnected." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38756,11 +38687,8 @@ "The **forward_event** notification informs whenever the status of a forwarded HTLC changes." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "payment_hash", "in_channel", @@ -38773,43 +38701,37 @@ "type": "hash", "description": [ "The payment hash for the forwarded HTLC." - ], - "added": "pre-v0.10.1" + ] }, "in_channel": { "type": "short_channel_id", "description": [ "The inbound channel that received the HTLC." - ], - "added": "pre-v0.10.1" + ] }, "out_channel": { "type": "short_channel_id", "description": [ "The outbound channel used for the forward, if one was selected." - ], - "added": "pre-v0.10.1" + ] }, "in_msat": { "type": "msat", "description": [ "The amount of the incoming HTLC." - ], - "added": "pre-v0.10.1" + ] }, "out_msat": { "type": "msat", "description": [ "The amount forwarded to the outbound channel, if known." - ], - "added": "pre-v0.10.1" + ] }, "fee_msat": { "type": "msat", "description": [ "The fee earned on the forward, if an outbound amount is known." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -38821,22 +38743,19 @@ ], "description": [ "The current forwarding status." - ], - "added": "pre-v0.10.1" + ] }, "failcode": { "type": "u32", "description": [ "The BOLT 4 failcode when the forward failed locally." - ], - "added": "pre-v0.10.1" + ] }, "failreason": { "type": "string", "description": [ "The symbolic failcode name when the forward failed locally." - ], - "added": "pre-v0.10.1" + ] }, "style": { "type": "string", @@ -38853,15 +38772,13 @@ "type": "number", "description": [ "The UNIX timestamp when the HTLC was received." - ], - "added": "pre-v0.10.1" + ] }, "resolved_time": { "type": "number", "description": [ "The UNIX timestamp when the HTLC was resolved, if it has been resolved." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38876,11 +38793,8 @@ "The **invoice_creation** notification informs whenever a new invoice is created." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "preimage", "label" @@ -38890,22 +38804,19 @@ "type": "msat", "description": [ "The invoice amount, if the invoice amount was specified at creation time." - ], - "added": "pre-v0.10.1" + ] }, "preimage": { "type": "secret", "description": [ "The payment preimage for the created invoice." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The label of the created invoice." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38920,11 +38831,8 @@ "The **invoice_payment** notification informs whenever an invoice is paid." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "msat", "preimage", @@ -38935,15 +38843,13 @@ "type": "msat", "description": [ "The amount paid for the invoice." - ], - "added": "pre-v0.10.1" + ] }, "preimage": { "type": "secret", "description": [ "The payment preimage for the invoice." - ], - "added": "pre-v0.10.1" + ] }, "outpoint": { "type": "outpoint", @@ -38956,8 +38862,7 @@ "type": "string", "description": [ "The label of the paid invoice." - ], - "added": "pre-v0.10.1" + ] } } } @@ -38972,11 +38877,8 @@ "The **log** notification informs whenever lightningd emits a log message and exposes the same payload shape as **warning**." ], "added": "v24.02", - "request": { - "added": "v24.02" - }, + "request": {}, "response": { - "added": "v24.02", "required": [ "level", "time", @@ -38997,36 +38899,31 @@ ], "description": [ "The log level of the emitted message." - ], - "added": "v24.02" + ] }, "time": { "type": "string", "description": [ "The event time as seconds since epoch with subsecond precision." - ], - "added": "v24.02" + ] }, "timestamp": { "type": "string", "description": [ "The event time in ISO 8601 format." - ], - "added": "v24.02" + ] }, "source": { "type": "string", "description": [ "The source component that emitted the log entry." - ], - "added": "v24.02" + ] }, "log": { "type": "string", "description": [ "The original log message." - ], - "added": "v24.02" + ] } } } @@ -39041,11 +38938,8 @@ "The **onionmessage_forward_fail** notification informs whenever lightningd cannot forward an onion message." ], "added": "v24.11", - "request": { - "added": "v24.11" - }, + "request": {}, "response": { - "added": "v24.11", "required": [ "source", "incoming", @@ -39056,43 +38950,37 @@ "type": "pubkey", "description": [ "The node that sent the incoming onion message." - ], - "added": "v24.11" + ] }, "incoming": { "type": "hex", "description": [ "The incoming onion message payload." - ], - "added": "v24.11" + ] }, "path_key": { "type": "pubkey", "description": [ "The path key used for the attempted forward." - ], - "added": "v24.11" + ] }, "outgoing": { "type": "hex", "description": [ "The outgoing onion message payload if a next hop had already been selected." - ], - "added": "v24.11" + ] }, "next_node_id": { "type": "pubkey", "description": [ "The next node id for the attempted forward when the next hop is a node." - ], - "added": "v24.11" + ] }, "next_short_channel_id_dir": { "type": "short_channel_id_dir", "description": [ "The next short_channel_id_dir for the attempted forward when the next hop is a channel." - ], - "added": "v24.11" + ] } } } @@ -39107,11 +38995,8 @@ "The **openchannel_peer_sigs** notification informs whenever a peer provides signatures for an in-progress channel open." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "channel_id", "signed_psbt" @@ -39121,15 +39006,13 @@ "type": "hash", "description": [ "The channel id for the channel open attempt." - ], - "added": "pre-v0.10.1" + ] }, "signed_psbt": { "type": "string", "description": [ "The signed PSBT from the peer." - ], - "added": "pre-v0.10.1" + ] } } } @@ -39144,11 +39027,8 @@ "The **plugin_started** notification informs whenever a plugin has started and registered its methods." ], "added": "v25.02", - "request": { - "added": "v25.02" - }, + "request": {}, "response": { - "added": "v25.02", "required": [ "plugin_name", "plugin_path", @@ -39159,28 +39039,24 @@ "type": "string", "description": [ "The short name of the plugin." - ], - "added": "v25.02" + ] }, "plugin_path": { "type": "string", "description": [ "The executable path of the plugin." - ], - "added": "v25.02" + ] }, "methods": { "type": "array", "description": [ "The RPC methods registered by the plugin." ], - "added": "v25.02", "items": { "type": "string", "description": [ "A method registered by the plugin." - ], - "added": "v25.02" + ] } } } @@ -39196,11 +39072,8 @@ "The **plugin_stopped** notification informs whenever a plugin stops and exposes the same payload shape as **plugin_started**." ], "added": "v25.02", - "request": { - "added": "v25.02" - }, + "request": {}, "response": { - "added": "v25.02", "required": [ "plugin_name", "plugin_path", @@ -39211,28 +39084,24 @@ "type": "string", "description": [ "The short name of the plugin." - ], - "added": "v25.02" + ] }, "plugin_path": { "type": "string", "description": [ "The executable path of the plugin." - ], - "added": "v25.02" + ] }, "methods": { "type": "array", "description": [ "The RPC methods registered by the plugin." ], - "added": "v25.02", "items": { "type": "string", "description": [ "A method registered by the plugin." - ], - "added": "v25.02" + ] } } } @@ -39248,11 +39117,8 @@ "The **sendpay_failure** notification informs whenever a payment attempt completes with failed status." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "code", "message", @@ -39263,15 +39129,13 @@ "type": "integer", "description": [ "The JSON-RPC error code for the payment failure." - ], - "added": "pre-v0.10.1" + ] }, "message": { "type": "string", "description": [ "The human-readable payment failure message." - ], - "added": "pre-v0.10.1" + ] }, "data": { "type": "object", @@ -39279,7 +39143,6 @@ "description": [ "The structured payment failure details." ], - "added": "pre-v0.10.1", "properties": { "created_index": { "type": "u64", @@ -39292,22 +39155,19 @@ "type": "u64", "description": [ "Old synonym for created_index." - ], - "added": "pre-v0.10.1" + ] }, "payment_hash": { "type": "hash", "description": [ "The hash of the payment preimage." - ], - "added": "pre-v0.10.1" + ] }, "groupid": { "type": "u64", "description": [ "Grouping key for multiple attempts on the same payment." - ], - "added": "pre-v0.10.1" + ] }, "updated_index": { "type": "u64", @@ -39320,43 +39180,37 @@ "type": "u64", "description": [ "Part number for a multi-part payment." - ], - "added": "pre-v0.10.1" + ] }, "destination": { "type": "pubkey", "description": [ "The final destination of the payment, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_msat": { "type": "msat", "description": [ "The amount intended for the destination, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_sent_msat": { "type": "msat", "description": [ "The total amount sent including fees." - ], - "added": "pre-v0.10.1" + ] }, "created_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment was initiated." - ], - "added": "pre-v0.10.1" + ] }, "completed_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment completed." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -39367,29 +39221,25 @@ ], "description": [ "The terminal status for a failed sendpay notification." - ], - "added": "pre-v0.10.1" + ] }, "payment_preimage": { "type": "secret", "description": [ "The payment preimage, if the payment later completed through another path." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The optional label associated with the payment." - ], - "added": "pre-v0.10.1" + ] }, "bolt11": { "type": "string", "description": [ "The bolt11 invoice string, if present." - ], - "added": "pre-v0.10.1" + ] }, "bolt12": { "type": "string", @@ -39402,71 +39252,61 @@ "type": "string", "description": [ "The description associated with the payment, if present." - ], - "added": "pre-v0.10.1" + ] }, "erroronion": { "type": "hex", "description": [ "The raw error onion if one was retained for the payment." - ], - "added": "pre-v0.10.1" + ] }, "onionreply": { "type": "hex", "description": [ "The onion reply for an unparseable onion failure." - ], - "added": "pre-v0.10.1" + ] }, "erring_index": { "type": "u32", "description": [ "The position in the route where the failure occurred." - ], - "added": "pre-v0.10.1" + ] }, "failcode": { "type": "u32", "description": [ "The BOLT 4 failcode." - ], - "added": "pre-v0.10.1" + ] }, "failcodename": { "type": "string", "description": [ "The symbolic name for the failcode, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_node": { "type": "pubkey", "description": [ "The node that produced the failure, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_channel": { "type": "short_channel_id", "description": [ "The channel that produced the failure, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_direction": { "type": "u32", "description": [ "The direction within the erring channel." - ], - "added": "pre-v0.10.1" + ] }, "raw_message": { "type": "hex", "description": [ "The raw failure message payload, if present." - ], - "added": "pre-v0.10.1" + ] } } } @@ -39483,11 +39323,8 @@ "The **sendpay_success** notification informs whenever a payment attempt completes successfully." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "created_index", "id", @@ -39509,22 +39346,19 @@ "type": "u64", "description": [ "Old synonym for created_index." - ], - "added": "pre-v0.10.1" + ] }, "payment_hash": { "type": "hash", "description": [ "The hash of the payment preimage." - ], - "added": "pre-v0.10.1" + ] }, "groupid": { "type": "u64", "description": [ "Grouping key for multiple attempts on the same payment." - ], - "added": "pre-v0.10.1" + ] }, "updated_index": { "type": "u64", @@ -39537,43 +39371,37 @@ "type": "u64", "description": [ "Part number for a multi-part payment." - ], - "added": "pre-v0.10.1" + ] }, "destination": { "type": "pubkey", "description": [ "The final destination of the payment, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_msat": { "type": "msat", "description": [ "The amount delivered to the destination, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_sent_msat": { "type": "msat", "description": [ "The total amount sent including fees." - ], - "added": "pre-v0.10.1" + ] }, "created_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment was initiated." - ], - "added": "pre-v0.10.1" + ] }, "completed_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment completed." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -39582,29 +39410,25 @@ ], "description": [ "The terminal status for a successful sendpay notification." - ], - "added": "pre-v0.10.1" + ] }, "payment_preimage": { "type": "secret", "description": [ "The payment preimage proving success." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The optional label associated with the payment." - ], - "added": "pre-v0.10.1" + ] }, "bolt11": { "type": "string", "description": [ "The bolt11 invoice string, if present." - ], - "added": "pre-v0.10.1" + ] }, "bolt12": { "type": "string", @@ -39617,15 +39441,13 @@ "type": "string", "description": [ "The description associated with the payment, if present." - ], - "added": "pre-v0.10.1" + ] }, "erroronion": { "type": "hex", "description": [ "The raw error onion if one was retained for the payment." - ], - "added": "pre-v0.10.1" + ] } } } @@ -39637,14 +39459,14 @@ "notification": "shutdown", "title": "Notification for plugin shutdown", "description": [ - "The **shutdown** notification informs a plugin that lightningd is shutting down." + "The **shutdown** notification informs a plugin that lightningd is shutting down.", + "", + "It is expected that the plugin terminates it's own process when subscribing to this notification.", + "If it does not do so, lightningd will kill the plugin's process after a timeout." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "properties": {} } }, @@ -39658,11 +39480,8 @@ "The **warning** notification informs whenever lightningd emits a warning or error level event that should be surfaced to plugins." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "level", "time", @@ -39679,15 +39498,13 @@ ], "description": [ "The log level of the warning notification." - ], - "added": "pre-v0.10.1" + ] }, "time": { "type": "string", "description": [ "The event time as seconds since epoch with subsecond precision." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "string", @@ -39700,15 +39517,13 @@ "type": "string", "description": [ "The source component that emitted the log entry." - ], - "added": "pre-v0.10.1" + ] }, "log": { "type": "string", "description": [ "The original log message." - ], - "added": "pre-v0.10.1" + ] } } } @@ -39723,11 +39538,8 @@ "The **pay_part_end** notification is emitted by the xpay plugin when a payment part completes." ], "added": "v25.09", - "request": { - "added": "v25.09" - }, + "request": {}, "response": { - "added": "v25.09", "required": [ "status", "duration", @@ -39744,78 +39556,67 @@ ], "description": [ "Whether the payment part succeeded or failed." - ], - "added": "v25.09" + ] }, "duration": { "type": "number", "description": [ "The time in seconds from send to result." - ], - "added": "v25.09" + ] }, "payment_hash": { "type": "hash", "description": [ "The payment hash for the xpay invocation." - ], - "added": "v25.09" + ] }, "groupid": { "type": "u64", "description": [ "The xpay group identifier for the payment attempt." - ], - "added": "v25.09" + ] }, "partid": { "type": "u64", "description": [ "The identifier for this payment part." - ], - "added": "v25.09" + ] }, "failed_msg": { "type": "hex", "description": [ "The decrypted onion error message, if available." - ], - "added": "v25.09" + ] }, "failed_node_id": { "type": "pubkey", "description": [ "The node that generated the failure, if known." - ], - "added": "v25.09" + ] }, "failed_short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id complained about, if known." - ], - "added": "v25.09" + ] }, "failed_direction": { "type": "u32", "description": [ "The direction within the failed short channel id, if known." - ], - "added": "v25.09" + ] }, "error_code": { "type": "u32", "description": [ "The xpay error code, if known." - ], - "added": "v25.09" + ] }, "error_message": { "type": "string", "description": [ "The human-readable xpay error message." - ], - "added": "v25.09" + ] } } } @@ -39830,11 +39631,8 @@ "The **pay_part_start** notification is emitted by the xpay plugin when a payment part begins." ], "added": "v25.09", - "request": { - "added": "v25.09" - }, + "request": {}, "response": { - "added": "v25.09", "required": [ "payment_hash", "groupid", @@ -39848,46 +39646,39 @@ "type": "hash", "description": [ "The payment hash for the xpay invocation." - ], - "added": "v25.09" + ] }, "groupid": { "type": "u64", "description": [ "The xpay group identifier for the payment attempt." - ], - "added": "v25.09" + ] }, "partid": { "type": "u64", "description": [ "The identifier for this payment part." - ], - "added": "v25.09" + ] }, "total_payment_msat": { "type": "msat", "description": [ "The total payment amount for the xpay invocation." - ], - "added": "v25.09" + ] }, "attempt_msat": { "type": "msat", "description": [ "The amount this payment part attempts to deliver." - ], - "added": "v25.09" + ] }, "hops": { "type": "array", "description": [ "The route hops for this payment part." ], - "added": "v25.09", "items": { "type": "object", - "added": "v25.09", "additionalProperties": false, "required": [ "next_node", @@ -39901,36 +39692,31 @@ "type": "pubkey", "description": [ "The next node for this hop." - ], - "added": "v25.09" + ] }, "short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id used for this hop." - ], - "added": "v25.09" + ] }, "direction": { "type": "u32", "description": [ "The direction within the short channel id." - ], - "added": "v25.09" + ] }, "channel_in_msat": { "type": "msat", "description": [ "The amount entering the hop." - ], - "added": "v25.09" + ] }, "channel_out_msat": { "type": "msat", "description": [ "The amount leaving the hop." - ], - "added": "v25.09" + ] } } } @@ -39944,7 +39730,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "commitment_revocation", + "hook": "commitment_revocation", "title": "Hook fired when a commitment transaction is revoked", "description": [ "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", @@ -40019,7 +39805,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "custommsg", + "hook": "custommsg", "title": "Hook for handling custom peer messages", "description": [ "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", @@ -40083,7 +39869,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "db_write", + "hook": "db_write", "title": "Hook fired before database writes are committed", "description": [ "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", @@ -40172,7 +39958,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "htlc_accepted", + "hook": "htlc_accepted", "title": "Hook for handling incoming HTLCs", "description": [ "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", @@ -40493,7 +40279,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "invoice_payment", + "hook": "invoice_payment", "title": "Hook fired when a payment for an invoice is received", "description": [ "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", @@ -40501,8 +40287,8 @@ "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", "", "The plugin can:", - "- accept the payment by returning {\"result\": \"continue\"}", - "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- accept the payment by returning `{\"result\": \"continue\"}`", + "- reject the payment with a generic error using `{\"result\": \"reject\"}`", "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", "", "If `failure_message` is provided, the payment will be failed with that message.", @@ -40607,14 +40393,14 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "onion_message_recv", + "hook": "onion_message_recv", "title": "Hook for receiving unsolicited onion messages", "description": [ "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", "", "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", "", - "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + "Returning anything other than `{\"result\": \"continue\"}` prevents further hook processing." ], "request": { "required": [ @@ -40778,7 +40564,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "onion_message_recv_secret", + "hook": "onion_message_recv_secret", "title": "Hook for receiving onion messages via blinded paths", "description": [ "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", @@ -40787,7 +40573,7 @@ "", "Replies MUST only be sent when the `pathsecret` matches expectations.", "", - "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + "Returning anything other than `{\"result\": \"continue\"}` prevents further hook processing." ], "request": { "required": [ @@ -40963,7 +40749,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel", + "hook": "openchannel", "title": "Hook fired when a peer proposes opening a channel using v1 protocol", "description": [ "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", @@ -41189,7 +40975,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2", + "hook": "openchannel2", "title": "Hook fired when a peer proposes opening a channel using v2 protocol", "description": [ "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", @@ -41478,7 +41264,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2_changed", + "hook": "openchannel2_changed", "title": "Hook for handling updates to the dual-funding PSBT", "description": [ "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", @@ -41578,7 +41364,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2_sign", + "hook": "openchannel2_sign", "title": "Hook for signing the dual-funding PSBT", "description": [ "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", @@ -41647,7 +41433,7 @@ } } }, - "example_notifications": [ + "example_hooks": [ { "method": "openchannel2_sign", "params": { @@ -41681,7 +41467,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "peer_connected", + "hook": "peer_connected", "title": "Hook fired when a peer connects and completes handshake", "description": [ "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", @@ -41777,7 +41563,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "rbf_channel", + "hook": "rbf_channel", "title": "Hook for handling RBF channel funding requests", "description": [ "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", @@ -41934,7 +41720,7 @@ "$schema": "../rpc-schema-draft.json", "added": "v23.08", "type": "object", - "notification": "recover", + "hook": "recover", "title": "Hook fired when node starts in recovery mode", "description": [ "The **recover** hook is called whenever the node is started using the --recovery flag.", @@ -41981,7 +41767,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "rpc_command", + "hook": "rpc_command", "title": "Hook for intercepting and modifying RPC commands", "description": [ "The **rpc_command** hook allows a plugin to take over any RPC command.", diff --git a/doc/schemas/bkpr-report.json b/doc/schemas/bkpr-report.json index 430d5019fdf7..3232247f5595 100644 --- a/doc/schemas/bkpr-report.json +++ b/doc/schemas/bkpr-report.json @@ -17,47 +17,47 @@ "type": "string", "description": [ "This format string is used for each income event (note that `lightning-cli` can get confused if the format begins and ends with `{` and `}`, so you may need to add a space). The following tags in braces are replaced:", - "{account}: account name (channel id, or 'wallet')", - "{tag}: event tag. This will be one of:", - " * `deposit`: an onchain send to the wallet by `outpoint`.", - " * `htlc_fulfill`: an onchain HTLC fulfill (due to unilaterally closed channel) at `outpoint`.", - " * `invoice`: either incoming (positive credit) or outgoing (positive debit) payment.", - " * `invoice_fee`: the routing fee paid to pay an outgoing invoice", - " * `journal_entry`: an accounting fixup, caused by loss of data (or, a node which predates bookkeeper)", - " * `lease_fee`: a fee paid or received to lease a channel via the experimental liquidity advertisement option", - " * `onchain_fee,`: a miner fee paid to open/close a channel, or make a bitcoin payment. The `txid` will correspond to a `withdrawal` `outpoint` for withdrawl", - " * `pushed`: an amount pushed to or from us on channel open.", - " * `rebalance_fee`: routing fee paid for sending a payment to ourselves.", - " * `routed`: credit gained from routing a payment", - " * `withdrawal`: debit from an onchain spend.", - "{description}: description as provided in the invoice, if present", - "{credit}: credit amount in BTC", - "{debit}: debit amount in BTC", - "{fees}: fee amount in BTC", - "{localtime}: event timestamp in local time as YYYY-MM-DD HH:MM:SS", - "{utctime}: event timestamp in UTC as YYYY-MM-DD HH:MM:SS", - "{outpoint}: outpoint, if present", - "{txid}: txid, if present", - "{payment_id}: payment hash, if present", - "{bkpr-currency}: value of bkpr-currency, if any", - "{currencyrate}: exchange rate for 1 BTC at that event time, if available", - "{creditdebit}: +credit or -debit (or 0) in BTC", - "{currencycredit}: credit amount converted into bkpr-currency", - "{currencydebit}: debit amount converted into bkpr-currency", - "{currencycreditdebit}: +credit or -debit (or 0) in bkpr-currency", + " * `{account}`: account name (channel id, or 'wallet')", + " * `{tag}`: event tag. This will be one of:", + " * `deposit`: an onchain send to the wallet by `outpoint`.", + " * `htlc_fulfill`: an onchain HTLC fulfill (due to unilaterally closed channel) at `outpoint`.", + " * `invoice`: either incoming (positive credit) or outgoing (positive debit) payment.", + " * `invoice_fee`: the routing fee paid to pay an outgoing invoice", + " * `journal_entry`: an accounting fixup, caused by loss of data (or, a node which predates bookkeeper)", + " * `lease_fee`: a fee paid or received to lease a channel via the experimental liquidity advertisement option", + " * `onchain_fee,`: a miner fee paid to open/close a channel, or make a bitcoin payment. The `txid` will correspond to a `withdrawal` `outpoint` for withdrawl", + " * `pushed`: an amount pushed to or from us on channel open.", + " * `rebalance_fee`: routing fee paid for sending a payment to ourselves.", + " * `routed`: credit gained from routing a payment", + " * `withdrawal`: debit from an onchain spend.", + " * `{description}`: description as provided in the invoice, if present", + " * `{credit}`: credit amount in BTC", + " * `{debit}`: debit amount in BTC", + " * `{fees}`: fee amount in BTC", + " * `{localtime}`: event timestamp in local time as YYYY-MM-DD HH:MM:SS", + " * `{utctime}`: event timestamp in UTC as YYYY-MM-DD HH:MM:SS", + " * `{outpoint}`: outpoint, if present", + " * `{txid}`: txid, if present", + " * `{payment_id}`: payment hash, if present", + " * `{bkpr-currency}`: value of bkpr-currency, if any", + " * `{currencyrate}`: exchange rate for 1 BTC at that event time, if available", + " * `{creditdebit}`: +credit or -debit (or 0) in BTC", + " * `{currencycredit}`: credit amount converted into bkpr-currency", + " * `{currencydebit}`: debit amount converted into bkpr-currency", + " * `{currencycreditdebit}`: +credit or -debit (or 0) in bkpr-currency", "", - "If a field is unavailable, it expands to an empty string (or 0 for credit, debit, fees and creditdebit).", + " If a field is unavailable, it expands to an empty string (or 0 for credit, debit, fees and creditdebit).", "", - "Tags support C-style conditional syntax: {tag[?if-set][:if-not-set]}", - " * if-set: text to use when the tag is present (and non-zero for credit, debit, fees and creditdebit). Default is the tag value itself.", - " * if-not-set: text to use when the tag is absent (or zero for amount fields). Default is empty string (or 0 for amount fields).", - "Either or both parts may be omitted, and each part can itself contain tags. For example:", - " * {outpoint:NONE}: the outpoint value, or 'NONE' if not available", - " * {credit:0.00}: the credit value, or '0.00' if zero", - " * {outpoint?[{outpoint}]:NONE}: '[]' if outpoint is available, or 'NONE' if not", - " * {payment_id:{txid:UNKNOWN}}: the payment_id, or the txid if no payment_id, or 'UNKNOWN' if neither", + " Tags support C-style conditional syntax: `{tag[?if-set][:if-not-set]}`", + " * if-set: text to use when the tag is present (and non-zero for credit, debit, fees and creditdebit). Default is the tag value itself.", + " * if-not-set: text to use when the tag is absent (or zero for amount fields). Default is empty string (or 0 for amount fields).", + " Either or both parts may be omitted, and each part can itself contain tags. For example:", + " * `{outpoint:NONE}`: the outpoint value, or 'NONE' if not available", + " * `{credit:0.00}`: the credit value, or '0.00' if zero", + " * `{outpoint?[{outpoint}]:NONE}`: `[]` if outpoint is available, or 'NONE' if not", + " * `{payment_id:{txid:UNKNOWN}}`: the payment_id, or the txid if no payment_id, or 'UNKNOWN' if neither", "", - "To include a literal {, write {{." + " To include a literal `{`, write `{{`." ] }, "headers": { diff --git a/doc/schemas/hook/commitment_revocation.json b/doc/schemas/hook/commitment_revocation.json index f655a076a36b..3766ebfefd7b 100644 --- a/doc/schemas/hook/commitment_revocation.json +++ b/doc/schemas/hook/commitment_revocation.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "commitment_revocation", + "hook": "commitment_revocation", "title": "Hook fired when a commitment transaction is revoked", "description": [ "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", diff --git a/doc/schemas/hook/custommsg.json b/doc/schemas/hook/custommsg.json index b1077a1515ff..88b783da2efb 100644 --- a/doc/schemas/hook/custommsg.json +++ b/doc/schemas/hook/custommsg.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "custommsg", + "hook": "custommsg", "title": "Hook for handling custom peer messages", "description": [ "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", diff --git a/doc/schemas/hook/db_write.json b/doc/schemas/hook/db_write.json index 317faff7984c..d21d85ff3935 100644 --- a/doc/schemas/hook/db_write.json +++ b/doc/schemas/hook/db_write.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "db_write", + "hook": "db_write", "title": "Hook fired before database writes are committed", "description": [ "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", diff --git a/doc/schemas/hook/htlc_accepted.json b/doc/schemas/hook/htlc_accepted.json index bbba2a6cc4d2..6a60cf3c1507 100644 --- a/doc/schemas/hook/htlc_accepted.json +++ b/doc/schemas/hook/htlc_accepted.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "htlc_accepted", + "hook": "htlc_accepted", "title": "Hook for handling incoming HTLCs", "description": [ "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", diff --git a/doc/schemas/hook/invoice_payment.json b/doc/schemas/hook/invoice_payment.json index 3b9657cebab7..fb3bcf4a3519 100644 --- a/doc/schemas/hook/invoice_payment.json +++ b/doc/schemas/hook/invoice_payment.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "invoice_payment", + "hook": "invoice_payment", "title": "Hook fired when a payment for an invoice is received", "description": [ "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", @@ -10,8 +10,8 @@ "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", "", "The plugin can:", - "- accept the payment by returning {\"result\": \"continue\"}", - "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- accept the payment by returning `{\"result\": \"continue\"}`", + "- reject the payment with a generic error using `{\"result\": \"reject\"}`", "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", "", "If `failure_message` is provided, the payment will be failed with that message.", diff --git a/doc/schemas/hook/onion_message_recv.json b/doc/schemas/hook/onion_message_recv.json index c5c837370405..81515883bc6c 100644 --- a/doc/schemas/hook/onion_message_recv.json +++ b/doc/schemas/hook/onion_message_recv.json @@ -2,14 +2,14 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "onion_message_recv", + "hook": "onion_message_recv", "title": "Hook for receiving unsolicited onion messages", "description": [ "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", "", "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", "", - "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + "Returning anything other than `{\"result\": \"continue\"}` prevents further hook processing." ], "request": { "required": [ diff --git a/doc/schemas/hook/onion_message_recv_secret.json b/doc/schemas/hook/onion_message_recv_secret.json index 55ada6b4a45b..92e0b4665482 100644 --- a/doc/schemas/hook/onion_message_recv_secret.json +++ b/doc/schemas/hook/onion_message_recv_secret.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "onion_message_recv_secret", + "hook": "onion_message_recv_secret", "title": "Hook for receiving onion messages via blinded paths", "description": [ "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", @@ -11,7 +11,7 @@ "", "Replies MUST only be sent when the `pathsecret` matches expectations.", "", - "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + "Returning anything other than `{\"result\": \"continue\"}` prevents further hook processing." ], "request": { "required": [ diff --git a/doc/schemas/hook/openchannel.json b/doc/schemas/hook/openchannel.json index 260614de7a99..51d368500754 100644 --- a/doc/schemas/hook/openchannel.json +++ b/doc/schemas/hook/openchannel.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel", + "hook": "openchannel", "title": "Hook fired when a peer proposes opening a channel using v1 protocol", "description": [ "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", diff --git a/doc/schemas/hook/openchannel2.json b/doc/schemas/hook/openchannel2.json index 4b551102bc56..51a10c6ae432 100644 --- a/doc/schemas/hook/openchannel2.json +++ b/doc/schemas/hook/openchannel2.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2", + "hook": "openchannel2", "title": "Hook fired when a peer proposes opening a channel using v2 protocol", "description": [ "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", diff --git a/doc/schemas/hook/openchannel2_changed.json b/doc/schemas/hook/openchannel2_changed.json index 0d19729ca046..327e3b250985 100644 --- a/doc/schemas/hook/openchannel2_changed.json +++ b/doc/schemas/hook/openchannel2_changed.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2_changed", + "hook": "openchannel2_changed", "title": "Hook for handling updates to the dual-funding PSBT", "description": [ "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", diff --git a/doc/schemas/hook/openchannel2_sign.json b/doc/schemas/hook/openchannel2_sign.json index 3917e8b38335..75cfb92d1553 100644 --- a/doc/schemas/hook/openchannel2_sign.json +++ b/doc/schemas/hook/openchannel2_sign.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "openchannel2_sign", + "hook": "openchannel2_sign", "title": "Hook for signing the dual-funding PSBT", "description": [ "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", @@ -71,7 +71,7 @@ } } }, - "example_notifications": [ + "example_hooks": [ { "method": "openchannel2_sign", "params": { diff --git a/doc/schemas/hook/peer_connected.json b/doc/schemas/hook/peer_connected.json index 78fd60c74611..9ac63327a64c 100644 --- a/doc/schemas/hook/peer_connected.json +++ b/doc/schemas/hook/peer_connected.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "peer_connected", + "hook": "peer_connected", "title": "Hook fired when a peer connects and completes handshake", "description": [ "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", diff --git a/doc/schemas/hook/rbf_channel.json b/doc/schemas/hook/rbf_channel.json index 6aed12464dd2..2a391de88120 100644 --- a/doc/schemas/hook/rbf_channel.json +++ b/doc/schemas/hook/rbf_channel.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "rbf_channel", + "hook": "rbf_channel", "title": "Hook for handling RBF channel funding requests", "description": [ "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", diff --git a/doc/schemas/hook/recover.json b/doc/schemas/hook/recover.json index f9f246c7bb0f..e1a49bbdb5fc 100644 --- a/doc/schemas/hook/recover.json +++ b/doc/schemas/hook/recover.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "v23.08", "type": "object", - "notification": "recover", + "hook": "recover", "title": "Hook fired when node starts in recovery mode", "description": [ "The **recover** hook is called whenever the node is started using the --recovery flag.", diff --git a/doc/schemas/hook/rpc_command.json b/doc/schemas/hook/rpc_command.json index 4bbf8ec3dffc..26327f6ba9f8 100644 --- a/doc/schemas/hook/rpc_command.json +++ b/doc/schemas/hook/rpc_command.json @@ -2,7 +2,7 @@ "$schema": "../rpc-schema-draft.json", "added": "pre-v0.10.1", "type": "object", - "notification": "rpc_command", + "hook": "rpc_command", "title": "Hook for intercepting and modifying RPC commands", "description": [ "The **rpc_command** hook allows a plugin to take over any RPC command.", diff --git a/doc/schemas/notification/balance_snapshot.json b/doc/schemas/notification/balance_snapshot.json index b2458ecaf3e8..da52436f3bda 100644 --- a/doc/schemas/notification/balance_snapshot.json +++ b/doc/schemas/notification/balance_snapshot.json @@ -8,11 +8,8 @@ "The **balance_snapshot** notification informs whenever lightningd emits a balance snapshot for bookkeeping." ], "added": "v24.11", - "request": { - "added": "v24.11" - }, + "request": {}, "response": { - "added": "v24.11", "required": [ "node_id", "blockheight", @@ -24,32 +21,27 @@ "type": "pubkey", "description": [ "The node id the snapshot was taken for." - ], - "added": "v24.11" + ] }, "blockheight": { "type": "u32", "description": [ "The blockheight at which the snapshot was taken." - ], - "added": "v24.11" + ] }, "timestamp": { "type": "u32", "description": [ "The snapshot time as seconds since epoch." - ], - "added": "v24.11" + ] }, "accounts": { "type": "array", "description": [ "The balances for each tracked account at the snapshot time." ], - "added": "v24.11", "items": { "type": "object", - "added": "v24.11", "additionalProperties": false, "required": [ "account_id", @@ -61,22 +53,19 @@ "type": "string", "description": [ "The account identifier for the balance." - ], - "added": "v24.11" + ] }, "balance_msat": { "type": "msat", "description": [ "The account balance in millisatoshis." - ], - "added": "v24.11" + ] }, "coin_type": { "type": "string", "description": [ "The BIP173 coin type name for the balance." - ], - "added": "v24.11" + ] } } } diff --git a/doc/schemas/notification/block_added.json b/doc/schemas/notification/block_added.json index 4bde510860cb..182decb2e3cb 100644 --- a/doc/schemas/notification/block_added.json +++ b/doc/schemas/notification/block_added.json @@ -19,15 +19,13 @@ "type": "hash", "description": [ "The hash of the block." - ], - "added": "v22.11" + ] }, "height": { "type": "u32", "description": [ "The total block height." - ], - "added": "v22.11" + ] } } } diff --git a/doc/schemas/notification/channel_open_failed.json b/doc/schemas/notification/channel_open_failed.json index 91e1a28ba1a6..809f0a873570 100644 --- a/doc/schemas/notification/channel_open_failed.json +++ b/doc/schemas/notification/channel_open_failed.json @@ -18,8 +18,7 @@ "type": "hash", "description": [ "The channel id of the channel." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/channel_opened.json b/doc/schemas/notification/channel_opened.json index 50ce4e5546ba..a59f7e5f9729 100644 --- a/doc/schemas/notification/channel_opened.json +++ b/doc/schemas/notification/channel_opened.json @@ -21,29 +21,25 @@ "type": "pubkey", "description": [ "The id of the peer which opened the channel" - ], - "added": "pre-v0.10.1" + ] }, "funding_msat": { "type": "msat", "description": [ "The amount of the funding transaction" - ], - "added": "pre-v0.10.1" + ] }, "funding_txid": { "type": "txid", "description": [ "The transaction id of the funding transaction" - ], - "added": "pre-v0.10.1" + ] }, "channel_ready": { "type": "boolean", "description": [ "true if the channel is ready" - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/channel_state_changed.json b/doc/schemas/notification/channel_state_changed.json index d8eea70c6689..d31d6a324fe3 100644 --- a/doc/schemas/notification/channel_state_changed.json +++ b/doc/schemas/notification/channel_state_changed.json @@ -5,7 +5,7 @@ "notification": "channel_state_changed", "title": "Notification for channel state change", "description": [ - "The **channel_state_changed** informs whenever the state of the channel has been updated." + "The **channel_state_changed** notification informs whenever the state of the channel has been updated." ], "added": "pre-v0.10.1", "request": {}, @@ -22,29 +22,25 @@ "type": "pubkey", "description": [ "The peer id of the channel." - ], - "added": "pre-v0.10.1" + ] }, "channel_id": { "type": "hash", "description": [ "The channel id of the channel." - ], - "added": "pre-v0.10.1" + ] }, "short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id of the channel. If the channel is not yet confirmed, this field will be null." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "string", "description": [ "The timestamp of the state change." - ], - "added": "pre-v0.10.1" + ] }, "old_state": { "type": "string", @@ -67,8 +63,7 @@ "description": [ "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally.", "The deprecated value 'unknown' is also present for new channels: after v26.04 this field will be omitted instead." - ], - "added": "pre-v0.10.1" + ] }, "new_state": { "type": "string", @@ -90,10 +85,9 @@ "CLOSED" ], "description": [ - "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally", + "The channel state, in particular \"CHANNELD_NORMAL\" and \"CHANNELD_AWAITING_SPLICE\" mean the channel can be used normally.", "Note: *CLOSED* state was only added in v25.12." - ], - "added": "pre-v0.10.1" + ] }, "cause": { "type": "string", @@ -107,15 +101,13 @@ ], "description": [ "The cause of the state change." - ], - "added": "pre-v0.10.1" + ] }, "message": { "type": "string", "description": [ "The state change message." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/coin_movement.json b/doc/schemas/notification/coin_movement.json index ab526c6e24f6..2fa85f7de6f2 100644 --- a/doc/schemas/notification/coin_movement.json +++ b/doc/schemas/notification/coin_movement.json @@ -8,11 +8,8 @@ "The **coin_movement** notification informs whenever lightningd records a finalized ledger movement." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "version", "coin_type", @@ -30,22 +27,19 @@ "type": "u32", "description": [ "The coin movement schema version." - ], - "added": "pre-v0.10.1" + ] }, "coin_type": { "type": "string", "description": [ "The BIP173 coin type name." - ], - "added": "pre-v0.10.1" + ] }, "node_id": { "type": "pubkey", "description": [ "The node id that emitted the notification." - ], - "added": "pre-v0.10.1" + ] }, "type": { "type": "string", @@ -55,8 +49,7 @@ ], "description": [ "Whether this is a channel or chain movement." - ], - "added": "pre-v0.10.1" + ] }, "created_index": { "type": "u64", @@ -69,43 +62,37 @@ "type": "string", "description": [ "The account identifier for the movement." - ], - "added": "pre-v0.10.1" + ] }, "credit_msat": { "type": "msat", "description": [ "Amount credited to the account." - ], - "added": "pre-v0.10.1" + ] }, "debit_msat": { "type": "msat", "description": [ "Amount debited from the account." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "u64", "description": [ "The UNIX timestamp when the movement was recorded." - ], - "added": "pre-v0.10.1" + ] }, "tags": { "type": "array", "description": [ "Deprecated legacy combined tag array, emitted only for deprecated output compatibility." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" ], "items": { "type": "string", - "added": "pre-v0.10.1", "description": [ "A legacy movement tag." ] @@ -151,7 +138,6 @@ "added": "v25.09", "items": { "type": "string", - "added": "v25.09", "description": [ "An additional movement tag." ] @@ -161,29 +147,25 @@ "type": "hash", "description": [ "The payment hash associated with the movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "part_id": { "type": "u64", "description": [ "The part id for a multi-part channel payment." - ], - "added": "pre-v0.10.1" + ] }, "group_id": { "type": "u64", "description": [ "The group id for a multi-part channel payment." - ], - "added": "pre-v0.10.1" + ] }, "fees_msat": { "type": "msat", "description": [ "The fees associated with a channel movement." - ], - "added": "pre-v0.10.1" + ] }, "utxo": { "type": "outpoint", @@ -196,22 +178,19 @@ "type": "pubkey", "description": [ "The peer associated with a chain movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "originating_account": { "type": "string", "description": [ "The originating account for an external chain movement, if any." - ], - "added": "pre-v0.10.1" + ] }, "txid": { "type": "txid", "description": [ "Deprecated legacy field for the spending transaction id." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -229,7 +208,6 @@ "description": [ "Deprecated legacy field for the outpoint transaction id." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -240,7 +218,6 @@ "description": [ "Deprecated legacy field for the outpoint output index." ], - "added": "pre-v0.10.1", "deprecated": [ "v25.09", "v26.09" @@ -250,22 +227,19 @@ "type": "msat", "description": [ "The output amount for a chain movement." - ], - "added": "pre-v0.10.1" + ] }, "output_count": { "type": "u32", "description": [ "The number of outputs in the spending transaction, if known." - ], - "added": "pre-v0.10.1" + ] }, "blockheight": { "type": "u32", "description": [ "The blockheight for a chain movement." - ], - "added": "pre-v0.10.1" + ] } }, "allOf": [ diff --git a/doc/schemas/notification/connect.json b/doc/schemas/notification/connect.json index 1fbe9310f81a..ca1cfd145378 100644 --- a/doc/schemas/notification/connect.json +++ b/doc/schemas/notification/connect.json @@ -4,7 +4,7 @@ "notification": "connect", "title": "Notification for connection with a peer", "description": [ - "The **connect** informs whenever the node is connected to a peer." + "The **connect** notification informs whenever the node is connected to a peer." ], "additionalProperties": false, "added": "pre-v0.10.1", @@ -20,8 +20,7 @@ "type": "pubkey", "description": [ "The id of the peer which sent the custom message" - ], - "added": "pre-v0.10.1" + ] }, "direction": { "type": "string", @@ -31,15 +30,13 @@ ], "description": [ "Direction of the connection" - ], - "added": "pre-v0.10.1" + ] }, "address": { "type": "object", "description": [ "Address information (mainly useful if **direction** is *out*)" ], - "added": "pre-v0.10.1", "additionalProperties": true, "required": [ "type" @@ -47,7 +44,6 @@ "properties": { "type": { "type": "string", - "added": "pre-v0.10.1", "enum": [ "local socket", "ipv4", @@ -82,7 +78,6 @@ "type": {}, "socket": { "type": "string", - "added": "pre-v0.10.1", "description": [ "Socket filename" ] @@ -114,14 +109,12 @@ "type": {}, "address": { "type": "string", - "added": "pre-v0.10.1", "description": [ "Address in expected format for **type**" ] }, "port": { "type": "u16", - "added": "pre-v0.10.1", "description": [ "Port number" ] diff --git a/doc/schemas/notification/custommsg.json b/doc/schemas/notification/custommsg.json index 61f6447de334..b099b7e73f39 100644 --- a/doc/schemas/notification/custommsg.json +++ b/doc/schemas/notification/custommsg.json @@ -19,15 +19,13 @@ "type": "pubkey", "description": [ "The id of the peer which sent the custom message" - ], - "added": "v24.02" + ] }, "payload": { "type": "hex", "description": [ "The hex-encoded payload. The first 2 bytes represent the BOLT-8 message type followed by the message content" - ], - "added": "v24.02" + ] } } } diff --git a/doc/schemas/notification/deprecated_oneshot.json b/doc/schemas/notification/deprecated_oneshot.json index 63c44e2ec621..e0edc00c90fa 100644 --- a/doc/schemas/notification/deprecated_oneshot.json +++ b/doc/schemas/notification/deprecated_oneshot.json @@ -8,11 +8,8 @@ "The **deprecated_oneshot** notification informs a plugin that the immediately following command uses different deprecation handling than the global setting." ], "added": "v24.02", - "request": { - "added": "v24.02" - }, + "request": {}, "response": { - "added": "v24.02", "required": [ "deprecated_ok" ], @@ -21,8 +18,7 @@ "type": "boolean", "description": [ "Whether deprecated APIs are allowed for the immediately following command." - ], - "added": "v24.02" + ] } } } diff --git a/doc/schemas/notification/disconnect.json b/doc/schemas/notification/disconnect.json index 3cf079e49244..63351ef205fd 100644 --- a/doc/schemas/notification/disconnect.json +++ b/doc/schemas/notification/disconnect.json @@ -8,11 +8,8 @@ "The **disconnect** notification informs whenever the node disconnects from a peer." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "id" ], @@ -21,8 +18,7 @@ "type": "pubkey", "description": [ "The id of the peer that disconnected." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/forward_event.json b/doc/schemas/notification/forward_event.json index 0b59ef827eba..920941bc3ac0 100644 --- a/doc/schemas/notification/forward_event.json +++ b/doc/schemas/notification/forward_event.json @@ -8,11 +8,8 @@ "The **forward_event** notification informs whenever the status of a forwarded HTLC changes." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "payment_hash", "in_channel", @@ -25,43 +22,37 @@ "type": "hash", "description": [ "The payment hash for the forwarded HTLC." - ], - "added": "pre-v0.10.1" + ] }, "in_channel": { "type": "short_channel_id", "description": [ "The inbound channel that received the HTLC." - ], - "added": "pre-v0.10.1" + ] }, "out_channel": { "type": "short_channel_id", "description": [ "The outbound channel used for the forward, if one was selected." - ], - "added": "pre-v0.10.1" + ] }, "in_msat": { "type": "msat", "description": [ "The amount of the incoming HTLC." - ], - "added": "pre-v0.10.1" + ] }, "out_msat": { "type": "msat", "description": [ "The amount forwarded to the outbound channel, if known." - ], - "added": "pre-v0.10.1" + ] }, "fee_msat": { "type": "msat", "description": [ "The fee earned on the forward, if an outbound amount is known." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -73,22 +64,19 @@ ], "description": [ "The current forwarding status." - ], - "added": "pre-v0.10.1" + ] }, "failcode": { "type": "u32", "description": [ "The BOLT 4 failcode when the forward failed locally." - ], - "added": "pre-v0.10.1" + ] }, "failreason": { "type": "string", "description": [ "The symbolic failcode name when the forward failed locally." - ], - "added": "pre-v0.10.1" + ] }, "style": { "type": "string", @@ -105,15 +93,13 @@ "type": "number", "description": [ "The UNIX timestamp when the HTLC was received." - ], - "added": "pre-v0.10.1" + ] }, "resolved_time": { "type": "number", "description": [ "The UNIX timestamp when the HTLC was resolved, if it has been resolved." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/invoice_creation.json b/doc/schemas/notification/invoice_creation.json index 735f46fa9898..b39c6d74cb57 100644 --- a/doc/schemas/notification/invoice_creation.json +++ b/doc/schemas/notification/invoice_creation.json @@ -8,11 +8,8 @@ "The **invoice_creation** notification informs whenever a new invoice is created." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "preimage", "label" @@ -22,22 +19,19 @@ "type": "msat", "description": [ "The invoice amount, if the invoice amount was specified at creation time." - ], - "added": "pre-v0.10.1" + ] }, "preimage": { "type": "secret", "description": [ "The payment preimage for the created invoice." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The label of the created invoice." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/invoice_payment.json b/doc/schemas/notification/invoice_payment.json index 2feb86f5db8a..4f0261ab4e0d 100644 --- a/doc/schemas/notification/invoice_payment.json +++ b/doc/schemas/notification/invoice_payment.json @@ -8,11 +8,8 @@ "The **invoice_payment** notification informs whenever an invoice is paid." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "msat", "preimage", @@ -23,15 +20,13 @@ "type": "msat", "description": [ "The amount paid for the invoice." - ], - "added": "pre-v0.10.1" + ] }, "preimage": { "type": "secret", "description": [ "The payment preimage for the invoice." - ], - "added": "pre-v0.10.1" + ] }, "outpoint": { "type": "outpoint", @@ -44,8 +39,7 @@ "type": "string", "description": [ "The label of the paid invoice." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/log.json b/doc/schemas/notification/log.json index 34c343be0ee5..1617e3bbf8a9 100644 --- a/doc/schemas/notification/log.json +++ b/doc/schemas/notification/log.json @@ -8,11 +8,8 @@ "The **log** notification informs whenever lightningd emits a log message and exposes the same payload shape as **warning**." ], "added": "v24.02", - "request": { - "added": "v24.02" - }, + "request": {}, "response": { - "added": "v24.02", "required": [ "level", "time", @@ -33,36 +30,31 @@ ], "description": [ "The log level of the emitted message." - ], - "added": "v24.02" + ] }, "time": { "type": "string", "description": [ "The event time as seconds since epoch with subsecond precision." - ], - "added": "v24.02" + ] }, "timestamp": { "type": "string", "description": [ "The event time in ISO 8601 format." - ], - "added": "v24.02" + ] }, "source": { "type": "string", "description": [ "The source component that emitted the log entry." - ], - "added": "v24.02" + ] }, "log": { "type": "string", "description": [ "The original log message." - ], - "added": "v24.02" + ] } } } diff --git a/doc/schemas/notification/onionmessage_forward_fail.json b/doc/schemas/notification/onionmessage_forward_fail.json index d7d01a8e59e9..ba04a465c92f 100644 --- a/doc/schemas/notification/onionmessage_forward_fail.json +++ b/doc/schemas/notification/onionmessage_forward_fail.json @@ -8,11 +8,8 @@ "The **onionmessage_forward_fail** notification informs whenever lightningd cannot forward an onion message." ], "added": "v24.11", - "request": { - "added": "v24.11" - }, + "request": {}, "response": { - "added": "v24.11", "required": [ "source", "incoming", @@ -23,43 +20,37 @@ "type": "pubkey", "description": [ "The node that sent the incoming onion message." - ], - "added": "v24.11" + ] }, "incoming": { "type": "hex", "description": [ "The incoming onion message payload." - ], - "added": "v24.11" + ] }, "path_key": { "type": "pubkey", "description": [ "The path key used for the attempted forward." - ], - "added": "v24.11" + ] }, "outgoing": { "type": "hex", "description": [ "The outgoing onion message payload if a next hop had already been selected." - ], - "added": "v24.11" + ] }, "next_node_id": { "type": "pubkey", "description": [ "The next node id for the attempted forward when the next hop is a node." - ], - "added": "v24.11" + ] }, "next_short_channel_id_dir": { "type": "short_channel_id_dir", "description": [ "The next short_channel_id_dir for the attempted forward when the next hop is a channel." - ], - "added": "v24.11" + ] } } } diff --git a/doc/schemas/notification/openchannel_peer_sigs.json b/doc/schemas/notification/openchannel_peer_sigs.json index 0793bda0a180..8f3f3ed7ac32 100644 --- a/doc/schemas/notification/openchannel_peer_sigs.json +++ b/doc/schemas/notification/openchannel_peer_sigs.json @@ -8,11 +8,8 @@ "The **openchannel_peer_sigs** notification informs whenever a peer provides signatures for an in-progress channel open." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "channel_id", "signed_psbt" @@ -22,15 +19,13 @@ "type": "hash", "description": [ "The channel id for the channel open attempt." - ], - "added": "pre-v0.10.1" + ] }, "signed_psbt": { "type": "string", "description": [ "The signed PSBT from the peer." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/plugin_started.json b/doc/schemas/notification/plugin_started.json index 63720e5740d1..7cd2c6e9f93e 100644 --- a/doc/schemas/notification/plugin_started.json +++ b/doc/schemas/notification/plugin_started.json @@ -8,11 +8,8 @@ "The **plugin_started** notification informs whenever a plugin has started and registered its methods." ], "added": "v25.02", - "request": { - "added": "v25.02" - }, + "request": {}, "response": { - "added": "v25.02", "required": [ "plugin_name", "plugin_path", @@ -23,28 +20,24 @@ "type": "string", "description": [ "The short name of the plugin." - ], - "added": "v25.02" + ] }, "plugin_path": { "type": "string", "description": [ "The executable path of the plugin." - ], - "added": "v25.02" + ] }, "methods": { "type": "array", "description": [ "The RPC methods registered by the plugin." ], - "added": "v25.02", "items": { "type": "string", "description": [ "A method registered by the plugin." - ], - "added": "v25.02" + ] } } } diff --git a/doc/schemas/notification/plugin_stopped.json b/doc/schemas/notification/plugin_stopped.json index 90c6cd634ed0..ab6a7e034dd8 100644 --- a/doc/schemas/notification/plugin_stopped.json +++ b/doc/schemas/notification/plugin_stopped.json @@ -8,11 +8,8 @@ "The **plugin_stopped** notification informs whenever a plugin stops and exposes the same payload shape as **plugin_started**." ], "added": "v25.02", - "request": { - "added": "v25.02" - }, + "request": {}, "response": { - "added": "v25.02", "required": [ "plugin_name", "plugin_path", @@ -23,28 +20,24 @@ "type": "string", "description": [ "The short name of the plugin." - ], - "added": "v25.02" + ] }, "plugin_path": { "type": "string", "description": [ "The executable path of the plugin." - ], - "added": "v25.02" + ] }, "methods": { "type": "array", "description": [ "The RPC methods registered by the plugin." ], - "added": "v25.02", "items": { "type": "string", "description": [ "A method registered by the plugin." - ], - "added": "v25.02" + ] } } } diff --git a/doc/schemas/notification/sendpay_failure.json b/doc/schemas/notification/sendpay_failure.json index fc7af72e7311..02e47b1c3bdd 100644 --- a/doc/schemas/notification/sendpay_failure.json +++ b/doc/schemas/notification/sendpay_failure.json @@ -8,11 +8,8 @@ "The **sendpay_failure** notification informs whenever a payment attempt completes with failed status." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "code", "message", @@ -23,15 +20,13 @@ "type": "integer", "description": [ "The JSON-RPC error code for the payment failure." - ], - "added": "pre-v0.10.1" + ] }, "message": { "type": "string", "description": [ "The human-readable payment failure message." - ], - "added": "pre-v0.10.1" + ] }, "data": { "type": "object", @@ -39,7 +34,6 @@ "description": [ "The structured payment failure details." ], - "added": "pre-v0.10.1", "properties": { "created_index": { "type": "u64", @@ -52,22 +46,19 @@ "type": "u64", "description": [ "Old synonym for created_index." - ], - "added": "pre-v0.10.1" + ] }, "payment_hash": { "type": "hash", "description": [ "The hash of the payment preimage." - ], - "added": "pre-v0.10.1" + ] }, "groupid": { "type": "u64", "description": [ "Grouping key for multiple attempts on the same payment." - ], - "added": "pre-v0.10.1" + ] }, "updated_index": { "type": "u64", @@ -80,43 +71,37 @@ "type": "u64", "description": [ "Part number for a multi-part payment." - ], - "added": "pre-v0.10.1" + ] }, "destination": { "type": "pubkey", "description": [ "The final destination of the payment, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_msat": { "type": "msat", "description": [ "The amount intended for the destination, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_sent_msat": { "type": "msat", "description": [ "The total amount sent including fees." - ], - "added": "pre-v0.10.1" + ] }, "created_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment was initiated." - ], - "added": "pre-v0.10.1" + ] }, "completed_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment completed." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -127,29 +112,25 @@ ], "description": [ "The terminal status for a failed sendpay notification." - ], - "added": "pre-v0.10.1" + ] }, "payment_preimage": { "type": "secret", "description": [ "The payment preimage, if the payment later completed through another path." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The optional label associated with the payment." - ], - "added": "pre-v0.10.1" + ] }, "bolt11": { "type": "string", "description": [ "The bolt11 invoice string, if present." - ], - "added": "pre-v0.10.1" + ] }, "bolt12": { "type": "string", @@ -162,71 +143,61 @@ "type": "string", "description": [ "The description associated with the payment, if present." - ], - "added": "pre-v0.10.1" + ] }, "erroronion": { "type": "hex", "description": [ "The raw error onion if one was retained for the payment." - ], - "added": "pre-v0.10.1" + ] }, "onionreply": { "type": "hex", "description": [ "The onion reply for an unparseable onion failure." - ], - "added": "pre-v0.10.1" + ] }, "erring_index": { "type": "u32", "description": [ "The position in the route where the failure occurred." - ], - "added": "pre-v0.10.1" + ] }, "failcode": { "type": "u32", "description": [ "The BOLT 4 failcode." - ], - "added": "pre-v0.10.1" + ] }, "failcodename": { "type": "string", "description": [ "The symbolic name for the failcode, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_node": { "type": "pubkey", "description": [ "The node that produced the failure, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_channel": { "type": "short_channel_id", "description": [ "The channel that produced the failure, if known." - ], - "added": "pre-v0.10.1" + ] }, "erring_direction": { "type": "u32", "description": [ "The direction within the erring channel." - ], - "added": "pre-v0.10.1" + ] }, "raw_message": { "type": "hex", "description": [ "The raw failure message payload, if present." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/sendpay_success.json b/doc/schemas/notification/sendpay_success.json index 7827725491a1..a434b191efa1 100644 --- a/doc/schemas/notification/sendpay_success.json +++ b/doc/schemas/notification/sendpay_success.json @@ -8,11 +8,8 @@ "The **sendpay_success** notification informs whenever a payment attempt completes successfully." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "created_index", "id", @@ -34,22 +31,19 @@ "type": "u64", "description": [ "Old synonym for created_index." - ], - "added": "pre-v0.10.1" + ] }, "payment_hash": { "type": "hash", "description": [ "The hash of the payment preimage." - ], - "added": "pre-v0.10.1" + ] }, "groupid": { "type": "u64", "description": [ "Grouping key for multiple attempts on the same payment." - ], - "added": "pre-v0.10.1" + ] }, "updated_index": { "type": "u64", @@ -62,43 +56,37 @@ "type": "u64", "description": [ "Part number for a multi-part payment." - ], - "added": "pre-v0.10.1" + ] }, "destination": { "type": "pubkey", "description": [ "The final destination of the payment, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_msat": { "type": "msat", "description": [ "The amount delivered to the destination, if known." - ], - "added": "pre-v0.10.1" + ] }, "amount_sent_msat": { "type": "msat", "description": [ "The total amount sent including fees." - ], - "added": "pre-v0.10.1" + ] }, "created_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment was initiated." - ], - "added": "pre-v0.10.1" + ] }, "completed_at": { "type": "u64", "description": [ "The UNIX timestamp when this payment completed." - ], - "added": "pre-v0.10.1" + ] }, "status": { "type": "string", @@ -107,29 +95,25 @@ ], "description": [ "The terminal status for a successful sendpay notification." - ], - "added": "pre-v0.10.1" + ] }, "payment_preimage": { "type": "secret", "description": [ "The payment preimage proving success." - ], - "added": "pre-v0.10.1" + ] }, "label": { "type": "string", "description": [ "The optional label associated with the payment." - ], - "added": "pre-v0.10.1" + ] }, "bolt11": { "type": "string", "description": [ "The bolt11 invoice string, if present." - ], - "added": "pre-v0.10.1" + ] }, "bolt12": { "type": "string", @@ -142,15 +126,13 @@ "type": "string", "description": [ "The description associated with the payment, if present." - ], - "added": "pre-v0.10.1" + ] }, "erroronion": { "type": "hex", "description": [ "The raw error onion if one was retained for the payment." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/shutdown.json b/doc/schemas/notification/shutdown.json index d165ef8147d3..5e9d84a08abc 100644 --- a/doc/schemas/notification/shutdown.json +++ b/doc/schemas/notification/shutdown.json @@ -5,14 +5,14 @@ "notification": "shutdown", "title": "Notification for plugin shutdown", "description": [ - "The **shutdown** notification informs a plugin that lightningd is shutting down." + "The **shutdown** notification informs a plugin that lightningd is shutting down.", + "", + "It is expected that the plugin terminates it's own process when subscribing to this notification.", + "If it does not do so, lightningd will kill the plugin's process after a timeout." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "properties": {} } } diff --git a/doc/schemas/notification/warning.json b/doc/schemas/notification/warning.json index 6d1df00f15c7..5d54b0f79743 100644 --- a/doc/schemas/notification/warning.json +++ b/doc/schemas/notification/warning.json @@ -8,11 +8,8 @@ "The **warning** notification informs whenever lightningd emits a warning or error level event that should be surfaced to plugins." ], "added": "pre-v0.10.1", - "request": { - "added": "pre-v0.10.1" - }, + "request": {}, "response": { - "added": "pre-v0.10.1", "required": [ "level", "time", @@ -29,15 +26,13 @@ ], "description": [ "The log level of the warning notification." - ], - "added": "pre-v0.10.1" + ] }, "time": { "type": "string", "description": [ "The event time as seconds since epoch with subsecond precision." - ], - "added": "pre-v0.10.1" + ] }, "timestamp": { "type": "string", @@ -50,15 +45,13 @@ "type": "string", "description": [ "The source component that emitted the log entry." - ], - "added": "pre-v0.10.1" + ] }, "log": { "type": "string", "description": [ "The original log message." - ], - "added": "pre-v0.10.1" + ] } } } diff --git a/doc/schemas/notification/xpay_pay_part_end.json b/doc/schemas/notification/xpay_pay_part_end.json index 814c02b1efce..bbfc6c8e940c 100644 --- a/doc/schemas/notification/xpay_pay_part_end.json +++ b/doc/schemas/notification/xpay_pay_part_end.json @@ -8,11 +8,8 @@ "The **pay_part_end** notification is emitted by the xpay plugin when a payment part completes." ], "added": "v25.09", - "request": { - "added": "v25.09" - }, + "request": {}, "response": { - "added": "v25.09", "required": [ "status", "duration", @@ -29,78 +26,67 @@ ], "description": [ "Whether the payment part succeeded or failed." - ], - "added": "v25.09" + ] }, "duration": { "type": "number", "description": [ "The time in seconds from send to result." - ], - "added": "v25.09" + ] }, "payment_hash": { "type": "hash", "description": [ "The payment hash for the xpay invocation." - ], - "added": "v25.09" + ] }, "groupid": { "type": "u64", "description": [ "The xpay group identifier for the payment attempt." - ], - "added": "v25.09" + ] }, "partid": { "type": "u64", "description": [ "The identifier for this payment part." - ], - "added": "v25.09" + ] }, "failed_msg": { "type": "hex", "description": [ "The decrypted onion error message, if available." - ], - "added": "v25.09" + ] }, "failed_node_id": { "type": "pubkey", "description": [ "The node that generated the failure, if known." - ], - "added": "v25.09" + ] }, "failed_short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id complained about, if known." - ], - "added": "v25.09" + ] }, "failed_direction": { "type": "u32", "description": [ "The direction within the failed short channel id, if known." - ], - "added": "v25.09" + ] }, "error_code": { "type": "u32", "description": [ "The xpay error code, if known." - ], - "added": "v25.09" + ] }, "error_message": { "type": "string", "description": [ "The human-readable xpay error message." - ], - "added": "v25.09" + ] } } } diff --git a/doc/schemas/notification/xpay_pay_part_start.json b/doc/schemas/notification/xpay_pay_part_start.json index e3f2a7fd2331..ccc73ce0c33b 100644 --- a/doc/schemas/notification/xpay_pay_part_start.json +++ b/doc/schemas/notification/xpay_pay_part_start.json @@ -8,11 +8,8 @@ "The **pay_part_start** notification is emitted by the xpay plugin when a payment part begins." ], "added": "v25.09", - "request": { - "added": "v25.09" - }, + "request": {}, "response": { - "added": "v25.09", "required": [ "payment_hash", "groupid", @@ -26,46 +23,39 @@ "type": "hash", "description": [ "The payment hash for the xpay invocation." - ], - "added": "v25.09" + ] }, "groupid": { "type": "u64", "description": [ "The xpay group identifier for the payment attempt." - ], - "added": "v25.09" + ] }, "partid": { "type": "u64", "description": [ "The identifier for this payment part." - ], - "added": "v25.09" + ] }, "total_payment_msat": { "type": "msat", "description": [ "The total payment amount for the xpay invocation." - ], - "added": "v25.09" + ] }, "attempt_msat": { "type": "msat", "description": [ "The amount this payment part attempts to deliver." - ], - "added": "v25.09" + ] }, "hops": { "type": "array", "description": [ "The route hops for this payment part." ], - "added": "v25.09", "items": { "type": "object", - "added": "v25.09", "additionalProperties": false, "required": [ "next_node", @@ -79,36 +69,31 @@ "type": "pubkey", "description": [ "The next node for this hop." - ], - "added": "v25.09" + ] }, "short_channel_id": { "type": "short_channel_id", "description": [ "The short channel id used for this hop." - ], - "added": "v25.09" + ] }, "direction": { "type": "u32", "description": [ "The direction within the short channel id." - ], - "added": "v25.09" + ] }, "channel_in_msat": { "type": "msat", "description": [ "The amount entering the hop." - ], - "added": "v25.09" + ] }, "channel_out_msat": { "type": "msat", "description": [ "The amount leaving the hop." - ], - "added": "v25.09" + ] } } } From 8940d016aa813e98e3fb0c298eed95a75fb2735f Mon Sep 17 00:00:00 2001 From: daywalker90 <8257956+daywalker90@users.noreply.github.com> Date: Tue, 21 Apr 2026 14:48:49 +0200 Subject: [PATCH 6/6] docs: generate notification and hook documentation from schemas and publish them to the readme.com instance of course! Changelog-None --- .github/scripts/sync-rpc-cmds.py | 143 ++-- doc/Makefile | 44 +- .../plugin-development/event-notifications.md | 645 +----------------- .../plugin-development/hooks.md | 524 +------------- doc/index.rst | 39 ++ tools/fromschema.py | 195 ++++-- 6 files changed, 314 insertions(+), 1276 deletions(-) diff --git a/.github/scripts/sync-rpc-cmds.py b/.github/scripts/sync-rpc-cmds.py index 368f37a077dc..eb915aab4805 100644 --- a/.github/scripts/sync-rpc-cmds.py +++ b/.github/scripts/sync-rpc-cmds.py @@ -5,8 +5,11 @@ from enum import Enum # readme url -URL = "https://api.readme.com/v2/branches/stable" +BRANCH = "stable" +URL = f"https://api.readme.com/v2/branches/{BRANCH}" CATEGORY_SLUG = "JSON-RPC" +NOTIFICATIONS_CATEGORY_SLUG = "Notifications" +HOOKS_CATEGORY_SLUG = "Hooks" class Action(Enum): @@ -15,8 +18,8 @@ class Action(Enum): DELETE = 'delete' -def getListOfRPCDocs(headers): - response = requests.get(f"{URL}/categories/reference/{CATEGORY_SLUG}/pages", headers=headers) +def getListOfDocs(headers, category): + response = requests.get(f"{URL}/categories/reference/{category}/pages", headers=headers) if response.status_code == 200: return response.json().get('data', []) else: @@ -47,15 +50,15 @@ def check_renderable(response, action, title): return True -def publishDoc(action, title, body, position, headers): +def publishDoc(action, title, body, position, headers, category): payload = { - "title": title, + "title": get_display_name(title), "type": "basic", "content": { "body": body, }, "category": { - "uri": f"/branches/stable/categories/reference/{CATEGORY_SLUG}" + "uri": f"/branches/{BRANCH}/categories/reference/{category}" }, "hidden": False, "position": position, @@ -99,18 +102,78 @@ def publishDoc(action, title, body, position, headers): print("Invalid action") -def extract_rpc_commands(rst_content): +def extract_all_from_rst(rst_content): manpages_block = re.search( - r"\.\. block_start manpages(.*?)" r"\.\. block_end manpages", + r"\.\. block_start manpages(.*?)\.\. block_end manpages", rst_content, re.DOTALL, ) - if manpages_block: - commands = re.findall( - r"\b([a-zA-Z0-9_-]+)" r"\s+<([^>]+)>\n", manpages_block.group(1) - ) - return commands - return [] + + if not manpages_block: + return [], [], [] + + entries = re.findall( + r"^\s*([a-zA-Z0-9_-]+)\s+<([^>]+)>", + manpages_block.group(1), + re.MULTILINE, + ) + + rpc_commands = [] + notifications = [] + hooks = [] + + for name, target in entries: + if name.startswith("notification-"): + notifications.append((name, target)) + elif name.startswith("hook-"): + hooks.append((name, target)) + else: + rpc_commands.append((name, target)) + + return rpc_commands, notifications, hooks + + +def sync_docs(local_items, readme_items, category_slug, headers, label): + local_titles = {name for name, _ in local_items} + readme_titles = {item['slug'] for item in readme_items} + + to_delete = readme_titles - local_titles + to_add = local_titles - readme_titles + + # Deletions + for name in to_delete: + publishDoc(Action.DELETE, name, "", 0, headers, category_slug) + sleep(1) + + # Add / Update + if not local_items: + print(f"⚠️ No {label} found in the Manpages block.") + return + + position = 0 + for name, file in local_items: + file_path = os.path.join("doc", file) + + if not os.path.exists(file_path): + print(f"⚠️ WARNING: File not found: {file_path}, skipping {name}") + continue + + with open(file_path) as f: + body = f.read() + + action = Action.ADD if name in to_add else Action.UPDATE + publishDoc(action, name, body, position, headers, category_slug) + + position += 1 + sleep(1) + + +def get_display_name(name): + if name.startswith("notification-"): + return name[len("notification-"):] + if name.startswith("hook-"): + return name[len("hook-"):] + return name def main(): @@ -136,34 +199,34 @@ def main(): with open(path_to_rst, "r") as file: rst_content = file.read() - commands_from_local = extract_rpc_commands(rst_content) - commands_from_readme = getListOfRPCDocs(headers) + commands_from_local, notifications_from_local, hooks_from_local = extract_all_from_rst(rst_content) + commands_from_readme = getListOfDocs(headers, CATEGORY_SLUG) + notifications_from_readme = getListOfDocs(headers, NOTIFICATIONS_CATEGORY_SLUG) + hooks_from_readme = getListOfDocs(headers, HOOKS_CATEGORY_SLUG) + + sync_docs( + commands_from_local, + commands_from_readme, + CATEGORY_SLUG, + headers, + "commands" + ) - # Compare local and server commands list to get the list of command to add or delete - commands_local_title = set(command[0] for command in commands_from_local) - commands_readme_title = set(command['slug'] for command in commands_from_readme) - commands_to_delete = commands_readme_title - commands_local_title - commands_to_add = commands_local_title - commands_readme_title - for name in commands_to_delete: - publishDoc(Action.DELETE, name, "", 0, headers) - sleep(1) + sync_docs( + notifications_from_local, + notifications_from_readme, + NOTIFICATIONS_CATEGORY_SLUG, + headers, + "notifications" + ) - if commands_from_local: - position = 0 - for name, file in commands_from_local: - file_path = "doc/" + file - if not os.path.exists(file_path): - print(f"⚠️ WARNING: File not found: {file_path}, skipping {name}") - continue - - with open(file_path) as f: - body = f.read() - action = Action.ADD if name in commands_to_add else Action.UPDATE - publishDoc(action, name, body, position, headers) - position += 1 - sleep(1) - else: - print("⚠️ No commands found in the Manpages block.") + sync_docs( + hooks_from_local, + hooks_from_readme, + HOOKS_CATEGORY_SLUG, + headers, + "hooks" + ) print("\n✨ Sync complete!") diff --git a/doc/Makefile b/doc/Makefile index fbf4b64d749b..c430de68c693 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -160,6 +160,23 @@ MARKDOWNPAGES := doc/addgossip.7 \ doc/withdraw.7 \ doc/xpay.7 +NOTIFICATION_SCHEMAS := $(wildcard doc/schemas/notification/*.json) +NOTIFICATION_MARKDOWNPAGES := $(patsubst doc/schemas/notification/%.json,doc/notification-%.7,$(NOTIFICATION_SCHEMAS)) + +HOOK_SCHEMAS := $(wildcard doc/schemas/hook/*.json) +HOOK_MARKDOWNPAGES := $(patsubst doc/schemas/hook/%.json,doc/hook-%.7,$(HOOK_SCHEMAS)) + +MARKDOWNPAGES += $(NOTIFICATION_MARKDOWNPAGES) $(HOOK_MARKDOWNPAGES) + +$(foreach s,$(wildcard doc/schemas/*.json), \ + $(eval doc/$(basename $(notdir $(s))).7.md: $(s))) + +$(foreach s,$(wildcard doc/schemas/notification/*.json), \ + $(eval doc/notification-$(basename $(notdir $(s))).7.md: $(s))) + +$(foreach s,$(wildcard doc/schemas/hook/*.json), \ + $(eval doc/hook-$(basename $(notdir $(s))).7.md: $(s))) + ifeq ($(HAVE_SQLITE3),1) MARKDOWNPAGES += doc/listsqlschemas.7 doc/sql.7 endif @@ -188,7 +205,7 @@ doc/schemas/sql.json: doc/schemas/sql-template.json plugins/sql doc-all: $(MANPAGES) doc/index.rst -SCHEMAS := $(wildcard doc/schemas/*.json) $(wildcard doc/schemas/notification/*.json) $(wildcard doc/schemas/hook/*.json) +SCHEMAS := $(wildcard doc/schemas/*.json) $(NOTIFICATION_SCHEMAS) $(HOOK_SCHEMAS) # Don't try to build sql.json tables with plugins/sql if we don't have sqlite3 ifeq ($(HAVE_SQLITE3),0) @@ -210,8 +227,29 @@ check-doc: check-config-docs check-manpages check-fmt-schemas LBRACKET=( RBRACKET=) -$(MARKDOWNPAGES_WITH_EXT): doc/%.7.md: doc/schemas/%.json tools/fromschema.py - @tools/fromschema.py --markdownfile=$@ $< > $@.tmp && mv $@.tmp $@ +doc/sql.7.md: doc/schemas/sql.json + +$(MARKDOWNPAGES_WITH_EXT): doc/%.7.md: tools/fromschema.py + @base="$*"; \ + json="doc/schemas/$$base.json"; \ + if [ ! -f "$$json" ]; then \ + case "$$base" in \ + notification-*) \ + name=$${base#notification-}; \ + json="doc/schemas/notification/$$name.json"; \ + ;; \ + hook-*) \ + name=$${base#hook-}; \ + json="doc/schemas/hook/$$name.json"; \ + ;; \ + esac; \ + fi; \ + if [ ! -f "$$json" ]; then \ + echo "No schema found for $@ (looked for $$json)"; \ + exit 1; \ + fi; \ + tools/fromschema.py --markdownfile=$@ $$json > $@.tmp && mv $@.tmp $@ + LOWDOWN := lowdown diff --git a/doc/developers-guide/plugin-development/event-notifications.md b/doc/developers-guide/plugin-development/event-notifications.md index 148719d60bae..ce16cd78f048 100644 --- a/doc/developers-guide/plugin-development/event-notifications.md +++ b/doc/developers-guide/plugin-development/event-notifications.md @@ -10,651 +10,10 @@ Event notifications allow a plugin to subscribe to events in `lightningd`. `ligh > > Notifications are not confirmable by definition, since they do not have a Response object to be returned. As such, the Client would not be aware of any errors (like e.g. "Invalid params","Internal error"). -Plugins subscribe by returning an array of subscriptions as part of the `getmanifest` response. The result for the `getmanifest` call above for example subscribes to the two topics `connect` and `disconnect`. The topics that are currently defined and the corresponding payloads are listed below. +Plugins subscribe by returning an array of subscriptions as part of the `getmanifest` response. The result for the `getmanifest` call above for example subscribes to the two topics `connect` and `disconnect`. > 📘 > > This is a way of specifying that you want to subscribe to all possible event notifications. It is not recommended, but is useful for plugins which want to provide generic infrastructure for others (in future, we may add the ability to dynamically subscribe/unsubscribe). -### `deprecated_oneshot` - -(Added in *v24.02*) - -This is a special notification, which the plugin will only receive it it set `deprecated_oneshot` to `true` in its getmanifest response. It indicates that the immeditately following command wants a different deprecation status than the global `allow-deprecated-apis` setting. - -This is possible because of the `deprecations` RPC command, where individual connections can change their deprecation settings. - -```json -{ - "deprecated_oneshot": { - "deprecated_ok": false - } -} -``` - -### `channel_opened` - -A notification for topic `channel_opened` is sent if a peer successfully funded a channel with us. It contains the peer id, the funding amount (in millisatoshis), the funding transaction id, and a boolean indicating if the funding transaction has been included into a block. - -```json -{ - "channel_opened": { - "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", - "funding_msat": 100000000, - "funding_txid": "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b", - "channel_ready": false - } -} -``` - -### `channel_open_failed` - -A notification to indicate that a channel open attempt has been unsuccessful. -Useful for cleaning up state for a v2 channel open attempt. See `plugins/funder.c` for an example of how to use this. - -```json -{ - "channel_open_failed": { - "channel_id": "a2d0851832f0e30a0cf...", - } -} -``` - -### `channel_state_changed` - -A notification for topic `channel_state_changed` is sent every time a channel changes its state. The notification includes the `peer_id` and `channel_id`, the old and new channel states, the type of `cause` and a `message`. - -```json -{ - "channel_state_changed": { - "peer_id": "03bc9337c7a28bb784d67742ebedd30a93bacdf7e4ca16436ef3798000242b2251", - "channel_id": "a2d0851832f0e30a0cf778a826d72f077ca86b69f72677e0267f23f63a0599b4", - "short_channel_id" : "561820x1020x1", - "timestamp":"2023-01-05T18:27:12.145Z", - "old_state": "CHANNELD_NORMAL", - "new_state": "CHANNELD_SHUTTING_DOWN", - "cause" : "remote", - "message" : "Peer closes channel" - } -} -``` - -A `cause` can have the following values: - -- "unknown" Anything other than the reasons below. Should not happen. -- "local" Unconscious internal reasons, e.g. dev fail of a channel. -- "user" The operator or a plugin opened or closed a channel by intention. -- "remote" The remote closed or funded a channel with us by intention. -- "protocol" We need to close a channel because of bad signatures and such. -- "onchain" A channel was closed onchain, while we were offline. - -Most state changes are caused subsequentially for a prior state change, e.g. "_CLOSINGD\_COMPLETE_" is followed by "_FUNDING\_SPEND\_SEEN_". Because of this, the `cause` reflects the last known reason in terms of local or remote user interaction, protocol reasons, etc. More specifically, a `new_state` "_FUNDING\_SPEND_SEEN_" will likely _not_ have "onchain" as a `cause` but some value such as "REMOTE" or "LOCAL" depending on who initiated the closing of a channel. - -Note: If the channel is not closed or being closed yet, the `cause` will reflect which side "remote" or "local" opened the channel. - -Note: If the cause is "onchain" this was very likely a conscious decision of the remote peer, but we have been offline. - -### `connect` - -A notification for topic `connect` is sent every time a new connection to a peer is established. `direction` is either `"in"` or `"out"`. - -```json -{ - "connect" : { - "address" : { - "address" : "127.0.0.1", - "port" : 38012, - "type" : "ipv4" - }, - "direction" : "in", - "id" : "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59" - } -} -``` - -### `disconnect` - -A notification for topic `disconnect` is sent every time a connection to a peer was lost. - -```json -{ - "disconnect": { - "id": "02f6725f9c1c40333b67faea92fd211c183050f28df32cac3f9d69685fe9665432" - } -} -``` - -### `custommsg` - -A notification for topic `custommsg` is sent when the node receives a `custommsg`. -```json -{ - "custommsg" : { - "peer_id" : "02f6725f9c1c40333b67faea92fd211c183050f28df32cac3f9d69685fe9665432", - "payload" : "1337ffffffff" - } -} -``` - -This `payload` represents a `custommsg` that was send by the peer whose `node_id` matches -`peer_id`. The message has type `0x1337` and contents `ffffffff`. - -To avoid conflicts with internal state-tracking, unexpected disconnections and channel-closures -the messages are currently limited to odd-numbered messages that can be safely ignored by -other nodes (see ["it's ok to be odd" in the specification](https://github.com/lightning/bolts/blob/c74a3bbcf890799d343c62cb05fcbcdc952a1cf3/01-messaging.md#lightning-message-format) -for details). - -The plugin must implement the parsing of the message including the type prefix. - -### `invoice_payment` - -A notification for topic `invoice_payment` is sent every time an invoice is paid. - -```json -{ - "invoice_payment": { - "label": "unique-label-for-invoice", - "preimage": "0000000000000000000000000000000000000000000000000000000000000000", - "msat": 10000, - } -} - -``` - -Before version `23.11` the `msat` field was a string with msat-suffix, e.g: `"10000msat"`. - -Note that there will be a string member "outpoint" ("txid:outnum") if -the payment was onchain (possible with the *invoices-onchain-fallback* -config option). - -### `invoice_creation` - -A notification for topic `invoice_creation` is sent every time an invoice is created. - -```json -{ - "invoice_creation": { - "label": "unique-label-for-invoice", - "preimage": "0000000000000000000000000000000000000000000000000000000000000000", - "msat": 10000 - } -} -``` - -Before version `23.11` the `msat` field was a string with msat-suffix, e.g: `"10000msat"`. - -### `warning` - -A notification for topic `warning` is sent every time a new `BROKEN`/`UNUSUAL` level(in plugins, we use `error`/`warn`) log generated, which means an unusual/borken thing happens, such as channel failed, message resolving failed... - -```json -{ - "warning": { - "level": "warn", - "time": "1559743608.565342521", - "source": "lightningd(17652): 0821f80652fb840239df8dc99205792bba2e559a05469915804c08420230e23c7c chan #7854:", - "log": "Peer permanent failure in CHANNELD_NORMAL: lightning_channeld: sent ERROR bad reestablish dataloss msg" - } -} -``` - -1. `level` is `warn` or `error`: `warn` means something seems bad happened and it's under control, but we'd better check it; `error` means something extremely bad is out of control, and it may lead to crash; -2. `time` is the second since epoch; -3. `source` means where the event happened, it may have the following forms: - ` chan #:`,`lightningd():`, - `plugin-:`, `():`, `jsonrpc:`, - `jcon fd :`, `plugin-manager`; -4. `log` is the context of the original log entry. - -There is also a more general version of this notification called `log`, which has the same payload. This needs to be used with caution, but it is useful for plugins that report logs remotely. For example: using OpenTelemetry. - -### `forward_event` - -A notification for topic `forward_event` is sent every time the status of a forward payment is set. The json format is same as the API `listforwards`. - -```json -{ - "forward_event": { - "payment_hash": "f5a6a059a25d1e329d9b094aeeec8c2191ca037d3f5b0662e21ae850debe8ea2", - "in_channel": "103x2x1", - "out_channel": "103x1x1", - "in_msat": 100001001, - "out_msat": 100000000, - "fee_msat": 1001, - "status": "settled", - "received_time": 1560696342.368, - "resolved_time": 1560696342.556, - "preimage": "0000000000000000000000000000000000000000000000000000000000000000" - } -} -``` - -or - -```json -{ - "forward_event": { - "payment_hash": "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", - "in_channel": "103x2x1", - "out_channel": "110x1x0", - "in_msat": 100001001, - "out_msat": 100000000, - "fee_msat": 1001, - "status": "local_failed", - "failcode": 16392, - "failreason": "WIRE_PERMANENT_CHANNEL_FAILURE", - "received_time": 1560696343.052 - } -} - -``` - -- The status includes `offered`, `settled`, `failed` and `local_failed`, and they are all string type in json. - - When the forward payment is valid for us, we'll set `offered` and send the forward payment to next hop to resolve; - - When the payment forwarded by us gets paid eventually, the forward payment will change the status from `offered` to `settled`; - - If payment fails locally(like failing to resolve locally) or the corresponding htlc with next hop fails(like htlc timeout), we will set the status as `local_failed`. `local_failed` may be set before setting `offered` or after setting `offered`. In fact, from the - time we receive the htlc of the previous hop, all we can know the cause of the failure is treated as `local_failed`. `local_failed` only occuors locally or happens in the htlc between us and next hop; - - If `local_failed` is set before `offered`, this means we just received htlc from the previous hop and haven't generate htlc for next hop. In this case, the json of `forward_event` sets the fields of `out_msatoshi`, `out_msat`,`fee` and `out_channel` as 0; - - Note: In fact, for this case we may be not sure if this incoming htlc represents a pay to us or a payment we need to forward. We just simply treat all incoming failed to resolve as `local_failed`. - - Only in `local_failed` case, json includes `failcode` and `failreason` fields; - - `failed` means the payment forwarded by us fails in the latter hops, and the failure isn't related to us, so we aren't accessed to the fail reason. `failed` must be set after - `offered`. - - `failed` case doesn't include `failcode` and `failreason` - fields; -- `received_time` means when we received the htlc of this payment from the previous peer. It will be contained into all status case; -- `resolved_time` means when the htlc of this payment between us and the next peer was resolved. The resolved result may success or fail, so only `settled` and `failed` case contain `resolved_time`; -- `preimage` is the 64-hex-char payment preimage revealed when the HTLC was fulfilled. Only present when `status` is `settled`; -- The `failcode` and `failreason` are defined in [BOLT 4](https://github.com/lightning/bolts/blob/master/04-onion-routing.md#failure-messages). - -### `sendpay_success` - -A notification for topic `sendpay_success` is sent every time a sendpay succeeds (with `complete` status). The json is the same as the return value of the commands `sendpay`/`waitsendpay` when these commands succeed. - -```json -{ - "sendpay_success": { - "id": 1, - "payment_hash": "5c85bf402b87d4860f4a728e2e58a2418bda92cd7aea0ce494f11670cfbfb206", - "destination": "035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d", - "amount_msat": 100000000, - "amount_sent_msat": 100001001, - "created_at": 1561390572, - "status": "complete", - "payment_preimage": "9540d98095fd7f37687ebb7759e733934234d4f934e34433d4998a37de3733ee" - } -} -``` - -`sendpay` doesn't wait for the result of sendpay and `waitsendpay` returns the result of sendpay in specified time or timeout, but `sendpay_success` will always return the result anytime when sendpay successes if is was subscribed. - -### `sendpay_failure` - -A notification for topic `sendpay_failure` is sent every time a sendpay completes with `failed` status. The JSON is same as the return value of the commands `sendpay`/`waitsendpay` when these commands fail. - -```json -{ - "sendpay_failure": { - "code": 204, - "message": "failed: WIRE_UNKNOWN_NEXT_PEER (reply from remote)", - "data": { - "id": 2, - "payment_hash": "9036e3bdbd2515f1e653cb9f22f8e4c49b73aa2c36e937c926f43e33b8db8851", - "destination": "035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d", - "amount_msat": 100000000, - "amount_sent_msat": 100001001, - "created_at": 1561395134, - "status": "failed", - "erring_index": 1, - "failcode": 16394, - "failcodename": "WIRE_UNKNOWN_NEXT_PEER", - "erring_node": "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59", - "erring_channel": "103x2x1", - "erring_direction": 0 - } - } -} -``` - -`sendpay` doesn't wait for the result of sendpay and `waitsendpay` returns the result of sendpay in specified time or timeout, but `sendpay_failure` will always return the result anytime when sendpay fails if is was subscribed. - -### `coin_movement` - -A notification for topic `coin_movement` is sent to record the movement of coins. It is only triggered by finalized ledger updates, i.e. only definitively resolved HTLCs or confirmed bitcoin transactions. - -```json -{ - "coin_movement": { - "version":2, - "node_id":"03a7103a2322b811f7369cbb27fb213d30bbc0b012082fed3cad7e4498da2dc56b", - "type":"chain_mvt", - "account_id":"wallet", - "originating_account": "wallet", // (`chain_mvt` only, optional) - "txid":"0159693d8f3876b4def468b208712c630309381e9d106a9836fa0a9571a28722", // (`chain_mvt` only, optional) - "utxo_txid":"0159693d8f3876b4def468b208712c630309381e9d106a9836fa0a9571a28722", // (`chain_mvt` only) - "vout":1, // (`chain_mvt` only) - "payment_hash": "xxx", // (either type, optional on both) - "part_id": 0, // (`channel_mvt` only, optional) - "group_id": 0, // (`channel_mvt` only, optional) - "credit_msat":2000000000, - "debit_msat":0, - "output_msat": 2000000000, // ('chain_mvt' only) - "output_count": 2, // ('chain_mvt' only, typically only channel closes) - "fees_msat": 382, // ('channel_mvt' only) - "primary_tag": "deposit", - "extra_tags": [], - "blockheight":102, // 'chain_mvt' only - "timestamp":1585948198, - "coin_type":"bc" - } -} -``` - -`version` indicates which version of the coin movement data struct this notification adheres to. - -`node_id` specifies the node issuing the coin movement. - -`type` marks the underlying mechanism which moved these coins. There are two 'types' of `coin_movements`: - -- `channel_mvt`s, which occur as a result of htlcs being resolved and, -- `chain_mvt`s, which occur as a result of bitcoin txs being mined. - -`account_id` is the name of this account. The node's wallet is named 'wallet', all channel funds' account are the channel id. - -`originating_account` is the account that this movement originated from. -_Only_ tagged on external events (deposits/withdrawals to an external party). - -`txid` is the transaction id of the bitcoin transaction that triggered this ledger event. `utxo_txid` and `vout` identify the bitcoin output which triggered this notification. (`chain_mvt` only). Notifications tagged `journal_entry` do not have a `utxo_txid` as they're not represented in the utxo set. - -`payment_hash` is the hash of the preimage used to move this payment. Only present for HTLC mediated moves (both `chain_mvt` and `channel_mvt`) A `chain_mvt` will have a `payment_hash` iff it's recording an htlc that was fulfilled onchain. - -`part_id` and `group_id` are identifiers for parts of a multi-part payment. useful for aggregating payments for an invoice or to indicate why a payment hash appears multiple times. `channel_mvt` only - -`credit` and `debit` are millisatoshi denominated amounts of the fund movement. A -'credit' is funds deposited into an account; a `debit` is funds withdrawn. - -`output_value` is the total value of the on-chain UTXO. Note that for channel opens/closes the total output value will not necessarily correspond to the amount that's credited/debited. - -`output_count` is the total outputs to expect for a channel close. Useful for figuring out when every onchain output for a close has been resolved. - -`fees` is an HTLC annotation for the amount of fees either paid or earned. For "invoice" tagged events, the fees are the total fees paid to send that payment. The end amount can be found by subtracting the total fees from the `debited` amount. For "routed" tagged events, both the debit/credit contain fees. Technically routed debits are the 'fee generating' event, however we include them on routed credits as well. - -`primary_tag` is a movement descriptor. Current primary tags are as follows: - -- `deposit`: funds deposited -- `withdrawal`: funds withdrawn -- `penalty`: funds paid or gained from a penalty tx. -- `invoice`: funds paid to or received from an invoice. -- `routed`: funds routed through this node. -- `pushed`: funds pushed to peer. -- `channel_open` : channel is opened, initial channel balance -- `channel_close`: channel is closed, final channel balance -- `delayed_to_us`: on-chain output to us, spent back into our wallet -- `htlc_timeout`: on-chain htlc timeout output -- `htlc_fulfill`: on-chian htlc fulfill output -- `htlc_tx`: on-chain htlc tx has happened -- `to_wallet`: output being spent into our wallet -- `anchor`: an anchor output -- `to_them`: output intended to peer's wallet -- `penalized`: output we've 'lost' due to a penalty (failed cheat attempt) -- `stolen`: output we've 'lost' due to peer's cheat -- `to_miner`: output we've burned to miner (OP_RETURN) -- `lease_fee`: amount paid as lease fee -- `channel_proposed`: a zero-conf channel - -`extra_tags` is zero or more additional tags. Current extra tags are as follows: - -- `ignored`: output is being ignored -- `opener`: tags `channel_open` or `channel_proposed`, we are the channel opener -- `stealable`: funds can be taken by the other party -- `leased`: tags `channel_open` or `channel_proposed`, channel contains leased funds -- `splice`: a channel close due to splice operation. - -`blockheight` is the block the txid is included in. `channel_mvt`s will be null, so will the blockheight for withdrawals to external parties (we issue these events when we send the tx containing them, before they're included in the chain). - -The `timestamp` is seconds since Unix epoch of the node's machine time at the time lightningd broadcasts the notification. - -`coin_type` is the BIP173 name for the coin which moved. - -### `balance_snapshot` - -Emitted after we've caught up to the chain head on first start. Lists all current accounts (`account_id` matches the `account_id` emitted from `coin_movement`). Useful for checkpointing account balances. - -```json -{ - "balance_snapshot": [ - { - 'node_id': '035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d', - 'blockheight': 101, - 'timestamp': 1639076327, - 'accounts': [ - { - 'account_id': 'wallet', - 'balance': '0msat', - 'coin_type': 'bcrt' - } - ] - }, - { - 'node_id': '035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d', - 'blockheight': 110, - 'timestamp': 1639076343, - 'accounts': [ - { - 'account_id': 'wallet', - 'balance': '995433000msat', - 'coin_type': 'bcrt' - }, { - 'account_id': '5b65c199ee862f49758603a5a29081912c8816a7c0243d1667489d244d3d055f', - 'balance': '500000000msat', - 'coin_type': 'bcrt' - } - ] - } - ] -} -``` - -### `block_added` - -Emitted after each block is received from bitcoind, either during the initial sync or throughout the node's life as new blocks appear. - -```json -{ - "block_added": { - "hash": "000000000000000000034bdb3c01652a0aa8f63d32f949313d55af2509f9d245", - "height": 753304 - } -} -``` - -### `openchannel_peer_sigs` - -When opening a channel with a peer using the collaborative transaction protocol `opt_dual_fund`), this notification is fired when the peer sends us their funding transaction signatures, `tx_signatures`. We update the in-progress PSBT and return it here, with the peer's signatures attached. - -```json -{ - "openchannel_peer_sigs": { - "channel_id": "252d1b0a1e5789...", - "signed_psbt": "cHNidP8BAKgCAAAAAQ+y+61AQAAAAD9////AzbkHAAAAAAAFgAUwsyrFxwqW+natS7EG4JYYwJMVGZQwwAAAAAAACIAIKYE2s4YZ+RON6BB5lYQESHR9cA7hDm6/maYtTzSLA0hUMMAAAAAAAAiACBbjNO5FM9nzdj6YnPJMDU902R2c0+9liECwt9TuQiAzWYAAAAAAQDfAgAAAAABARtaSZufCbC+P+/G23XVaQ8mDwZQFW1vlCsCYhLbmVrpAAAAAAD+////AvJs5ykBAAAAFgAUT6ORgb3CgFsbwSOzNLzF7jQS5s+AhB4AAAAAABepFNi369DMyAJmqX2agouvGHcDKsZkhwJHMEQCIHELIyqrqlwRjyzquEPvqiorzL2hrvdu9EBxsqppeIKiAiBykC6De/PDElnqWw49y2vTqauSJIVBgGtSc+vq5BQd+gEhAg0f8WITWvA8o4grxNKfgdrNDncqreMLeRFiteUlne+GZQAAAAEBIICEHgAAAAAAF6kU2Lfr0MzIAmapfZqCi68YdwMqxmSHAQcXFgAUAfrZCrzWZpfiWSFkci3kqV6+4WUBCGsCRzBEAiBF31wbNWECsJ0DrPel2inWla2hYpCgaxeVgPAvFEOT2AIgWiFWN0hvUaK6kEnXhED50wQ2fBqnobsRhoy1iDDKXE0BIQPXRURck2JmXyLg2W6edm8nPzJg3qOcina/oF3SaE3czwz8CWxpZ2h0bmluZwEIexhVcpJl8ugM/AlsaWdodG5pbmcCAgABAAz8CWxpZ2h0bmluZwEIR7FutlQgkSoADPwJbGlnaHRuaW5nAQhYT+HjxFBqeAAM/AlsaWdodG5pbmcBCOpQ5iiTTNQEAA==" - } -} -``` - -### `onionmessage_forward_fail` - -When we receive an onion message from a peer (and it's not ratelimited), and we cannot forward it for some reason. There are three reasons why this can be called: - -1. The onion message cannot be parsed. In this case, `outgoing` and `next_node_id`/`next_short_channel_id_dir` fields are missing. -2. The forward was by short_channel_id, but we don't know that id. In this case, `next_node_id` is missing, but `next_short_channel_id_dir` is present. -3. The next peer wasn't connected. In this case, only `next_short_channel_id_dir` is missing. - -Example 1: Failure because next node wasn't connected: - -```json -{ - "onionmessage_forward_fail": { - "source": "0266e4598d1d3c415f572a8488830b60f7e744ed9235eb0b1ba93283b315c03518", - "incoming": "0002d32df088bbe2723c619b0bb88bd0969843320f961744816cbcf30ad92d8f8db567687640ffdf492971729befd7016443514bed786fbcde7be8935f09b624868c912674abd3764099f082da36c8431a847cb486f19f4888a34ee19850b6977b2b0019b8570f9a194f952a451711a42cc9b7b26b1f0f099a43f94c2946a0e3b6425abff66f002b50ab16939d0239496309198870fbb91cf2c9e67b6092a843e827e01d44898c28d4e31d1278daef2e6d7dd4ffb7d170d102a198451c46974d93a1e86e1a752db64cd067089e42ae90be27a86dbee3462850fca616bf9aac4cfe704bcf82a4d90c9cab2f107f93c9d96a400f09fd3520d55262bd99880f82525560b4d605dfe40b87ea7a606f7a12fc86b6be45bbfb4fe10686a040523f5270a40dc125466ff2f470ee82f07cf0f55f826e669e265bba2ef4649aaeec91d3e82e02b64bd93e8d3eb3f84bc4734fe36649859d6a573a7ce32ee49ecf419892d24aa0cdf778b48bb60476c29c398b2faa2ad095b76f508e69fb1fc3b26c3495c38b5e01741557bd4f22c5e752209fd7f3bbac53bed7d43c97f59294df67ec3489ae28deafbc6a24675d0d33ec59ed698932ab39d132aea90a0c8a1577dc149769c28d1e709d37ce945e1a1a3587df288725075a55cc881a821abd8179e48183482d909371fc6132cb6eb588d1e1f99a4da625da4b1bf54365ec4426e52ea0bcc8da5b7768f0dd5cd3d16116987d24f9e99e0f1c16e7564bec502b29ff3d1dde44f438bda52d716965751e173458318dfef1bb441ae55a77cd8a3a018c21770581d65edcb50146e4c06a4a545c1629fbcbf9985ab0de8c2d7b16ce67ac97ee4475d9b890a96481bf53960350194404c84ad8ddf78c956b3d5538741dc21030f7d4407ec55a5ec41e142b3613d14955f0b19232a7e2c403aa76b5289c714697d61e2ba6ad33f9e1c68fdb0b0d3dba91170eeb2647eca097b6aed5a86a8af41c026768a03f8f0225e7f7e025152a2fd9238c54a53e95ae3c8d867b41f014ab799365f5f12c214b91b7df04ad7055930fc652b24ba4bdaa509002dee61a49b80323e5a8b576beefa50149adc9ea55d00799cdf97efb0c60c9c05e812d203034bd0c3c3405d53e22e15b9c543f7655a327ddb9879ba7959215f1562d974447ecd5ea08fbc8665619f5aee259ebf6f2dd3f851bda06861817d7751126c48beba46b63f87aacba344be60ba437b677fd6ca997848c00a79377425f2c70ea4097e29a06028bdf8d34eeda515682e148832af2ee8efa326997a7f834609363114e1015115c37c8a59b2fd18554e59a6cd049acf95ead7023d2f4654fd938f821324d6cc87161b202dfc5b69803d4b3b0f8dcd5d2eb9058973027966ea48f1b63766074fceb5827d7da4fcd50d554f0a971fdfb7760b65cf413e5653065b1b728c09f1c75aaf635a6f350a04163af02e51031642439486c623f71e78ab9141c09afcc9963808ec063bbc393163e91e21dea1d6543da8c27dcc37bd68d90fb5bff3912caf29f5c9c0398f8a4384a3bc75213cac334e0f078034fbaf0ae541fbe20da77404759c72d036750f30247cd2b9317e71dea7907047457b010acfdc17ae4671c7fcc7800031a4473f924fcc9483deaa912a838c90eae30a96355cc34303f7cd4146fcefb2cc81359adcfa60b5f5a67ee203bb21d1a6a75a7315fe25fd53b729c9e32b17eef2fda0e4e00e357a55bb4c97e82f39cf386e3c40b8a1e6aae62654ad0d050e23030061ea4baf5d3fa3395d146072e30c48ff7f7ad36199774baa8c26f0e17d26c340f294c64cdbd001929a46bd0b58d472d630bba53b848eddcec0a4e3ca098e3350c022011b2ca57719e9ba10a185e2fe0959bb4796f8806642d01700c1a5c617ae68f3ec9447a40b8751bb43b4eb1052fa0f35d", - "path_key": "036fb5acc4ddcd66bebbc00831e856720f92255017ae200bfafaa2f5fb23aa74e2", - "outgoing": "02010221795ac7fd20c3905ced2317c9455adcdb4166be10fd0b10b9408373c8db813c05560002da9320b96766a2b754923854bf99aa9cfe96263e24ead7d0f027b4941702fc6fdf2878caab06852f5f20857699a81c421e784a744d4d0d3706d328a8ff6c61a6d07e4f4496c6ace9b34d860c325cd0676fa7251f9fddf572fb454f0738ee3d16cb88045f325e88ba804936789539a7250584a6611a5b51d3d4c41ad4ca9d1988cdb0c32adae4261c78da204ea6123a0c3afa7c9b6891c7132fedf07f7cedfb0faa685f0fc91be657169983403a0e9d62558e0eb28456ca3f26a272cf447c2c417e34346a3a175abac1af534fa7c1d5427e2ad9343266f7edbb48f9bcb0ee4afca808572606a5a23cda7e54936ea0691e49f065b02cac3dfdb84e248ff7c69b8bd31345b295c1f58b572f72b8453bc434dc1744c8abf22d3bbda741be411fbbb7ec51fb66f229deacd56f180f92c12bd62cad53fc695aa41305b2a5751f449c361061e417a6d00a12d27d9fd0a043a40d60fd6e28f37096cb9f9f5000b088cba346a2d6d6d9db1d8f5144a625b5dd7204392f0ff4c5a2e92e8e787d336a6090c94d7f60668807429dcce5af39b8688d8cc48d1618de9b48219d3ddfa0ad4e5712966267a4bd41d8d1135e594b1d5b4de4050e46b1bc10cde4f0a401e0b14edd74b675b56f0177f713c89ec963cc97896c7aa918339104ae7eec6a16f9c08950f2d6f7f54bb8da2700e88825fead9d463b47bb9f45c11bdfeda92b5afb12942d162392ebc8e403b1c89838c772d4f6e737ff5e0eb03887a996b2f5591fec34da4eb2330e0b09b763f1165f098b204b590250f5013edbdc51fe04cd3ac9c412b0f6f7af9f88ddde58ae17e21466f3a71ab55fd557b7d52e8da8d62800313d13447ff92c557f19b52acaab60223cb7fb36ebc4b5279f097c0710fa86385e8ff4544515feb53fa0a6385f47f57e3b690e198ffc77be73ef25ef00676fdbb8750fecbc2bfdc081a0a3a2340dae8e3810f9fe7f6c365ec1903f2d4f48017b6d591e87ef148da5e41f80c02a1d4747b0e69934abf3ed57af48918b2d2facda94ce8759c2d98663e33ac1a5e293479332389d85b25cd69484d25a1e52d93cde5812bc5b69439e03339b595c4ee0035195c1a73d152a1763b7df77a48f36781c719c8c482fce687792c9fc5b2a9b51f679f82c4327b6478922ee47ee6524cb4aa63121222fa4762fa7bb6798444522030aa2c27b75cbf6f20d802e321d9b648d7f556a0d3be3e61d06a02f800c927fc15027a44d0132cf277083de6118ac163fa47d662b3274a00f0b561248e350313fbd446eb495ab9503b749ca0126b5690755ca43372db968ac7a2c41aa7f019184c3208b40e3ebd2b5d38e33cb0435b3ad2d5eec77101ae26b113c07c3044da335e57378c6cf1b2d3339d49244cb4d0f6982505ec06b85aedc91c241cfc429628fca8756b4c172f4af35e73ee57f650d0e049100d2664d016571e83ce06929ec37dc77bbd3bad59435ff2406084b24165b94704d8df16577b7100a41503a247f60f4f6a58e3fce6e789e19e5b04c64ca30e09207bb8b556db17dda1d00f7c47c391086247b63388275f2018f29d7bfd33dac7f73924f6c2e50e0d26a6f7f3ca19156e092a1f13d9205fec58d33f5e18360ab295c7798475229a95b56af4df9035e676a0bed91faa8df5b2e1131ba7d8b9155b12cb1358ae5d739893503bce95540dac5bb377660ef74bd0da5a2f655db5ecc785143cec2dba84a5208fa711cf680f027259efcfaad64e20daf8cc4ad4396296f9c8fa51e20dd457594d26fbe1f36278e5483401ad158363ea43bdae7595c434a4af3d47f25d61fee9a996bde1a018f544dfe7cb4b986c55f5d5d6783efc9078e423f7855be7415764a8b5a5fe350032f16a3b4f18db2062a5ae6446dffbb346f5429a30d9a13c7736af009ef0b9c64defca7d17bbabac9", - "next_node_id": "035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d" - } -} -``` - -Example 2: failed because we don't know short-channel `1x2x3`: - -```json -{ - "onionmessage_forward_fail": { - "source": "0266e4598d1d3c415f572a8488830b60f7e744ed9235eb0b1ba93283b315c03518", - "incoming": "0002d32df088bbe2723c619b0bb88bd0969843320f961744816cbcf30ad92d8f8db567687640ffdf492971729befd7016443514bed786fbcde7be8935f09b624868c912674abd3764099f082da36c8431a847cb486f19f4888a34ee19850b6977b2b0019b8570f9a194f952a451711a42cc9b7b26b1f0f099a43f94c2946a0e3b6425abff66f002b50ab16939d0239496309198870fbb91cf2c9e67b6092a843e827e01d44898c28d4e31d1278daef2e6d7dd4ffb7d170d102a198451c46974d93a1e86e1a752db64cd067089e42ae90be27a86dbee3462850fca616bf9aac4cfe704bcf82a4d90c9cab2f107f93c9d96a400f09fd3520d55262bd99880f82525560b4d605dfe40b87ea7a606f7a12fc86b6be45bbfb4fe10686a040523f5270a40dc125466ff2f470ee82f07cf0f55f826e669e265bba2ef4649aaeec91d3e82e02b64bd93e8d3eb3f84bc4734fe36649859d6a573a7ce32ee49ecf419892d24aa0cdf778b48bb60476c29c398b2faa2ad095b76f508e69fb1fc3b26c3495c38b5e01741557bd4f22c5e752209fd7f3bbac53bed7d43c97f59294df67ec3489ae28deafbc6a24675d0d33ec59ed698932ab39d132aea90a0c8a1577dc149769c28d1e709d37ce945e1a1a3587df288725075a55cc881a821abd8179e48183482d909371fc6132cb6eb588d1e1f99a4da625da4b1bf54365ec4426e52ea0bcc8da5b7768f0dd5cd3d16116987d24f9e99e0f1c16e7564bec502b29ff3d1dde44f438bda52d716965751e173458318dfef1bb441ae55a77cd8a3a018c21770581d65edcb50146e4c06a4a545c1629fbcbf9985ab0de8c2d7b16ce67ac97ee4475d9b890a96481bf53960350194404c84ad8ddf78c956b3d5538741dc21030f7d4407ec55a5ec41e142b3613d14955f0b19232a7e2c403aa76b5289c714697d61e2ba6ad33f9e1c68fdb0b0d3dba91170eeb2647eca097b6aed5a86a8af41c026768a03f8f0225e7f7e025152a2fd9238c54a53e95ae3c8d867b41f014ab799365f5f12c214b91b7df04ad7055930fc652b24ba4bdaa509002dee61a49b80323e5a8b576beefa50149adc9ea55d00799cdf97efb0c60c9c05e812d203034bd0c3c3405d53e22e15b9c543f7655a327ddb9879ba7959215f1562d974447ecd5ea08fbc8665619f5aee259ebf6f2dd3f851bda06861817d7751126c48beba46b63f87aacba344be60ba437b677fd6ca997848c00a79377425f2c70ea4097e29a06028bdf8d34eeda515682e148832af2ee8efa326997a7f834609363114e1015115c37c8a59b2fd18554e59a6cd049acf95ead7023d2f4654fd938f821324d6cc87161b202dfc5b69803d4b3b0f8dcd5d2eb9058973027966ea48f1b63766074fceb5827d7da4fcd50d554f0a971fdfb7760b65cf413e5653065b1b728c09f1c75aaf635a6f350a04163af02e51031642439486c623f71e78ab9141c09afcc9963808ec063bbc393163e91e21dea1d6543da8c27dcc37bd68d90fb5bff3912caf29f5c9c0398f8a4384a3bc75213cac334e0f078034fbaf0ae541fbe20da77404759c72d036750f30247cd2b9317e71dea7907047457b010acfdc17ae4671c7fcc7800031a4473f924fcc9483deaa912a838c90eae30a96355cc34303f7cd4146fcefb2cc81359adcfa60b5f5a67ee203bb21d1a6a75a7315fe25fd53b729c9e32b17eef2fda0e4e00e357a55bb4c97e82f39cf386e3c40b8a1e6aae62654ad0d050e23030061ea4baf5d3fa3395d146072e30c48ff7f7ad36199774baa8c26f0e17d26c340f294c64cdbd001929a46bd0b58d472d630bba53b848eddcec0a4e3ca098e3350c022011b2ca57719e9ba10a185e2fe0959bb4796f8806642d01700c1a5c617ae68f3ec9447a40b8751bb43b4eb1052fa0f35d", - "path_key": "036fb5acc4ddcd66bebbc00831e856720f92255017ae200bfafaa2f5fb23aa74e2", - "outgoing": "02010221795ac7fd20c3905ced2317c9455adcdb4166be10fd0b10b9408373c8db813c05560002da9320b96766a2b754923854bf99aa9cfe96263e24ead7d0f027b4941702fc6fdf2878caab06852f5f20857699a81c421e784a744d4d0d3706d328a8ff6c61a6d07e4f4496c6ace9b34d860c325cd0676fa7251f9fddf572fb454f0738ee3d16cb88045f325e88ba804936789539a7250584a6611a5b51d3d4c41ad4ca9d1988cdb0c32adae4261c78da204ea6123a0c3afa7c9b6891c7132fedf07f7cedfb0faa685f0fc91be657169983403a0e9d62558e0eb28456ca3f26a272cf447c2c417e34346a3a175abac1af534fa7c1d5427e2ad9343266f7edbb48f9bcb0ee4afca808572606a5a23cda7e54936ea0691e49f065b02cac3dfdb84e248ff7c69b8bd31345b295c1f58b572f72b8453bc434dc1744c8abf22d3bbda741be411fbbb7ec51fb66f229deacd56f180f92c12bd62cad53fc695aa41305b2a5751f449c361061e417a6d00a12d27d9fd0a043a40d60fd6e28f37096cb9f9f5000b088cba346a2d6d6d9db1d8f5144a625b5dd7204392f0ff4c5a2e92e8e787d336a6090c94d7f60668807429dcce5af39b8688d8cc48d1618de9b48219d3ddfa0ad4e5712966267a4bd41d8d1135e594b1d5b4de4050e46b1bc10cde4f0a401e0b14edd74b675b56f0177f713c89ec963cc97896c7aa918339104ae7eec6a16f9c08950f2d6f7f54bb8da2700e88825fead9d463b47bb9f45c11bdfeda92b5afb12942d162392ebc8e403b1c89838c772d4f6e737ff5e0eb03887a996b2f5591fec34da4eb2330e0b09b763f1165f098b204b590250f5013edbdc51fe04cd3ac9c412b0f6f7af9f88ddde58ae17e21466f3a71ab55fd557b7d52e8da8d62800313d13447ff92c557f19b52acaab60223cb7fb36ebc4b5279f097c0710fa86385e8ff4544515feb53fa0a6385f47f57e3b690e198ffc77be73ef25ef00676fdbb8750fecbc2bfdc081a0a3a2340dae8e3810f9fe7f6c365ec1903f2d4f48017b6d591e87ef148da5e41f80c02a1d4747b0e69934abf3ed57af48918b2d2facda94ce8759c2d98663e33ac1a5e293479332389d85b25cd69484d25a1e52d93cde5812bc5b69439e03339b595c4ee0035195c1a73d152a1763b7df77a48f36781c719c8c482fce687792c9fc5b2a9b51f679f82c4327b6478922ee47ee6524cb4aa63121222fa4762fa7bb6798444522030aa2c27b75cbf6f20d802e321d9b648d7f556a0d3be3e61d06a02f800c927fc15027a44d0132cf277083de6118ac163fa47d662b3274a00f0b561248e350313fbd446eb495ab9503b749ca0126b5690755ca43372db968ac7a2c41aa7f019184c3208b40e3ebd2b5d38e33cb0435b3ad2d5eec77101ae26b113c07c3044da335e57378c6cf1b2d3339d49244cb4d0f6982505ec06b85aedc91c241cfc429628fca8756b4c172f4af35e73ee57f650d0e049100d2664d016571e83ce06929ec37dc77bbd3bad59435ff2406084b24165b94704d8df16577b7100a41503a247f60f4f6a58e3fce6e789e19e5b04c64ca30e09207bb8b556db17dda1d00f7c47c391086247b63388275f2018f29d7bfd33dac7f73924f6c2e50e0d26a6f7f3ca19156e092a1f13d9205fec58d33f5e18360ab295c7798475229a95b56af4df9035e676a0bed91faa8df5b2e1131ba7d8b9155b12cb1358ae5d739893503bce95540dac5bb377660ef74bd0da5a2f655db5ecc785143cec2dba84a5208fa711cf680f027259efcfaad64e20daf8cc4ad4396296f9c8fa51e20dd457594d26fbe1f36278e5483401ad158363ea43bdae7595c434a4af3d47f25d61fee9a996bde1a018f544dfe7cb4b986c55f5d5d6783efc9078e423f7855be7415764a8b5a5fe350032f16a3b4f18db2062a5ae6446dffbb346f5429a30d9a13c7736af009ef0b9c64defca7d17bbabac9", - "next_short_channel_id_dir": "1x2x3/1" - } -} - -``` -### `shutdown` - -Send in two situations: lightningd is (almost completely) shutdown, or the plugin `stop` command has been called for this plugin. In both cases the plugin has 30 seconds to exit itself, otherwise it's killed. - -In the shutdown case, plugins should not interact with lightnind except via (id-less) logging or notifications. New rpc calls will fail with error code -5 and (plugin's) responses will be ignored. Because lightningd can crash or be killed, a plugin cannot rely on the shutdown notification always been send. - -```json -{ - "shutdown": { - } -} -``` - -### `plugin_started` (v25.02 onward) - -Emitted when a plugin has completed startup. - -```json -{ - "plugin_started": { - "plugin_name": "example_plugin", - "plugin_path": "/path/to/example_plugin.py", - "methods": [ - "example_method1", - "example_method2", - "example_method3" - ] - } -} -``` - -Where: - -- `plugin_name`: The short name of the plugin. -- `plugin_path`: The full file path to the plugin executable. -- `methods`: An array of RPC method names that the plugin registered. - -### `plugin_stopped` (v25.02 onward) - -Emitted when a plugin has been stopped or has exited. - -```json -{ - "plugin_stopped": { - "plugin_name": "example_plugin", - "plugin_path": "/path/to/example_plugin.py", - "methods": [ - "example_method1", - "example_method2", - "example_method3" - ] - } -} -``` - -Where: - -- `plugin_name`: The short name of the plugin. -- `plugin_path`: The full file path to the plugin executable. -- `methods`: An array of RPC method names that the plugin registered. - - -### `pay_part_start` (v25.09 onward) - -Emitted by `xpay` when part of a payment begins. `payment_hash` and -`groupid` uniquely identify this xpay invocation, and `partid` then identifies -this particular attempt to pay part of that. - -`total_payment_msat` is the total amount (usually the invoice amount), -which will be the same across all parts, adn `attempt_msat` is the -amount being delivered to the destination by this part. - -Each element in `hops` shows the amount going into the node (i.e. with -fees, `channel_in_msat`) and the amount we're telling it to send -to the other end (`channel_out_msat`). The `channel_out_msat` will -be equal to the next `channel_in_msat. The final -`channel_out_msat` will be equal to the `attempt_msat`. - -The example shows a payment from this node via 103x1x0 (direction 1) to 022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59, then via 103x2x0 (direction 0) to 035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d. - -```json -{ - "jsonrpc": "2.0", - "method": "pay_part_start", - "params": { - "origin": "cln-xpay", - "payment_hash": "651b28004d41cf0dc8e39a0b3d905651a7b012d03d81199fde09314700cb5a62", - "groupid": 5793910575598463611, - "partid": 1, - "total_payment_msat": 5000000, - "attempt_msat": 5000000, - "hops": [ - { - "next_node": "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59", - "short_channel_id": "103x1x0", - "direction": 1, - "channel_in_msat": 5000051, - "channel_out_msat": 5000051 - }, - { - "next_node": "035d2b1192dfba134e10e540875d366ebc8bc353d5aa766b80c090b39c3a5d885d", - "short_channel_id": "103x2x0", - "direction": 0, - "channel_in_msat": 5000051, - "channel_out_msat": 5000000 - } - ] - } -} -``` - -### `pay_part_end` (v25.09 onward) - -Emitted by `xpay` when part of a payment ends. `payment_hash`, `groupid` and `partid` -will match a previous `pay_part_start`. - -`status` will be "success" or "failure". `duration` will be a number of seconds, with 9 decimal places. This is the time between `xpay` telling lightningd to send the onion, to when `xpay` processes the response. - -If `status` is "failure", there will always be an `error_message`: the other fields below -will be missing in the unusual case where the error onion is corrupted. - -`failed_node_id`: If it's a non-local error, the source of the error. -`failed_short_channel_id`: if it's not the final node, the channel it's complaining about. -`failed_direction`: if it's not the final node, the channel direction. -`failed_msg`: the decrypted onion message, in hex, if it was valid. -`error_code`: the error code returned (present unless onion was corrupted). -`error_message`: always present: if `failed_node_id` is present it's just the name of the `error_code`, but otherwise it can be a more informative error from our own node. - -```json -{ - "jsonrpc": "2.0", - "method": "pay_part_end", - "params": { - "origin": "cln-xpay", - "status": "success", - "duration": 0.220209189, - "payment_hash": "651b28004d41cf0dc8e39a0b3d905651a7b012d03d81199fde09314700cb5a62", - "groupid": 5793910575598463611, - "partid": 1 - } -} -``` +Lookup the **[Notification APIs](ref:notification-balance_snapshot)** for details on each notification and their payload. diff --git a/doc/developers-guide/plugin-development/hooks.md b/doc/developers-guide/plugin-development/hooks.md index 8f5bc7df759e..626d9a6f2eb7 100644 --- a/doc/developers-guide/plugin-development/hooks.md +++ b/doc/developers-guide/plugin-development/hooks.md @@ -28,526 +28,4 @@ Hooks are considered to be an advanced feature due to the fact that `lightningd` As a convention, for all hooks, returning the object `{ "result" : "continue" }` results in `lightningd` behaving exactly as if no plugin is registered on the hook. -### `peer_connected` - -This hook is called whenever a peer has connected and successfully completed the cryptographic handshake. The parameters have the following structure: -```json -{ - "peer": { - "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", - "direction": "in", - "addr": "34.239.230.56:9735", - "features": "" - } -} -``` - -The hook is sparse on information, since the plugin can use the JSON-RPC `listpeers` command to get additional details should they be required. `direction` is either `"in"` or `"out"`. The `addr` field shows the address that we are connected to ourselves, not the gossiped list of known addresses. In particular this means that the port for incoming connections is an ephemeral port, that may not be available for reconnections. - -The returned result must contain a `result` member which is either the string `disconnect` or `continue`. If `disconnect` and there's a member `error_message`, that member is sent to the peer before disconnection. - -Note that `peer_connected` is a chained hook. The first plugin that decides to `disconnect` with or without an `error_message` will lead to the subsequent plugins not being called anymore. - -### `recover` - -This hook is called whenever the node is started using the --recovery flag. So basically whenever a user wants to recover their node with a codex32 secret, they can use --recover="codex32secret" to use that secret as their HSM Secret. - -The payload consists of the following information: -```json -{ - "codex32": "cl10leetsllhdmn9m42vcsamx24zrxgs3qrl7ahwvhw4fnzrhve25gvezzyqqjdsjnzedu43ns" -} -``` - -This hook is intended to be used for recovering the node and funds by connecting to some known peers who keep your peer storage backups with them. - -### `commitment_revocation` - -This hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps: - -1. Proposal of a new state commitment in the form of a commitment transaction -2. Exchange of signatures for the agreed upon commitment transaction -3. Verification that the signatures match the commitment transaction -4. Exchange of revocation secrets that could be used to penalize an eventual misbehaving party - -The `commitment_revocation` hook is used to inform the plugin about the state transition being completed, and deliver the penalty transaction. The penalty transaction could then be sent to a watchtower that automatically reacts in case one party attempts to settle using a revoked commitment. - -The payload consists of the following information: -```json -{ - "commitment_txid": "58eea2cf538cfed79f4d6b809b920b40bb6b35962c4bb4cc81f5550a7728ab05", - "penalty_tx": "02000000000101...ac00000000", - "channel_id": "fb16398de93e8690c665873715ef590c038dfac5dd6c49a9d4b61dccfcedc2fb", - "commitnum": 21 -} -``` - -Notice that the `commitment_txid` could also be extracted from the sole input of the `penalty_tx`, however it is enclosed so plugins don't have to include the logic to parse transactions. - -Not included are the `htlc_success` and `htlc_failure` transactions that may also be spending `commitment_tx` outputs. This is because these transactions are much more dynamic and have a predictable timeout, allowing wallets to ensure a quick checkin when the CLTV of the HTLC is about to expire. - -The `commitment_revocation` hook is a chained hook, i.e., multiple plugins can register it, and they will be called in the order they were registered in. Plugins should always return `{"result": "continue"}`, otherwise subsequent hook subscribers would not get called. - -### `db_write` - -This hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default). This hook will be useless (the `"writes"` field will always be empty) if you are using a PostgreSQL database. - -It is currently extremely restricted: - -1. a plugin registering for this hook should not perform anything that may cause a db operation in response (pretty much, anything but logging). -2. a plugin registering for this hook should not register for other hooks or commands, as these may become intermingled and break rule #1. -3. the hook will be called before your plugin is initialized! - -This hook, unlike all the other hooks, is also strongly synchronous: `lightningd` will stop almost all the other processing until this hook responds. -```json -{ - "data_version": 42, - "writes": [ - "PRAGMA foreign_keys = ON" - ] -} -``` - -This hook is intended for creating continuous backups. The intent is that your backup plugin maintains three pieces of information (possibly in separate files): - -1. a snapshot of the database -2. a log of database queries that will bring that snapshot up-to-date -3. the previous `data_version` - -`data_version` is an unsigned 32-bit number that will always increment by 1 each time `db_write` is called. Note that this will wrap around on the limit of 32-bit numbers. - -`writes` is an array of strings, each string being a database query that modifies the database. If the `data_version` above is validated correctly, then you can simply append this to the log of database queries. - -Your plugin **MUST** validate the `data_version`. It **MUST** keep track of the previous `data_version` it got, and: - -1. If the new `data_version` is **_exactly_** one higher than the previous, then this is the ideal case and nothing bad happened and we should save this and continue. -2. If the new `data_version` is **_exactly_** the same value as the previous, then the previous set of queries was not committed. Your plugin **MAY** overwrite the previous set of queries with the current set, or it **MAY** overwrite its entire backup with a new snapshot of the database and the current `writes` array (treating this case as if `data_version` were two or more higher than the previous). -3. If the new `data_version` is **_less than_** the previous, your plugin **MUST** halt and catch fire, and have the operator inspect what exactly happened here. -4. Otherwise, some queries were lost and your plugin **SHOULD** recover by creating a new snapshot of the database: copy the database file, back up the given `writes` array, then delete (or atomically `rename` if in a POSIX filesystem) the previous backups of the database and SQL statements, or you **MAY** fail the hook to abort `lightningd`. - -The "rolling up" of the database could be done periodically as well if the log of SQL statements has grown large. - -Any response other than `{"result": "continue"}` will cause lightningd to error without committing to the database! This is the expected way to halt and catch fire. - -`db_write` is a parallel-chained hook, i.e., multiple plugins can register it, and all of them will be invoked simultaneously without regard for order of registration. The hook is considered handled if all registered plugins return `{"result": "continue"}`. If any plugin returns anything else, `lightningd` will error without committing to the database. - -### `invoice_payment` - -This hook is called whenever a valid payment for an unpaid invoice has arrived. -```json -{ - "payment": { - "label": "unique-label-for-invoice", - "preimage": "0000000000000000000000000000000000000000000000000000000000000000", - "msat": 10000 - } -} -``` - -Before version `23.11` the `msat` field was a string with msat-suffix, e.g: `"10000msat"`. - -The hook is deliberately sparse, since the plugin can use the JSON-RPC `listinvoices` command to get additional details about this invoice. It can return a `failure_message` field as defined for final nodes in [BOLT 4](https://github.com/lightning/bolts/blob/master/04-onion-routing.md#failure-messages), a `result` field with the string `reject` to fail it with `incorrect_or_unknown_payment_details`, or a `result` field with the string `continue` to accept the payment. - -### `openchannel` - -This hook is called whenever a remote peer tries to fund a channel to us using the v1 protocol, and it has passed basic sanity checks: -```json -{ - "openchannel": { - "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", - "funding_msat": 100000000, - "push_msat": 0, - "dust_limit_msat": 546000, - "max_htlc_value_in_flight_msat": 18446744073709551615, - "channel_reserve_msat": 1000000, - "htlc_minimum_msat": 0, - "feerate_per_kw": 7500, - "to_self_delay": 5, - "max_accepted_htlcs": 483, - "channel_flags": 1 - } -} -``` - -There may be additional fields, including `shutdown_scriptpubkey` and a hex-string. You can see the definitions of these fields in [BOLT 2's description of the open_channel message](https://github.com/lightning/bolts/blob/master/02-peer-protocol.md#the-open_channel-message). - -The returned result must contain a `result` member which is either the string `reject` or `continue`. If `reject` and there's a member `error_message`, that member is sent to the peer before disconnection. - -For a 'continue'd result, you can also include a `close_to` address, which will be used as the output address for a mutual close transaction. - -e.g. -```json -{ - "result": "continue", - "close_to": "bc1qlq8srqnz64wgklmqvurv7qnr4rvtq2u96hhfg2", - "mindepth": 0, - "reserve": "1234sat" -} -``` - -Note that `close_to` must be a valid address for the current chain, an invalid address will cause the node to exit with an error. - -- `mindepth` is the number of confirmations to require before making the channel usable. Notice that setting this to 0 (`zeroconf`) or some other low value might expose you to double-spending issues, so only lower this value from the default if you trust the peer not to double-spend, or you reject incoming payments, including forwards, until the funding is confirmed. - -- `reserve` is an absolute value for the amount in the channel that the peer must keep on their side. This ensures that they always have something to lose, so only lower this below the 1% of funding amount if you trust the peer. The protocol requires this to be larger than the dust limit, hence it will be adjusted to be the dust limit if the specified value is below. - -Note that `openchannel` is a chained hook. Therefore `close_to`, `reserve` will only be evaluated for the first plugin that sets it. If more than one plugin tries to set a `close_to` address an error will be logged. - -### `openchannel2` - -This hook is called whenever a remote peer tries to fund a channel to us using the v2 protocol, and it has passed basic sanity checks: -```json -{ - "openchannel2": { - "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", - "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", - "their_funding_msat": 100000000, - "dust_limit_msat": 546000, - "max_htlc_value_in_flight_msat": 18446744073709551615, - "htlc_minimum_msat": 0, - "funding_feerate_per_kw": 7500, - "commitment_feerate_per_kw": 7500, - "feerate_our_max": 10000, - "feerate_our_min": 253, - "to_self_delay": 5, - "max_accepted_htlcs": 483, - "channel_flags": 1, - "channel_type": {'bits': [12, 22], 'names': ['static_remotekey/even', 'anchors/even']}, - "locktime": 2453, - "channel_max_msat": 16777215000, - "requested_lease_msat": 100000000, - "lease_blockheight_start": 683990, - "node_blockheight": 683990 - } -} -``` - -There may be additional fields, such as `shutdown_scriptpubkey`. You can see the definitions of these fields in [BOLT 2's description of the open_channel message](https://github.com/lightning/bolts/blob/master/02-peer-protocol.md#the-open_channel-message). - -`requested_lease_msat`, `lease_blockheight_start`, and `node_blockheight` are only present if the opening peer has requested a funding lease, per `option_will_fund`. - -The returned result must contain a `result` member which is either the string `reject` or `continue`. If `reject` and there's a member `error_message`, that member is sent to the peer before disconnection. - -For a 'continue'd result, you can also include a `close_to` address, which will be used as the output address for a mutual close transaction; you can include a `psbt` and an `our_funding_msat` to contribute funds, inputs and outputs to this channel open. - -Note that, like `openchannel_init` RPC call, the `our_funding_msat` amount must NOT be accounted for in any supplied output. Change, however, should be included and should use the `funding_feerate_per_kw` to calculate. - -See `plugins/funder.c` for an example of how to use this hook to contribute funds to a channel open. - -e.g. -```json -{ - "result": "continue", - "close_to": "bc1qlq8srqnz64wgklmqvurv7qnr4rvtq2u96hhfg2", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZrrw28Oed52hTw3N7t0HbIyZhFdcZRH3+61AQAAAAD9////AGYAAAAAAQDfAgAAAAABARtaSZufCbC+P+/G23XVaQ8mDwZQFW1vlCsCYhLbmVrpAAAAAAD+////AvJs5ykBAAAAFgAUT6ORgb3CgFsbwSOzNLzF7jQS5s+AhB4AAAAAABepFNi369DMyAJmqX2agouvGHcDKsZkhwJHMEQCIHELIyqrqlwRjyzquEPvqiorzL2hrvdu9EBxsqppeIKiAiBykC6De/PDElnqWw49y2vTqauSJIVBgGtSc+vq5BQd+gEhAg0f8WITWvA8o4grxNKfgdrNDncqreMLeRFiteUlne+GZQAAAAEBIICEHgAAAAAAF6kU2Lfr0MzIAmapfZqCi68YdwMqxmSHAQQWABQB+tkKvNZml+JZIWRyLeSpXr7hZQz8CWxpZ2h0bmluZwEIexhVcpJl8ugM/AlsaWdodG5pbmcCAgABAA==", - "our_funding_msat": 39999000 -} -``` - -Note that `close_to` must be a valid address for the current chain, an invalid address will cause the node to exit with an error. - -Note that `openchannel` is a chained hook. Therefore `close_to` will only be evaluated for the first plugin that sets it. If more than one plugin tries to set a `close_to` address an error will be logged. - -### `openchannel2_changed` - -This hook is called when we received updates to the funding transaction from the peer. -```json -{ - "openchannel2_changed": { - "channel_id": "252d1b0a1e57895e841...", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." - } -} -``` - -In return, we expect a `result` indicated to `continue` and an updated `psbt`. If we have no updates to contribute, return the passed in PSBT. Once no changes to the PSBT are made on either side, the transaction construction negotiation will end and commitment transactions will be exchanged. - -#### Expected Return -```json -{ - "result": "continue", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." -} -``` - -See `plugins/funder.c` for an example of how to use this hook to continue a v2 channel open. - -### `openchannel2_sign` - -This hook is called after we've gotten the commitment transactions for a channel open. It expects psbt to be returned which contains signatures for our inputs to the funding transaction. -```json -{ - "openchannel2_sign": { - "channel_id": "252d1b0a1e57895e841...", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." - } -} -``` - -In return, we expect a `result` indicated to `continue` and an partially signed `psbt`. - -If we have no inputs to sign, return the passed in PSBT. Once we have also received the signatures from the peer, the funding transaction will be broadcast. - -#### Expected Return -```json -{ - "result": "continue", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." -} -``` - -See `plugins/funder.c` for an example of how to use this hook to sign a funding transaction. - -### `rbf_channel` - -Similar to `openchannel2`, the `rbf_channel` hook is called when a peer requests an RBF for a channel funding transaction. -```json -{ - "rbf_channel": { - "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", - "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", - "their_last_funding_msat": 100000000, - "their_funding_msat": 100000000, - "our_last_funding_msat": 100000000, - "funding_feerate_per_kw": 7500, - "feerate_our_max": 10000, - "feerate_our_min": 253, - "channel_max_msat": 16777215000, - "locktime": 2453, - "requested_lease_msat": 100000000 - } -} -``` - -The returned result must contain a `result` member which is either the string `reject` or `continue`. If `reject` and there's a member `error_message`, that member is sent to the peer before disconnection. - -For a 'continue'd result, you can include a `psbt` and an `our_funding_msat` to contribute funds, inputs and outputs to this channel open. - -Note that, like the `openchannel_init` RPC call, the `our_funding_msat` amount must NOT be accounted for in any supplied output. Change, however, should be included and should use the `funding_feerate_per_kw` to calculate. - -#### Return -```json -{ - "result": "continue", - "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZrrw28Oed52hTw3N7t0HbIyZhFdcZRH3+61AQAAAAD9////AGYAAAAAAQDfAgAAAAABARtaSZufCbC+P+/G23XVaQ8mDwZQFW1vlCsCYhLbmVrpAAAAAAD+////AvJs5ykBAAAAFgAUT6ORgb3CgFsbwSOzNLzF7jQS5s+AhB4AAAAAABepFNi369DMyAJmqX2agouvGHcDKsZkhwJHMEQCIHELIyqrqlwRjyzquEPvqiorzL2hrvdu9EBxsqppeIKiAiBykC6De/PDElnqWw49y2vTqauSJIVBgGtSc+vq5BQd+gEhAg0f8WITWvA8o4grxNKfgdrNDncqreMLeRFiteUlne+GZQAAAAEBIICEHgAAAAAAF6kU2Lfr0MzIAmapfZqCi68YdwMqxmSHAQQWABQB+tkKvNZml+JZIWRyLeSpXr7hZQz8CWxpZ2h0bmluZwEIexhVcpJl8ugM/AlsaWdodG5pbmcCAgABAA==", - "our_funding_msat": 39999000 -} -``` - -### `htlc_accepted` - -The `htlc_accepted` hook is called whenever an incoming HTLC is accepted, and its result determines how `lightningd` should treat that HTLC. - -The payload of the hook call has the following format: -```json -{ - "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", - "onion": { - "payload": "", - "short_channel_id": "1x2x3", - "forward_msat": 42, - "outgoing_cltv_value": 500014, - "shared_secret": "0000000000000000000000000000000000000000000000000000000000000000", - "next_onion": "[1365bytes of serialized onion]" - }, - "htlc": { - "short_channel_id": "4x5x6", - "id": 27, - "amount_msat": 43, - "cltv_expiry": 500028, - "cltv_expiry_relative": 10, - "payment_hash": "0000000000000000000000000000000000000000000000000000000000000000", - "extra_tlvs": "fdffff012afe00010001020539" - }, - "forward_to": "0000000000000000000000000000000000000000000000000000000000000000" -} -``` - -For detailed information about each field please refer to [BOLT 04 of the specification](https://github.com/lightning/bolts/blob/master/04-onion-routing.md), the following is just a brief summary: - -- `peer_id`: is the id of the peer that offered us this htlc. -- `onion`: - - `payload` contains the unparsed payload that was sent to us from the sender of the payment. - - `short_channel_id` determines the channel that the sender is hinting should be used next. Not present if we're the final destination. - - `forward_amount` is the amount we should be forwarding to the next hop, and should match the incoming funds in case we are the recipient. - - `outgoing_cltv_value` determines what the CLTV value for the HTLC that we forward to the next hop should be. - - `total_msat` specifies the total amount to pay, if present. - - `payment_secret` specifies the payment secret (which the payer should have obtained from the invoice), if present. - - `next_onion` is the fully processed onion that we should be sending to the next hop as part of the outgoing HTLC. Processed in this case means that we took the incoming onion, decrypted it, extracted the payload destined for us, and serialised the resulting onion again. - - `shared_secret` is the shared secret we used to decrypt the incoming onion. It is shared with the sender that constructed the onion. -- `htlc`: - - `short_channel_id` is the channel this payment is coming from. - - `id` is the low-level sequential HTLC id integer as sent by the channel peer. - - `amount` is the amount that we received with the HTLC. This amount minus the `forward_amount` is the fee that will stay with us. - - `cltv_expiry` determines when the HTLC reverts back to the sender. `cltv_expiry` minus `outgoing_cltv_expiry` should be equal or larger than our `cltv_delta` setting. - - `cltv_expiry_relative` hints how much time we still have to claim the HTLC. It is the `cltv_expiry` minus the current `blockheight` and is passed along mainly to avoid the plugin having to look up the current blockheight. - - `payment_hash` is the hash whose `payment_preimage` will unlock the funds and allow us to claim the HTLC. - - `extra_tlvs` is an optional TLV-stream attached to the HTLC. -- `forward_to`: if set, the channel_id we intend to forward this to (will not be present if the short_channel_id was invalid or we were the final destination). - -The hook response must have one of the following formats: -```json -{ - "result": "continue" -} -``` - -This means that the plugin does not want to do anything special and `lightningd` should continue processing it normally, i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced. - -It can also replace the `onion.payload` by specifying a `payload` in the response. Note that this is always a TLV-style payload, so unlike `onion.payload` there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed; it's useful for removing onion fields which a plugin doesn't want lightningd to consider. - -It can also specify `forward_to` in the response, replacing the destination. This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the `payload` is also replaced. - -Also, it can specify `extra_tlvs` in the response. This will replace the TLV-stream `update_add_htlc_tlvs` in the `update_add_htlc` message for forwarded htlcs. - -If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the `payment_hash` by specifying `invoice_msat`. -```json -{ - "result": "fail", - "failure_message": "2002" -} -``` - -`fail` will tell `lightningd` to fail the HTLC with a given hex-encoded `failure_message` (please refer to the [spec](https://github.com/lightning/bolts/blob/master/04-onion-routing.md) for details: `incorrect_or_unknown_payment_details` is the most common). -```json -{ - "result": "fail", - "failure_onion": "[serialized error packet]" -} -``` - -Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the [spec](https://github.com/lightning/bolts/blob/master/04-onion-routing.md) for details). This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that `lightningd` will apply the obfuscation step to the value returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop. -```json -{ - "result": "resolve", - "payment_key": "0000000000000000000000000000000000000000000000000000000000000000" -} -``` - -`resolve` instructs `lightningd` to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call. Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since `lightningd` will not check and the wrong value could result in the channel being closed. - -> 🚧 -> -> `lightningd` will replay the HTLCs for which it doesn't have a final verdict during startup. This means that, if the plugin response wasn't processed before the HTLC was forwarded, failed, or resolved, then the plugin may see the same HTLC again during startup. It is therefore paramount that the plugin is idempotent if it talks to an external system. - -The `htlc_accepted` hook is a chained hook, i.e., multiple plugins can register it, and they will be called in the order they were registered in until the first plugin return a result that is not `{"result": "continue"}`, after which the event is considered to be handled. After the event has been handled the remaining plugins will be skipped. - -### `rpc_command` - -The `rpc_command` hook allows a plugin to take over any RPC command. It sends the received JSON-RPC request to the registered plugin. You can optionally specify a "filters" array, containing the command names you want to intercept: without this, all commands will be sent to this hook. -```json -{ - "rpc_command": { - "id": 3, - "method": "method_name", - "params": { - "param_1": [], - "param_2": {}, - "param_n": "", - } - } -} -``` -which can in turn: - -Let `lightningd` execute the command with -```json -{ - "result" : "continue" -} -``` - -Replace the request made to `lightningd`: -```json -{ - "replace": { - "id": 3, - "method": "method_name", - "params": { - "param_1": [], - "param_2": {}, - "param_n": "", - } - } -} -``` - -Return a custom response to the request sender: -```json -{ - "return": { - "result": { - } - } -} -``` - -Return a custom error to the request sender: -```json -{ - "return": { - "error": { - } - } -} -``` - -Note: The `rpc_command` hook is chainable. If two or more plugins try to replace/result/error the same `method`, only the first plugin in the chain will be respected. Others will be ignored and a warning will be logged. - -### `custommsg` - -The `custommsg` plugin hook is the receiving counterpart to the [`sendcustommsg`](ref:sendcustommsg) RPC method and allows plugins to handle messages that are not handled internally. The goal of these two components is to allow the implementation of custom protocols or prototypes on top of a Core Lightning node, without having to change the node's implementation itself. Note that if the hook registration specifies "filters" then that should be a JSON array of message numbers, and the hook will only be called for those. Otherwise, the hook is called for all messages not handled internally. - -The payload for a call follows this format: -```json -{ - "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", - "payload": "1337ffffffff" -} -``` - -This payload would have been sent by the peer with the `node_id` matching `peer_id`, and the message has type `0x1337` and contents `ffffffff`. Notice that the messages are currently limited to odd-numbered types and must not match a type that is handled internally by Core Lightning. These limitations are in place in order to avoid conflicts with the internal state tracking, and avoiding disconnections or channel closures, since odd-numbered message can be ignored by nodes (see ["it's ok to be odd" in the specification](https://github.com/lightning/bolts/blob/c74a3bbcf890799d343c62cb05fcbcdc952a1cf3/01-messaging.md#lightning-message-format) for details). The plugin must implement the parsing of the message, including the type prefix, since Core Lightning does not know how to parse the message. - -Because this is a chained hook, the daemon expects the result to be `{'result': 'continue'}`. It will fail if something else is returned. - -### `onion_message_recv` and `onion_message_recv_secret` - -These two hooks are almost identical, in that they are called when an onion message is received. - -`onion_message_recv` is used for unsolicited messages (where the source knows that it is sending to this node), and `onion_message_recv_secret` is used for messages which use a blinded path we supplied. The latter hook will have a `pathsecret` field, the former never will. - -These hooks are separate, because replies MUST be ignored unless they use the correct path (i.e. `onion_message_recv_secret`, with the expected `pathsecret`). This avoids the source trying to probe for responses without using the designated delivery path. - -The payload for a call follows this format: -```json -{ - "onion_message": { - "pathsecret": "0000000000000000000000000000000000000000000000000000000000000000", - "reply_blindedpath": { - "first_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", - "first_scid": "100x200x300", - "first_scid_dir": 1, - "blinding": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", - "hops": [ - { - "blinded_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", - "encrypted_recipient_data": "0a020d0d" - } - ] - }, - "invoice_request": "0a020d0d", - "invoice": "0a020d0d", - "invoice_error": "0a020d0d", - "unknown_fields": [ - { - "number": 12345, - "value": "0a020d0d" - } - ] - } -} -``` - -All fields shown here are optional: in particular, only one of "first_node_id" or the pair "first_scid" and "first_scid_dir" is present. - -We suggest just returning `{"result": "continue"}`; any other result will cause the message not to be handed to any other hooks. +Lookup the **[Hook APIs](ref:hook-commitment_revocation)** for details on each hook's payload and how to respond to them. diff --git a/doc/index.rst b/doc/index.rst index f4fae7c6ca5a..6ecc0a945bdb 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -85,6 +85,21 @@ Core Lightning Documentation getroute getroutes help + hook-commitment_revocation + hook-custommsg + hook-db_write + hook-htlc_accepted + hook-invoice_payment + hook-onion_message_recv + hook-onion_message_recv_secret + hook-openchannel + hook-openchannel2 + hook-openchannel2_changed + hook-openchannel2_sign + hook-peer_connected + hook-rbf_channel + hook-recover + hook-rpc_command injectonionmessage injectpaymentonion invoice @@ -123,6 +138,30 @@ Core Lightning Documentation multifundchannel multiwithdraw newaddr + notification-balance_snapshot + notification-block_added + notification-channel_open_failed + notification-channel_opened + notification-channel_state_changed + notification-coin_movement + notification-connect + notification-custommsg + notification-deprecated_oneshot + notification-disconnect + notification-forward_event + notification-invoice_creation + notification-invoice_payment + notification-log + notification-onionmessage_forward_fail + notification-openchannel_peer_sigs + notification-plugin_started + notification-plugin_stopped + notification-sendpay_failure + notification-sendpay_success + notification-shutdown + notification-warning + notification-xpay_pay_part_end + notification-xpay_pay_part_start notifications offer openchannel_abort diff --git a/tools/fromschema.py b/tools/fromschema.py index dc20aaa03b1f..69237a19e5f1 100755 --- a/tools/fromschema.py +++ b/tools/fromschema.py @@ -5,6 +5,7 @@ from argparse import ArgumentParser import json import re +import os # To maintain the sequence of the before return value (body) and after return value (footer) sections in the markdown file BODY_KEY_SEQUENCE = ['reliability', 'restriction_format', 'permitted_sqlite3_functions', 'treatment_of_types', 'tables', 'notes', 'notifications', 'sharing_runes', 'riskfactor_effect_on_routing', 'recommended_riskfactor_values', 'optimality', 'randomization'] @@ -87,7 +88,7 @@ def output_conditional_params(conditional_sub_array, condition): def output_type(properties, is_optional): """Add types for request and reponse parameters""" - typename = 'one of' if 'oneOf' in properties else esc_underscores(properties['type']) + typename = 'one of' if 'oneOf' in properties else esc_underscores(properties.get('type', 'any')) if typename == 'array': if 'items' in properties and 'type' in properties['items']: typename += ' of {}s'.format(esc_underscores(properties['items']['type'])) @@ -152,6 +153,9 @@ def output_member(propname, properties, is_optional, indent, print_type=True, pr if 'hidden' in properties and properties['hidden']: return + if ('type' not in properties and 'properties' not in properties and 'oneOf' not in properties and 'untyped' not in properties): + return + if prefix is None: prefix = '- ' + fmt_propname(propname) if propname is not None else '-' output(indent + prefix) @@ -174,16 +178,16 @@ def output_member(propname, properties, is_optional, indent, print_type=True, pr if 'deprecated' in properties: output(' **deprecated in {}, removed after {}**'.format(properties['deprecated'][0], properties['deprecated'][1] if len(properties['deprecated']) > 1 else deprecated_to_deleted(properties['deprecated'][0]))) - if 'added' in properties: + if 'added' in properties and properties['added'] != 'pre-v0.10.1': output(' *(added {})*'.format(properties['added'])) if 'oneOf' in properties and isinstance(properties['oneOf'], list): output(':\n') output_members(properties, indent + ' ') - elif not is_untyped and properties['type'] == 'object': + elif not is_untyped and properties.get('type') == 'object': output(':\n') output_members(properties, indent + ' ') - elif not is_untyped and properties['type'] == 'array': + elif not is_untyped and properties.get('type') == 'array': output(':\n') output_array(properties['items'], indent + ' ') else: @@ -192,11 +196,15 @@ def output_member(propname, properties, is_optional, indent, print_type=True, pr def output_array(items, indent): """We've already said it's an array of {type}""" + if not isinstance(items, dict) or items == {}: + output(indent + '- (any type)\n') + return + if 'oneOf' in items and isinstance(items['oneOf'], list): output_members(items, indent + ' ') - elif items['type'] == 'object': + elif items.get('type') == 'object': output_members(items, indent) - elif items['type'] == 'array': + elif items.get('type') == 'array': output(indent + '-') output_type(items, False) output(': {}\n'.format(esc_underscores('\n'.join(items['description']))) if 'description' in items and len(items['description']) > 0 else '\n') @@ -205,10 +213,15 @@ def output_array(items, indent): else: if 'type' in items: output_member(None, items, True, indent) + else: + output(indent + '- (any type)\n') def has_members(sub): """Does this sub have any properties to print?""" + if 'properties' not in sub: + return False + for p in list(sub['properties'].keys()): if len(sub['properties'][p]) == 0: continue @@ -248,10 +261,21 @@ def output_members(sub, indent=''): if 'oneOf' in sub: for oneOfItem in sub['oneOf']: - if 'type' in oneOfItem and oneOfItem['type'] == 'array': + if 'properties' not in oneOfItem and 'type' not in oneOfItem: + continue + + if oneOfItem.get('type') == 'array': output_array(oneOfItem, indent) + elif 'properties' in oneOfItem: + output_members(oneOfItem, indent) else: - output_member(None, oneOfItem, False, indent, False if 'enum' in oneOfItem else True) + output_member( + None, + oneOfItem, + False, + indent, + False if 'enum' in oneOfItem else True + ) # If we have multiple ifs, we have to wrap them in allOf. if 'allOf' in sub: @@ -323,76 +347,92 @@ def create_shell_command(rpc, example): output('```\n') -def generate_header(schema): +def generate_header(schema, name): """Generate lines for rpc title and synopsis with request parameters""" - output_title(esc_underscores(''.join([schema['rpc'], ' -- ', schema['title']])), '=', 0, 1) - output_title('SYNOPSIS') - # Add command level warning if exists - if 'warning' in schema: - output('**(WARNING: {})**\n\n'.format(esc_underscores(schema['warning']))) - # generate the rpc command details with request parameters - request = schema['request'] - properties = request['properties'] - toplevels = list(request['properties'].keys()) - output('{} '.format(fmt_propname(schema['rpc']))) - i = 0 - while i < len(toplevels): - # Skip hidden properties - if 'hidden' in properties[toplevels[i]] and properties[toplevels[i]]['hidden']: - i += 1 - continue - # Search for the parameter in 'dependentUpon' array - dependent_upon_obj = request['dependentUpon'] if 'dependentUpon' in request else [] - if toplevels[i] in dependent_upon_obj: - # Output parameters with appropriate separator - output('{}*{}* '.format('' if 'required' in request and toplevels[i] in request['required'] else '[', esc_underscores(toplevels[i]))) - output_conditional_params(dependent_upon_obj[toplevels[i]], 'dependentUpon') - toplevels = [key for key in toplevels if key not in dependent_upon_obj[toplevels[i]]] - output('{}'.format('' if 'required' in request and toplevels[i] in request['required'] else '] ')) - else: - # Search for the parameter in any conditional sub-arrays (oneOfMany, pairedWith) - condition, foundinsubarray = search_key_in_conditional_array(request, toplevels[i]) - # If param found in the conditional sub-array - if condition != '' and foundinsubarray is not None: + output_title(esc_underscores(''.join([name, ' -- ', schema['title']])), '=', 0, 1) + if schema.get('rpc'): + output_title('SYNOPSIS') + # Add command level warning if exists + if 'warning' in schema: + output('**(WARNING: {})**\n\n'.format(esc_underscores(schema['warning']))) + # generate the rpc command details with request parameters + request = schema.get('request', {}) + properties = request.get('properties', {}) + toplevels = list(properties.keys()) + output('{} '.format(fmt_propname(name))) + i = 0 + while i < len(toplevels): + # Skip hidden properties + if 'hidden' in properties[toplevels[i]] and properties[toplevels[i]]['hidden']: + i += 1 + continue + # Search for the parameter in 'dependentUpon' array + dependent_upon_obj = request['dependentUpon'] if 'dependentUpon' in request else [] + if toplevels[i] in dependent_upon_obj: # Output parameters with appropriate separator - output_conditional_params(foundinsubarray, condition) - # Remove found keys from toplevels array - toplevels = [key for key in toplevels if key not in foundinsubarray] - # Reset the cursor to the previous index - i = i - 1 + output('{}*{}* '.format('' if 'required' in request and toplevels[i] in request['required'] else '[', esc_underscores(toplevels[i]))) + output_conditional_params(dependent_upon_obj[toplevels[i]], 'dependentUpon') + toplevels = [key for key in toplevels if key not in dependent_upon_obj[toplevels[i]]] + output('{}'.format('' if 'required' in request and toplevels[i] in request['required'] else '] ')) else: - # Print the key as it is if it doesn't exist in conditional array - output('{}'.format(fmt_paramname(toplevels[i], False if 'required' in request and toplevels[i] in request['required'] else True))) - i += 1 - # plugin.json is an exception where all parameters cannot be printed deu to their dependency on different subcommands - # So, add ... at the end for plugin schema - if schema['rpc'] == 'plugin': - output('...') + # Search for the parameter in any conditional sub-arrays (oneOfMany, pairedWith) + condition, foundinsubarray = search_key_in_conditional_array(request, toplevels[i]) + # If param found in the conditional sub-array + if condition != '' and foundinsubarray is not None: + # Output parameters with appropriate separator + output_conditional_params(foundinsubarray, condition) + # Remove found keys from toplevels array + toplevels = [key for key in toplevels if key not in foundinsubarray] + # Reset the cursor to the previous index + i = i - 1 + else: + # Print the key as it is if it doesn't exist in conditional array + output('{}'.format(fmt_paramname(toplevels[i], False if 'required' in request and toplevels[i] in request['required'] else True))) + i += 1 + # plugin.json is an exception where all parameters cannot be printed deu to their dependency on different subcommands + # So, add ... at the end for plugin schema + if schema.get('rpc') == 'plugin': + output('...') output('\n') def generate_description(schema): """Generate rpc description with request parameter descriptions""" - request = schema['request'] + schema_type = get_schema_type(schema, True) + request = schema.get('request', {}) + properties = request.get('properties', {}) output_title('DESCRIPTION') # Add deprecated and removal information for the command if 'deprecated' in schema: - output('Command **deprecated in {}, removed after {}**.\n\n'.format(schema['deprecated'][0], schema['deprecated'][1] if len(schema['deprecated']) > 1 else deprecated_to_deleted(schema['deprecated'][0]))) + output('{} **deprecated in {}, removed after {}**.\n\n'.format(schema_type, schema['deprecated'][0], schema['deprecated'][1] if len(schema['deprecated']) > 1 else deprecated_to_deleted(schema['deprecated'][0]))) # Version when the command was added - if 'added' in schema: - output('Command *added* in {}.\n\n'.format(schema['added'])) + if 'added' in schema and schema['added'] != 'pre-v0.10.1': + output('{} *added* in {}.\n\n'.format(schema_type, schema['added'])) # Command's detailed description outputs(schema['description'], '\n') # Request parameter's detailed description - output('{}'.format('\n\n' if len(request['properties']) > 0 else '\n')) - output_members(request) + output('{}'.format('\n\n' if len(properties) > 0 else '\n')) + + if properties: + if schema.get('hook'): + output_title('HOOK PAYLOAD') + output_members(request) def generate_return_value(schema): """This is not general, but works for us""" - output_title('RETURN VALUE') - - response = schema['response'] + schema_type = get_schema_type(schema) + if schema.get('rpc'): + output_title('RETURN VALUE') + elif schema.get('notification'): + output_title('NOTIFICATION PAYLOAD') + elif schema.get('hook'): + output_title('HOOK RETURN') + + response = schema.get('response') + if response is None: + output(f'This {schema_type} does not have a response.\n\n') + return if 'pre_return_value_notes' in response: outputs(response['pre_return_value_notes'], '\n') @@ -416,20 +456,24 @@ def generate_return_value(schema): # Use pre/post_return_value_notes with empty properties when dynamic generation of the return value section is not required. # But to add a custom return value section instead. Example: `commando` commands. if "pre_return_value_notes" not in response and "post_return_value_notes" not in response: - output('On success, an empty object is returned.\n') + if schema.get('rpc'): + output('On success, an empty object is returned.\n') sub = schema elif len(toplevels) == 1 and props[toplevels[0]]['type'] == 'object': - output('On success, an object containing {} is returned. It is an object containing:\n\n'.format(fmt_propname(toplevels[0]))) + if schema.get('rpc'): + output('On success, an object containing {} is returned. It is an object containing:\n\n'.format(fmt_propname(toplevels[0]))) # Don't have a description field here, it's not used. assert 'description' not in toplevels[0] sub = props[toplevels[0]] elif len(toplevels) == 1 and props[toplevels[0]]['type'] == 'array' and props[toplevels[0]]['items']['type'] == 'object': - output('On success, an object containing {} is returned. It is an array of objects, where each object contains:\n\n'.format(fmt_propname(toplevels[0]))) + if schema.get('rpc'): + output('On success, an object containing {} is returned. It is an array of objects, where each object contains:\n\n'.format(fmt_propname(toplevels[0]))) # Don't have a description field here, it's not used. assert 'description' not in toplevels[0] sub = props[toplevels[0]]['items'] else: - output('On success, an object is returned, containing:\n\n') + if schema.get('rpc'): + output('On success, an object is returned, containing:\n\n') sub = response output_members(sub) @@ -463,7 +507,7 @@ def generate_body(schema): output('\n') -def generate_footer(schema): +def generate_footer(schema, name): """Output sections which should be printed after return value""" for key in FOOTER_KEY_SEQUENCE: if key not in schema: @@ -484,7 +528,7 @@ def generate_footer(schema): for i, example in enumerate(schema.get('examples', [])): output('\n{}**Example {}**: {}\n'.format('' if i == 0 else '\n', i + 1, '\n'.join(example.get('description', '')))) output('\nRequest:\n') - create_shell_command(schema['rpc'], example) + create_shell_command(name, example) output('```json\n') output(json.dumps(example['request'], indent=2).strip() + '\n') output('```\n') @@ -498,11 +542,28 @@ def generate_footer(schema): output('\n') +def get_schema_type(schema, capitalize=False): + if 'rpc' in schema: + result = 'command' + elif 'notification' in schema: + result = 'notification' + elif 'hook' in schema: + result = 'hook' + else: + raise ValueError('Unknown schema type') + + return result.capitalize() if capitalize else result + + def main(schemafile, markdownfile): with open(schemafile, 'r') as f: schema = json.load(f) + + name = schema.get('rpc') or schema.get('notification') or schema.get('hook') + if name is None: + name = os.path.basename(schemafile).replace('.json', '') # Outputs rpc title and synopsis with request parameters - generate_header(schema) + generate_header(schema, name) # Outputs command description with request parameter descriptions generate_description(schema) # Outputs other remaining sections before return value section @@ -510,7 +571,7 @@ def main(schemafile, markdownfile): # Outputs command response with response parameter descriptions generate_return_value(schema) # Outputs other remaining sections after return value section - generate_footer(schema) + generate_footer(schema, name) if markdownfile is None: return