From df6ad3e8183dafeeee412b55f99d2ec8da921cd4 Mon Sep 17 00:00:00 2001 From: t-bast Date: Mon, 21 Oct 2024 10:37:41 +0200 Subject: [PATCH] Require strict exchange of `shutdown` Whenever one side sends `shutdown`, we restart a signing round from scratch. To be compatible with future taproot channels, we require the receiver to also send `shutdown` before moving on to exchanging `closing_complete` and `closing_sig`. This will give nodes a message to exchange fresh musig2 nonces before producing signatures. On reconnection, we also restart a signing session from scratch and discard pending partial signatures. --- .../fr/acinq/eclair/channel/ChannelData.scala | 37 +++- .../eclair/channel/ChannelExceptions.scala | 1 + .../fr/acinq/eclair/channel/Helpers.scala | 2 - .../fr/acinq/eclair/channel/fsm/Channel.scala | 201 +++++++++++------- .../eclair/channel/fsm/CommonHandlers.scala | 25 ++- .../eclair/channel/fsm/ErrorHandlers.scala | 4 +- .../channel/version4/ChannelCodecs4.scala | 19 +- .../scala/fr/acinq/eclair/TestDatabases.scala | 16 +- .../states/g/NegotiatingStateSpec.scala | 156 ++++++++++++-- .../channel/version4/ChannelCodecs4Spec.scala | 26 ++- 10 files changed, 374 insertions(+), 113 deletions(-) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelData.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelData.scala index df1c0c696e..6b519059e7 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelData.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelData.scala @@ -26,7 +26,7 @@ import fr.acinq.eclair.channel.fund.{InteractiveTxBuilder, InteractiveTxSigningS import fr.acinq.eclair.io.Peer import fr.acinq.eclair.transactions.CommitmentSpec import fr.acinq.eclair.transactions.Transactions._ -import fr.acinq.eclair.wire.protocol.{ChannelAnnouncement, ChannelReady, ChannelReestablish, ChannelUpdate, ClosingSigned, CommitSig, FailureReason, FundingCreated, FundingSigned, Init, LiquidityAds, OnionRoutingPacket, OpenChannel, OpenDualFundedChannel, Shutdown, SpliceInit, Stfu, TxInitRbf, TxSignatures, UpdateAddHtlc, UpdateFailHtlc, UpdateFailMalformedHtlc, UpdateFulfillHtlc} +import fr.acinq.eclair.wire.protocol.{ChannelAnnouncement, ChannelReady, ChannelReestablish, ChannelUpdate, ClosingComplete, ClosingSig, ClosingSigned, CommitSig, FailureReason, FundingCreated, FundingSigned, Init, LiquidityAds, OnionRoutingPacket, OpenChannel, OpenDualFundedChannel, Shutdown, SpliceInit, Stfu, TxInitRbf, TxSignatures, UpdateAddHtlc, UpdateFailHtlc, UpdateFailMalformedHtlc, UpdateFulfillHtlc} import fr.acinq.eclair.{Alias, BlockHeight, CltvExpiry, CltvExpiryDelta, Features, InitFeature, MilliSatoshi, MilliSatoshiLong, RealShortChannelId, TimestampMilli, UInt64} import scodec.bits.ByteVector @@ -536,6 +536,38 @@ object SpliceStatus { case object SpliceAborted extends SpliceStatus } +case class ClosingCompleteSent(closingComplete: ClosingComplete, closingFeerate: FeeratePerKw) + +sealed trait OnRemoteShutdown +object OnRemoteShutdown { + /** When receiving the remote shutdown, we sign a new version of our closing transaction. */ + case class SignTransaction(closingFeerate: FeeratePerKw) extends OnRemoteShutdown + /** When receiving the remote shutdown, we don't sign a new version of our closing transaction, but our peer may sign theirs. */ + case object WaitForSigs extends OnRemoteShutdown +} + +sealed trait ClosingNegotiation { + def localShutdown: Shutdown + // When we disconnect, we discard pending signatures. + def disconnect(): ClosingNegotiation.WaitingForRemoteShutdown = this match { + case status: ClosingNegotiation.WaitingForRemoteShutdown => status + case status: ClosingNegotiation.SigningTransactions => status.closingCompleteSent_opt.map(_.closingFeerate) match { + // If we were waiting for their signature, we will send closing_complete again after exchanging shutdown. + case Some(closingFeerate) if status.closingSigReceived_opt.isEmpty => ClosingNegotiation.WaitingForRemoteShutdown(status.localShutdown, OnRemoteShutdown.SignTransaction(closingFeerate)) + case _ => ClosingNegotiation.WaitingForRemoteShutdown(status.localShutdown, OnRemoteShutdown.WaitForSigs) + } + case status: ClosingNegotiation.WaitingForConfirmation => ClosingNegotiation.WaitingForRemoteShutdown(status.localShutdown, OnRemoteShutdown.WaitForSigs) + } +} +object ClosingNegotiation { + /** We've sent a new shutdown message: we wait for their shutdown message before taking any action. */ + case class WaitingForRemoteShutdown(localShutdown: Shutdown, onRemoteShutdown: OnRemoteShutdown) extends ClosingNegotiation + /** We've exchanged shutdown messages: at least one side will send closing_complete to renew their closing transaction. */ + case class SigningTransactions(localShutdown: Shutdown, remoteShutdown: Shutdown, closingCompleteSent_opt: Option[ClosingCompleteSent], closingSigSent_opt: Option[ClosingSig], closingSigReceived_opt: Option[ClosingSig]) extends ClosingNegotiation + /** We've signed a new closing transaction and are waiting for confirmation or to initiate RBF. */ + case class WaitingForConfirmation(localShutdown: Shutdown, remoteShutdown: Shutdown) extends ClosingNegotiation +} + sealed trait ChannelData extends PossiblyHarmful { def channelId: ByteVector32 } @@ -655,12 +687,13 @@ final case class DATA_NEGOTIATING(commitments: Commitments, require(!commitments.params.localParams.paysClosingFees || closingTxProposed.forall(_.nonEmpty), "initiator must have at least one closing signature for every negotiation attempt because it initiates the closing") } final case class DATA_NEGOTIATING_SIMPLE(commitments: Commitments, - localShutdown: Shutdown, remoteShutdown: Shutdown, + status: ClosingNegotiation, // Closing transactions we created, where we pay the fees (unsigned). proposedClosingTxs: List[ClosingTxs], // Closing transactions we published: this contains our local transactions for // which they sent a signature, and their closing transactions that we signed. publishedClosingTxs: List[ClosingTx]) extends ChannelDataWithCommitments { + val localScriptPubKey: ByteVector = status.localShutdown.scriptPubKey def findClosingTx(tx: Transaction): Option[ClosingTx] = publishedClosingTxs.find(_.tx.txid == tx.txid).orElse(proposedClosingTxs.flatMap(_.all).find(_.tx.txid == tx.txid)) } final case class DATA_CLOSING(commitments: Commitments, diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelExceptions.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelExceptions.scala index 916869f391..ad2ef0366b 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelExceptions.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/ChannelExceptions.scala @@ -119,6 +119,7 @@ case class InvalidHtlcSignature (override val channelId: Byte case class CannotGenerateClosingTx (override val channelId: ByteVector32) extends ChannelException(channelId, "failed to generate closing transaction: all outputs are trimmed") case class MissingCloseSignature (override val channelId: ByteVector32) extends ChannelException(channelId, "closing_complete is missing a signature for a closing transaction including our output") case class InvalidCloseSignature (override val channelId: ByteVector32, txId: TxId) extends ChannelException(channelId, s"invalid close signature: txId=$txId") +case class UnexpectedClosingComplete (override val channelId: ByteVector32, fees: Satoshi, lockTime: Long) extends ChannelException(channelId, s"unexpected closing_complete with fees=$fees and lockTime=$lockTime: we already sent closing_sig, you must send shutdown first") case class InvalidCloseAmountBelowDust (override val channelId: ByteVector32, txId: TxId) extends ChannelException(channelId, s"invalid closing tx: some outputs are below dust: txId=$txId") case class CommitSigCountMismatch (override val channelId: ByteVector32, expected: Int, actual: Int) extends ChannelException(channelId, s"commit sig count mismatch: expected=$expected actual=$actual") case class HtlcSigCountMismatch (override val channelId: ByteVector32, expected: Int, actual: Int) extends ChannelException(channelId, s"htlc sig count mismatch: expected=$expected actual=$actual") diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala index 6a4b8ec0d6..12b3b2cd90 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/Helpers.scala @@ -712,8 +712,6 @@ object Helpers { /** We are the closer: we sign closing transactions for which we pay the fees. */ def makeSimpleClosingTx(currentBlockHeight: BlockHeight, keyManager: ChannelKeyManager, commitment: FullCommitment, localScriptPubkey: ByteVector, remoteScriptPubkey: ByteVector, feerate: FeeratePerKw): Either[ChannelException, (ClosingTxs, ClosingComplete)] = { - require(isValidFinalScriptPubkey(localScriptPubkey, allowAnySegwit = true, allowOpReturn = true), "invalid localScriptPubkey") - require(isValidFinalScriptPubkey(remoteScriptPubkey, allowAnySegwit = true, allowOpReturn = true), "invalid remoteScriptPubkey") // We must convert the feerate to a fee: we must build dummy transactions to compute their weight. val closingFee = { val dummyClosingTxs = Transactions.makeSimpleClosingTxs(commitment.commitInput, commitment.localCommit.spec, SimpleClosingTxFee.PaidByUs(0 sat), currentBlockHeight.toLong, localScriptPubkey, remoteScriptPubkey) diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/Channel.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/Channel.scala index 977063c6f3..a2ae4242f6 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/Channel.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/Channel.scala @@ -736,15 +736,7 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with if (d.commitments.hasNoPendingHtlcsOrFeeUpdate) { // there are no pending signed changes, let's directly negotiate a closing transaction if (Features.canUseFeature(d.commitments.params.localParams.initFeatures, d.commitments.params.remoteParams.initFeatures, Features.SimpleClose)) { - val closingFeerate = d.closingFeerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) - MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, localShutdown.scriptPubKey, remoteShutdownScript, closingFeerate) match { - case Left(f) => - log.warning("cannot create local closing txs, waiting for remote closing_complete: {}", f.getMessage) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, Nil, Nil) storing() sending sendList - case Right((closingTxs, closingComplete)) => - log.debug("signing local mutual close transactions: {}", closingTxs) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, closingTxs :: Nil, Nil) storing() sending sendList :+ closingComplete - } + startSimpleClose(d.commitments, localShutdown, remoteShutdown, d.closingFeerates, sendList) } else if (d.commitments.params.localParams.paysClosingFees) { // we pay the closing fees, so we initiate the negotiation by sending the first closing_signed val (closingTx, closingSigned) = MutualClose.makeFirstClosingTx(keyManager, d.commitments.latest, localShutdown.scriptPubKey, remoteShutdownScript, nodeParams.currentBitcoinCoreFeerates, nodeParams.onChainFeeConf, d.closingFeerates) @@ -1524,15 +1516,7 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with context.system.eventStream.publish(ChannelSignatureReceived(self, commitments1)) if (commitments1.hasNoPendingHtlcsOrFeeUpdate) { if (Features.canUseFeature(d.commitments.params.localParams.initFeatures, d.commitments.params.remoteParams.initFeatures, Features.SimpleClose)) { - val closingFeerate = d.closingFeerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) - MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey, closingFeerate) match { - case Left(f) => - log.warning("cannot create local closing txs, waiting for remote closing_complete: {}", f.getMessage) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, Nil, Nil) storing() sending revocation - case Right((closingTxs, closingComplete)) => - log.debug("signing local mutual close transactions: {}", closingTxs) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, closingTxs :: Nil, Nil) storing() sending revocation :: closingComplete :: Nil - } + startSimpleClose(d.commitments, localShutdown, remoteShutdown, d.closingFeerates, revocation :: Nil) } else if (d.commitments.params.localParams.paysClosingFees) { // we pay the closing fees, so we initiate the negotiation by sending the first closing_signed val (closingTx, closingSigned) = MutualClose.makeFirstClosingTx(keyManager, commitments1.latest, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey, nodeParams.currentBitcoinCoreFeerates, nodeParams.onChainFeeConf, closingFeerates) @@ -1576,15 +1560,7 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with if (commitments1.hasNoPendingHtlcsOrFeeUpdate) { log.debug("switching to NEGOTIATING spec:\n{}", commitments1.latest.specs2String) if (Features.canUseFeature(d.commitments.params.localParams.initFeatures, d.commitments.params.remoteParams.initFeatures, Features.SimpleClose)) { - val closingFeerate = d.closingFeerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) - MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey, closingFeerate) match { - case Left(f) => - log.warning("cannot create local closing txs, waiting for remote closing_complete: {}", f.getMessage) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, Nil, Nil) storing() - case Right((closingTxs, closingComplete)) => - log.debug("signing local mutual close transactions: {}", closingTxs) - goto(NEGOTIATING_SIMPLE) using DATA_NEGOTIATING_SIMPLE(d.commitments, localShutdown, remoteShutdown, closingTxs :: Nil, Nil) storing() sending closingComplete - } + startSimpleClose(d.commitments, localShutdown, remoteShutdown, d.closingFeerates, Nil) } else if (d.commitments.params.localParams.paysClosingFees) { // we pay the closing fees, so we initiate the negotiation by sending the first closing_signed val (closingTx, closingSigned) = MutualClose.makeFirstClosingTx(keyManager, commitments1.latest, localShutdown.scriptPubKey, remoteShutdown.scriptPubKey, nodeParams.currentBitcoinCoreFeerates, nodeParams.onChainFeeConf, closingFeerates) @@ -1605,11 +1581,8 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with case Event(shutdown: Shutdown, d: DATA_SHUTDOWN) => if (shutdown.scriptPubKey != d.remoteShutdown.scriptPubKey) { log.debug("our peer updated their shutdown script (previous={}, current={})", d.remoteShutdown.scriptPubKey, shutdown.scriptPubKey) - stay() using d.copy(remoteShutdown = shutdown) storing() - } else { - // This is a retransmission of their previous shutdown, we can ignore it. - stay() } + stay() using d.copy(remoteShutdown = shutdown) storing() case Event(r: RevocationTimeout, d: DATA_SHUTDOWN) => handleRevocationTimeout(r, d) @@ -1754,38 +1727,111 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with when(NEGOTIATING_SIMPLE)(handleExceptions { case Event(remoteShutdown: Shutdown, d: DATA_NEGOTIATING_SIMPLE) => - // Our peer wants to create a new version of their closing transaction. - // We don't need to update our version of the closing transaction: we simply wait for their closing_complete. - stay() using d.copy(remoteShutdown = remoteShutdown) storing() + d.status match { + case status: ClosingNegotiation.WaitingForRemoteShutdown => + // We have already sent our shutdown. Now that we've received theirs, we're ready to sign closing transactions. + // If we don't have a closing feerate, we don't need to create a new version of our closing transaction (which + // can happen after a reconnection for example). + status.onRemoteShutdown match { + case OnRemoteShutdown.SignTransaction(closingFeerate) => + val localScript = status.localShutdown.scriptPubKey + val remoteScript = remoteShutdown.scriptPubKey + MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, localScript, remoteScript, closingFeerate) match { + case Left(f) => + log.warning("cannot create local closing txs, waiting for remote closing_complete: {}", f.getMessage) + val status1 = ClosingNegotiation.SigningTransactions(status.localShutdown, remoteShutdown, None, None, None) + stay() using d.copy(status = status1) + case Right((closingTxs, closingComplete)) => + log.debug("signing local mutual close transactions: {}", closingTxs) + val status1 = ClosingNegotiation.SigningTransactions(status.localShutdown, remoteShutdown, Some(ClosingCompleteSent(closingComplete, closingFeerate)), None, None) + stay() using d.copy(status = status1, proposedClosingTxs = d.proposedClosingTxs :+ closingTxs) storing() sending closingComplete + } + case OnRemoteShutdown.WaitForSigs => + val status1 = ClosingNegotiation.SigningTransactions(status.localShutdown, remoteShutdown, None, None, None) + stay() using d.copy(status = status1) + } + case status: ClosingNegotiation.SigningTransactions => + // We were in the middle of signing transactions: we restart a signing round from scratch. + // If we were waiting for their signature, we will send closing_complete again after exchanging shutdown. + val localShutdown = status.localShutdown + val onRemoteShutdown = status.closingCompleteSent_opt.map(_.closingFeerate) match { + case Some(closingFeerate) if status.closingSigReceived_opt.isEmpty => OnRemoteShutdown.SignTransaction(closingFeerate) + case _ => OnRemoteShutdown.WaitForSigs + } + val status1 = ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, onRemoteShutdown) + self ! remoteShutdown + stay() using d.copy(status = status1) sending localShutdown + case status: ClosingNegotiation.WaitingForConfirmation => + // Our peer wants to create a new version of their closing transaction. We don't need to update our version of + // the closing transaction: we re-send our previous shutdown and wait for their closing_complete. + val localShutdown = status.localShutdown + val status1 = ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, None, None, None) + stay() using d.copy(status = status1) sending localShutdown + } case Event(closingComplete: ClosingComplete, d: DATA_NEGOTIATING_SIMPLE) => - MutualClose.signSimpleClosingTx(keyManager, d.commitments.latest, d.localShutdown.scriptPubKey, d.remoteShutdown.scriptPubKey, closingComplete) match { - case Left(f) => - // This may happen if scripts were updated concurrently, so we simply ignore failures. - // Bolt 2: - // - If the signature field is not valid for the corresponding closing transaction: - // - MUST ignore `closing_complete`. - log.warning("invalid closing_complete: {}", f.getMessage) + d.status match { + case _: ClosingNegotiation.WaitingForRemoteShutdown => + log.info("ignoring remote closing_complete, we've sent shutdown to initiate a new signing round") stay() - case Right((signedClosingTx, closingSig)) => - log.debug("signing remote mutual close transaction: {}", signedClosingTx.tx) - val d1 = d.copy(publishedClosingTxs = d.publishedClosingTxs :+ signedClosingTx) - stay() using d1 storing() calling doPublish(signedClosingTx, localPaysClosingFees = false) sending closingSig + case _: ClosingNegotiation.WaitingForConfirmation => + log.info("ignoring closing_complete, we've already sent closing_sig: peer must send shutdown again before closing_complete") + stay() sending Warning(d.channelId, UnexpectedClosingComplete(d.channelId, closingComplete.fees, closingComplete.lockTime).getMessage) + case status: ClosingNegotiation.SigningTransactions if status.closingSigSent_opt.nonEmpty => + log.info("ignoring closing_complete, we've already sent closing_sig: peer must send shutdown again before closing_complete") + stay() sending Warning(d.channelId, UnexpectedClosingComplete(d.channelId, closingComplete.fees, closingComplete.lockTime).getMessage) + case status: ClosingNegotiation.SigningTransactions => + val localScript = status.localShutdown.scriptPubKey + val remoteScript = status.remoteShutdown.scriptPubKey + MutualClose.signSimpleClosingTx(keyManager, d.commitments.latest, localScript, remoteScript, closingComplete) match { + case Left(f) => + // This may happen if scripts were updated concurrently, so we simply ignore failures. + log.warning("invalid closing_complete: {}", f.getMessage) + stay() + case Right((signedClosingTx, closingSig)) => + log.debug("signing remote mutual close transaction: {}", signedClosingTx.tx) + val status1 = status.closingCompleteSent_opt match { + // We've sent closing_complete: we may be waiting for their closing_sig. + case Some(_) => status.closingSigReceived_opt match { + case Some(_) => ClosingNegotiation.WaitingForConfirmation(status.localShutdown, status.remoteShutdown) + case None => status.copy(closingSigSent_opt = Some(closingSig)) + } + // We haven't sent closing_complete: we're not waiting for their closing_sig'. + case None => ClosingNegotiation.WaitingForConfirmation(status.localShutdown, status.remoteShutdown) + } + val d1 = d.copy(status = status1, publishedClosingTxs = d.publishedClosingTxs :+ signedClosingTx) + stay() using d1 storing() calling doPublish(signedClosingTx, localPaysClosingFees = false) sending closingSig + } } case Event(closingSig: ClosingSig, d: DATA_NEGOTIATING_SIMPLE) => - MutualClose.receiveSimpleClosingSig(keyManager, d.commitments.latest, d.proposedClosingTxs.last, closingSig) match { - case Left(f) => - // This may happen if scripts were updated concurrently, so we simply ignore failures. - // Bolt 2: - // - If the signature field is not valid for the corresponding closing transaction: - // - MUST ignore `closing_sig`. - log.warning("invalid closing_sig: {}", f.getMessage) + d.status match { + case _: ClosingNegotiation.WaitingForRemoteShutdown => + log.info("ignoring remote closing_sig, we've sent shutdown to initiate a new signing round") + stay() + case _: ClosingNegotiation.WaitingForConfirmation => + log.info("ignoring closing_sig, we've already fully signed closing transactions") stay() - case Right(signedClosingTx) => - log.debug("received signatures for local mutual close transaction: {}", signedClosingTx.tx) - val d1 = d.copy(publishedClosingTxs = d.publishedClosingTxs :+ signedClosingTx) - stay() using d1 storing() calling doPublish(signedClosingTx, localPaysClosingFees = true) + case status: ClosingNegotiation.SigningTransactions if status.closingSigReceived_opt.nonEmpty => + log.info("ignoring closing_sig, we've already received it") + stay() + case status: ClosingNegotiation.SigningTransactions => + MutualClose.receiveSimpleClosingSig(keyManager, d.commitments.latest, d.proposedClosingTxs.last, closingSig) match { + case Left(f) => + // This may happen if scripts were updated concurrently, so we simply ignore failures. + log.warning("invalid closing_sig: {}", f.getMessage) + stay() + case Right(signedClosingTx) => + log.debug("received signatures for local mutual close transaction: {}", signedClosingTx.tx) + val status1 = status.closingSigSent_opt match { + // We have already signed their transaction: both local and remote closing transactions have been updated. + case Some(_) => ClosingNegotiation.WaitingForConfirmation(status.localShutdown, status.remoteShutdown) + // We haven't sent closing_sig yet: they may send us closing_complete to update their closing transaction. + case None => status.copy(closingSigReceived_opt = Some(closingSig)) + } + val d1 = d.copy(status = status1, publishedClosingTxs = d.publishedClosingTxs :+ signedClosingTx) + stay() using d1 storing() calling doPublish(signedClosingTx, localPaysClosingFees = true) + } } case Event(WatchFundingSpentTriggered(tx), d: DATA_NEGOTIATING_SIMPLE) if d.findClosingTx(tx).nonEmpty => @@ -1806,27 +1852,26 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with goto(CLOSED) using d storing() case Event(c: CMD_CLOSE, d: DATA_NEGOTIATING_SIMPLE) => - val localShutdown_opt = c.scriptPubKey match { - case Some(scriptPubKey) if scriptPubKey != d.localShutdown.scriptPubKey => Some(Shutdown(d.channelId, scriptPubKey)) - case _ => None - } - if (localShutdown_opt.nonEmpty || c.feerates.nonEmpty) { - val localScript = localShutdown_opt.map(_.scriptPubKey).getOrElse(d.localShutdown.scriptPubKey) - val feerate = c.feerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) - MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, localScript, d.remoteShutdown.scriptPubKey, feerate) match { - case Left(f) => handleCommandError(f, c) - case Right((closingTxs, closingComplete)) => - log.info("new closing transaction created with script={} fees={}", localScript, closingComplete.fees) - log.debug("signing local mutual close transactions: {}", closingTxs) - val d1 = d.copy(localShutdown = localShutdown_opt.getOrElse(d.localShutdown), proposedClosingTxs = d.proposedClosingTxs :+ closingTxs) - stay() using d1 storing() sending localShutdown_opt.toSeq :+ closingComplete - } - } else { - handleCommandError(ClosingAlreadyInProgress(d.channelId), c) + val localShutdown = Shutdown(d.channelId, c.scriptPubKey.getOrElse(d.status.localShutdown.scriptPubKey)) + val closingFeerate = c.feerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) + d.status match { + case _: ClosingNegotiation.WaitingForRemoteShutdown => + log.info("we're already waiting for our peer to send their shutdown message, no need to send ours again") + handleCommandError(ClosingAlreadyInProgress(d.channelId), c) + case _: ClosingNegotiation.SigningTransactions => + val status1 = ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, OnRemoteShutdown.SignTransaction(closingFeerate)) + stay() using d.copy(status = status1) storing() sending localShutdown + case _: ClosingNegotiation.WaitingForConfirmation => + val status1 = ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, OnRemoteShutdown.SignTransaction(closingFeerate)) + stay() using d.copy(status = status1) storing() sending localShutdown } case Event(e: Error, d: DATA_NEGOTIATING_SIMPLE) => handleRemoteError(e, d) + case Event(INPUT_DISCONNECTED, d: DATA_NEGOTIATING_SIMPLE) => + val status1 = d.status.disconnect() + goto(OFFLINE) using d.copy(status = status1) + }) when(CLOSING)(handleExceptions { @@ -2480,12 +2525,12 @@ class Channel(val nodeParams: NodeParams, val wallet: OnChainChannelFunder with case Event(_: ChannelReestablish, d: DATA_NEGOTIATING_SIMPLE) => // We retransmit our shutdown: we may have updated our script and they may not have received it. - // We also sign a new round of closing transactions since network fees may have changed while we were offline. - val closingFeerate = nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates) - Closing.MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, d.commitments.latest, d.localShutdown.scriptPubKey, d.remoteShutdown.scriptPubKey, closingFeerate) match { - case Left(_) => goto(NEGOTIATING_SIMPLE) using d sending d.localShutdown - case Right((closingTxs, closingComplete)) => goto(NEGOTIATING_SIMPLE) using d.copy(proposedClosingTxs = d.proposedClosingTxs :+ closingTxs) sending Seq(d.localShutdown, closingComplete) + val localShutdown = d.status.localShutdown + val status1 = d.status match { + case status: ClosingNegotiation.WaitingForRemoteShutdown => status.copy(localShutdown = localShutdown) + case _ => ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, OnRemoteShutdown.WaitForSigs) } + goto(NEGOTIATING_SIMPLE) using d.copy(status = status1) sending localShutdown // This handler is a workaround for an issue in lnd: starting with versions 0.10 / 0.11, they sometimes fail to send // a channel_reestablish when reconnecting a channel that recently got confirmed, and instead send a channel_ready diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/CommonHandlers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/CommonHandlers.scala index a8458f4ec5..d22e0799ea 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/CommonHandlers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/CommonHandlers.scala @@ -16,13 +16,14 @@ package fr.acinq.eclair.channel.fsm -import akka.actor.{ActorRef, FSM, Status} +import akka.actor.FSM import fr.acinq.bitcoin.scalacompat.{ByteVector32, Script} import fr.acinq.eclair.Features +import fr.acinq.eclair.channel.Helpers.Closing.MutualClose import fr.acinq.eclair.channel._ import fr.acinq.eclair.db.PendingCommandsDb import fr.acinq.eclair.io.Peer -import fr.acinq.eclair.wire.protocol.{HtlcSettlementMessage, LightningMessage, UpdateMessage} +import fr.acinq.eclair.wire.protocol.{HtlcSettlementMessage, LightningMessage, Shutdown, UpdateMessage} import scodec.bits.ByteVector import scala.concurrent.duration.DurationInt @@ -106,7 +107,7 @@ trait CommonHandlers { case d: DATA_NORMAL if d.localShutdown.isDefined => d.localShutdown.get.scriptPubKey case d: DATA_SHUTDOWN => d.localShutdown.scriptPubKey case d: DATA_NEGOTIATING => d.localShutdown.scriptPubKey - case d: DATA_NEGOTIATING_SIMPLE => d.localShutdown.scriptPubKey + case d: DATA_NEGOTIATING_SIMPLE => d.localScriptPubKey case d: DATA_CLOSING => d.finalScriptPubKey case d => d.commitments.params.localParams.upfrontShutdownScript_opt match { @@ -131,4 +132,22 @@ trait CommonHandlers { finalScriptPubKey } + def startSimpleClose(commitments: Commitments, localShutdown: Shutdown, remoteShutdown: Shutdown, closingFeerates: Option[ClosingFeerates], toSend: List[LightningMessage]) = { + val localScript = localShutdown.scriptPubKey + val remoteScript = remoteShutdown.scriptPubKey + val closingFeerate = closingFeerates.map(_.preferred).getOrElse(nodeParams.onChainFeeConf.getClosingFeerate(nodeParams.currentBitcoinCoreFeerates)) + MutualClose.makeSimpleClosingTx(nodeParams.currentBlockHeight, keyManager, commitments.latest, localScript, remoteScript, closingFeerate) match { + case Left(f) => + log.warning("cannot create local closing txs, waiting for remote closing_complete: {}", f.getMessage) + val status = ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, None, None, None) + val d = DATA_NEGOTIATING_SIMPLE(commitments, status, Nil, Nil) + goto(NEGOTIATING_SIMPLE) using d storing() sending toSend + case Right((closingTxs, closingComplete)) => + log.debug("signing local mutual close transactions: {}", closingTxs) + val status = ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, Some(ClosingCompleteSent(closingComplete, closingFeerate)), None, None) + val d = DATA_NEGOTIATING_SIMPLE(commitments, status, closingTxs :: Nil, Nil) + goto(NEGOTIATING_SIMPLE) using d storing() sending toSend :+ closingComplete + } + } + } diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala index a2ffe53180..d1bffd374e 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/channel/fsm/ErrorHandlers.scala @@ -89,7 +89,7 @@ trait ErrorHandlers extends CommonHandlers { handleMutualClose(bestUnpublishedClosingTx, Left(negotiating)) case negotiating: DATA_NEGOTIATING_SIMPLE if negotiating.publishedClosingTxs.nonEmpty => // We have published at least one mutual close transaction, it's better to use it instead of our local commit. - val closing = DATA_CLOSING(negotiating.commitments, waitingSince = nodeParams.currentBlockHeight, finalScriptPubKey = negotiating.localShutdown.scriptPubKey, mutualCloseProposed = negotiating.proposedClosingTxs.flatMap(_.all), mutualClosePublished = negotiating.publishedClosingTxs) + val closing = DATA_CLOSING(negotiating.commitments, waitingSince = nodeParams.currentBlockHeight, finalScriptPubKey = negotiating.localScriptPubKey, mutualCloseProposed = negotiating.proposedClosingTxs.flatMap(_.all), mutualClosePublished = negotiating.publishedClosingTxs) goto(CLOSING) using closing storing() case dd: ChannelDataWithCommitments => // We publish our commitment even if we have nothing at stake: it's a nice thing to do because it lets our peer @@ -139,7 +139,7 @@ trait ErrorHandlers extends CommonHandlers { handleMutualClose(bestUnpublishedClosingTx, Left(negotiating)) case negotiating: DATA_NEGOTIATING_SIMPLE if negotiating.publishedClosingTxs.nonEmpty => // We have published at least one mutual close transaction, it's better to use it instead of our local commit. - val closing = DATA_CLOSING(negotiating.commitments, waitingSince = nodeParams.currentBlockHeight, finalScriptPubKey = negotiating.localShutdown.scriptPubKey, mutualCloseProposed = negotiating.proposedClosingTxs.flatMap(_.all), mutualClosePublished = negotiating.publishedClosingTxs) + val closing = DATA_CLOSING(negotiating.commitments, waitingSince = nodeParams.currentBlockHeight, finalScriptPubKey = negotiating.localScriptPubKey, mutualCloseProposed = negotiating.proposedClosingTxs.flatMap(_.all), mutualClosePublished = negotiating.publishedClosingTxs) goto(CLOSING) using closing storing() // NB: we publish the commitment even if we have nothing at stake (in a dataloss situation our peer will send us an error just for that) case hasCommitments: ChannelDataWithCommitments => diff --git a/eclair-core/src/main/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4.scala b/eclair-core/src/main/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4.scala index 8dbd2bb56f..083faf417e 100644 --- a/eclair-core/src/main/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4.scala +++ b/eclair-core/src/main/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4.scala @@ -681,7 +681,7 @@ private[channel] object ChannelCodecs4 { ("remotePushAmount" | millisatoshi) :: ("status" | interactiveTxWaitingForSigsCodec) :: ("remoteChannelData_opt" | optional(bool8, varsizebinarydata))).as[DATA_WAIT_FOR_DUAL_FUNDING_SIGNED] - + val DATA_WAIT_FOR_DUAL_FUNDING_CONFIRMED_02_Codec: Codec[DATA_WAIT_FOR_DUAL_FUNDING_CONFIRMED] = ( ("commitments" | commitmentsCodecWithoutFirstRemoteCommitIndex) :: ("localPushAmount" | millisatoshi) :: @@ -759,10 +759,23 @@ private[channel] object ChannelCodecs4 { ("localOnly_opt" | optional(bool8, closingTxCodec)) :: ("remoteOnly_opt" | optional(bool8, closingTxCodec))).as[ClosingTxs] + private val onRemoteShutdownCodec: Codec[OnRemoteShutdown] = discriminated[OnRemoteShutdown].by(uint8) + .typecase(0x00, provide(OnRemoteShutdown.WaitForSigs)) + .typecase(0x01, feeratePerKw.as[OnRemoteShutdown.SignTransaction]) + + private val waitingForRemoteShutdownCodec: Codec[ClosingNegotiation.WaitingForRemoteShutdown] = ( + ("localShutdown" | lengthDelimited(shutdownCodec)) :: + ("onRemoteShutdown" | onRemoteShutdownCodec) + ).as[ClosingNegotiation.WaitingForRemoteShutdown] + + val closingNegotiationCodec: Codec[ClosingNegotiation] = discriminated[ClosingNegotiation].by(uint8) + .\(0x01) { case status: ClosingNegotiation.WaitingForRemoteShutdown => status }(waitingForRemoteShutdownCodec) + .\(0x02) { case status: ClosingNegotiation.SigningTransactions => status.disconnect() }(waitingForRemoteShutdownCodec) + .\(0x03) { case status: ClosingNegotiation.WaitingForConfirmation => status.disconnect() }(waitingForRemoteShutdownCodec) + val DATA_NEGOTIATING_SIMPLE_14_Codec: Codec[DATA_NEGOTIATING_SIMPLE] = ( ("commitments" | commitmentsCodec) :: - ("localShutdown" | lengthDelimited(shutdownCodec)) :: - ("remoteShutdown" | lengthDelimited(shutdownCodec)) :: + ("status" | closingNegotiationCodec) :: ("proposedClosingTxs" | listOfN(uint16, closingTxsCodec)) :: ("publishedClosingTxs" | listOfN(uint16, closingTxCodec))).as[DATA_NEGOTIATING_SIMPLE] diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/TestDatabases.scala b/eclair-core/src/test/scala/fr/acinq/eclair/TestDatabases.scala index f102cd3868..fb91428dca 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/TestDatabases.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/TestDatabases.scala @@ -79,18 +79,25 @@ object TestDatabases { case d: DATA_WAIT_FOR_REMOTE_PUBLISH_FUTURE_COMMITMENT => d.copy(commitments = freeze2(d.commitments)) case d: DATA_NORMAL => d.copy(commitments = freeze2(d.commitments)) .modify(_.spliceStatus).using { - case s: SpliceStatus.SpliceWaitingForSigs => s - case _ => SpliceStatus.NoSplice - } + case s: SpliceStatus.SpliceWaitingForSigs => s + case _ => SpliceStatus.NoSplice + } case d: DATA_CLOSING => d.copy(commitments = freeze2(d.commitments)) case d: DATA_NEGOTIATING => d.copy(commitments = freeze2(d.commitments)) case d: DATA_NEGOTIATING_SIMPLE => d.copy(commitments = freeze2(d.commitments)) case d: DATA_SHUTDOWN => d.copy(commitments = freeze2(d.commitments)) } + // When negotiating closing transactions with the option_simple_close feature, we discard pending signatures on + // disconnection and will restart a signing round on reconnection. + def freeze4(input: PersistentChannelData): PersistentChannelData = input match { + case d: DATA_NEGOTIATING_SIMPLE => freeze3(d.copy(status = d.status.disconnect())) + case d => freeze3(d) + } + super.addOrUpdateChannel(data) val check = super.getChannel(data.channelId) - val frozen = freeze3(data) + val frozen = freeze4(data) require(check.contains(frozen), s"serialization/deserialization check failed, $check != $frozen") } } @@ -133,6 +140,7 @@ object TestDatabases { } object TestPgDatabases { + import _root_.io.zonky.test.db.postgres.embedded.EmbeddedPostgres /** single instance */ diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/channel/states/g/NegotiatingStateSpec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/channel/states/g/NegotiatingStateSpec.scala index 226767cd4b..174bb245b5 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/channel/states/g/NegotiatingStateSpec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/channel/states/g/NegotiatingStateSpec.scala @@ -23,14 +23,14 @@ import fr.acinq.eclair.blockchain.fee.{FeeratePerKw, FeeratesPerKw} import fr.acinq.eclair.channel.Helpers.Closing import fr.acinq.eclair.channel._ import fr.acinq.eclair.channel.fsm.Channel -import fr.acinq.eclair.channel.publish.TxPublisher.{PublishFinalTx, PublishTx} +import fr.acinq.eclair.channel.publish.TxPublisher.{PublishFinalTx, PublishTx, SetChannelId} import fr.acinq.eclair.channel.states.ChannelStateTestsBase.PimpTestFSM import fr.acinq.eclair.channel.states.{ChannelStateTestsBase, ChannelStateTestsTags} import fr.acinq.eclair.testutils.PimpTestProbe._ import fr.acinq.eclair.transactions.Transactions import fr.acinq.eclair.transactions.Transactions.ZeroFeeHtlcTxAnchorOutputsCommitmentFormat import fr.acinq.eclair.wire.protocol.ClosingSignedTlv.FeeRange -import fr.acinq.eclair.wire.protocol.{AnnouncementSignatures, ClosingComplete, ClosingSig, ClosingSigned, ClosingTlv, Error, Shutdown, TlvStream, Warning} +import fr.acinq.eclair.wire.protocol.{AnnouncementSignatures, ChannelReestablish, ClosingComplete, ClosingSig, ClosingSigned, ClosingTlv, Error, Init, Shutdown, TlvStream, Warning} import fr.acinq.eclair.{BlockHeight, CltvExpiry, Features, MilliSatoshiLong, TestConstants, TestKitBaseClass, randomBytes32, randomKey} import org.scalatest.funsuite.FixtureAnyFunSuiteLike import org.scalatest.{Outcome, Tag} @@ -586,25 +586,43 @@ class NegotiatingStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike test("recv ClosingComplete (with concurrent shutdown)", Tag(ChannelStateTestsTags.SimpleClose)) { f => import f._ aliceClose(f) - val aliceClosingComplete1 = alice2bob.expectMsgType[ClosingComplete] - bob2alice.expectMsgType[ClosingComplete] // ignored - // Bob updates his closing script before receiving Alice's closing_complete. + val aliceClosingComplete1 = alice2bob.expectMsgType[ClosingComplete] // ignored + bob2alice.expectMsgType[ClosingComplete] + bob2alice.forward(alice) + alice2blockchain.expectMsgType[PublishFinalTx] + alice2blockchain.expectMsgType[WatchTxConfirmed] + val aliceClosingSig1 = alice2bob.expectMsgType[ClosingSig] // ignored + + // Bob updates his closing script before receiving Alice's closing_complete and closing_sig. val bobScript = Script.write(Script.pay2wpkh(randomKey().publicKey)) bob ! CMD_CLOSE(TestProbe().ref, Some(bobScript), Some(ClosingFeerates(preferred = FeeratePerKw(2500 sat), min = FeeratePerKw(253 sat), max = FeeratePerKw(2500 sat)))) - val bobShutdown = bob2alice.expectMsgType[Shutdown] - assert(bobShutdown.scriptPubKey == bobScript) - val bobClosingComplete = bob2alice.expectMsgType[ClosingComplete] + assert(bob2alice.expectMsgType[Shutdown].scriptPubKey == bobScript) + bob2alice.forward(alice) + + // Bob receives Alice's previous closing_complete and closing_sig and ignores them. alice2bob.forward(bob, aliceClosingComplete1) - bob2alice.expectNoMessage(100 millis) // Bob ignores Alice's obsolete closing_complete. - // When Alice receives Bob's shutdown, she doesn't change her own closing txs. - bob2alice.forward(alice, bobShutdown) - alice2bob.expectNoMessage(100 millis) - // When she receives Bob's new closing_complete, she signs it: Bob now has closing transactions with his last closing script. - bob2alice.forward(alice, bobClosingComplete) - val aliceClosingSig = alice2bob.expectMsgType[ClosingSig] - alice2bob.forward(bob, aliceClosingSig) - alice2blockchain.expectMsgType[PublishFinalTx] - bob2blockchain.expectMsgType[PublishFinalTx] + alice2bob.forward(bob, aliceClosingSig1) + bob2alice.expectNoMessage(100 millis) + + // Alice re-sends shutdown in response to Bob's shutdown, at which point they sign transactions from scratch. + alice2bob.expectMsgType[Shutdown] + alice2bob.forward(bob) + bob2alice.expectMsgType[ClosingComplete] + bob2alice.forward(alice) + val bobTxId = alice2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId) + alice2bob.expectMsgType[ClosingComplete] + alice2bob.forward(bob) + val aliceTxId = bob2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId) + alice2bob.expectMsgType[ClosingSig] + alice2bob.forward(bob) + assert(bob2blockchain.expectMsgType[PublishFinalTx].tx.txid == bobTxId) + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId) + bob2alice.expectMsgType[ClosingSig] + bob2alice.forward(alice) + assert(alice2blockchain.expectMsgType[PublishFinalTx].tx.txid == aliceTxId) + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId) } test("recv WatchFundingSpentTriggered (counterparty's mutual close)") { f => @@ -692,6 +710,8 @@ class NegotiatingStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike alice ! CMD_CLOSE(TestProbe().ref, Some(Script.write(Script.pay2wpkh(randomKey().publicKey))), None) alice2bob.expectMsgType[Shutdown] alice2bob.forward(bob) + bob2alice.expectMsgType[Shutdown] + bob2alice.forward(alice) alice2bob.expectMsgType[ClosingComplete] alice2bob.forward(bob) bob2alice.expectMsgType[ClosingSig] @@ -795,6 +815,106 @@ class NegotiatingStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike awaitCond(bob.stateName == CLOSING) } + test("receive INPUT_RECONNECTED", Tag(ChannelStateTestsTags.SimpleClose)) { f => + import f._ + aliceClose(f) + alice2bob.expectMsgType[ClosingComplete] + alice2bob.forward(bob) + val aliceTxId1 = bob2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId1) + bob2alice.expectMsgType[ClosingComplete] + bob2alice.forward(alice) + val bobTxId1 = alice2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId1) + alice2bob.expectMsgType[ClosingSig] + alice2bob.forward(bob) + assert(bob2blockchain.expectMsgType[PublishFinalTx].tx.txid == bobTxId1) + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId1) + bob2alice.expectMsgType[ClosingSig] // ignored + + // A disconnection happens before Alice received Bob's closing_sig. + // On reconnection, she retries signing her closing transaction. + alice ! INPUT_DISCONNECTED + bob ! INPUT_DISCONNECTED + awaitCond(alice.stateName == OFFLINE) + awaitCond(bob.stateName == OFFLINE) + val aliceInit = Init(TestConstants.Alice.nodeParams.features.initFeatures()) + val bobInit = Init(TestConstants.Bob.nodeParams.features.initFeatures()) + alice ! INPUT_RECONNECTED(alice2bob.ref, aliceInit, bobInit) + bob ! INPUT_RECONNECTED(bob2alice.ref, bobInit, aliceInit) + alice2bob.expectMsgType[ChannelReestablish] + alice2bob.forward(bob) + bob2alice.expectMsgType[ChannelReestablish] + bob2alice.forward(alice) + alice2bob.expectMsgType[Shutdown] + alice2bob.forward(bob) + bob2alice.expectMsgType[Shutdown] + bob2alice.forward(alice) + alice2bob.expectMsgType[ClosingComplete] + alice2bob.forward(bob) + val aliceTxId2 = bob2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId2) + bob2alice.expectMsgType[ClosingSig] + bob2alice.forward(alice) + assert(alice2blockchain.expectMsgType[PublishFinalTx].tx.txid == aliceTxId2) + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId2) + bob2alice.expectNoMessage(100 millis) + } + + test("receive INPUT_RESTORED", Tag(ChannelStateTestsTags.SimpleClose)) { f => + import f._ + aliceClose(f) + alice2bob.expectMsgType[ClosingComplete] + alice2bob.forward(bob) + val aliceTxId1 = bob2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId1) + bob2alice.expectMsgType[ClosingComplete] + bob2alice.forward(alice) + val bobTxId1 = alice2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId1) + alice2bob.expectMsgType[ClosingSig] + alice2bob.forward(bob) + assert(bob2blockchain.expectMsgType[PublishFinalTx].tx.txid == bobTxId1) + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == bobTxId1) + bob2alice.expectMsgType[ClosingSig] // ignored + val aliceData = alice.underlyingActor.nodeParams.db.channels.getChannel(channelId(alice)).get + val bobData = bob.underlyingActor.nodeParams.db.channels.getChannel(channelId(bob)).get + + // Alice restarts before receiving Bob's closing_sig. + // On reconnection, she retries signing her closing transaction. + // simulate another node restart + alice.setState(WAIT_FOR_INIT_INTERNAL, Nothing) + alice ! INPUT_RESTORED(aliceData) + alice2blockchain.expectMsgType[SetChannelId] + alice2blockchain.expectMsgType[WatchFundingSpent] + bob.setState(WAIT_FOR_INIT_INTERNAL, Nothing) + bob ! INPUT_RESTORED(bobData) + bob2blockchain.expectMsgType[SetChannelId] + bob2blockchain.expectMsgType[WatchFundingSpent] + awaitCond(alice.stateName == OFFLINE && bob.stateName == OFFLINE) + val aliceInit = Init(TestConstants.Alice.nodeParams.features.initFeatures()) + val bobInit = Init(TestConstants.Bob.nodeParams.features.initFeatures()) + alice ! INPUT_RECONNECTED(alice2bob.ref, aliceInit, bobInit) + bob ! INPUT_RECONNECTED(bob2alice.ref, bobInit, aliceInit) + alice2bob.expectMsgType[ChannelReestablish] + alice2bob.forward(bob) + bob2alice.expectMsgType[ChannelReestablish] + bob2alice.forward(alice) + alice2bob.expectMsgType[Shutdown] + alice2bob.forward(bob) + bob2alice.expectMsgType[Shutdown] + bob2alice.forward(alice) + alice2bob.expectMsgType[ClosingComplete] + alice2bob.forward(bob) + val aliceTxId2 = bob2blockchain.expectMsgType[PublishFinalTx].tx.txid + assert(bob2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId2) + bob2alice.expectMsgType[ClosingSig] + bob2alice.forward(alice) + assert(alice2blockchain.expectMsgType[PublishFinalTx].tx.txid == aliceTxId2) + assert(alice2blockchain.expectMsgType[WatchTxConfirmed].txId == aliceTxId2) + bob2alice.expectNoMessage(100 millis) + } + test("recv Error") { f => import f._ bobClose(f) diff --git a/eclair-core/src/test/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4Spec.scala b/eclair-core/src/test/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4Spec.scala index ca67359422..28a772cf03 100644 --- a/eclair-core/src/test/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4Spec.scala +++ b/eclair-core/src/test/scala/fr/acinq/eclair/wire/internal/channel/version4/ChannelCodecs4Spec.scala @@ -17,7 +17,7 @@ import fr.acinq.eclair.transactions.{CommitmentSpec, Scripts} import fr.acinq.eclair.wire.internal.channel.ChannelCodecsSpec.normal import fr.acinq.eclair.wire.internal.channel.version4.ChannelCodecs4.Codecs._ import fr.acinq.eclair.wire.internal.channel.version4.ChannelCodecs4.channelDataCodec -import fr.acinq.eclair.wire.protocol.{LiquidityAds, TxSignatures} +import fr.acinq.eclair.wire.protocol.{ClosingComplete, ClosingSig, LiquidityAds, Shutdown, TxSignatures} import fr.acinq.eclair.{BlockHeight, CltvExpiryDelta, Features, MilliSatoshi, MilliSatoshiLong, UInt64, randomBytes32, randomKey} import org.scalatest.funsuite.AnyFunSuite import scodec.bits._ @@ -230,4 +230,28 @@ class ChannelCodecs4Spec extends AnyFunSuite { assert(originCodec.decode(trampolineRelayedBin.bits).require.value == trampolineRelayed) } + test("encode/decode closing negotiation status") { + val channelId = randomBytes32() + val localShutdown = Shutdown(channelId, Script.write(Script.pay2wpkh(randomKey().publicKey))) + val remoteShutdown = Shutdown(channelId, Script.write(Script.pay2wpkh(randomKey().publicKey))) + val waitingForRemoteShutdown = ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, OnRemoteShutdown.WaitForSigs) + val closingFeerate = FeeratePerKw(5000 sat) + val waitingForRemoteShutdownWithFeerate = ClosingNegotiation.WaitingForRemoteShutdown(localShutdown, OnRemoteShutdown.SignTransaction(closingFeerate)) + val closingCompleteSent = ClosingCompleteSent(ClosingComplete(channelId, 1500 sat, 0), closingFeerate) + val closingSigReceived = ClosingSig(channelId) + val testCases = Map( + waitingForRemoteShutdown -> waitingForRemoteShutdown, + waitingForRemoteShutdownWithFeerate -> waitingForRemoteShutdownWithFeerate, + ClosingNegotiation.WaitingForConfirmation(localShutdown, remoteShutdown) -> waitingForRemoteShutdown, + ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, None, None, None) -> waitingForRemoteShutdown, + ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, Some(closingCompleteSent), None, None) -> waitingForRemoteShutdownWithFeerate, + ClosingNegotiation.SigningTransactions(localShutdown, remoteShutdown, Some(closingCompleteSent), None, Some(closingSigReceived)) -> waitingForRemoteShutdown, + ) + testCases.foreach { case (status, expected) => + val encoded = closingNegotiationCodec.encode(status).require + val decoded = closingNegotiationCodec.decode(encoded).require.value + assert(decoded == expected) + } + } + }