diff --git a/cmd/utils/hc.pb.go b/cmd/utils/hc.pb.go new file mode 100644 index 0000000000..88f4fbcb19 --- /dev/null +++ b/cmd/utils/hc.pb.go @@ -0,0 +1,228 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc v5.28.2 +// source: cmd/utils/hc.proto + +package utils + +import ( + common "github.com/dominant-strategies/go-quai/common" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type ProtoNode struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Hash *common.ProtoHash `protobuf:"bytes,1,opt,name=hash,proto3,oneof" json:"hash,omitempty"` + Number [][]byte `protobuf:"bytes,2,rep,name=number,proto3" json:"number,omitempty"` + Location *common.ProtoLocation `protobuf:"bytes,3,opt,name=location,proto3,oneof" json:"location,omitempty"` + Entropy []byte `protobuf:"bytes,4,opt,name=entropy,proto3,oneof" json:"entropy,omitempty"` +} + +func (x *ProtoNode) Reset() { + *x = ProtoNode{} + mi := &file_cmd_utils_hc_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ProtoNode) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProtoNode) ProtoMessage() {} + +func (x *ProtoNode) ProtoReflect() protoreflect.Message { + mi := &file_cmd_utils_hc_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProtoNode.ProtoReflect.Descriptor instead. +func (*ProtoNode) Descriptor() ([]byte, []int) { + return file_cmd_utils_hc_proto_rawDescGZIP(), []int{0} +} + +func (x *ProtoNode) GetHash() *common.ProtoHash { + if x != nil { + return x.Hash + } + return nil +} + +func (x *ProtoNode) GetNumber() [][]byte { + if x != nil { + return x.Number + } + return nil +} + +func (x *ProtoNode) GetLocation() *common.ProtoLocation { + if x != nil { + return x.Location + } + return nil +} + +func (x *ProtoNode) GetEntropy() []byte { + if x != nil { + return x.Entropy + } + return nil +} + +type ProtoNodeSet struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + NodeSet map[string]*ProtoNode `protobuf:"bytes,1,rep,name=node_set,json=nodeSet,proto3" json:"node_set,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ProtoNodeSet) Reset() { + *x = ProtoNodeSet{} + mi := &file_cmd_utils_hc_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ProtoNodeSet) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProtoNodeSet) ProtoMessage() {} + +func (x *ProtoNodeSet) ProtoReflect() protoreflect.Message { + mi := &file_cmd_utils_hc_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProtoNodeSet.ProtoReflect.Descriptor instead. +func (*ProtoNodeSet) Descriptor() ([]byte, []int) { + return file_cmd_utils_hc_proto_rawDescGZIP(), []int{1} +} + +func (x *ProtoNodeSet) GetNodeSet() map[string]*ProtoNode { + if x != nil { + return x.NodeSet + } + return nil +} + +var File_cmd_utils_hc_proto protoreflect.FileDescriptor + +var file_cmd_utils_hc_proto_rawDesc = []byte{ + 0x0a, 0x12, 0x63, 0x6d, 0x64, 0x2f, 0x75, 0x74, 0x69, 0x6c, 0x73, 0x2f, 0x68, 0x63, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x02, 0x68, 0x63, 0x1a, 0x19, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x22, 0xc8, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x4e, 0x6f, 0x64, + 0x65, 0x12, 0x2a, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x11, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x48, 0x61, + 0x73, 0x68, 0x48, 0x00, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x88, 0x01, 0x01, 0x12, 0x16, 0x0a, + 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0c, 0x52, 0x06, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x36, 0x0a, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, + 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x01, + 0x52, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, + 0x07, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x02, + 0x52, 0x07, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x88, 0x01, 0x01, 0x42, 0x07, 0x0a, 0x05, + 0x5f, 0x68, 0x61, 0x73, 0x68, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x65, 0x6e, 0x74, 0x72, 0x6f, 0x70, 0x79, 0x22, 0x93, + 0x01, 0x0a, 0x0c, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x74, 0x12, + 0x38, 0x0a, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x1d, 0x2e, 0x68, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x4e, 0x6f, 0x64, 0x65, + 0x53, 0x65, 0x74, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x07, 0x6e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x74, 0x1a, 0x49, 0x0a, 0x0c, 0x4e, 0x6f, 0x64, + 0x65, 0x53, 0x65, 0x74, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x23, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x68, 0x63, 0x2e, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x3a, 0x02, 0x38, 0x01, 0x42, 0x32, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x64, 0x6f, 0x6d, 0x69, 0x6e, 0x61, 0x6e, 0x74, 0x2d, 0x73, 0x74, 0x72, 0x61, + 0x74, 0x65, 0x67, 0x69, 0x65, 0x73, 0x2f, 0x67, 0x6f, 0x2d, 0x71, 0x75, 0x61, 0x69, 0x2f, 0x63, + 0x6d, 0x64, 0x2f, 0x75, 0x74, 0x69, 0x6c, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_cmd_utils_hc_proto_rawDescOnce sync.Once + file_cmd_utils_hc_proto_rawDescData = file_cmd_utils_hc_proto_rawDesc +) + +func file_cmd_utils_hc_proto_rawDescGZIP() []byte { + file_cmd_utils_hc_proto_rawDescOnce.Do(func() { + file_cmd_utils_hc_proto_rawDescData = protoimpl.X.CompressGZIP(file_cmd_utils_hc_proto_rawDescData) + }) + return file_cmd_utils_hc_proto_rawDescData +} + +var file_cmd_utils_hc_proto_msgTypes = make([]protoimpl.MessageInfo, 3) +var file_cmd_utils_hc_proto_goTypes = []any{ + (*ProtoNode)(nil), // 0: hc.ProtoNode + (*ProtoNodeSet)(nil), // 1: hc.ProtoNodeSet + nil, // 2: hc.ProtoNodeSet.NodeSetEntry + (*common.ProtoHash)(nil), // 3: common.ProtoHash + (*common.ProtoLocation)(nil), // 4: common.ProtoLocation +} +var file_cmd_utils_hc_proto_depIdxs = []int32{ + 3, // 0: hc.ProtoNode.hash:type_name -> common.ProtoHash + 4, // 1: hc.ProtoNode.location:type_name -> common.ProtoLocation + 2, // 2: hc.ProtoNodeSet.node_set:type_name -> hc.ProtoNodeSet.NodeSetEntry + 0, // 3: hc.ProtoNodeSet.NodeSetEntry.value:type_name -> hc.ProtoNode + 4, // [4:4] is the sub-list for method output_type + 4, // [4:4] is the sub-list for method input_type + 4, // [4:4] is the sub-list for extension type_name + 4, // [4:4] is the sub-list for extension extendee + 0, // [0:4] is the sub-list for field type_name +} + +func init() { file_cmd_utils_hc_proto_init() } +func file_cmd_utils_hc_proto_init() { + if File_cmd_utils_hc_proto != nil { + return + } + file_cmd_utils_hc_proto_msgTypes[0].OneofWrappers = []any{} + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_cmd_utils_hc_proto_rawDesc, + NumEnums: 0, + NumMessages: 3, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_cmd_utils_hc_proto_goTypes, + DependencyIndexes: file_cmd_utils_hc_proto_depIdxs, + MessageInfos: file_cmd_utils_hc_proto_msgTypes, + }.Build() + File_cmd_utils_hc_proto = out.File + file_cmd_utils_hc_proto_rawDesc = nil + file_cmd_utils_hc_proto_goTypes = nil + file_cmd_utils_hc_proto_depIdxs = nil +} diff --git a/cmd/utils/hc.proto b/cmd/utils/hc.proto new file mode 100644 index 0000000000..7569970420 --- /dev/null +++ b/cmd/utils/hc.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package hc; +option go_package = "github.com/dominant-strategies/go-quai/cmd/utils"; + +import "common/proto_common.proto"; + +message ProtoNode { + optional common.ProtoHash hash = 1; + repeated bytes number = 2; + optional common.ProtoLocation location = 3; + optional bytes entropy = 4; +} + +message ProtoNodeSet { + map node_set = 1; +} diff --git a/cmd/utils/hierarchical_coordinator.go b/cmd/utils/hierarchical_coordinator.go index a0b43ff5f8..39b58c6f09 100644 --- a/cmd/utils/hierarchical_coordinator.go +++ b/cmd/utils/hierarchical_coordinator.go @@ -36,6 +36,7 @@ const ( var ( c_currentExpansionNumberKey = []byte("cexp") + c_bestNodeKey = []byte("best") ) type Node struct { @@ -45,10 +46,61 @@ type Node struct { entropy *big.Int } +func (ch *Node) ProtoEncode() *ProtoNode { + protoNumber := make([][]byte, common.HierarchyDepth) + for i, num := range ch.number { + protoNumber[i] = num.Bytes() + } + protoNode := &ProtoNode{ + Hash: ch.hash.ProtoEncode(), + Number: protoNumber, + Location: ch.location.ProtoEncode(), + Entropy: ch.entropy.Bytes(), + } + return protoNode +} + +func (ch *Node) ProtoDecode(protoNode *ProtoNode) { + hash := &common.Hash{} + hash.ProtoDecode(protoNode.GetHash()) + ch.hash = *hash + + number := make([]*big.Int, common.HierarchyDepth) + for i, num := range protoNode.GetNumber() { + number[i] = new(big.Int).SetBytes(num) + } + ch.number = number + + location := &common.Location{} + location.ProtoDecode(protoNode.GetLocation()) + ch.location = *location + + ch.entropy = new(big.Int).SetBytes(protoNode.GetEntropy()) +} + type NodeSet struct { nodes map[string]Node } +func (ns *NodeSet) ProtoEncode() *ProtoNodeSet { + protoNodeSet := &ProtoNodeSet{} + protoNodeSet.NodeSet = make(map[string]*ProtoNode) + + for loc, node := range ns.nodes { + node := node.ProtoEncode() + protoNodeSet.NodeSet[loc] = node + } + return protoNodeSet +} + +func (ns *NodeSet) ProtoDecode(protoNodeSet *ProtoNodeSet) { + for loc, protoNode := range protoNodeSet.NodeSet { + node := &Node{} + node.ProtoDecode(protoNode) + ns.nodes[loc] = *node + } +} + func (ch *Node) Empty() bool { return ch.hash == common.Hash{} && ch.location.Equal(common.Location{}) && ch.entropy == nil } @@ -137,6 +189,8 @@ func (hc *HierarchicalCoordinator) InitPendingHeaders() { } } hc.Add(new(big.Int).SetUint64(0), nodeSet, hc.pendingHeaders) + + hc.LoadBestNodeSet() } func (hc *HierarchicalCoordinator) Add(entropy *big.Int, node NodeSet, newPendingHeaders *PendingHeaders) { @@ -208,11 +262,14 @@ func (ns *NodeSet) Extendable(wo *types.WorkObject, order int) bool { func (ns *NodeSet) Entropy(numRegions int, numZones int) *big.Int { entropy := new(big.Int) - entropy.Add(entropy, ns.nodes[common.Location{}.Name()].entropy) + primeEntropy := ns.nodes[common.Location{}.Name()].entropy + entropy.Add(entropy, primeEntropy) for i := 0; i < numRegions; i++ { - entropy.Add(entropy, ns.nodes[common.Location{byte(i)}.Name()].entropy) + regionEntropy := ns.nodes[common.Location{byte(i)}.Name()].entropy + entropy.Add(entropy, regionEntropy) for j := 0; j < numZones; j++ { - entropy.Add(entropy, ns.nodes[common.Location{byte(i), byte(j)}.Name()].entropy) + zoneEntropy := ns.nodes[common.Location{byte(i), byte(j)}.Name()].entropy + entropy.Add(entropy, zoneEntropy) } } @@ -430,6 +487,7 @@ func (hc *HierarchicalCoordinator) Stop() { for _, chainEventSub := range hc.chainSubs { chainEventSub.Unsubscribe() } + hc.StoreBestNodeSet() hc.expansionSub.Unsubscribe() hc.db.Close() hc.wg.Wait() @@ -602,44 +660,6 @@ func (hc *HierarchicalCoordinator) ChainEventLoop(chainEvent chan core.ChainEven for { select { case head := <-chainEvent: - // If this is the first block we have after a restart, then we can - // add this block into the node set directly - // Since on startup we initialize the pending headers cache with the - // genesis block, we can check and see if we are in that state - // We can do that by checking the length of the pendding headers order - // cache length is 1 - if len(hc.pendingHeaders.order) == 1 { - // create a nodeset on this block - nodeSet := NodeSet{ - nodes: make(map[string]Node), - } - - //Initialize for prime - backend := hc.GetBackend(common.Location{}) - entropy := backend.TotalLogEntropy(head.Block) - newNode := Node{ - hash: head.Block.ParentHash(common.PRIME_CTX), - number: head.Block.NumberArray(), - location: common.Location{}, - entropy: entropy, - } - nodeSet.nodes[common.Location{}.Name()] = newNode - - regionLocation := common.Location{byte(head.Block.Location().Region())} - backend = hc.GetBackend(regionLocation) - newNode.hash = head.Block.ParentHash(common.REGION_CTX) - newNode.location = regionLocation - newNode.entropy = entropy - nodeSet.nodes[regionLocation.Name()] = newNode - - zoneLocation := head.Block.Location() - backend = hc.GetBackend(zoneLocation) - newNode.hash = head.Block.ParentHash(common.ZONE_CTX) - newNode.location = zoneLocation - newNode.entropy = entropy - nodeSet.nodes[zoneLocation.Name()] = newNode - hc.Add(entropy, nodeSet, hc.pendingHeaders) - } go hc.ReapplicationLoop(head) go hc.ComputeMapPending(head) @@ -1321,3 +1341,50 @@ func (hc *HierarchicalCoordinator) GetBackendForLocationAndOrder(location common } return nil } + +func (hc *HierarchicalCoordinator) StoreBestNodeSet() { + + log.Global.Info("Storing the best node set on stop") + + bestNode, exists := hc.pendingHeaders.collection.Get(hc.bestEntropy.String()) + if !exists { + log.Global.Error("best entropy node set doesnt exist in the pending headers collection") + } + + protoBestNode := bestNode.ProtoEncode() + + data, err := proto.Marshal(protoBestNode) + if err != nil { + log.Global.Error("Error marshalling best node, err: ", err) + return + } + + err = hc.db.Put(c_bestNodeKey, data, nil) + if err != nil { + log.Global.Error("Error storing the best node key, err: ", err) + return + } +} + +func (hc *HierarchicalCoordinator) LoadBestNodeSet() { + data, err := hc.db.Get(c_bestNodeKey, nil) + if err != nil { + log.Global.Error("Error loading the best node, err: ", err) + return + } + + protoNodeSet := &ProtoNodeSet{} + err = proto.Unmarshal(data, protoNodeSet) + if err != nil { + log.Global.Error("Error unmarshalling the proto node set, err: ", err) + return + } + + nodeSet := NodeSet{} + nodeSet.nodes = make(map[string]Node) + nodeSet.ProtoDecode(protoNodeSet) + + numRegions, numZones := common.GetHierarchySizeForExpansionNumber(hc.currentExpansionNumber) + hc.bestEntropy = nodeSet.Entropy(int(numRegions), int(numZones)) + hc.Add(hc.bestEntropy, nodeSet, hc.pendingHeaders) +} diff --git a/core/headerchain.go b/core/headerchain.go index da1408817e..aff75c4aa3 100644 --- a/core/headerchain.go +++ b/core/headerchain.go @@ -1224,7 +1224,7 @@ func (hc *HeaderChain) ComputeExpansionNumber(parent *types.WorkObject) (uint8, } // If the Prime Terminus is genesis the expansion number is the genesis expansion number - if hc.IsGenesisHash(primeTerminusHash) && hc.NodeLocation().Equal(common.Location{0, 0}) { + if hc.IsGenesisHash(primeTerminusHash) && hc.NodeLocation().Equal(common.Location{0, 0}) || hc.config.StartingExpansionNumber != 0 { return primeTerminus.ExpansionNumber(), nil } else { // check if the prime terminus is the block where the threshold count diff --git a/params/config.go b/params/config.go index dbb147558d..e0b527320d 100644 --- a/params/config.go +++ b/params/config.go @@ -112,9 +112,9 @@ var ( // // This configuration is intentionally not using keyed fields to force anyone // adding flags to the config to also have to set these fields. - AllProgpowProtocolChanges = &ChainConfig{big.NewInt(1337), "progpow", new(Blake3powConfig), new(ProgpowConfig), common.Location{}, common.Hash{}, false} + AllProgpowProtocolChanges = &ChainConfig{big.NewInt(1337), "progpow", new(Blake3powConfig), new(ProgpowConfig), common.Location{}, common.Hash{}, false, 0} - TestChainConfig = &ChainConfig{big.NewInt(1), "progpow", new(Blake3powConfig), new(ProgpowConfig), common.Location{}, common.Hash{}, false} + TestChainConfig = &ChainConfig{big.NewInt(1), "progpow", new(Blake3powConfig), new(ProgpowConfig), common.Location{}, common.Hash{}, false, 0} TestRules = TestChainConfig.Rules(new(big.Int)) ) @@ -126,12 +126,13 @@ var ( type ChainConfig struct { ChainID *big.Int `json:"chainId"` // chainId identifies the current chain and is used for replay protection // Various consensus engines - ConsensusEngine string - Blake3Pow *Blake3powConfig `json:"blake3pow,omitempty"` - Progpow *ProgpowConfig `json:"progpow,omitempty"` - Location common.Location - DefaultGenesisHash common.Hash - IndexAddressUtxos bool + ConsensusEngine string + Blake3Pow *Blake3powConfig `json:"blake3pow,omitempty"` + Progpow *ProgpowConfig `json:"progpow,omitempty"` + Location common.Location + DefaultGenesisHash common.Hash + IndexAddressUtxos bool + StartingExpansionNumber uint64 } // SetLocation sets the location on the chain config diff --git a/quai/backend.go b/quai/backend.go index ce21140b0e..b46dde4557 100644 --- a/quai/backend.go +++ b/quai/backend.go @@ -181,6 +181,7 @@ func New(stack *node.Node, p2p NetworkingAPI, config *quaiconfig.Config, nodeCtx chainConfig.Location = config.NodeLocation // TODO: See why this is necessary chainConfig.DefaultGenesisHash = config.DefaultGenesisHash chainConfig.IndexAddressUtxos = config.IndexAddressUtxos + chainConfig.StartingExpansionNumber = startingExpansionNumber logger.WithFields(log.Fields{ "Ctx": nodeCtx, "NodeLocation": config.NodeLocation,