diff --git a/spec/epochs_spec.lua b/spec/epochs_spec.lua index 8df8d1f8..10ced980 100644 --- a/spec/epochs_spec.lua +++ b/spec/epochs_spec.lua @@ -51,8 +51,8 @@ describe("epochs", function() } end) - describe("computePrescribedObserversForEpoch", function() - it("should return all eligible gateways if fewer than the maximum in network", function() + describe("getPrescribedObserversWithWeightsForEpoch", function() + it("should return the prescribed observers with weights for the epoch", function() _G.GatewayRegistry["test-this-is-valid-arweave-wallet-address-1"] = { operatorStake = gar.getSettings().operators.minStake, totalDelegatedStake = 0, @@ -71,26 +71,63 @@ describe("epochs", function() settings = testSettings, status = "joined", observerAddress = "observerAddress", + weights = { + normalizedCompositeWeight = 1, + stakeWeight = 1, + tenureWeight = 1, + gatewayRewardRatioWeight = 1, + observerRewardRatioWeight = 1, + compositeWeight = 1, + }, } + _G.Epochs[0].prescribedObservers = { + ["observerAddress"] = "test-this-is-valid-arweave-wallet-address-1", + } + local epochIndex = 0 local expectation = { { - gatewayAddress = "test-this-is-valid-arweave-wallet-address-1", observerAddress = "observerAddress", - stake = gar.getSettings().operators.minStake, - startTimestamp = startTimestamp, + gatewayAddress = "test-this-is-valid-arweave-wallet-address-1", + normalizedCompositeWeight = 1, stakeWeight = 1, - tenureWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, + tenureWeight = 1, gatewayRewardRatioWeight = 1, observerRewardRatioWeight = 1, - compositeWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - normalizedCompositeWeight = 1, + compositeWeight = 1, }, } - local status, result = pcall(epochs.computePrescribedObserversForEpoch, 0, hashchain) - assert.is_true(status) - assert.are.equal(1, #result) + local result = epochs.getPrescribedObserversWithWeightsForEpoch(epochIndex) assert.are.same(expectation, result) end) + end) + + describe("computePrescribedObserversForEpoch", function() + it("should return all eligible gateways if fewer than the maximum in network", function() + _G.GatewayRegistry["test-this-is-valid-arweave-wallet-address-1"] = { + operatorStake = gar.getSettings().operators.minStake, + totalDelegatedStake = 0, + vaults = {}, + delegates = {}, + startTimestamp = startTimestamp, + stats = { + prescribedEpochCount = 0, + observedEpochCount = 0, + totalEpochCount = 0, + passedEpochCount = 0, + failedEpochCount = 0, + failedConsecutiveEpochs = 0, + passedConsecutiveEpochs = 0, + }, + settings = testSettings, + status = "joined", + observerAddress = "observerAddress", + } + local expectation = { + ["observerAddress"] = "test-this-is-valid-arweave-wallet-address-1", + } + local prescribedObserverMap = epochs.computePrescribedObserversForEpoch(0, hashchain) + assert.are.same(expectation, prescribedObserverMap) + end) it("should return the maximum number of gateways if more are enrolled in network", function() local testHashchain = "c29tZSBzYW1wbGUgaGFzaA==" -- base64 of "some sample hash" @@ -118,52 +155,25 @@ describe("epochs", function() }, settings = testSettings, status = "joined", - observerAddress = "observerAddress", + observerAddress = "observer-address-" .. i, } -- note - ordering of keys is not guaranteed when insert into maps _G.GatewayRegistry["observer" .. i] = gateway end local expectation = { - { - gatewayAddress = "observer1", - observerAddress = "observerAddress", - stake = gar.getSettings().operators.minStake, - startTimestamp = startTimestamp, - stakeWeight = 1, - tenureWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - gatewayRewardRatioWeight = 1, - observerRewardRatioWeight = 1, - compositeWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - normalizedCompositeWeight = 1 / 3, - }, - { - gatewayAddress = "observer3", - observerAddress = "observerAddress", - stake = gar.getSettings().operators.minStake, - startTimestamp = startTimestamp, - stakeWeight = 1, - tenureWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - gatewayRewardRatioWeight = 1, - observerRewardRatioWeight = 1, - compositeWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - normalizedCompositeWeight = 1 / 3, - }, + ["observer-address-1"] = "observer1", + ["observer-address-3"] = "observer3", } - local status, result = pcall(epochs.computePrescribedObserversForEpoch, 0, testHashchain) - assert.is_true(status) - assert.are.equal(2, #result) - table.sort(result, function(a, b) - return a.gatewayAddress < b.gatewayAddress - end) - assert.are.same(expectation, result) + local prescribedObserverMap = epochs.computePrescribedObserversForEpoch(0, testHashchain) + assert.are.same(expectation, prescribedObserverMap) end) end) describe("computePrescribedNamesForEpoch", function() -- NOTE: Record names in the tests below use spelled out numbers because without that -- there's insufficient base64url information encoded in the final encoded block to - -- disambiguate the decoded vallues. + -- disambiguate the decoded values. it("should return all eligible names if fewer than the maximum in name registry", function() _G.NameRegistry.records = { ["arns-name-one"] = { @@ -252,7 +262,7 @@ describe("epochs", function() assert.match("Observations for the current epoch cannot be submitted before", error) end) it("should throw an error if the caller is not prescribed", function() - local observer = "test-this-is-valid-arweave-wallet-address-2" + local observer = "test-this-is-valid-arweave-observer-address-2" local reportTxId = "test-this-very-valid-observations-report-tx" local settings = epochs.getSettings() local timestamp = settings.epochZeroStartTimestamp + settings.distributionDelayMs + 1 @@ -260,18 +270,7 @@ describe("epochs", function() "test-this-is-valid-arweave-wallet-address-1", } _G.Epochs[0].prescribedObservers = { - { - gatewayAddress = "test-this-is-valid-arweave-wallet-address-1", - observerAddress = "test-this-is-valid-arweave-wallet-address-1", - stake = gar.getSettings().operators.minStake, - startTimestamp = startTimestamp, - stakeWeight = 1, - tenureWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - gatewayRewardRatioWeight = 1, - observerRewardRatioWeight = 1, - compositeWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - normalizedCompositeWeight = 1, - }, + ["test-this-is-valid-arweave-observer-address-1"] = "test-this-is-valid-arweave-gateway-address-1", } local status, error = pcall(epochs.saveObservations, observer, reportTxId, failedGateways, timestamp) assert.is_false(status) @@ -280,7 +279,7 @@ describe("epochs", function() it( "should save observation when the timestamp is after the distribution delay and only mark gateways around during the full epoch as failed", function() - local observer = "test-this-is-valid-arweave-wallet-address-2" + local observer = "test-this-is-valid-arweave-observer-address-2" local reportTxId = "test-this-very-valid-observations-report-tx" local settings = epochs.getSettings() local timestamp = settings.epochZeroStartTimestamp + settings.distributionDelayMs + 1 @@ -302,7 +301,7 @@ describe("epochs", function() }, settings = testSettings, status = "joined", - observerAddress = "test-this-is-valid-arweave-wallet-address-1", + observerAddress = "test-this-is-valid-arweave-observer-address-1", }, ["test-this-is-valid-arweave-wallet-address-2"] = { operatorStake = gar.getSettings().operators.minStake, @@ -321,7 +320,7 @@ describe("epochs", function() }, settings = testSettings, status = "joined", - observerAddress = "test-this-is-valid-arweave-wallet-address-2", + observerAddress = "test-this-is-valid-arweave-observer-address-2", }, ["test-this-is-valid-arweave-wallet-address-3"] = { operatorStake = gar.getSettings().operators.minStake, @@ -340,7 +339,7 @@ describe("epochs", function() }, settings = testSettings, status = "joined", - observerAddress = "test-this-is-valid-arweave-wallet-address-3", + observerAddress = "test-this-is-valid-arweave-observer-address-3", }, ["test-this-is-valid-arweave-wallet-address-4"] = { operatorStake = gar.getSettings().operators.minStake, @@ -360,22 +359,11 @@ describe("epochs", function() }, settings = testSettings, status = "leaving", -- leaving, so it is not eligible to receive stats from this epoch - observerAddress = "test-this-is-valid-arweave-wallet-address-4", + observerAddress = "test-this-is-valid-arweave-observer-address-4", }, } _G.Epochs[0].prescribedObservers = { - { - gatewayAddress = "test-this-is-valid-arweave-wallet-address-2", - observerAddress = "test-this-is-valid-arweave-wallet-address-2", - stake = gar.getSettings().operators.minStake, - startTimestamp = startTimestamp, - stakeWeight = 1, - tenureWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - gatewayRewardRatioWeight = 1, - observerRewardRatioWeight = 1, - compositeWeight = 1 / gar.getSettings().observers.tenureWeightPeriod, - normalizedCompositeWeight = 1, - }, + ["test-this-is-valid-arweave-observer-address-2"] = "test-this-is-valid-arweave-wallet-address-2", } local failedGateways = { "test-this-is-valid-arweave-wallet-address-1", @@ -513,18 +501,7 @@ describe("epochs", function() reports = {}, }, prescribedObservers = { - { - compositeWeight = 4.0, - gatewayAddress = "test-this-is-valid-arweave-wallet-address-1", - gatewayRewardRatioWeight = 1.0, - normalizedCompositeWeight = 1.0, - observerAddress = "test-this-is-valid-arweave-wallet-address-1", - observerRewardRatioWeight = 1.0, - stake = gar.getSettings().operators.minStake, - stakeWeight = 1.0, - startTimestamp = 0, - tenureWeight = 4, - }, + ["test-this-is-valid-arweave-wallet-address-1"] = "test-this-is-valid-arweave-wallet-address-1", }, prescribedNames = {}, distributions = { @@ -634,21 +611,11 @@ describe("epochs", function() }, prescribedNames = {}, prescribedObservers = { - { - observerAddress = "test-this-very-valid-observer-wallet-addr-1", - }, - { - observerAddress = "test-this-very-valid-observer-wallet-addr-2", - }, - { - observerAddress = "test-this-very-valid-observer-wallet-addr-3", - }, - { - observerAddress = "test-this-very-valid-observer-wallet-addr-4", - }, - { - observerAddress = "test-this-very-valid-observer-wallet-addr-5", - }, + ["test-this-very-valid-observer-wallet-addr-1"] = "test-this-very-valid-arweave-wallet-addr-1", + ["test-this-very-valid-observer-wallet-addr-2"] = "test-this-very-valid-arweave-wallet-addr-2", + ["test-this-very-valid-observer-wallet-addr-3"] = "test-this-very-valid-arweave-wallet-addr-3", + ["test-this-very-valid-observer-wallet-addr-4"] = "test-this-very-valid-arweave-wallet-addr-4", + ["test-this-very-valid-observer-wallet-addr-5"] = "test-this-very-valid-arweave-wallet-addr-5", }, } diff --git a/src/epochs.lua b/src/epochs.lua index cb521c79..d2c92a06 100644 --- a/src/epochs.lua +++ b/src/epochs.lua @@ -64,7 +64,6 @@ EpochSettings = EpochSettings or { pruneEpochsCount = 14, -- prune epochs older than 14 days prescribedNameCount = 2, - rewardPercentage = 0.0005, -- 0.05% maxObservers = 50, epochZeroStartTimestamp = 1719900000000, -- July 9th, 00:00:00 UTC durationMs = constants.defaultEpochDurationMs, -- 24 hours @@ -108,11 +107,41 @@ end --- Gets the prescribed observers for an epoch --- @param epochIndex number The epoch index ---- @return WeightedGateway[] # The prescribed observers for the epoch +--- @return table # The prescribed observers for the epoch function epochs.getPrescribedObserversForEpoch(epochIndex) return epochs.getEpoch(epochIndex).prescribedObservers or {} end +--- Get prescribed observers with weights for epoch +--- @param epochIndex number The epoch index +--- @return WeightedGateway[] # The prescribed observers with weights for the epoch +function epochs.getPrescribedObserversWithWeightsForEpoch(epochIndex) + local prescribedObservers = epochs.getPrescribedObserversForEpoch(epochIndex) + -- Iterate over prescribed observers and add gateway details + local prescribedObserversWithWeights = {} + for _, gatewayAddress in pairs(prescribedObservers) do + local gateway = gar.getGateway(gatewayAddress) + if gateway then + table.insert(prescribedObserversWithWeights, { + observerAddress = gateway.observerAddress, + gatewayAddress = gatewayAddress, + normalizedCompositeWeight = gateway.weights.normalizedCompositeWeight, + stakeWeight = gateway.weights.stakeWeight, + tenureWeight = gateway.weights.tenureWeight, + gatewayRewardRatioWeight = gateway.weights.gatewayRewardRatioWeight, + observerRewardRatioWeight = gateway.weights.observerRewardRatioWeight, + compositeWeight = gateway.weights.compositeWeight, + }) + end + end + + -- sort by normalizedCompositeWeight + table.sort(prescribedObserversWithWeights, function(a, b) + return a.normalizedCompositeWeight > b.normalizedCompositeWeight + end) + return prescribedObserversWithWeights +end + --- Gets the eligible rewards for an epoch --- @param epochIndex number The epoch index --- @return Rewards # T he eligible rewards for the epoch @@ -228,7 +257,7 @@ end --- Computes the prescribed observers for an epoch --- @param epochIndex number The epoch index --- @param hashchain string The hashchain ---- @return WeightedGateway[], WeightedGateway[] # The prescribed observers for the epoch, and all the gateways with weights +--- @return table, WeightedGateway[] # The prescribed observers for the epoch, and all the gateways with weights function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) assert(epochIndex >= 0, "Epoch index must be greater than or equal to 0") assert(type(hashchain) == "string", "Hashchain must be a string") @@ -239,6 +268,7 @@ function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) -- Filter out any observers that could have a normalized composite weight of 0 local filteredObservers = {} + local prescribedObserversLookup = {} -- use ipairs as weightedObservers in array for _, observer in ipairs(weightedGateways) do if observer.normalizedCompositeWeight > 0 then @@ -246,7 +276,10 @@ function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) end end if #filteredObservers <= epochs.getSettings().maxObservers then - return filteredObservers, weightedGateways + for _, observer in ipairs(filteredObservers) do + prescribedObserversLookup[observer.observerAddress] = observer.gatewayAddress + end + return prescribedObserversLookup, weightedGateways end -- the hash we will use to create entropy for prescribed observers @@ -263,14 +296,12 @@ function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) -- get our prescribed observers, using the hashchain as entropy local hash = epochHash - local prescribedObserversAddressesLookup = {} - while utils.lengthOfTable(prescribedObserversAddressesLookup) < epochs.getSettings().maxObservers do + while utils.lengthOfTable(prescribedObserversLookup) < epochs.getSettings().maxObservers do local hashString = crypto.utils.array.toString(hash) local random = crypto.random(nil, nil, hashString) / 0xffffffff local cumulativeNormalizedCompositeWeight = 0 for _, observer in ipairs(filteredObservers) do - local alreadyPrescribed = prescribedObserversAddressesLookup[observer.gatewayAddress] - + local alreadyPrescribed = prescribedObserversLookup[observer.observerAddress] -- add only if observer has not already been prescribed if not alreadyPrescribed then -- add the observers normalized composite weight to the cumulative weight @@ -278,7 +309,7 @@ function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) + observer.normalizedCompositeWeight -- if the random value is less than the cumulative weight, we have found our observer if random <= cumulativeNormalizedCompositeWeight then - prescribedObserversAddressesLookup[observer.gatewayAddress] = true + prescribedObserversLookup[observer.observerAddress] = observer.gatewayAddress break end end @@ -287,22 +318,8 @@ function epochs.computePrescribedObserversForEpoch(epochIndex, hashchain) local newHash = crypto.utils.stream.fromArray(hash) hash = crypto.digest.sha2_256(newHash).asBytes() end - local prescribedObservers = {} - local filteredObserversAddressMap = utils.reduce(filteredObservers, function(acc, _, observer) - acc[observer.gatewayAddress] = observer - return acc - end, {}) - for address, _ in pairs(prescribedObserversAddressesLookup) do - table.insert(prescribedObservers, filteredObserversAddressMap[address]) - end - - -- sort them in place - table.sort(prescribedObservers, function(a, b) - return a.normalizedCompositeWeight > b.normalizedCompositeWeight -- sort by descending weight - end) - -- return the prescribed observers and the weighted observers - return prescribedObservers, weightedGateways + return prescribedObserversLookup, weightedGateways end --- Gets the epoch timestamps for an epoch index @@ -433,17 +450,13 @@ function epochs.saveObservations(observerAddress, reportTxId, failedGatewayAddre "Observations for the current epoch cannot be submitted before: " .. epochDistributionTimestamp ) - local prescribedObservers = epochs.getPrescribedObserversForEpoch(epochIndex) - assert(#prescribedObservers > 0, "No prescribed observers for the current epoch.") + local prescribedObserversLookup = epochs.getPrescribedObserversForEpoch(epochIndex) + assert(utils.lengthOfTable(prescribedObserversLookup) > 0, "No prescribed observers for the current epoch.") - local observerIndex = utils.findInArray(prescribedObservers, function(prescribedObserver) - return prescribedObserver.observerAddress == observerAddress - end) + local gatewayAddressForObserver = prescribedObserversLookup[observerAddress] + assert(gatewayAddressForObserver, "Caller is not a prescribed observer for the current epoch.") - local observer = prescribedObservers[observerIndex] - assert(observer, "Caller is not a prescribed observer for the current epoch.") - - local observingGateway = gar.getGateway(observer.gatewayAddress) + local observingGateway = gar.getGateway(gatewayAddressForObserver) assert(observingGateway, "The associated gateway not found in the registry.") local epoch = epochs.getEpoch(epochIndex) @@ -503,20 +516,17 @@ end --- Computes the total eligible rewards for an epoch based on the protocol balance and the reward percentage and prescribed observers --- @param epochIndex number The epoch index ---- @param prescribedObservers WeightedGateway[] The prescribed observers for the epoch +--- @param prescribedObserversLookup table The prescribed observers for the epoch --- @return ComputedRewards # The total eligible rewards -function epochs.computeTotalEligibleRewardsForEpoch(epochIndex, prescribedObservers) +function epochs.computeTotalEligibleRewardsForEpoch(epochIndex, prescribedObserversLookup) local epochStartTimestamp = epochs.getEpochTimestampsForIndex(epochIndex) local activeGatewayAddresses = gar.getActiveGatewaysBeforeTimestamp(epochStartTimestamp) local protocolBalance = balances.getBalance(ao.id) local rewardRate = epochs.getRewardRateForEpoch(epochIndex) local totalEligibleRewards = math.floor(protocolBalance * rewardRate) local eligibleGatewayReward = math.floor(totalEligibleRewards * 0.90 / #activeGatewayAddresses) -- TODO: make these setting variables - local eligibleObserverReward = math.floor(totalEligibleRewards * 0.10 / #prescribedObservers) -- TODO: make these setting variables - local prescribedObserversLookup = utils.reduce(prescribedObservers, function(acc, _, observer) - acc[observer.observerAddress] = true - return acc - end, {}) + local eligibleObserverReward = + math.floor(totalEligibleRewards * 0.10 / utils.lengthOfTable(prescribedObserversLookup)) -- TODO: make these setting variables -- compute for each gateway what their potential rewards are and for their delegates local potentialRewards = {} -- use ipairs as activeGatewayAddresses is an array @@ -590,14 +600,7 @@ function epochs.distributeRewardsForEpoch(currentTimestamp) end local eligibleGatewaysForEpoch = epochs.getEligibleRewardsForEpoch(epochIndex) - local prescribedObserversLookup = utils.reduce( - epochs.getPrescribedObserversForEpoch(epochIndex), - function(acc, _, observer) - acc[observer.observerAddress] = true - return acc - end, - {} - ) + local prescribedObserversLookup = epochs.getPrescribedObserversForEpoch(epochIndex) local totalObservationsSubmitted = utils.lengthOfTable(epoch.observations.reports) or 0 -- get the eligible rewards for the epoch diff --git a/src/gar.lua b/src/gar.lua index 2149f4fc..58928301 100644 --- a/src/gar.lua +++ b/src/gar.lua @@ -27,7 +27,7 @@ local gar = {} --- @field services GatewayServices | nil --- @field status "joined"|"leaving" --- @field observerAddress WalletAddress ---- @field weights GatewayWeights | nil +--- @field weights GatewayWeights | nil // TODO: make this required and update tests to match the type --- @field slashings table | nil --- @class Gateway : CompactGateway @@ -679,8 +679,8 @@ function gar.getGatewayWeightsAtTimestamp(gatewayAddresses, timestamp) local totalCompositeWeight = 0 -- Iterate over gateways to calculate weights - for _, address in pairs(gatewayAddresses) do - local gateway = gar.getGateway(address) + for _, gatewayAddress in pairs(gatewayAddresses) do + local gateway = gar.getGateway(gatewayAddress) if gateway then local totalStake = gateway.operatorStake + gateway.totalDelegatedStake -- 100 - no cap to this local stakeWeightRatio = totalStake / gar.getSettings().operators.minStake -- this is always greater than 1 as the minOperatorStake is always less than the stake @@ -710,7 +710,7 @@ function gar.getGatewayWeightsAtTimestamp(gatewayAddresses, timestamp) * observerRewardRatioWeight table.insert(weightedObservers, { - gatewayAddress = address, + gatewayAddress = gatewayAddress, observerAddress = gateway.observerAddress, stake = totalStake, startTimestamp = gateway.startTimestamp, diff --git a/src/main.lua b/src/main.lua index 3c02d8d7..a063d4be 100644 --- a/src/main.lua +++ b/src/main.lua @@ -579,6 +579,12 @@ addEventingHandler(ActionMap.Transfer, utils.hasMatchingTag("Action", ActionMap. msg.ioEvent:addField("RecipientNewBalance", recipientNewBalance) end + -- if the sender is the protocol, then we need to update the circulating supply as tokens are now in circulation + if msg.From == ao.id then + LastKnownCirculatingSupply = LastKnownCirculatingSupply + quantity + addSupplyData(msg.ioEvent) + end + -- Casting implies that the sender does not want a response - Reference: https://elixirforum.com/t/what-is-the-etymology-of-genserver-cast/33610/3 if not msg.Cast then -- Debit-Notice message template, that is sent to the Sender of the transfer @@ -1887,12 +1893,8 @@ end) addEventingHandler(ActionMap.Epoch, utils.hasMatchingTag("Action", ActionMap.Epoch), function(msg) -- check if the epoch number is provided, if not get the epoch number from the timestamp - local providedEpochIndex = msg.Tags["Epoch-Index"] - local timestamp = msg.Timestamp - - assert(providedEpochIndex or timestamp, "Epoch index or timestamp is required") - - local epochIndex = providedEpochIndex or epochs.getEpochIndexForTimestamp(timestamp) + local epochIndex = msg.Tags["Epoch-Index"] and tonumber(msg.Tags["Epoch-Index"]) + or epochs.getEpochIndexForTimestamp(msg.Timestamp or msg.Tags.Timestamp) local epoch = epochs.getEpoch(epochIndex) Send(msg, { Target = msg.From, Action = "Epoch-Notice", Data = json.encode(epoch) }) end) @@ -1922,29 +1924,20 @@ addEventingHandler( ActionMap.PrescribedObservers, utils.hasMatchingTag("Action", ActionMap.PrescribedObservers), function(msg) - -- check if the epoch number is provided, if not get the epoch number from the timestamp - local providedEpochIndex = msg.Tags["Epoch-Index"] - local timestamp = msg.Timestamp - - assert(providedEpochIndex or timestamp, "Epoch index or timestamp is required") - - local epochIndex = providedEpochIndex or epochs.getEpochIndexForTimestamp(timestamp) - local prescribedObservers = epochs.getPrescribedObserversForEpoch(epochIndex) + local epochIndex = msg.Tags["Epoch-Index"] and tonumber(msg.Tags["Epoch-Index"]) + or epochs.getEpochIndexForTimestamp(msg.Timestamp or msg.Tags.Timestamp) + local prescribedObserversWithWeights = epochs.getPrescribedObserversWithWeightsForEpoch(epochIndex) Send(msg, { Target = msg.From, Action = "Prescribed-Observers-Notice", - Data = json.encode(prescribedObservers), + Data = json.encode(prescribedObserversWithWeights), }) end ) addEventingHandler(ActionMap.Observations, utils.hasMatchingTag("Action", ActionMap.Observations), function(msg) - local providedEpochIndex = msg.Tags["Epoch-Index"] - local timestamp = msg.Timestamp - - assert(providedEpochIndex or timestamp, "Epoch index or timestamp is required") - - local epochIndex = providedEpochIndex or epochs.getEpochIndexForTimestamp(timestamp) + local epochIndex = msg.Tags["Epoch-Index"] and tonumber(msg.Tags["Epoch-Index"]) + or epochs.getEpochIndexForTimestamp(msg.Timestamp or msg.Tags.Timestamp) local observations = epochs.getObservationsForEpoch(epochIndex) Send(msg, { Target = msg.From, @@ -1956,12 +1949,8 @@ end) addEventingHandler(ActionMap.PrescribedNames, utils.hasMatchingTag("Action", ActionMap.PrescribedNames), function(msg) -- check if the epoch number is provided, if not get the epoch number from the timestamp - local providedEpochIndex = msg.Tags["Epoch-Index"] - local timestamp = msg.Timestamp - - assert(providedEpochIndex or timestamp, "Epoch index or timestamp is required") - - local epochIndex = providedEpochIndex or epochs.getEpochIndexForTimestamp(timestamp) + local epochIndex = msg.Tags["Epoch-Index"] and tonumber(msg.Tags["Epoch-Index"]) + or epochs.getEpochIndexForTimestamp(msg.Timestamp or msg.Tags.Timestamp) local prescribedNames = epochs.getPrescribedNamesForEpoch(epochIndex) Send(msg, { Target = msg.From, @@ -1972,12 +1961,8 @@ end) addEventingHandler(ActionMap.Distributions, utils.hasMatchingTag("Action", ActionMap.Distributions), function(msg) -- check if the epoch number is provided, if not get the epoch number from the timestamp - local providedEpochIndex = msg.Tags["Epoch-Index"] - local timestamp = msg.Timestamp - - assert(providedEpochIndex or timestamp, "Epoch index or timestamp is required") - - local epochIndex = providedEpochIndex or epochs.getEpochIndexForTimestamp(timestamp) + local epochIndex = msg.Tags["Epoch-Index"] and tonumber(msg.Tags["Epoch-Index"]) + or epochs.getEpochIndexForTimestamp(msg.Timestamp) local distributions = epochs.getDistributionsForEpoch(epochIndex) Send(msg, { Target = msg.From, diff --git a/tests/epochs.test.mjs b/tests/epochs.test.mjs new file mode 100644 index 00000000..355b8327 --- /dev/null +++ b/tests/epochs.test.mjs @@ -0,0 +1,133 @@ +import { + buyRecord, + getEpoch, + joinNetwork, + getPrescribedObservers, + getPrescribedNames, + tick, + startMemory, + totalTokenSupply, + getEpochSettings, +} from './helpers.mjs'; +import { describe, it, before } from 'node:test'; +import assert from 'node:assert'; +import { STUB_ADDRESS, STUB_OPERATOR_ADDRESS } from '../tools/constants.mjs'; + +const firstEpochStartTimestamp = 1719900000000; +const epochLength = 1000 * 60 * 60 * 24; // 24 hours +const distributionDelay = 1000 * 60 * 40; // 40 minutes + +describe('epochs', () => { + let sharedMemory; + + before(async () => { + const { Memory: totalTokenSupplyMemory } = await totalTokenSupply({ + memory: startMemory, + }); + // have a gateway join, and add an arns name which will be used to prescribe names and observers + const { memory: gatewayJoinMemory } = await joinNetwork({ + memory: totalTokenSupplyMemory, + address: STUB_OPERATOR_ADDRESS, + }); + const { memory: buyRecordMemory } = await buyRecord({ + memory: gatewayJoinMemory, + name: 'prescribed-name', + type: 'permabuy', + from: STUB_OPERATOR_ADDRESS, + }); + const { memory: tickMemory } = await tick({ + memory: buyRecordMemory, + timestamp: firstEpochStartTimestamp, + }); + sharedMemory = tickMemory; + }); + + describe('Epoch', () => { + it('should return the current epoch', async () => { + const epoch = await getEpoch({ + memory: sharedMemory, + timestamp: firstEpochStartTimestamp, + }); + assert.deepStrictEqual(epoch, { + epochIndex: 0, + startTimestamp: firstEpochStartTimestamp, + endTimestamp: firstEpochStartTimestamp + epochLength, + startHeight: 1, + distributionTimestamp: + firstEpochStartTimestamp + epochLength + distributionDelay, + prescribedObservers: { + [STUB_ADDRESS]: STUB_OPERATOR_ADDRESS, + }, + prescribedNames: ['prescribed-name'], + observations: { + failureSummaries: [], + reports: [], + }, + distributions: { + totalEligibleGatewayReward: 22500900000, + totalEligibleGateways: 1, + totalEligibleObserverReward: 2500100000, + totalEligibleRewards: 25001000000, + rewards: { + eligible: { + [STUB_OPERATOR_ADDRESS]: { + delegateRewards: [], + operatorReward: 25001000000, // 0.001 of the protocol balance after the transfers and name purchase + }, + }, + }, + }, + }); + + // TODO (PE-7321): add a test for an empty epoch before names and gateways have been prescribed + }); + }); + + describe('Prescribed Observers', () => { + it('should return the correct epoch for the current epoch with weights', async () => { + const prescribedObservers = await getPrescribedObservers({ + memory: sharedMemory, + timestamp: firstEpochStartTimestamp, + }); + assert.deepStrictEqual(prescribedObservers, [ + { + compositeWeight: 4, + gatewayAddress: STUB_OPERATOR_ADDRESS, + gatewayRewardRatioWeight: 1, + normalizedCompositeWeight: 1, + observerAddress: STUB_ADDRESS, + observerRewardRatioWeight: 1, + stakeWeight: 1, + tenureWeight: 4, + }, + ]); + }); + }); + + describe('Prescribed Names', () => { + it('should return the correct epoch for the first epoch', async () => { + const prescribedNames = await getPrescribedNames({ + memory: sharedMemory, + timestamp: firstEpochStartTimestamp, + }); + assert.deepStrictEqual(prescribedNames, ['prescribed-name']); + }); + }); + + describe('Epoch-Settings', () => { + it('should return the correct epoch settings', async () => { + const epochSettings = await getEpochSettings({ + memory: sharedMemory, + timestamp: firstEpochStartTimestamp, + }); + assert.deepStrictEqual(epochSettings, { + maxObservers: 50, + epochZeroStartTimestamp: firstEpochStartTimestamp, + durationMs: epochLength, + distributionDelayMs: distributionDelay, + prescribedNameCount: 2, + pruneEpochsCount: 14, + }); + }); + }); +}); diff --git a/tests/gar.test.mjs b/tests/gar.test.mjs index a1a02d96..40b4500f 100644 --- a/tests/gar.test.mjs +++ b/tests/gar.test.mjs @@ -1315,10 +1315,11 @@ describe('GatewayRegistry', async () => { // Assert prescribed observers const prescribedObservers = JSON.parse(futureTick.Messages[0].Data) .maybeNewEpoch.prescribedObservers; - assert.equal(prescribedObservers.length, 2); - const prescribedObserverAddresses = prescribedObservers.map( - (o) => o.observerAddress, - ); + assert.deepEqual(prescribedObservers, { + [STUB_ADDRESS]: STUB_ADDRESS, + [observerAddress]: gatewayAddress, + }); + const prescribedObserverAddresses = Object.keys(prescribedObservers); assert.ok(prescribedObserverAddresses.includes(STUB_ADDRESS)); assert.ok(prescribedObserverAddresses.includes(observerAddress)); gatewayMemory = futureTick.Memory; diff --git a/tests/handlers.test.mjs b/tests/handlers.test.mjs index 8e2e2090..8fbc770b 100644 --- a/tests/handlers.test.mjs +++ b/tests/handlers.test.mjs @@ -1,10 +1,6 @@ import { handle } from './helpers.mjs'; import { describe, it } from 'node:test'; import assert from 'node:assert'; -import { - AO_LOADER_HANDLER_ENV, - DEFAULT_HANDLE_OPTIONS, -} from '../tools/constants.mjs'; describe('handlers', async () => { it('should maintain order of handlers, with _eval and _default first, followed by prune', async () => { diff --git a/tests/helpers.mjs b/tests/helpers.mjs index cda56bfd..10ec9a68 100644 --- a/tests/helpers.mjs +++ b/tests/helpers.mjs @@ -9,6 +9,7 @@ import { STUB_TIMESTAMP, STUB_MESSAGE_ID, validGatewayTags, + STUB_PROCESS_ID, } from '../tools/constants.mjs'; const initialOperatorStake = 100_000_000_000; @@ -143,6 +144,7 @@ export const transfer = async ({ }, memory, }); + assertNoResultError(transferResult); return transferResult.Memory; }; @@ -169,6 +171,7 @@ export const joinNetwork = async ({ }, memory: transferMemory, }); + assertNoResultError(joinNetworkResult); return { memory: joinNetworkResult.Memory, result: joinNetworkResult, @@ -708,7 +711,7 @@ export const buyRecord = async ({ memory, from, name, - processId, + processId = STUB_PROCESS_ID, type = 'lease', years = 1, timestamp = STUB_TIMESTAMP, @@ -728,6 +731,7 @@ export const buyRecord = async ({ timestamp, memory, }); + assertNoResultError(buyRecordResult); return { result: buyRecordResult, memory: buyRecordResult.Memory, @@ -776,3 +780,100 @@ export const totalTokenSupply = async ({ memory, timestamp = 0 }) => { memory, }); }; + +export const tick = async ({ + memory, + timestamp = STUB_TIMESTAMP, + forcePrune = false, +}) => { + const tickResult = await handle({ + options: { + Tags: [{ name: 'Action', value: 'Tick' }], + Timestamp: timestamp, + ...(forcePrune ? { name: 'Force-Prune', value: 'true' } : {}), + }, + memory, + }); + return { + memory: tickResult.Memory, + result: tickResult, + }; +}; + +export const getEpoch = async ({ + memory, + timestamp = STUB_TIMESTAMP, + epochIndex, +}) => { + const epochResult = await handle({ + options: { + Tags: [ + { name: 'Action', value: 'Epoch' }, + ...(epochIndex !== undefined + ? [{ name: 'Epoch-Index', value: epochIndex }] + : []), + ], + Timestamp: timestamp, + }, + memory, + }); + assertNoResultError(epochResult); + return JSON.parse(epochResult.Messages[0].Data); +}; + +export const getPrescribedObservers = async ({ + memory, + timestamp = STUB_TIMESTAMP, + epochIndex, +}) => { + const prescribedObserversResult = await handle({ + options: { + Tags: [ + { name: 'Action', value: 'Epoch-Prescribed-Observers' }, + ...(epochIndex !== undefined + ? [{ name: 'Epoch-Index', value: epochIndex }] + : []), + ], + Timestamp: timestamp, + }, + memory, + }); + assertNoResultError(prescribedObserversResult); + return JSON.parse(prescribedObserversResult.Messages[0].Data); +}; + +export const getPrescribedNames = async ({ + memory, + timestamp = STUB_TIMESTAMP, + epochIndex, +}) => { + const prescribedNamesResult = await handle({ + options: { + Tags: [ + { name: 'Action', value: 'Epoch-Prescribed-Names' }, + ...(epochIndex !== undefined + ? [{ name: 'Epoch-Index', value: epochIndex }] + : []), + ], + Timestamp: timestamp, + }, + memory, + }); + assertNoResultError(prescribedNamesResult); + return JSON.parse(prescribedNamesResult.Messages[0].Data); +}; + +export const getEpochSettings = async ({ + memory, + timestamp = STUB_TIMESTAMP, +}) => { + const epochSettingsResult = await handle({ + options: { + Tags: [{ name: 'Action', value: 'Epoch-Settings' }], + Timestamp: timestamp, + }, + memory, + }); + assertNoResultError(epochSettingsResult); + return JSON.parse(epochSettingsResult.Messages[0].Data); +}; diff --git a/tests/tick.test.mjs b/tests/tick.test.mjs index 56e611ac..11b0bc21 100644 --- a/tests/tick.test.mjs +++ b/tests/tick.test.mjs @@ -20,9 +20,15 @@ import { joinNetwork, buyRecord, handle, + transfer, startMemory, returnedNamesPeriod, totalTokenSupply, + getEpoch, + tick, + saveObservations, + getEpochSettings, + leaveNetwork, } from './helpers.mjs'; import { assertNoInvariants } from './invariants.mjs'; @@ -46,49 +52,18 @@ describe('Tick', async () => { }); }); - const transfer = async ({ - recipient = STUB_ADDRESS, - quantity = 100_000_000_000, - memory = sharedMemory, - timestamp = STUB_TIMESTAMP, - } = {}) => { - const transferResult = await handle({ - options: { - From: PROCESS_OWNER, - Owner: PROCESS_OWNER, - Tags: [ - { name: 'Action', value: 'Transfer' }, - { name: 'Recipient', value: recipient }, - { name: 'Quantity', value: quantity }, - { name: 'Cast', value: false }, - ], - }, - memory, - timestamp, - }); - - // assert no error tag - const errorTag = transferResult.Messages?.[0]?.Tags?.find( - (tag) => tag.Name === 'Error', - ); - assert.strictEqual(errorTag, undefined); - - return transferResult.Memory; - }; - it('should prune record that are expired and after the grace period and create returned names for them', async () => { - let memory = sharedMemory; - const buyRecordResult = await handle({ - options: { - Tags: [ - { name: 'Action', value: 'Buy-Record' }, - { name: 'Name', value: 'test-name' }, - { name: 'Purchase-Type', value: 'lease' }, - { name: 'Years', value: '1' }, - { name: 'Process-Id', value: ''.padEnd(43, 'a') }, - ], - }, + const memory = await transfer({ + recipient: STUB_ADDRESS, + quantity: 100_000_000_000, + memory: sharedMemory, + }); + const buyRecordResult = await buyRecord({ memory, + name: 'test-name', + type: 'lease', + from: STUB_ADDRESS, + processId: ''.padEnd(43, 'a'), }); const realRecord = await handle({ options: { @@ -97,7 +72,7 @@ describe('Tick', async () => { { name: 'Name', value: 'test-name' }, ], }, - memory: buyRecordResult.Memory, + memory: buyRecordResult.memory, }); const buyRecordData = JSON.parse(realRecord.Messages[0].Data); assert.deepEqual(buyRecordData, { @@ -112,12 +87,9 @@ describe('Tick', async () => { // mock the passage of time and tick with a future timestamp const futureTimestamp = buyRecordData.endTimestamp + 1000 * 60 * 60 * 24 * 14 + 1; - const futureTickResult = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Tick' }], - Timestamp: futureTimestamp, - }, - memory: buyRecordResult.Memory, + const { result: futureTickResult } = await tick({ + memory: buyRecordResult.memory, + timestamp: futureTimestamp, }); const tickEvent = JSON.parse( @@ -171,40 +143,29 @@ describe('Tick', async () => { const memory = await transfer({ recipient: STUB_ADDRESS, quantity: 100_000_000_000, + memory: sharedMemory, }); - - const joinNetworkResult = await handle({ - options: { - Tags: validGatewayTags(), - From: STUB_ADDRESS, - Owner: STUB_ADDRESS, - }, + const joinNetworkResult = await joinNetwork({ memory, + address: STUB_ADDRESS, }); - // assert no error tag - assertNoResultError(joinNetworkResult); - // check the gateway record from contract const gateway = await getGateway({ - memory: joinNetworkResult.Memory, + memory: joinNetworkResult.memory, address: STUB_ADDRESS, }); assert.deepEqual(gateway.status, 'joined'); // leave the network - const leaveNetworkResult = await handle({ - options: { - From: STUB_ADDRESS, - Owner: STUB_ADDRESS, - Tags: [{ name: 'Action', value: 'Leave-Network' }], - }, - memory: joinNetworkResult.Memory, + const leaveNetworkResult = await leaveNetwork({ + memory: joinNetworkResult.memory, + address: STUB_ADDRESS, }); // check the gateways status is leaving const leavingGateway = await getGateway({ - memory: leaveNetworkResult.Memory, + memory: leaveNetworkResult.memory, address: STUB_ADDRESS, }); assert.deepEqual(leavingGateway.status, 'leaving'); @@ -212,17 +173,14 @@ describe('Tick', async () => { // expedite the timestamp to the future const futureTimestamp = leavingGateway.endTimestamp + 1; - const futureTick = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Tick' }], - Timestamp: futureTimestamp, - }, - memory: leaveNetworkResult.Memory, + const futureTick = await tick({ + memory: leaveNetworkResult.memory, + timestamp: futureTimestamp, }); // check the gateway is pruned const prunedGateway = await getGateway({ - memory: futureTick.Memory, + memory: futureTick.memory, address: STUB_ADDRESS, timestamp: futureTimestamp, }); @@ -315,12 +273,9 @@ describe('Tick', async () => { ); // mock the passage of time and tick with a future timestamp const futureTimestamp = vaultData.endTimestamp + 1; - const futureTick = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Tick' }], - Timestamp: futureTimestamp, - }, + const futureTick = await tick({ memory: createVaultResult.Memory, + timestamp: futureTimestamp, }); // check the vault is pruned @@ -370,6 +325,7 @@ describe('Tick', async () => { const initialMemory = await transfer({ recipient: STUB_ADDRESS, quantity: 100_000_000_000, + memory: sharedMemory, }); const delegateAddress = 'delegate-address-'.padEnd(43, '1'); @@ -403,36 +359,26 @@ describe('Tick', async () => { assertNoResultError(newDelegateResult); // fast forward to the start of the first epoch - const epochSettings = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Epoch-Settings' }], - }, + const epochSettings = await getEpochSettings({ + memory: newDelegateResult.Memory, + timestamp: delegateTimestamp, }); - const epochSettingsData = JSON.parse(epochSettings.Messages?.[0]?.Data); - const genesisEpochTimestamp = epochSettingsData.epochZeroStartTimestamp; + const genesisEpochTimestamp = epochSettings.epochZeroStartTimestamp; // now tick to create the first epoch after the epoch start timestamp const createEpochTimestamp = genesisEpochTimestamp + 1; - const newEpochTick = await handle({ - options: { - Timestamp: createEpochTimestamp, // one millisecond after the epoch start timestamp, should create the epoch and set the prescribed observers and names - Tags: [ - { name: 'Action', value: 'Tick' }, - { name: 'Force-Prune', value: 'true' }, // simply exercise this though it's not critical to the test - ], - }, + const newEpochTick = await tick({ memory: newDelegateResult.Memory, + timestamp: createEpochTimestamp, + forcePrune: true, }); // assert no error tag assertNoResultError(newEpochTick); // assert the new epoch is created - const epoch = await handle({ - options: { - Timestamp: createEpochTimestamp, // one millisecond after the epoch start timestamp - Tags: [{ name: 'Action', value: 'Epoch' }], - }, - memory: newEpochTick.Memory, + const epochData = await getEpoch({ + memory: newEpochTick.memory, + timestamp: createEpochTimestamp, }); // get the epoch timestamp and assert it is in 24 hours @@ -444,7 +390,6 @@ describe('Tick', async () => { (totalGatewayRewards + totalObserverRewards) / 1; // only one gateway in the network const expectedGatewayOperatorReward = totalEligibleGatewayRewards * 0.75; // 75% of the eligible rewards go to the operator const expectedGatewayDelegateReward = totalEligibleGatewayRewards * 0.25; // 25% of the eligible rewards go to the delegates - const epochData = JSON.parse(epoch.Messages[0].Data); assert.deepStrictEqual(epochData, { epochIndex: 0, startHeight: 1, @@ -456,21 +401,9 @@ describe('Tick', async () => { failureSummaries: [], reports: [], }, - prescribedObservers: [ - { - // TODO: we could just return the addresses here - observerAddress: STUB_ADDRESS, - observerRewardRatioWeight: 1, - normalizedCompositeWeight: 1, - gatewayRewardRatioWeight: 1, - gatewayAddress: STUB_ADDRESS, - stake: INITIAL_OPERATOR_STAKE * 3, - tenureWeight: 4, - compositeWeight: 12, - startTimestamp: 21600000, - stakeWeight: 3, - }, - ], // the only gateway in the network + prescribedObservers: { + [STUB_ADDRESS]: STUB_ADDRESS, + }, prescribedNames: [], // no names in the network distributions: { totalEligibleGateways: 1, @@ -493,20 +426,11 @@ describe('Tick', async () => { // have the gateway submit an observation const reportTxId = 'report-tx-id-'.padEnd(43, '1'); const observationTimestamp = createEpochTimestamp + 7 * 1000 * 60 * 60; // 7 hours after the epoch start timestamp - const observation = await handle({ - options: { - From: STUB_ADDRESS, - Owner: STUB_ADDRESS, - Timestamp: observationTimestamp, - Tags: [ - { name: 'Action', value: 'Save-Observations' }, - { - name: 'Report-Tx-Id', - value: reportTxId, - }, - ], - }, - memory: epoch.Memory, + const observation = await saveObservations({ + memory: newEpochTick.memory, + timestamp: observationTimestamp, + from: STUB_ADDRESS, + reportTxId, }); // assert no error tag @@ -514,33 +438,21 @@ describe('Tick', async () => { // now jump ahead to the epoch distribution timestamp const distributionTimestamp = epochData.distributionTimestamp; - const distributionTick = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Tick' }], - Timestamp: distributionTimestamp, - }, - memory: observation.Memory, + const distributionTick = await tick({ + memory: observation.memory, + timestamp: distributionTimestamp, }); // assert no error tag assertNoResultError(distributionTick); // check the rewards were distributed correctly - const rewards = await handle({ - options: { - Timestamp: distributionTimestamp, - Tags: [ - { name: 'Action', value: 'Epoch' }, - { - name: 'Epoch-Index', - value: '0', - }, - ], - }, - memory: distributionTick.Memory, + const distributedEpochData = await getEpoch({ + memory: distributionTick.memory, + timestamp: distributionTimestamp, + epochIndex: 0, }); - const distributedEpochData = JSON.parse(rewards.Messages[0].Data); assert.deepStrictEqual(distributedEpochData, { ...epochData, distributions: { @@ -563,18 +475,15 @@ describe('Tick', async () => { }, }); // assert the new epoch was created - const newEpoch = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Epoch' }], - Timestamp: distributionTimestamp, - }, - memory: distributionTick.Memory, + const newEpoch = await getEpoch({ + memory: distributionTick.memory, + timestamp: distributionTimestamp, + epochIndex: 1, }); - const newEpochData = JSON.parse(newEpoch.Messages[0].Data); - assert.equal(newEpochData.epochIndex, 1); + assert.equal(newEpoch.epochIndex, 1); // assert the gateway stakes were updated and match the distributed rewards const gateway = await getGateway({ - memory: distributionTick.Memory, + memory: distributionTick.memory, address: STUB_ADDRESS, timestamp: distributionTimestamp, }); @@ -616,7 +525,7 @@ describe('Tick', async () => { }); const delegateItems = await getDelegatesItems({ - memory: distributionTick.Memory, + memory: distributionTick.memory, gatewayAddress: STUB_ADDRESS, timestamp: distributionTimestamp, }); @@ -737,14 +646,11 @@ describe('Tick', async () => { // Tick to the epoch where demandFactor is 0.5 for (let i = 0; i <= 49; i++) { const epochTimestamp = genesisEpochStart + (epochDurationMs + 1) * i; - const { Memory } = await handle({ - options: { - Tags: [{ name: 'Action', value: 'Tick' }], - Timestamp: epochTimestamp, - }, + const { result: tickResult } = await tick({ memory: tickMemory, + timestamp: epochTimestamp, }); - tickMemory = Memory; + tickMemory = tickResult.Memory; if (i === 45) { const demandFactor = await getDemandFactor({ diff --git a/tools/constants.mjs b/tools/constants.mjs index 8af14cc8..b6ed8039 100644 --- a/tools/constants.mjs +++ b/tools/constants.mjs @@ -14,6 +14,7 @@ export const INITIAL_OPERATOR_STAKE = 10_000_000_000; // 10K ARIO export const INITIAL_DELEGATE_STAKE = 10_000_000; // 10K ARIO export const INITIAL_OWNER_BALANCE = 950_000_000_000_000; // 950M ARIO export const STUB_TIMESTAMP = 21600000; // 01-01-1970 00:00:00 +export const STUB_PROCESS_ID = 'process-id-stub-'.padEnd(43, '0'); export const STUB_MESSAGE_ID = ''.padEnd(43, 'm'); export const STUB_HASH_CHAIN = 'NGU1fq_ssL9m6kRbRU1bqiIDBht79ckvAwRMGElkSOg'; /* ao READ-ONLY Env Variables */