From 6b76ee63c65ff556b45cd2f70c9782bd26ec5b93 Mon Sep 17 00:00:00 2001 From: Razvan Barbascu Date: Wed, 25 Sep 2024 12:12:27 +0100 Subject: [PATCH] feat(release): Add epoch start estimator script (#12114) Usage: ``` python3 estimate_epoch_start_time.py --url https://archival-rpc.mainnet.near.org --num_future_epochs 10 --num_epochs 20 ```
Example output ``` Epoch 1: 48616.817671756 seconds Epoch 2: 48008.022047789 seconds Epoch 3: 51080.074328883 seconds Epoch 4: 50294.362538916 seconds Epoch 5: 50321.011121149 seconds Epoch 6: 47953.952447657 seconds Epoch 7: 49785.977865728 seconds Epoch 8: 48770.992102473 seconds Epoch 9: 47790.67163107 seconds Epoch 10: 47881.711552879 seconds Epoch 11: 47893.72761854 seconds Epoch 12: 48008.929334719 seconds Epoch 13: 48079.328908341 seconds Epoch 14: 49524.900702396 seconds Epoch 15: 51087.296069337 seconds Epoch 16: 50241.39822162 seconds Epoch 17: 48424.705606747 seconds Epoch 18: 49929.137882208 seconds Epoch 19: 47886.499544935 seconds Epoch 20: 47977.195690047 seconds Exponential weighted average epoch length: 49064.81392174163 seconds Predicted start of epoch 1: 2024-09-20 13:07:05 Friday Predicted start of epoch 2: 2024-09-21 02:44:50 Saturday Predicted start of epoch 3: 2024-09-21 16:22:35 Saturday Predicted start of epoch 4: 2024-09-22 06:00:20 Sunday Predicted start of epoch 5: 2024-09-22 19:38:04 Sunday Predicted start of epoch 6: 2024-09-23 09:15:49 Monday Predicted start of epoch 7: 2024-09-23 22:53:34 Monday Predicted start of epoch 8: 2024-09-24 12:31:19 Tuesday Predicted start of epoch 9: 2024-09-25 02:09:04 Wednesday Predicted start of epoch 10: 2024-09-25 15:46:48 Wednesday ```
--- debug_scripts/estimate_epoch_start_time.py | 161 +++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 debug_scripts/estimate_epoch_start_time.py diff --git a/debug_scripts/estimate_epoch_start_time.py b/debug_scripts/estimate_epoch_start_time.py new file mode 100644 index 00000000000..f5a9afee2df --- /dev/null +++ b/debug_scripts/estimate_epoch_start_time.py @@ -0,0 +1,161 @@ +import requests +import time +import math +import argparse + + +# Function to get block data +def get_block(url, block_hash): + payload = { + "jsonrpc": "2.0", + "id": "dontcare", + "method": "block", + } + + payload["params"] = { + "block_id": block_hash + } if block_hash is not None else { + "finality": "final" + } + + response = requests.post(url, json=payload) + return response.json()['result']['header'] + + +def ns_to_seconds(ns): + return ns / 1e9 + + +def format_time(seconds): + return time.strftime("%H hours, %M minutes", time.gmtime(seconds)) + + +# Function to fetch epoch lengths for the past n epochs and calculate the weighted average using exponential decay +def get_exponential_weighted_epoch_lengths(url, + starting_block_hash, + num_epochs, + decay_rate=0.1): + epoch_lengths = [] + current_hash = starting_block_hash + + for i in range(num_epochs): + # Get the block data by hash + block_data = get_block(url, current_hash) + + # Get the timestamp of this block (start of current epoch) + current_timestamp = int(block_data['timestamp']) + + # Get the next epoch hash (last block hash of previous epoch.) + previous_hash = block_data['next_epoch_id'] + + # Fetch the block data for start of previous epoch + previous_block_data = get_block(url, previous_hash) + previous_timestamp = int(previous_block_data['timestamp']) + + # Calculate the length of the epoch in nanoseconds + epoch_length = current_timestamp - previous_timestamp + epoch_length_seconds = ns_to_seconds(epoch_length) # Convert to seconds + epoch_lengths.append(epoch_length_seconds) + + print(f"Epoch -{i+1}: {format_time(epoch_length_seconds)}") + + # Move to the next epoch + current_hash = previous_hash + + # Apply exponential decay weights: weight = e^(-lambda * i), where i is the epoch index and lambda is the decay rate + weighted_sum = 0 + total_weight = 0 + for i in range(num_epochs): + weight = math.exp(-decay_rate * i) + weighted_sum += epoch_lengths[i] * weight + total_weight += weight + + # Calculate the weighted average using exponential decay + exponential_weighted_average_epoch_length = weighted_sum / total_weight + + print( + f"\nExponential weighted average epoch length: {format_time(exponential_weighted_average_epoch_length)}" + ) + + return epoch_lengths, exponential_weighted_average_epoch_length + + +# Function to approximate future epoch start dates +def predict_future_epochs(starting_epoch_timestamp, avg_epoch_length, + num_future_epochs): + future_epochs = [] + current_timestamp = ns_to_seconds( + starting_epoch_timestamp) # Convert from nanoseconds to seconds + + for i in range(1, num_future_epochs + 1): + # Add the average epoch length for each future epoch + future_timestamp = current_timestamp + (i * avg_epoch_length) + + # Convert to human-readable format + future_date = time.strftime('%Y-%m-%d %H:%M:%S %A', + time.gmtime(future_timestamp)) + future_epochs.append(future_date) + + print(f"Predicted start of epoch {i}: {future_date}") + + return future_epochs + + +# Main function to run the process +def main(args): + latest_block = get_block(args.url, None) + next_epoch_id = latest_block['next_epoch_id'] + current_epoch_first_block = get_block(args.url, next_epoch_id) + current_timestamp = int(current_epoch_first_block['timestamp'] + ) # Current epoch start timestamp in nanoseconds + + # Get epoch lengths and the exponential weighted average + epoch_lengths, exponential_weighted_average_epoch_length = get_exponential_weighted_epoch_lengths( + args.url, next_epoch_id, args.num_past_epochs, args.decay_rate) + + # Predict future epoch start dates + predict_future_epochs(current_timestamp, + exponential_weighted_average_epoch_length, + args.num_future_epochs) + + +# Custom action to set the URL based on chain_id +class SetURLFromChainID(argparse.Action): + + def __call__(self, parser, namespace, values, option_string=None): + if values == 'mainnet': + setattr(namespace, 'url', 'https://archival-rpc.mainnet.near.org') + elif values == 'testnet': + setattr(namespace, 'url', 'https://archival-rpc.testnet.near.org') + + +# Set up command-line argument parsing +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Approximate future epoch start dates for NEAR Protocol.") + # Create a mutually exclusive group for chain_id and url + group = parser.add_mutually_exclusive_group(required=False) + group.add_argument("--url", help="The RPC URL to query.") + group.add_argument( + "--chain_id", + choices=['mainnet', 'testnet'], + action=SetURLFromChainID, + help= + "The chain ID (either 'mainnet' or 'testnet'). Sets the corresponding URL." + ) + + parser.add_argument("--num_past_epochs", + type=int, + default=4, + help="Number of past epochs to analyze.") + parser.add_argument("--decay_rate", + type=float, + default=0.1, + help="Decay rate for exponential weighting.") + parser.add_argument("--num_future_epochs", + type=int, + default=3, + help="Number of future epochs to predict.") + + args = parser.parse_args() + main(args)