Skip to content

Commit

Permalink
Merge pull request #139 from Synthetixio/feat/simplify-indexer-service
Browse files Browse the repository at this point in the history
Simplify Indexer Service
  • Loading branch information
Tburm authored Nov 12, 2024
2 parents 9abc444 + af3cce6 commit 6e2bf2a
Show file tree
Hide file tree
Showing 12 changed files with 768 additions and 118 deletions.
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,10 @@ extract:
docker compose run extractors python main.py configs/arbitrum_mainnet.yaml
docker compose run extractors python main.py configs/arbitrum_sepolia.yaml

index:
docker compose run indexers-v2 --network_name base_mainnet --config_name synthetix-v3
docker compose run indexers-v2 --network_name arbitrum_mainnet --config_name synthetix-v3

synths:
docker compose run transformer python scripts/get_synths.py

Expand Down
34 changes: 4 additions & 30 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,44 +29,18 @@ services:
tmpfs:
- /dev/shm:size=4g

arbitrum-mainnet-indexer:
indexer-v2:
build:
context: ./indexers-v2
dockerfile: Dockerfile
networks:
- data
depends_on:
- db
restart: always
environment:
DB_HOST: db
DB_PORT: 5432
DB_NAME: arbitrum_mainnet
DB_PASS: $PG_PASSWORD
GQL_PORT: 4350
RPC_ENDPOINT: wss://arbitrum-one-rpc.publicnode.com
NETWORK_NAME: arbitrum_mainnet
volumes:
- ./parquet-data:/parquet-data

base-mainnet-indexer:
build:
context: ./indexers-v2
dockerfile: Dockerfile
networks:
- data
depends_on:
- db
restart: always
env_file:
- .env
environment:
DB_HOST: db
DB_PORT: 5432
DB_NAME: base_mainnet
DB_PASS: $PG_PASSWORD
GQL_PORT: 4350
RPC_ENDPOINT: https://mainnet.base.org
NETWORK_NAME: base_mainnet
CONFIG_NAME: base_mainnet_parquet
CONFIG_NAME: synthetix-v3
volumes:
- ./parquet-data:/parquet-data

Expand Down
2 changes: 1 addition & 1 deletion indexers-v2/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
set -e

# Get contract data from SDK and generate squidgen.yaml and squid.yaml
python3 main.py --network_name "$NETWORK_NAME" --rpc_endpoint "$RPC_ENDPOINT" --config_name "$CONFIG_NAME"
python3 main.py --network_name $NETWORK_NAME --config_name $CONFIG_NAME "$@"

# Generate squid processor
npm run generate:processor
Expand Down
64 changes: 44 additions & 20 deletions indexers-v2/main.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import json
import os
import argparse
from dotenv import load_dotenv
import yaml
from synthetix import Synthetix

# load environment variables
load_dotenv()


def save_abi(abi, contract_name):
os.makedirs("abi", exist_ok=True)
Expand All @@ -12,13 +16,22 @@ def save_abi(abi, contract_name):


def create_squidgen_config(
rpc_url, archive_url, contracts_info, block_range, config_name, rate_limit=10
rpc_url,
archive_url,
network_name,
contracts_info,
block_range,
config_name,
rate_limit=10,
):
config = {
"archive": archive_url,
"finalityConfirmation": 1,
"chain": {"url": rpc_url, "rateLimit": rate_limit},
"target": {"type": "parquet", "path": f"/parquet-data/{config_name}"},
"target": {
"type": "parquet",
"path": f"/parquet-data/{network_name}/{config_name}",
},
"contracts": [],
}

Expand Down Expand Up @@ -73,32 +86,34 @@ def load_network_config(path):
"--config_name",
type=str,
help="Name of the configuration to use",
required=True,
)
parser.add_argument(
"--contract_names",
type=str,
help="Comma-separated list of contract names to index.",
)
parser.add_argument("--rpc_endpoint", type=str, help="RPC URL", required=True)
args = parser.parse_args()

rpc_endpoint = args.rpc_endpoint
if rpc_endpoint is None:
message = "RPC_ENDPOINT environment variable is not set"
raise Exception(message)

# Load config file for network
network_name = args.network_name
config_name = args.config_name
contract_names = args.contract_names

# Get contract names
if contract_names is not None:
parsed_contract_names = [name.strip() for name in contract_names.split(",")]

# Load network config
path = f"networks/{network_name}"
config_file = load_network_config(path)

# Get config name
if args.config_name is None:
config_name = "default"
else:
config_name = args.config_name

# Load shared network-level details
network_params = config_file["network"]
if network_params is None:
message = f"Network '{network_name}' not found in {path}/network_config.yaml"
raise Exception(message)
network_id = network_params["network_id"]
rpc_endpoint = os.getenv(f"NETWORK_{network_id}_RPC")
archive_url = network_params.get("archive_url", "None")

# Load custom config
Expand Down Expand Up @@ -132,19 +147,27 @@ def load_network_config(path):
if "contracts_from_sdk" in custom_config:
contracts_from_sdk = custom_config["contracts_from_sdk"]
for contract in contracts_from_sdk:
if contract_names is not None:
if contract["name"] not in parsed_contract_names:
continue
name = contract["name"]
package = contract["package"]
contract_data = snx.contracts[package][name]
save_abi(contract_data["abi"], name)
contracts.append({"name": name, "address": contract_data["address"]})
abi = contract_data["abi"]
address = contract_data["address"]
save_abi(abi, name)
contracts.append({"name": name, "address": address})
elif "contracts_from_abi" in custom_config:
contracts_from_abi = custom_config["contracts_from_abi"]
for contract in contracts_from_abi:
if contract_names is not None:
if contract["name"] not in parsed_contract_names:
continue
name = contract["name"]
with open(f"{path}/abi/{name}.json", "r") as file:
contract_data = json.load(file)
save_abi(contract_data["abi"], name)
contracts.append({"name": name, "address": contract_data["address"]})
abi = json.load(file)
save_abi(abi, name)
contracts.append({"name": name, "address": contract["address"]})
else:
message = "No contracts found in network config"
raise Exception(message)
Expand All @@ -154,6 +177,7 @@ def load_network_config(path):
squidgen_config = create_squidgen_config(
rpc_endpoint,
archive_url,
network_name,
contracts,
block_range,
config_name,
Expand Down
Loading

0 comments on commit 6e2bf2a

Please sign in to comment.