351 lines
13 KiB
Python
Executable File
351 lines
13 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
Import Public Haus Members using Optimism Etherscan API
|
|
|
|
This script fetches holders of the Public Haus shares token using the Optimism Etherscan API,
|
|
imports them into the database, and links them to the Public Haus DAO.
|
|
|
|
Usage:
|
|
python import_public_haus_etherscan.py
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import logging
|
|
import json
|
|
import time
|
|
import requests
|
|
from typing import Dict, Any, List, Optional
|
|
from dotenv import load_dotenv
|
|
|
|
# Add parent directory to path to import utils
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
from utils.db_connector import DatabaseConnector
|
|
from utils.logger import setup_logger
|
|
|
|
# Load environment variables
|
|
load_dotenv()
|
|
|
|
# Setup logging
|
|
logger = setup_logger("public_haus_etherscan_importer")
|
|
|
|
# Constants
|
|
PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
|
|
SHARES_TOKEN_ADDRESS = "0x4950c436F69c8b4F68ed814A70a5E1D94495c4a7" # From the image, sharesToken address
|
|
|
|
# Optimism Etherscan API
|
|
OPTIMISM_ETHERSCAN_API_URL = "https://api-optimistic.etherscan.io/api"
|
|
|
|
class PublicHausEtherscanImporter:
|
|
"""Importer for Public Haus members using Optimism Etherscan API"""
|
|
|
|
def __init__(self):
|
|
"""Initialize the importer"""
|
|
# Initialize database
|
|
self.db = DatabaseConnector()
|
|
|
|
# Get Etherscan API key
|
|
self.etherscan_api_key = os.getenv("ETHERSCAN_API_KEY")
|
|
if not self.etherscan_api_key:
|
|
logger.warning("ETHERSCAN_API_KEY not set, using API without key (rate limited)")
|
|
self.etherscan_api_key = ""
|
|
|
|
# Register data source
|
|
self.data_source_id = self.register_data_source()
|
|
|
|
# Initialize scraping job
|
|
self.job_id = self.db.create_scraping_job(
|
|
source_name="Public Haus DAO Etherscan",
|
|
status="running"
|
|
)
|
|
logger.info(f"Created scraping job with ID: {self.job_id}")
|
|
|
|
def register_data_source(self) -> str:
|
|
"""Register the Public Haus data source in the database"""
|
|
return self.db.upsert_data_source(
|
|
name="Public Haus DAO Etherscan",
|
|
source_type="blockchain",
|
|
description="Public Haus DAO members identified by token holdings via Etherscan"
|
|
)
|
|
|
|
def get_token_info(self) -> Dict[str, Any]:
|
|
"""
|
|
Get information about the shares token from Etherscan
|
|
|
|
Returns:
|
|
Token information
|
|
"""
|
|
try:
|
|
# Get token info from Etherscan
|
|
params = {
|
|
"module": "token",
|
|
"action": "tokeninfo",
|
|
"contractaddress": SHARES_TOKEN_ADDRESS,
|
|
"apikey": self.etherscan_api_key
|
|
}
|
|
|
|
response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
|
|
data = response.json()
|
|
|
|
if data["status"] == "1":
|
|
token_info = data["result"][0]
|
|
logger.info(f"Token info: {token_info.get('name')} ({token_info.get('symbol')})")
|
|
return token_info
|
|
else:
|
|
# If Etherscan API fails, use hardcoded values
|
|
logger.warning(f"Error getting token info from Etherscan: {data.get('message')}")
|
|
return {
|
|
"name": "Public Haus Shares",
|
|
"symbol": "SHARES",
|
|
"decimals": "18",
|
|
"totalSupply": "0"
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting token info: {e}")
|
|
# Return default values
|
|
return {
|
|
"name": "Public Haus Shares",
|
|
"symbol": "SHARES",
|
|
"decimals": "18",
|
|
"totalSupply": "0"
|
|
}
|
|
|
|
def fetch_token_holders(self) -> List[Dict[str, Any]]:
|
|
"""
|
|
Fetch holders of the shares token using Etherscan API
|
|
|
|
Returns:
|
|
List of token holders with their balances
|
|
"""
|
|
try:
|
|
# Get token info
|
|
token_info = self.get_token_info()
|
|
decimals = int(token_info.get("decimals", 18))
|
|
|
|
# Get token holders from Etherscan
|
|
params = {
|
|
"module": "token",
|
|
"action": "tokenholderlist",
|
|
"contractaddress": SHARES_TOKEN_ADDRESS,
|
|
"page": 1,
|
|
"offset": 100, # Get up to 100 holders
|
|
"apikey": self.etherscan_api_key
|
|
}
|
|
|
|
response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
|
|
data = response.json()
|
|
|
|
holders = []
|
|
|
|
if data["status"] == "1":
|
|
for holder in data["result"]:
|
|
address = holder["address"]
|
|
balance = int(holder["TokenHolderQuantity"])
|
|
|
|
# Skip zero balances
|
|
if balance > 0:
|
|
holders.append({
|
|
"address": address,
|
|
"balance": balance,
|
|
"balanceFormatted": balance / (10 ** decimals),
|
|
"dao": "Public Haus"
|
|
})
|
|
|
|
logger.info(f"Found {len(holders)} token holders with non-zero balance")
|
|
else:
|
|
# If Etherscan API fails, try alternative approach
|
|
logger.warning(f"Error getting token holders from Etherscan: {data.get('message')}")
|
|
|
|
# If the tokenholderlist endpoint is not available, try getting transfers
|
|
params = {
|
|
"module": "account",
|
|
"action": "tokentx",
|
|
"contractaddress": SHARES_TOKEN_ADDRESS,
|
|
"page": 1,
|
|
"offset": 1000, # Get up to 1000 transfers
|
|
"sort": "desc",
|
|
"apikey": self.etherscan_api_key
|
|
}
|
|
|
|
response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
|
|
data = response.json()
|
|
|
|
if data["status"] == "1":
|
|
# Extract unique addresses from transfers
|
|
addresses = set()
|
|
for tx in data["result"]:
|
|
addresses.add(tx["to"])
|
|
addresses.add(tx["from"])
|
|
|
|
# Remove zero address
|
|
if "0x0000000000000000000000000000000000000000" in addresses:
|
|
addresses.remove("0x0000000000000000000000000000000000000000")
|
|
|
|
# Create holder objects
|
|
for address in addresses:
|
|
holders.append({
|
|
"address": address,
|
|
"balance": 1, # We don't know the actual balance
|
|
"balanceFormatted": 1,
|
|
"dao": "Public Haus"
|
|
})
|
|
|
|
logger.info(f"Found {len(holders)} unique addresses from token transfers")
|
|
|
|
# If we still don't have any holders, use the DAO address itself
|
|
if not holders:
|
|
logger.warning("No token holders found, using DAO address as fallback")
|
|
holders.append({
|
|
"address": PUBLIC_HAUS_DAO_ID,
|
|
"balance": 1,
|
|
"balanceFormatted": 1,
|
|
"dao": "Public Haus"
|
|
})
|
|
|
|
return holders
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching token holders: {e}")
|
|
raise
|
|
|
|
def process_holder(self, holder: Dict[str, Any]) -> Optional[str]:
|
|
"""
|
|
Process a token holder and import into the database
|
|
|
|
Args:
|
|
holder: Token holder information
|
|
|
|
Returns:
|
|
Contact ID if successful, None otherwise
|
|
"""
|
|
try:
|
|
# Extract holder information
|
|
address = holder["address"]
|
|
balance = holder["balance"]
|
|
balance_formatted = holder["balanceFormatted"]
|
|
dao_name = holder["dao"]
|
|
|
|
# Check if contact exists
|
|
query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
|
|
existing_contacts = self.db.execute_query(query, {"address": address})
|
|
|
|
contact_id = None
|
|
|
|
if existing_contacts:
|
|
# Use existing contact
|
|
contact_id = existing_contacts[0]["id"]
|
|
logger.info(f"Found existing contact {contact_id} for address {address}")
|
|
else:
|
|
# Create new contact
|
|
contact_id = self.db.upsert_contact(
|
|
ethereum_address=address,
|
|
ens_name=None
|
|
)
|
|
logger.info(f"Created new contact {contact_id} for address {address}")
|
|
|
|
# Add DAO membership
|
|
self.db.execute_update(
|
|
"""
|
|
INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
|
|
VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
|
|
ON CONFLICT ("contactId", "daoName")
|
|
DO UPDATE SET
|
|
"shares" = %(shares)s,
|
|
"loot" = %(loot)s,
|
|
"updatedAt" = NOW()
|
|
""",
|
|
{
|
|
"contact_id": contact_id,
|
|
"dao_name": dao_name,
|
|
"shares": balance, # Use token balance as shares
|
|
"loot": 0, # We don't have loot information
|
|
"delegating_to": None
|
|
}
|
|
)
|
|
|
|
# Add note about membership
|
|
note_content = f"Public Haus DAO Member\nShares Token Balance: {balance_formatted}"
|
|
|
|
self.db.add_note_to_contact(
|
|
contact_id=contact_id,
|
|
content=note_content
|
|
)
|
|
|
|
# Add tag for the DAO
|
|
self.db.add_tag_to_contact(
|
|
contact_id=contact_id,
|
|
tag_name=dao_name
|
|
)
|
|
|
|
# Link to data source
|
|
self.db.link_contact_to_data_source(contact_id, self.data_source_id)
|
|
|
|
return contact_id
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error processing holder {holder.get('address')}: {e}")
|
|
return None
|
|
|
|
def run(self) -> int:
|
|
"""
|
|
Run the importer
|
|
|
|
Returns:
|
|
Number of holders imported
|
|
"""
|
|
try:
|
|
# Fetch token holders
|
|
holders = self.fetch_token_holders()
|
|
|
|
if not holders:
|
|
logger.info("No token holders found")
|
|
self.db.update_scraping_job(self.job_id, "completed")
|
|
return 0
|
|
|
|
# Process holders
|
|
imported_count = 0
|
|
existing_count = 0
|
|
|
|
for holder in holders:
|
|
try:
|
|
contact_id = self.process_holder(holder)
|
|
if contact_id:
|
|
imported_count += 1
|
|
except Exception as e:
|
|
logger.exception(f"Error processing holder {holder.get('address')}: {e}")
|
|
|
|
# Add a small delay to avoid overwhelming the database
|
|
time.sleep(0.1)
|
|
|
|
# Complete the scraping job
|
|
self.db.update_scraping_job(
|
|
self.job_id,
|
|
"completed",
|
|
records_processed=len(holders),
|
|
records_added=imported_count,
|
|
records_updated=existing_count
|
|
)
|
|
|
|
logger.info(f"Imported {imported_count} holders out of {len(holders)} processed")
|
|
return imported_count
|
|
|
|
except Exception as e:
|
|
# Update the scraping job with error
|
|
self.db.update_scraping_job(self.job_id, "failed", error_message=str(e))
|
|
logger.exception(f"Error importing holders: {e}")
|
|
raise
|
|
|
|
def main():
|
|
"""Main function"""
|
|
try:
|
|
importer = PublicHausEtherscanImporter()
|
|
imported_count = importer.run()
|
|
logger.info(f"Import completed successfully. Imported {imported_count} token holders.")
|
|
return 0
|
|
except Exception as e:
|
|
logger.exception(f"Error importing token holders: {e}")
|
|
return 1
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main()) |