254 lines
8.3 KiB
Python
254 lines
8.3 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Import Public Haus Members from DAOhaus API
|
|
|
|
This script fetches members of Public Haus DAO from the DAOhaus API on Optimism mainnet,
|
|
imports them into the database, and links them to the Public Haus DAO.
|
|
|
|
Usage:
|
|
python import_public_haus_members_api.py
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import logging
|
|
import requests
|
|
import json
|
|
import time
|
|
from typing import Dict, Any, List, Optional
|
|
from dotenv import load_dotenv
|
|
|
|
# Add parent directory to path to import utils
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
from utils.db_connector import DatabaseConnector
|
|
from utils.logger import setup_logger
|
|
|
|
# Load environment variables
|
|
load_dotenv()
|
|
|
|
# Setup logging
|
|
logger = setup_logger("public_haus_importer")
|
|
|
|
# Constants
|
|
DAOHAUS_API_URL = "https://admin.daohaus.club/api"
|
|
PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
|
|
CHAIN_ID = "10" # Optimism chain ID
|
|
|
|
class PublicHausImporter:
|
|
"""Importer for Public Haus members from DAOhaus API"""
|
|
|
|
def __init__(self):
|
|
"""Initialize the importer"""
|
|
# Initialize database
|
|
self.db = DatabaseConnector()
|
|
|
|
# Register data source
|
|
self.data_source_id = self.register_data_source()
|
|
|
|
def register_data_source(self) -> str:
|
|
"""Register the Public Haus data source in the database"""
|
|
return self.db.upsert_data_source(
|
|
name="Public Haus DAO API",
|
|
source_type="api",
|
|
description="Public Haus DAO members from DAOhaus API on Optimism mainnet"
|
|
)
|
|
|
|
def fetch_dao_info(self) -> Dict[str, Any]:
|
|
"""
|
|
Fetch Public Haus DAO information from the DAOhaus API
|
|
|
|
Returns:
|
|
DAO information
|
|
"""
|
|
# Make request to DAOhaus API
|
|
url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}"
|
|
response = requests.get(url)
|
|
|
|
# Check for errors
|
|
if response.status_code != 200:
|
|
logger.error(f"Error fetching DAO info: {response.text}")
|
|
raise Exception(f"Error fetching DAO info: {response.status_code}")
|
|
|
|
data = response.json()
|
|
logger.info(f"Fetched DAO info: {data.get('name')}")
|
|
|
|
return data
|
|
|
|
def fetch_members(self) -> List[Dict[str, Any]]:
|
|
"""
|
|
Fetch Public Haus members from the DAOhaus API
|
|
|
|
Returns:
|
|
List of member data from the API
|
|
"""
|
|
# Make request to DAOhaus API
|
|
url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}/members"
|
|
response = requests.get(url)
|
|
|
|
# Check for errors
|
|
if response.status_code != 200:
|
|
logger.error(f"Error fetching members: {response.text}")
|
|
raise Exception(f"Error fetching members: {response.status_code}")
|
|
|
|
data = response.json()
|
|
|
|
# Check if members exist
|
|
if not data:
|
|
logger.error(f"No members found for DAO: {PUBLIC_HAUS_DAO_ID}")
|
|
return []
|
|
|
|
logger.info(f"Fetched {len(data)} members from API")
|
|
|
|
return data
|
|
|
|
def process_member(self, member: Dict[str, Any]) -> Optional[str]:
|
|
"""
|
|
Process a single member and import into database
|
|
|
|
Args:
|
|
member: Member data from the API
|
|
|
|
Returns:
|
|
Contact ID if successful, None otherwise
|
|
"""
|
|
# Extract member data
|
|
address = member.get("memberAddress")
|
|
shares = int(member.get("shares", 0))
|
|
loot = int(member.get("loot", 0))
|
|
joined_at = member.get("createdAt")
|
|
delegating_to = member.get("delegatingTo")
|
|
|
|
# Skip if no address
|
|
if not address:
|
|
logger.warning(f"Member has no address: {member}")
|
|
return None
|
|
|
|
# Check if contact already exists
|
|
query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
|
|
existing_contacts = self.db.execute_query(query, {"address": address})
|
|
|
|
contact_id = None
|
|
|
|
if existing_contacts:
|
|
# Use existing contact
|
|
contact_id = existing_contacts[0]["id"]
|
|
logger.info(f"Found existing contact {contact_id} for address {address}")
|
|
else:
|
|
# Create new contact
|
|
contact_data = {
|
|
"ethereumAddress": address,
|
|
"name": f"Public Haus Member {address[:8]}", # Default name
|
|
}
|
|
|
|
contact_id = self.db.upsert_contact(contact_data)
|
|
logger.info(f"Created new contact {contact_id} for address {address}")
|
|
|
|
# Add DAO membership
|
|
self.db.execute_update(
|
|
"""
|
|
INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
|
|
VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
|
|
ON CONFLICT ("contactId", "daoName")
|
|
DO UPDATE SET
|
|
"shares" = %(shares)s,
|
|
"loot" = %(loot)s,
|
|
"delegatingTo" = %(delegating_to)s,
|
|
"updatedAt" = NOW()
|
|
""",
|
|
{
|
|
"contact_id": contact_id,
|
|
"dao_name": "Public Haus",
|
|
"shares": shares,
|
|
"loot": loot,
|
|
"delegating_to": delegating_to
|
|
}
|
|
)
|
|
|
|
# Add note about membership
|
|
note_content = f"Public Haus DAO Member\nShares: {shares}\nLoot: {loot}"
|
|
if joined_at:
|
|
note_content += f"\nJoined: {joined_at}"
|
|
if delegating_to:
|
|
note_content += f"\nDelegating to: {delegating_to}"
|
|
|
|
self.db.add_note_to_contact(
|
|
contact_id=contact_id,
|
|
content=note_content,
|
|
source="Public Haus DAO API"
|
|
)
|
|
|
|
# Link to data source
|
|
self.db.link_contact_to_data_source(contact_id, self.data_source_id)
|
|
|
|
return contact_id
|
|
|
|
def run(self) -> int:
|
|
"""
|
|
Run the importer
|
|
|
|
Returns:
|
|
Number of members imported
|
|
"""
|
|
# Create a scraping job
|
|
job_id = self.db.create_scraping_job("Public Haus DAO Importer", "running")
|
|
logger.info(f"Created scraping job with ID: {job_id}")
|
|
|
|
try:
|
|
# Fetch DAO info
|
|
dao_info = self.fetch_dao_info()
|
|
logger.info(f"DAO Name: {dao_info.get('name')}")
|
|
|
|
# Fetch members
|
|
members = self.fetch_members()
|
|
|
|
if not members:
|
|
logger.info("No members found")
|
|
self.db.update_scraping_job(job_id, "completed")
|
|
return 0
|
|
|
|
# Process members
|
|
imported_count = 0
|
|
existing_count = 0
|
|
|
|
for member in members:
|
|
try:
|
|
contact_id = self.process_member(member)
|
|
if contact_id:
|
|
imported_count += 1
|
|
except Exception as e:
|
|
logger.exception(f"Error processing member {member.get('memberAddress')}: {e}")
|
|
|
|
# Add a small delay to avoid overwhelming the database
|
|
time.sleep(0.1)
|
|
|
|
# Complete the scraping job
|
|
self.db.update_scraping_job(
|
|
job_id,
|
|
"completed",
|
|
records_processed=len(members),
|
|
records_added=imported_count,
|
|
records_updated=existing_count
|
|
)
|
|
|
|
logger.info(f"Imported {imported_count} members out of {len(members)} processed")
|
|
return imported_count
|
|
|
|
except Exception as e:
|
|
# Update the scraping job with error
|
|
self.db.update_scraping_job(job_id, "failed", error_message=str(e))
|
|
logger.exception(f"Error importing members: {e}")
|
|
raise
|
|
|
|
def main():
|
|
"""Main function"""
|
|
try:
|
|
importer = PublicHausImporter()
|
|
imported_count = importer.run()
|
|
logger.info(f"Import completed successfully. Imported {imported_count} members.")
|
|
return 0
|
|
except Exception as e:
|
|
logger.exception(f"Error importing members: {e}")
|
|
return 1
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main()) |