diff --git a/.env.production.template b/.env.production.template
new file mode 100644
index 0000000..c943d61
--- /dev/null
+++ b/.env.production.template
@@ -0,0 +1,23 @@
+# Database
+DATABASE_URL="postgresql://stones@66.179.188.130:5433/stones?schema=public"
+PYTHON_DATABASE_URL="postgresql://stones@66.179.188.130:5433/stones"
+
+# API Keys (replace with your actual API keys)
+ETHEREUM_ETHERSCAN_API_KEY="YOUR_ETHERSCAN_API_KEY"
+ALCHEMY_API_KEY="YOUR_ALCHEMY_API_KEY"
+
+# Web3 Provider
+WEB3_PROVIDER_URL="https://eth-mainnet.g.alchemy.com/v2/${ALCHEMY_API_KEY}"
+OPTIMISM_RPC_URL="https://opt-mainnet.g.alchemy.com/v2/YOUR_OPTIMISM_KEY"
+
+# Application
+NODE_ENV="production"
+PORT=3000
+
+# Next.js
+NEXT_PUBLIC_API_URL="http://your-domain.com/api"
+OPTIMISM_ETHERSCAN_API_KEY="YOUR_OPTIMISM_ETHERSCAN_KEY"
+
+# IMPORTANT: Add your database password to the DATABASE_URL and PYTHON_DATABASE_URL
+# Add it between 'stones:' and '@66.179'
+# Example: DATABASE_URL="postgresql://stones:your_password_here@66.179.188.130:5433/stones?schema=public"
\ No newline at end of file
diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md
new file mode 100644
index 0000000..9b86068
--- /dev/null
+++ b/DEPLOYMENT.md
@@ -0,0 +1,214 @@
+# Stones Database - Deployment Guide
+
+This guide provides step-by-step instructions for deploying the Stones Database application to a VPS with Nginx and PostgreSQL.
+
+## Prerequisites
+
+- A VPS server with Ubuntu/Debian
+- A domain or subdomain (e.g., contact.boilerhaus.org)
+- SSH access to your server
+- PostgreSQL database server
+- Node.js and npm installed on the server
+- Nginx web server
+- Let's Encrypt SSL certificates for your domain
+- SSH key set up for Gitea access
+
+## Server Setup Checklist
+
+### 1. Update your server
+```bash
+sudo apt update && sudo apt upgrade -y
+```
+
+### 2. Install Node.js using NVM
+```bash
+# Install NVM
+curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash
+source ~/.bashrc
+
+# Install Node.js v18
+nvm install 18
+nvm use 18
+nvm alias default 18
+```
+
+### 3. Install and configure PostgreSQL
+```bash
+sudo apt install postgresql postgresql-contrib -y
+
+# Create a database user and database
+sudo -u postgres psql -c "CREATE USER stonesadmin WITH PASSWORD 'your-secure-password';"
+sudo -u postgres psql -c "CREATE DATABASE stones;"
+sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE stones TO stonesadmin;"
+```
+
+### 4. Install Nginx
+```bash
+sudo apt install nginx -y
+```
+
+### 5. Install Let's Encrypt Certbot
+```bash
+sudo apt install certbot python3-certbot-nginx -y
+```
+
+### 6. Generate SSL certificate
+```bash
+sudo certbot --nginx -d contact.boilerhaus.org
+```
+
+### 7. Set up SSH key for Gitea
+If you don't already have an SSH key:
+```bash
+ssh-keygen -t rsa -b 4096 -C "your_email@example.com"
+```
+
+Add your public key to your Gitea account at git.boilerhaus.org:
+```bash
+cat ~/.ssh/id_rsa.pub
+```
+
+Copy the output and add it to your Gitea account settings.
+
+## Deployment Process
+
+### 1. Set up your repository on Gitea (git.boilerhaus.org)
+
+Make sure your project is pushed to your Gitea repository at git.boilerhaus.org/boiler/stones. This repository will be used for deployment.
+
+### 2. Clone the repository to your local machine to prepare for deployment
+```bash
+git clone git@git.boilerhaus.org:boiler/stones.git
+cd stones
+```
+
+### 3. Prepare the deployment files
+
+Copy the Nginx configuration and deployment script to your repository:
+
+- `contact-boilerhaus-org.conf`: Nginx configuration for your subdomain
+- `deploy.sh`: Deployment script to automate the deployment process
+
+The deployment script is already configured to use your Gitea server:
+```
+REPO_URL="git@git.boilerhaus.org:boiler/stones.git"
+```
+
+### 4. Make the deployment script executable
+```bash
+chmod +x deploy.sh backup-db.sh
+```
+
+### 5. Commit and push these files to your repository
+```bash
+git add contact-boilerhaus-org.conf deploy.sh backup-db.sh DEPLOYMENT.md
+git commit -m "Add deployment files"
+git push origin main
+```
+
+### 6. Upload the repository to your server
+
+You can clone the repository directly to your server:
+
+```bash
+ssh your-server-user@your-server-ip
+git clone git@git.boilerhaus.org:boiler/stones.git
+cd stones
+```
+
+Make sure your server has the proper SSH key set up to access your Gitea repository.
+
+### 7. Run the deployment script
+```bash
+./deploy.sh
+```
+
+The script will:
+- Check if SSH key is set up for git user access
+- Clone or update the repository
+- Install dependencies
+- Build the application
+- Create a .env.production file if it doesn't exist
+- Set up PM2 for process management
+- Configure Nginx
+
+### 8. Update the .env.production file with your actual values
+```bash
+nano .env.production
+```
+
+Make sure to update:
+- `DATABASE_URL` with your PostgreSQL credentials
+- `AUTH_SECRET` with a strong random string
+- Any other configuration variables
+
+### 9. Import your database dump (if you have one)
+```bash
+psql -U stonesadmin -d stones -f path/to/your/dump.sql
+```
+
+## Updating the Application
+
+When you need to update the application, you can either:
+
+1. Run the deployment script again, which will pull the latest changes:
+```bash
+cd /path/to/repository
+./deploy.sh
+```
+
+2. Or manually update:
+```bash
+cd /var/www/stones-database
+git pull origin main
+npm ci
+npm run build
+pm2 restart stones-database
+```
+
+## Troubleshooting
+
+### Git Access Issues
+If you encounter issues with Git access:
+```bash
+# Test SSH connection to Gitea
+ssh -T git@git.boilerhaus.org
+
+# Check if SSH agent is running
+eval $(ssh-agent -s)
+ssh-add ~/.ssh/id_rsa
+```
+
+### Nginx Configuration
+If you encounter issues with Nginx:
+```bash
+sudo nginx -t # Test Nginx configuration
+sudo systemctl reload nginx # Reload Nginx
+sudo systemctl status nginx # Check Nginx status
+```
+
+### PM2 Issues
+```bash
+pm2 logs stones-database # View application logs
+pm2 list # Check running processes
+pm2 restart stones-database # Restart the application
+```
+
+### Database Connection
+If your application can't connect to the database:
+1. Check if PostgreSQL is running: `sudo systemctl status postgresql`
+2. Verify your `.env.production` file has the correct DATABASE_URL
+3. Make sure your PostgreSQL configuration allows connections (check pg_hba.conf)
+
+### SSL Certificate
+If you have issues with SSL:
+```bash
+sudo certbot renew --dry-run # Test certificate renewal
+sudo certbot certificates # List certificates
+```
+
+## Notes
+
+- The application runs on port 3001 locally and is proxied through Nginx
+- PM2 is used to keep the application running and restart it if it crashes
+- Make sure to back up your database regularly
\ No newline at end of file
diff --git a/README.md b/README.md
index f9c9e3f..94ab696 100644
--- a/README.md
+++ b/README.md
@@ -1,57 +1,138 @@
# Stones Database
-A database application for collecting Ethereum addresses and contact information for the Farcastle $Stones token launch.
+A web application for managing contacts and their blockchain-related information, including NFT holdings, DAO memberships, and token holdings.
-## Project Overview
+## Development Setup
-This application provides:
-- A database to store Ethereum addresses, ENS names, and contact information
-- Data collection scripts to gather information from various sources (NFT holders, ERC20 holders, Moloch DAO members)
-- A web interface for accessing and managing the database at stones.boilerhaus.org
+### Prerequisites
-## Tech Stack
+- Node.js (v16 or higher)
+- PostgreSQL database
+- Git
-- **Backend**: Node.js with Express
-- **Frontend**: Next.js with App Router, React, Shadcn UI, and Tailwind CSS
-- **Database**: PostgreSQL
-- **Data Collection**: Python scripts for blockchain data scraping
-- **Deployment**: Docker for containerization
-
-## Project Structure
-
-```
-/
-├── src/ # Source code
-│ ├── app/ # Next.js app router pages
-│ ├── components/ # React components
-│ ├── lib/ # Shared utilities
-│ └── server/ # Server-side code
-├── scripts/ # Python scripts for data collection
-│ ├── nft_holders/ # Scripts to collect NFT holder data
-│ ├── erc20_holders/ # Scripts to collect ERC20 token holder data
-│ ├── moloch_dao/ # Scripts to collect Moloch DAO member data
-│ └── utils/ # Shared utilities for scripts
-├── prisma/ # Database schema and migrations
-├── public/ # Static assets
-└── docker/ # Docker configuration
-```
-
-## Getting Started
+### Setup Instructions
1. Clone the repository
-2. Install dependencies: `npm install`
-3. Set up environment variables
-4. Run the development server: `npm run dev`
-5. Access the application at http://localhost:3000
+```bash
+git clone git@git.boilerhaus.org:boiler/stones.git
+cd stones
+```
-## Data Collection
+2. Install dependencies
+```bash
+npm install
+```
-The application includes various Python scripts to collect data from:
-- NFT holders
-- ERC20 token holders
-- Moloch DAO members (Raid Guild, DAOhaus, Metacartel)
-- ENS resolution for contact information
+3. Set up the database
+- Create a PostgreSQL database named `stones`
+- Update the `.env.local` file with your database connection string:
+```
+DATABASE_URL="postgresql://username:password@localhost:5432/stones"
+```
+
+4. Run database migrations
+```bash
+npx prisma migrate dev
+npx prisma generate
+```
+
+5. Start the development server
+```bash
+npm run dev
+# or
+./run-dev.sh
+```
+
+This will start the application at http://localhost:3000.
+
+## Utility Scripts
+
+This project includes several utility scripts to streamline the development and deployment process:
+
+- `run-dev.sh`: Starts the development server with all necessary checks
+- `check-db.sh`: Tests database connectivity and displays database statistics
+- `push-to-gitea.sh`: Pushes changes to the Gitea repository
+- `deploy.sh`: Deploys the application to a production server
+- `backup-db.sh`: Creates a backup of the PostgreSQL database
+
+### Using the Development Server
+
+To run the development server with automatic checks for dependencies and database setup:
+
+```bash
+./run-dev.sh
+```
+
+This script will:
+- Check for Node.js and npm
+- Create a `.env.local` file if it doesn't exist
+- Install dependencies if needed
+- Run database migrations
+- Start the development server
+
+### Checking Database Connectivity
+
+To test your database connection and view statistics:
+
+```bash
+./check-db.sh
+```
+
+This script will connect to your database and display the number of contacts, NFT holdings, DAO memberships, and token holdings.
+
+## Authentication
+
+The application uses a simple authentication system with a hardcoded admin user:
+
+- Username: `admin`
+- Password: `stones1234`
+
+For security in production, this should be replaced with a proper authentication system.
## Deployment
-The application is deployed at stones.boilerhaus.org
\ No newline at end of file
+For detailed deployment instructions, see [DEPLOYMENT.md](DEPLOYMENT.md).
+
+To deploy to a server:
+
+1. Push changes to Gitea:
+```bash
+./push-to-gitea.sh
+```
+
+2. Connect to your server and clone the repository:
+```bash
+ssh your-server-user@your-server-ip
+git clone git@git.boilerhaus.org:boiler/stones.git
+cd stones
+```
+
+3. Run the deployment script:
+```bash
+./deploy.sh
+```
+
+4. Update the `.env.production` file with your production settings:
+```bash
+nano .env.production
+```
+
+5. Access your application at https://contact.boilerhaus.org (or your configured domain).
+
+## Database Backups
+
+To back up your database:
+
+```bash
+./backup-db.sh
+```
+
+This creates a compressed SQL backup in `/var/backups/stones-database/` with a timestamp.
+
+## Project Structure
+
+- `src/app`: Next.js App Router pages and API routes
+- `src/components`: React components
+- `src/lib`: Utility functions and libraries
+- `prisma`: Database schema and migrations
+- `scripts`: Data collection and processing scripts
\ No newline at end of file
diff --git a/backup-db.sh b/backup-db.sh
new file mode 100755
index 0000000..75b15e3
--- /dev/null
+++ b/backup-db.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+# Backup script for Stones Database PostgreSQL database
+
+set -e # Exit immediately if a command exits with a non-zero status
+
+# Configuration
+DB_NAME="stones"
+DB_USER="stonesadmin" # Replace with your actual database user
+BACKUP_DIR="/var/backups/stones-database"
+DATETIME=$(date +"%Y-%m-%d_%H-%M-%S")
+BACKUP_FILE="$BACKUP_DIR/stones_db_backup_$DATETIME.sql"
+LOG_FILE="$BACKUP_DIR/backup.log"
+ROTATION_DAYS=30 # Number of days to keep backups
+
+# Colors for pretty output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+# Create backup directory if it doesn't exist
+if [ ! -d "$BACKUP_DIR" ]; then
+ echo -e "${YELLOW}Creating backup directory...${NC}"
+ mkdir -p $BACKUP_DIR
+fi
+
+# Log start of backup
+echo -e "$(date): Starting backup of $DB_NAME database" | tee -a $LOG_FILE
+
+# Export database
+echo -e "${YELLOW}Exporting database to $BACKUP_FILE...${NC}"
+if pg_dump -U $DB_USER $DB_NAME > $BACKUP_FILE; then
+ # Compress the backup file
+ echo -e "${YELLOW}Compressing backup file...${NC}"
+ gzip $BACKUP_FILE
+ BACKUP_FILE="$BACKUP_FILE.gz"
+
+ # Calculate file size
+ FILE_SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
+
+ echo -e "${GREEN}Backup completed successfully: $BACKUP_FILE (Size: $FILE_SIZE)${NC}" | tee -a $LOG_FILE
+else
+ echo -e "${RED}Backup failed!${NC}" | tee -a $LOG_FILE
+ exit 1
+fi
+
+# Remove old backups
+echo -e "${YELLOW}Removing backups older than $ROTATION_DAYS days...${NC}"
+find $BACKUP_DIR -name "stones_db_backup_*.sql.gz" -type f -mtime +$ROTATION_DAYS -delete
+echo -e "$(date): Removed old backups" | tee -a $LOG_FILE
+
+# Optional: Copy backup to another location (e.g., remote server or cloud storage)
+# Uncomment and modify these lines to enable remote backup
+
+# SCP to remote server
+# echo -e "${YELLOW}Copying backup to remote server...${NC}"
+# scp $BACKUP_FILE user@remote-server:/path/to/backup/dir/
+
+# Or upload to S3 (requires AWS CLI)
+# echo -e "${YELLOW}Uploading backup to S3...${NC}"
+# aws s3 cp $BACKUP_FILE s3://your-bucket/stones-database/
+
+echo -e "${GREEN}Backup process completed!${NC}"
+
+# To use this script as a cronjob, add a line like this to your crontab:
+# Run daily at 2:00 AM: 0 2 * * * /path/to/backup-db.sh
+
+# To setup the cronjob automatically, uncomment these lines:
+# (crontab -l 2>/dev/null; echo "0 2 * * * $(readlink -f $0)") | crontab -
\ No newline at end of file
diff --git a/check-db.sh b/check-db.sh
new file mode 100755
index 0000000..85fd153
--- /dev/null
+++ b/check-db.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+# Script to check database connectivity for the Stones Database application
+
+set -e # Exit immediately if a command exits with a non-zero status
+
+# Colors for pretty output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+echo -e "${YELLOW}Checking database connectivity...${NC}"
+
+# Check if Prisma is installed
+if [ ! -d "node_modules/.prisma" ]; then
+ echo -e "${YELLOW}Installing Prisma dependencies...${NC}"
+ npm install @prisma/client
+ npx prisma generate
+fi
+
+# Create temporary script to check DB connection
+cat > db-check.js << EOF
+const { PrismaClient } = require('@prisma/client');
+
+async function main() {
+ console.log('Attempting to connect to database...');
+ const prisma = new PrismaClient();
+
+ try {
+ // Test the connection
+ await prisma.\$connect();
+ console.log('Successfully connected to the database.');
+
+ // Get some database statistics
+ const contactCount = await prisma.contact.count();
+ const nftHoldingCount = await prisma.nftHolding.count();
+ const daoMembershipCount = await prisma.daoMembership.count();
+ const tokenHoldingCount = await prisma.tokenHolding.count();
+
+ console.log('Database statistics:');
+ console.log(\`Contacts: \${contactCount}\`);
+ console.log(\`NFT Holdings: \${nftHoldingCount}\`);
+ console.log(\`DAO Memberships: \${daoMembershipCount}\`);
+ console.log(\`Token Holdings: \${tokenHoldingCount}\`);
+
+ await prisma.\$disconnect();
+ process.exit(0);
+ } catch (error) {
+ console.error('Failed to connect to the database:', error.message);
+
+ if (error.message.includes('database does not exist')) {
+ console.error('The database specified in your DATABASE_URL does not exist.');
+ console.error('You may need to create it manually:');
+ console.error(' 1. Connect to PostgreSQL using: psql -U postgres');
+ console.error(' 2. Create the database: CREATE DATABASE stones;');
+ console.error(' 3. Update your .env or .env.local file with the correct DATABASE_URL');
+ } else if (error.message.includes('authentication failed')) {
+ console.error('Authentication failed. Check your username and password in DATABASE_URL.');
+ } else if (error.message.includes('connect ECONNREFUSED')) {
+ console.error('Could not connect to PostgreSQL server. Make sure it is running.');
+ }
+
+ await prisma.\$disconnect();
+ process.exit(1);
+ }
+}
+
+main();
+EOF
+
+# Run the temporary script
+echo -e "${YELLOW}Running database connection test...${NC}"
+node db-check.js
+
+# Clean up
+rm db-check.js
+
+echo -e "${GREEN}Database check completed.${NC}"
\ No newline at end of file
diff --git a/contact-boilerhaus-org.conf b/contact-boilerhaus-org.conf
new file mode 100644
index 0000000..e773d07
--- /dev/null
+++ b/contact-boilerhaus-org.conf
@@ -0,0 +1,64 @@
+server {
+ listen 80;
+ server_name contact.boilerhaus.org;
+
+ # Redirect HTTP to HTTPS
+ location / {
+ return 301 https://$host$request_uri;
+ }
+}
+
+server {
+ listen 443 ssl http2;
+ server_name contact.boilerhaus.org;
+
+ # SSL Configuration (make sure to update paths to your certificates)
+ ssl_certificate /etc/letsencrypt/live/boilerhaus.org/fullchain.pem;
+ ssl_certificate_key /etc/letsencrypt/live/boilerhaus.org/privkey.pem;
+ ssl_trusted_certificate /etc/letsencrypt/live/boilerhaus.org/chain.pem;
+ ssl_session_timeout 1d;
+ ssl_session_cache shared:SSL:50m;
+ ssl_session_tickets off;
+ ssl_protocols TLSv1.2 TLSv1.3;
+ ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384';
+ ssl_prefer_server_ciphers on;
+
+ # HSTS
+ add_header Strict-Transport-Security "max-age=63072000" always;
+
+ # Security Headers
+ add_header X-Content-Type-Options nosniff;
+ add_header X-Frame-Options SAMEORIGIN;
+ add_header X-XSS-Protection "1; mode=block";
+
+ # Logs
+ access_log /var/log/nginx/contact.boilerhaus.org.access.log;
+ error_log /var/log/nginx/contact.boilerhaus.org.error.log;
+
+ # Proxy to Node.js application
+ location / {
+ proxy_pass http://localhost:3001; # Assuming your Next.js app will run on port 3001
+ proxy_http_version 1.1;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection 'upgrade';
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Forwarded-Proto $scheme;
+ proxy_cache_bypass $http_upgrade;
+ }
+
+ # Serve static files directly
+ location /_next/static {
+ alias /path/to/your/app/.next/static;
+ expires 365d;
+ access_log off;
+ }
+
+ # Serve public files directly
+ location /public {
+ alias /path/to/your/app/public;
+ expires 365d;
+ access_log off;
+ }
+}
\ No newline at end of file
diff --git a/deploy.sh b/deploy.sh
new file mode 100755
index 0000000..01bca16
--- /dev/null
+++ b/deploy.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+# Deployment script for Stones Database Next.js application
+
+set -e # Exit immediately if a command exits with a non-zero status
+
+# Configuration
+APP_NAME="stones-database"
+APP_DIR="/var/www/$APP_NAME"
+REPO_URL="git@git.boilerhaus.org:boiler/stones.git" # Updated to correct Gitea repo URL
+BRANCH="main" # Or whatever branch you want to deploy
+NODE_VERSION="18" # Make sure this matches your development environment
+
+# Colors for pretty output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+echo -e "${GREEN}Starting deployment of $APP_NAME...${NC}"
+
+# Check if SSH key is set up for git user
+if [ ! -f ~/.ssh/id_rsa ]; then
+ echo -e "${YELLOW}SSH key not found. Make sure your SSH key is set up for git user access.${NC}"
+ echo -e "${YELLOW}You may need to run: ssh-keygen -t rsa -b 4096 -C 'your_email@example.com'${NC}"
+ echo -e "${YELLOW}Then add the public key to your Gitea account.${NC}"
+ exit 1
+fi
+
+# Check if directory exists, if not create it
+if [ ! -d "$APP_DIR" ]; then
+ echo -e "${YELLOW}Creating application directory...${NC}"
+ mkdir -p $APP_DIR
+ # Clone the repository if it's the first time
+ echo -e "${YELLOW}Cloning repository...${NC}"
+ git clone --branch $BRANCH $REPO_URL $APP_DIR
+else
+ echo -e "${YELLOW}Pulling latest changes...${NC}"
+ cd $APP_DIR
+ git fetch --all
+ git reset --hard origin/$BRANCH
+fi
+
+cd $APP_DIR
+
+# Make sure we're using the right Node.js version
+echo -e "${YELLOW}Using Node.js version $NODE_VERSION...${NC}"
+export NVM_DIR="$HOME/.nvm"
+[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
+nvm use $NODE_VERSION || { echo -e "${RED}Failed to switch Node.js version. Make sure NVM is installed.${NC}"; exit 1; }
+
+# Install dependencies
+echo -e "${YELLOW}Installing dependencies...${NC}"
+npm ci || { echo -e "${RED}Failed to install dependencies.${NC}"; exit 1; }
+
+# Build the application
+echo -e "${YELLOW}Building the application...${NC}"
+npm run build || { echo -e "${RED}Build failed.${NC}"; exit 1; }
+
+# Setup environment variables
+echo -e "${YELLOW}Setting up environment variables...${NC}"
+if [ ! -f ".env.production" ]; then
+ echo -e "${YELLOW}Creating .env.production file...${NC}"
+ cat > .env.production << EOF
+# Database Connection
+DATABASE_URL="postgresql://username:password@localhost:5432/stones"
+
+# Authentication
+AUTH_SECRET="your-auth-secret" # Replace with a strong random string
+
+# Application
+NEXT_PUBLIC_APP_URL="https://contact.boilerhaus.org"
+EOF
+ echo -e "${YELLOW}Please update the .env.production file with your actual values.${NC}"
+fi
+
+# Setup PM2 process manager if not already configured
+if ! command -v pm2 &> /dev/null; then
+ echo -e "${YELLOW}Installing PM2 process manager...${NC}"
+ npm install -g pm2
+fi
+
+# Check if the PM2 process already exists
+if pm2 list | grep -q "$APP_NAME"; then
+ echo -e "${YELLOW}Restarting application with PM2...${NC}"
+ pm2 restart $APP_NAME
+else
+ echo -e "${YELLOW}Setting up application with PM2...${NC}"
+ pm2 start npm --name $APP_NAME -- start -- -p 3001
+ # Save PM2 configuration to persist across server restarts
+ pm2 save
+ pm2 startup
+fi
+
+# Update Nginx configuration
+echo -e "${YELLOW}Setting up Nginx configuration...${NC}"
+NGINX_CONF="/etc/nginx/sites-available/contact-boilerhaus-org.conf"
+if [ ! -f "$NGINX_CONF" ]; then
+ echo -e "${YELLOW}Copying Nginx configuration file...${NC}"
+ # Assuming contact-boilerhaus-org.conf is in the same directory as this script
+ cp ./contact-boilerhaus-org.conf $NGINX_CONF
+ # Update paths in the Nginx configuration
+ sed -i "s|/path/to/your/app|$APP_DIR|g" $NGINX_CONF
+
+ # Create symlink if it doesn't exist
+ if [ ! -f "/etc/nginx/sites-enabled/contact-boilerhaus-org.conf" ]; then
+ ln -s $NGINX_CONF /etc/nginx/sites-enabled/
+ fi
+
+ # Test Nginx configuration
+ nginx -t && systemctl reload nginx
+else
+ echo -e "${YELLOW}Nginx configuration already exists.${NC}"
+ # Test Nginx configuration
+ nginx -t && systemctl reload nginx
+fi
+
+echo -e "${GREEN}Deployment completed successfully!${NC}"
+echo -e "${GREEN}Your application should now be accessible at https://contact.boilerhaus.org${NC}"
\ No newline at end of file
diff --git a/import-data.js b/import-data.js
new file mode 100644
index 0000000..5907a61
--- /dev/null
+++ b/import-data.js
@@ -0,0 +1,58 @@
+const { PrismaClient } = require('@prisma/client');
+const prisma = new PrismaClient();
+
+// This script imports data directly via Prisma instead of using psql
+async function main() {
+ console.log('Importing data sources...');
+
+ try {
+ // Create DataSource records first
+ await prisma.dataSource.createMany({
+ data: [
+ {
+ name: 'Public Nouns',
+ type: 'NFT',
+ description: 'Public Nouns NFT holders',
+ createdAt: new Date(),
+ updatedAt: new Date()
+ },
+ {
+ name: 'Raid Guild',
+ type: 'DAO',
+ description: 'Raid Guild DAO members',
+ createdAt: new Date(),
+ updatedAt: new Date()
+ },
+ {
+ name: 'Moloch DAO',
+ type: 'DAO',
+ description: 'Moloch DAO members',
+ createdAt: new Date(),
+ updatedAt: new Date()
+ },
+ {
+ name: 'MetaCartel',
+ type: 'DAO',
+ description: 'MetaCartel DAO members',
+ createdAt: new Date(),
+ updatedAt: new Date()
+ }
+ ],
+ skipDuplicates: true
+ });
+
+ console.log('Data sources imported successfully');
+
+ // You can add more import steps here if needed
+
+ } catch (error) {
+ console.error('Error importing data:', error);
+ } finally {
+ await prisma.$disconnect();
+ }
+}
+
+main().catch(error => {
+ console.error(error);
+ process.exit(1);
+});
\ No newline at end of file
diff --git a/next.config.js b/next.config.js
new file mode 100644
index 0000000..ae88795
--- /dev/null
+++ b/next.config.js
@@ -0,0 +1,7 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+ reactStrictMode: true,
+ swcMinify: true,
+}
+
+module.exports = nextConfig
diff --git a/package-lock.json b/package-lock.json
index 52c2f49..a02ad22 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -8,7 +8,7 @@
"name": "stones-database",
"version": "0.1.0",
"dependencies": {
- "@prisma/client": "5.10.2",
+ "@prisma/client": "^6.5.0",
"@radix-ui/react-avatar": "^1.0.4",
"@radix-ui/react-dialog": "^1.0.5",
"@radix-ui/react-dropdown-menu": "^2.0.6",
@@ -18,16 +18,16 @@
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-toast": "^1.1.5",
"class-variance-authority": "^0.7.0",
- "clsx": "^2.1.0",
+ "clsx": "^2.1.1",
"express": "^4.18.2",
"framer-motion": "^11.0.5",
"lucide-react": "^0.331.0",
- "next": "14.1.0",
+ "next": "^14.2.25",
"next-themes": "^0.2.1",
"nuqs": "^1.16.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
- "tailwind-merge": "^2.2.1",
+ "tailwind-merge": "^2.6.0",
"tailwindcss-animate": "^1.0.7",
"zod": "^3.22.4"
},
@@ -38,9 +38,9 @@
"@types/react-dom": "^18.2.19",
"autoprefixer": "^10.4.17",
"eslint": "^8.56.0",
- "eslint-config-next": "14.1.0",
+ "eslint-config-next": "^14.2.25",
"postcss": "^8.4.35",
- "prisma": "^5.10.2",
+ "prisma": "^6.5.0",
"tailwindcss": "^3.4.1",
"typescript": "^5.3.3"
}
@@ -57,6 +57,431 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz",
+ "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz",
+ "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz",
+ "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz",
+ "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz",
+ "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz",
+ "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz",
+ "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz",
+ "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz",
+ "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz",
+ "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz",
+ "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz",
+ "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz",
+ "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz",
+ "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz",
+ "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz",
+ "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz",
+ "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz",
+ "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz",
+ "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz",
+ "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz",
+ "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz",
+ "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz",
+ "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz",
+ "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz",
+ "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/@eslint-community/eslint-utils": {
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.5.1.tgz",
@@ -289,15 +714,15 @@
}
},
"node_modules/@next/env": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/env/-/env-14.1.0.tgz",
- "integrity": "sha512-Py8zIo+02ht82brwwhTg36iogzFqGLPXlRGKQw5s+qP/kMNc4MAyDeEwBKDijk6zTIbegEgu8Qy7C1LboslQAw==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.25.tgz",
+ "integrity": "sha512-JnzQ2cExDeG7FxJwqAksZ3aqVJrHjFwZQAEJ9gQZSoEhIow7SNoKZzju/AwQ+PLIR4NY8V0rhcVozx/2izDO0w==",
"license": "MIT"
},
"node_modules/@next/eslint-plugin-next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.1.0.tgz",
- "integrity": "sha512-x4FavbNEeXx/baD/zC/SdrvkjSby8nBn8KcCREqk6UuwvwoAPZmaV8TFCAuo/cpovBRTIY67mHhe86MQQm/68Q==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-14.2.25.tgz",
+ "integrity": "sha512-L2jcdEEa0bTv1DhE67Cdx1kLLkL0iLL9ILdBYx0j7noi2AUJM7bwcqmcN8awGg+8uyKGAGof/OkFom50x+ZyZg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -305,9 +730,9 @@
}
},
"node_modules/@next/swc-darwin-arm64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.0.tgz",
- "integrity": "sha512-nUDn7TOGcIeyQni6lZHfzNoo9S0euXnu0jhsbMOmMJUBfgsnESdjN97kM7cBqQxZa8L/bM9om/S5/1dzCrW6wQ==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.25.tgz",
+ "integrity": "sha512-09clWInF1YRd6le00vt750s3m7SEYNehz9C4PUcSu3bAdCTpjIV4aTYQZ25Ehrr83VR1rZeqtKUPWSI7GfuKZQ==",
"cpu": [
"arm64"
],
@@ -321,9 +746,9 @@
}
},
"node_modules/@next/swc-darwin-x64": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.0.tgz",
- "integrity": "sha512-1jgudN5haWxiAl3O1ljUS2GfupPmcftu2RYJqZiMJmmbBT5M1XDffjUtRUzP4W3cBHsrvkfOFdQ71hAreNQP6g==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.25.tgz",
+ "integrity": "sha512-V+iYM/QR+aYeJl3/FWWU/7Ix4b07ovsQ5IbkwgUK29pTHmq+5UxeDr7/dphvtXEq5pLB/PucfcBNh9KZ8vWbug==",
"cpu": [
"x64"
],
@@ -337,9 +762,9 @@
}
},
"node_modules/@next/swc-linux-arm64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.0.tgz",
- "integrity": "sha512-RHo7Tcj+jllXUbK7xk2NyIDod3YcCPDZxj1WLIYxd709BQ7WuRYl3OWUNG+WUfqeQBds6kvZYlc42NJJTNi4tQ==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.25.tgz",
+ "integrity": "sha512-LFnV2899PJZAIEHQ4IMmZIgL0FBieh5keMnriMY1cK7ompR+JUd24xeTtKkcaw8QmxmEdhoE5Mu9dPSuDBgtTg==",
"cpu": [
"arm64"
],
@@ -353,9 +778,9 @@
}
},
"node_modules/@next/swc-linux-arm64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.0.tgz",
- "integrity": "sha512-v6kP8sHYxjO8RwHmWMJSq7VZP2nYCkRVQ0qolh2l6xroe9QjbgV8siTbduED4u0hlk0+tjS6/Tuy4n5XCp+l6g==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.25.tgz",
+ "integrity": "sha512-QC5y5PPTmtqFExcKWKYgUNkHeHE/z3lUsu83di488nyP0ZzQ3Yse2G6TCxz6nNsQwgAx1BehAJTZez+UQxzLfw==",
"cpu": [
"arm64"
],
@@ -369,9 +794,9 @@
}
},
"node_modules/@next/swc-linux-x64-gnu": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.0.tgz",
- "integrity": "sha512-zJ2pnoFYB1F4vmEVlb/eSe+VH679zT1VdXlZKX+pE66grOgjmKJHKacf82g/sWE4MQ4Rk2FMBCRnX+l6/TVYzQ==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.25.tgz",
+ "integrity": "sha512-y6/ML4b9eQ2D/56wqatTJN5/JR8/xdObU2Fb1RBidnrr450HLCKr6IJZbPqbv7NXmje61UyxjF5kvSajvjye5w==",
"cpu": [
"x64"
],
@@ -385,9 +810,9 @@
}
},
"node_modules/@next/swc-linux-x64-musl": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.0.tgz",
- "integrity": "sha512-rbaIYFt2X9YZBSbH/CwGAjbBG2/MrACCVu2X0+kSykHzHnYH5FjHxwXLkcoJ10cX0aWCEynpu+rP76x0914atg==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.25.tgz",
+ "integrity": "sha512-sPX0TSXHGUOZFvv96GoBXpB3w4emMqKeMgemrSxI7A6l55VBJp/RKYLwZIB9JxSqYPApqiREaIIap+wWq0RU8w==",
"cpu": [
"x64"
],
@@ -401,9 +826,9 @@
}
},
"node_modules/@next/swc-win32-arm64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.0.tgz",
- "integrity": "sha512-o1N5TsYc8f/HpGt39OUQpQ9AKIGApd3QLueu7hXk//2xq5Z9OxmV6sQfNp8C7qYmiOlHYODOGqNNa0e9jvchGQ==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.25.tgz",
+ "integrity": "sha512-ReO9S5hkA1DU2cFCsGoOEp7WJkhFzNbU/3VUF6XxNGUCQChyug6hZdYL/istQgfT/GWE6PNIg9cm784OI4ddxQ==",
"cpu": [
"arm64"
],
@@ -417,9 +842,9 @@
}
},
"node_modules/@next/swc-win32-ia32-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.0.tgz",
- "integrity": "sha512-XXIuB1DBRCFwNO6EEzCTMHT5pauwaSj4SWs7CYnME57eaReAKBXCnkUE80p/pAZcewm7hs+vGvNqDPacEXHVkw==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.25.tgz",
+ "integrity": "sha512-DZ/gc0o9neuCDyD5IumyTGHVun2dCox5TfPQI/BJTYwpSNYM3CZDI4i6TOdjeq1JMo+Ug4kPSMuZdwsycwFbAw==",
"cpu": [
"ia32"
],
@@ -433,9 +858,9 @@
}
},
"node_modules/@next/swc-win32-x64-msvc": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.0.tgz",
- "integrity": "sha512-9WEbVRRAqJ3YFVqEZIxUqkiO8l1nool1LmNxygr5HWF8AcSYsEpneUDhmjUVJEzO2A04+oPtZdombzzPPkTtgg==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.25.tgz",
+ "integrity": "sha512-KSznmS6eFjQ9RJ1nEc66kJvtGIL1iZMYmGEXsZPh2YtnLtqrgdVvKXJY2ScjjoFnG6nGLyPFR0UiEvDwVah4Tw==",
"cpu": [
"x64"
],
@@ -504,71 +929,86 @@
}
},
"node_modules/@prisma/client": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/@prisma/client/-/client-5.10.2.tgz",
- "integrity": "sha512-ef49hzB2yJZCvM5gFHMxSFL9KYrIP9udpT5rYo0CsHD4P9IKj473MbhU1gjKKftiwWBTIyrt9jukprzZXazyag==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/client/-/client-6.5.0.tgz",
+ "integrity": "sha512-M6w1Ql/BeiGoZmhMdAZUXHu5sz5HubyVcKukbLs3l0ELcQb8hTUJxtGEChhv4SVJ0QJlwtLnwOLgIRQhpsm9dw==",
"hasInstallScript": true,
"license": "Apache-2.0",
"engines": {
- "node": ">=16.13"
+ "node": ">=18.18"
},
"peerDependencies": {
- "prisma": "*"
+ "prisma": "*",
+ "typescript": ">=5.1.0"
},
"peerDependenciesMeta": {
"prisma": {
"optional": true
+ },
+ "typescript": {
+ "optional": true
}
}
},
+ "node_modules/@prisma/config": {
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/config/-/config-6.5.0.tgz",
+ "integrity": "sha512-sOH/2Go9Zer67DNFLZk6pYOHj+rumSb0VILgltkoxOjYnlLqUpHPAN826vnx8HigqnOCxj9LRhT6U7uLiIIWgw==",
+ "devOptional": true,
+ "license": "Apache-2.0",
+ "dependencies": {
+ "esbuild": ">=0.12 <1",
+ "esbuild-register": "3.6.0"
+ }
+ },
"node_modules/@prisma/debug": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-5.10.2.tgz",
- "integrity": "sha512-bkBOmH9dpEBbMKFJj8V+Zp8IZHIBjy3fSyhLhxj4FmKGb/UBSt9doyfA6k1UeUREsMJft7xgPYBbHSOYBr8XCA==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-6.5.0.tgz",
+ "integrity": "sha512-fc/nusYBlJMzDmDepdUtH9aBsJrda2JNErP9AzuHbgUEQY0/9zQYZdNlXmKoIWENtio+qarPNe/+DQtrX5kMcQ==",
"devOptional": true,
"license": "Apache-2.0"
},
"node_modules/@prisma/engines": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-5.10.2.tgz",
- "integrity": "sha512-HkSJvix6PW8YqEEt3zHfCYYJY69CXsNdhU+wna+4Y7EZ+AwzeupMnUThmvaDA7uqswiHkgm5/SZ6/4CStjaGmw==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-6.5.0.tgz",
+ "integrity": "sha512-FVPQYHgOllJklN9DUyujXvh3hFJCY0NX86sDmBErLvoZjy2OXGiZ5FNf3J/C4/RZZmCypZBYpBKEhx7b7rEsdw==",
"devOptional": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
- "@prisma/debug": "5.10.2",
- "@prisma/engines-version": "5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9",
- "@prisma/fetch-engine": "5.10.2",
- "@prisma/get-platform": "5.10.2"
+ "@prisma/debug": "6.5.0",
+ "@prisma/engines-version": "6.5.0-73.173f8d54f8d52e692c7e27e72a88314ec7aeff60",
+ "@prisma/fetch-engine": "6.5.0",
+ "@prisma/get-platform": "6.5.0"
}
},
"node_modules/@prisma/engines-version": {
- "version": "5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9",
- "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9.tgz",
- "integrity": "sha512-uCy/++3Jx/O3ufM+qv2H1L4tOemTNqcP/gyEVOlZqTpBvYJUe0tWtW0y3o2Ueq04mll4aM5X3f6ugQftOSLdFQ==",
+ "version": "6.5.0-73.173f8d54f8d52e692c7e27e72a88314ec7aeff60",
+ "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-6.5.0-73.173f8d54f8d52e692c7e27e72a88314ec7aeff60.tgz",
+ "integrity": "sha512-iK3EmiVGFDCmXjSpdsKGNqy9hOdLnvYBrJB61far/oP03hlIxrb04OWmDjNTwtmZ3UZdA5MCvI+f+3k2jPTflQ==",
"devOptional": true,
"license": "Apache-2.0"
},
"node_modules/@prisma/fetch-engine": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-5.10.2.tgz",
- "integrity": "sha512-dSmXcqSt6DpTmMaLQ9K8ZKzVAMH3qwGCmYEZr/uVnzVhxRJ1EbT/w2MMwIdBNq1zT69Rvh0h75WMIi0mrIw7Hg==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-6.5.0.tgz",
+ "integrity": "sha512-3LhYA+FXP6pqY8FLHCjewyE8pGXXJ7BxZw2rhPq+CZAhvflVzq4K8Qly3OrmOkn6wGlz79nyLQdknyCG2HBTuA==",
"devOptional": true,
"license": "Apache-2.0",
"dependencies": {
- "@prisma/debug": "5.10.2",
- "@prisma/engines-version": "5.10.0-34.5a9203d0590c951969e85a7d07215503f4672eb9",
- "@prisma/get-platform": "5.10.2"
+ "@prisma/debug": "6.5.0",
+ "@prisma/engines-version": "6.5.0-73.173f8d54f8d52e692c7e27e72a88314ec7aeff60",
+ "@prisma/get-platform": "6.5.0"
}
},
"node_modules/@prisma/get-platform": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-5.10.2.tgz",
- "integrity": "sha512-nqXP6vHiY2PIsebBAuDeWiUYg8h8mfjBckHh6Jezuwej0QJNnjDiOq30uesmg+JXxGk99nqyG3B7wpcOODzXvg==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-6.5.0.tgz",
+ "integrity": "sha512-xYcvyJwNMg2eDptBYFqFLUCfgi+wZLcj6HDMsj0Qw0irvauG4IKmkbywnqwok0B+k+W+p+jThM2DKTSmoPCkzw==",
"devOptional": true,
"license": "Apache-2.0",
"dependencies": {
- "@prisma/debug": "5.10.2"
+ "@prisma/debug": "6.5.0"
}
},
"node_modules/@radix-ui/number": {
@@ -1335,12 +1775,19 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/@swc/counter": {
+ "version": "0.1.3",
+ "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz",
+ "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==",
+ "license": "Apache-2.0"
+ },
"node_modules/@swc/helpers": {
- "version": "0.5.2",
- "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.2.tgz",
- "integrity": "sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==",
+ "version": "0.5.5",
+ "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.5.tgz",
+ "integrity": "sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==",
"license": "Apache-2.0",
"dependencies": {
+ "@swc/counter": "^0.1.3",
"tslib": "^2.4.0"
}
},
@@ -1487,61 +1934,111 @@
"@types/send": "*"
}
},
- "node_modules/@typescript-eslint/parser": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz",
- "integrity": "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==",
+ "node_modules/@typescript-eslint/eslint-plugin": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.26.1.tgz",
+ "integrity": "sha512-2X3mwqsj9Bd3Ciz508ZUtoQQYpOhU/kWoUqIf49H8Z0+Vbh6UF/y0OEYp0Q0axOGzaBGs7QxRwq0knSQ8khQNA==",
"dev": true,
- "license": "BSD-2-Clause",
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/scope-manager": "6.21.0",
- "@typescript-eslint/types": "6.21.0",
- "@typescript-eslint/typescript-estree": "6.21.0",
- "@typescript-eslint/visitor-keys": "6.21.0",
- "debug": "^4.3.4"
+ "@eslint-community/regexpp": "^4.10.0",
+ "@typescript-eslint/scope-manager": "8.26.1",
+ "@typescript-eslint/type-utils": "8.26.1",
+ "@typescript-eslint/utils": "8.26.1",
+ "@typescript-eslint/visitor-keys": "8.26.1",
+ "graphemer": "^1.4.0",
+ "ignore": "^5.3.1",
+ "natural-compare": "^1.4.0",
+ "ts-api-utils": "^2.0.1"
},
"engines": {
- "node": "^16.0.0 || >=18.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
- "eslint": "^7.0.0 || ^8.0.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
+ "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0",
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.9.0"
}
},
- "node_modules/@typescript-eslint/scope-manager": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz",
- "integrity": "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==",
+ "node_modules/@typescript-eslint/parser": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.26.1.tgz",
+ "integrity": "sha512-w6HZUV4NWxqd8BdeFf81t07d7/YV9s7TCWrQQbG5uhuvGUAW+fq1usZ1Hmz9UPNLniFnD8GLSsDpjP0hm1S4lQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "6.21.0",
- "@typescript-eslint/visitor-keys": "6.21.0"
+ "@typescript-eslint/scope-manager": "8.26.1",
+ "@typescript-eslint/types": "8.26.1",
+ "@typescript-eslint/typescript-estree": "8.26.1",
+ "@typescript-eslint/visitor-keys": "8.26.1",
+ "debug": "^4.3.4"
},
"engines": {
- "node": "^16.0.0 || >=18.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.9.0"
+ }
+ },
+ "node_modules/@typescript-eslint/scope-manager": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.26.1.tgz",
+ "integrity": "sha512-6EIvbE5cNER8sqBu6V7+KeMZIC1664d2Yjt+B9EWUXrsyWpxx4lEZrmvxgSKRC6gX+efDL/UY9OpPZ267io3mg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/types": "8.26.1",
+ "@typescript-eslint/visitor-keys": "8.26.1"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
}
},
+ "node_modules/@typescript-eslint/type-utils": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.26.1.tgz",
+ "integrity": "sha512-Kcj/TagJLwoY/5w9JGEFV0dclQdyqw9+VMndxOJKtoFSjfZhLXhYjzsQEeyza03rwHx2vFEGvrJWJBXKleRvZg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/typescript-estree": "8.26.1",
+ "@typescript-eslint/utils": "8.26.1",
+ "debug": "^4.3.4",
+ "ts-api-utils": "^2.0.1"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.9.0"
+ }
+ },
"node_modules/@typescript-eslint/types": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.21.0.tgz",
- "integrity": "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==",
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.26.1.tgz",
+ "integrity": "sha512-n4THUQW27VmQMx+3P+B0Yptl7ydfceUj4ON/AQILAASwgYdZ/2dhfymRMh5egRUrvK5lSmaOm77Ry+lmXPOgBQ==",
"dev": true,
"license": "MIT",
"engines": {
- "node": "^16.0.0 || >=18.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
@@ -1549,32 +2046,30 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz",
- "integrity": "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==",
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.26.1.tgz",
+ "integrity": "sha512-yUwPpUHDgdrv1QJ7YQal3cMVBGWfnuCdKbXw1yyjArax3353rEJP1ZA+4F8nOlQ3RfS2hUN/wze3nlY+ZOhvoA==",
"dev": true,
- "license": "BSD-2-Clause",
+ "license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "6.21.0",
- "@typescript-eslint/visitor-keys": "6.21.0",
+ "@typescript-eslint/types": "8.26.1",
+ "@typescript-eslint/visitor-keys": "8.26.1",
"debug": "^4.3.4",
- "globby": "^11.1.0",
+ "fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
- "minimatch": "9.0.3",
- "semver": "^7.5.4",
- "ts-api-utils": "^1.0.1"
+ "minimatch": "^9.0.4",
+ "semver": "^7.6.0",
+ "ts-api-utils": "^2.0.1"
},
"engines": {
- "node": "^16.0.0 || >=18.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
},
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
+ "peerDependencies": {
+ "typescript": ">=4.8.4 <5.9.0"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
@@ -1588,9 +2083,9 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
- "version": "9.0.3",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
- "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+ "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -1603,22 +2098,59 @@
"url": "https://github.com/sponsors/isaacs"
}
},
- "node_modules/@typescript-eslint/visitor-keys": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz",
- "integrity": "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==",
+ "node_modules/@typescript-eslint/utils": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.26.1.tgz",
+ "integrity": "sha512-V4Urxa/XtSUroUrnI7q6yUTD3hDtfJ2jzVfeT3VK0ciizfK2q/zGC0iDh1lFMUZR8cImRrep6/q0xd/1ZGPQpg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@typescript-eslint/types": "6.21.0",
- "eslint-visitor-keys": "^3.4.1"
+ "@eslint-community/eslint-utils": "^4.4.0",
+ "@typescript-eslint/scope-manager": "8.26.1",
+ "@typescript-eslint/types": "8.26.1",
+ "@typescript-eslint/typescript-estree": "8.26.1"
},
"engines": {
- "node": "^16.0.0 || >=18.0.0"
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/typescript-eslint"
+ },
+ "peerDependencies": {
+ "eslint": "^8.57.0 || ^9.0.0",
+ "typescript": ">=4.8.4 <5.9.0"
+ }
+ },
+ "node_modules/@typescript-eslint/visitor-keys": {
+ "version": "8.26.1",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.26.1.tgz",
+ "integrity": "sha512-AjOC3zfnxd6S4Eiy3jwktJPclqhFHNyd8L6Gycf9WUPoKZpgM5PjkxY1X7uSy61xVpiJDhhk7XT2NVsN3ALTWg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@typescript-eslint/types": "8.26.1",
+ "eslint-visitor-keys": "^4.2.0"
+ },
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/typescript-eslint"
+ }
+ },
+ "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint"
}
},
"node_modules/@ungap/structured-clone": {
@@ -1803,16 +2335,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/array-union": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
- "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/array.prototype.findlast": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz",
@@ -2491,7 +3013,7 @@
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
- "dev": true,
+ "devOptional": true,
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
@@ -2579,19 +3101,6 @@
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==",
"license": "Apache-2.0"
},
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/dlv": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
@@ -2844,6 +3353,60 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/esbuild": {
+ "version": "0.25.1",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz",
+ "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==",
+ "devOptional": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.25.1",
+ "@esbuild/android-arm": "0.25.1",
+ "@esbuild/android-arm64": "0.25.1",
+ "@esbuild/android-x64": "0.25.1",
+ "@esbuild/darwin-arm64": "0.25.1",
+ "@esbuild/darwin-x64": "0.25.1",
+ "@esbuild/freebsd-arm64": "0.25.1",
+ "@esbuild/freebsd-x64": "0.25.1",
+ "@esbuild/linux-arm": "0.25.1",
+ "@esbuild/linux-arm64": "0.25.1",
+ "@esbuild/linux-ia32": "0.25.1",
+ "@esbuild/linux-loong64": "0.25.1",
+ "@esbuild/linux-mips64el": "0.25.1",
+ "@esbuild/linux-ppc64": "0.25.1",
+ "@esbuild/linux-riscv64": "0.25.1",
+ "@esbuild/linux-s390x": "0.25.1",
+ "@esbuild/linux-x64": "0.25.1",
+ "@esbuild/netbsd-arm64": "0.25.1",
+ "@esbuild/netbsd-x64": "0.25.1",
+ "@esbuild/openbsd-arm64": "0.25.1",
+ "@esbuild/openbsd-x64": "0.25.1",
+ "@esbuild/sunos-x64": "0.25.1",
+ "@esbuild/win32-arm64": "0.25.1",
+ "@esbuild/win32-ia32": "0.25.1",
+ "@esbuild/win32-x64": "0.25.1"
+ }
+ },
+ "node_modules/esbuild-register": {
+ "version": "3.6.0",
+ "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz",
+ "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==",
+ "devOptional": true,
+ "license": "MIT",
+ "dependencies": {
+ "debug": "^4.3.4"
+ },
+ "peerDependencies": {
+ "esbuild": ">=0.12 <1"
+ }
+ },
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
@@ -2931,15 +3494,16 @@
}
},
"node_modules/eslint-config-next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.1.0.tgz",
- "integrity": "sha512-SBX2ed7DoRFXC6CQSLc/SbLY9Ut6HxNB2wPTcoIWjUMd7aF7O/SIE7111L8FdZ9TXsNV4pulUDnfthpyPtbFUg==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-14.2.25.tgz",
+ "integrity": "sha512-BwuRQJeqw4xP/fkul/WWjivwbaLs8AjvuMzQCC+nJI65ZVhnVolWs6tk5VSD92xPHu96gSTahfaSkQjIRtJ3ag==",
"dev": true,
"license": "MIT",
"dependencies": {
- "@next/eslint-plugin-next": "14.1.0",
+ "@next/eslint-plugin-next": "14.2.25",
"@rushstack/eslint-patch": "^1.3.3",
- "@typescript-eslint/parser": "^5.4.2 || ^6.0.0",
+ "@typescript-eslint/eslint-plugin": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0",
+ "@typescript-eslint/parser": "^5.4.2 || ^6.0.0 || ^7.0.0 || ^8.0.0",
"eslint-import-resolver-node": "^0.3.6",
"eslint-import-resolver-typescript": "^3.5.2",
"eslint-plugin-import": "^2.28.1",
@@ -3865,27 +4429,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
- "node_modules/globby": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
- "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
@@ -4960,13 +5503,13 @@
}
},
"node_modules/next": {
- "version": "14.1.0",
- "resolved": "https://registry.npmjs.org/next/-/next-14.1.0.tgz",
- "integrity": "sha512-wlzrsbfeSU48YQBjZhDzOwhWhGsy+uQycR8bHAOt1LY1bn3zZEcDyHQOEoN3aWzQ8LHCAJ1nqrWCc9XF2+O45Q==",
+ "version": "14.2.25",
+ "resolved": "https://registry.npmjs.org/next/-/next-14.2.25.tgz",
+ "integrity": "sha512-N5M7xMc4wSb4IkPvEV5X2BRRXUmhVHNyaXwEM86+voXthSZz8ZiRyQW4p9mwAoAPIm6OzuVZtn7idgEJeAJN3Q==",
"license": "MIT",
"dependencies": {
- "@next/env": "14.1.0",
- "@swc/helpers": "0.5.2",
+ "@next/env": "14.2.25",
+ "@swc/helpers": "0.5.5",
"busboy": "1.6.0",
"caniuse-lite": "^1.0.30001579",
"graceful-fs": "^4.2.11",
@@ -4980,18 +5523,19 @@
"node": ">=18.17.0"
},
"optionalDependencies": {
- "@next/swc-darwin-arm64": "14.1.0",
- "@next/swc-darwin-x64": "14.1.0",
- "@next/swc-linux-arm64-gnu": "14.1.0",
- "@next/swc-linux-arm64-musl": "14.1.0",
- "@next/swc-linux-x64-gnu": "14.1.0",
- "@next/swc-linux-x64-musl": "14.1.0",
- "@next/swc-win32-arm64-msvc": "14.1.0",
- "@next/swc-win32-ia32-msvc": "14.1.0",
- "@next/swc-win32-x64-msvc": "14.1.0"
+ "@next/swc-darwin-arm64": "14.2.25",
+ "@next/swc-darwin-x64": "14.2.25",
+ "@next/swc-linux-arm64-gnu": "14.2.25",
+ "@next/swc-linux-arm64-musl": "14.2.25",
+ "@next/swc-linux-x64-gnu": "14.2.25",
+ "@next/swc-linux-x64-musl": "14.2.25",
+ "@next/swc-win32-arm64-msvc": "14.2.25",
+ "@next/swc-win32-ia32-msvc": "14.2.25",
+ "@next/swc-win32-x64-msvc": "14.2.25"
},
"peerDependencies": {
"@opentelemetry/api": "^1.1.0",
+ "@playwright/test": "^1.41.2",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"sass": "^1.3.0"
@@ -5000,6 +5544,9 @@
"@opentelemetry/api": {
"optional": true
},
+ "@playwright/test": {
+ "optional": true
+ },
"sass": {
"optional": true
}
@@ -5380,16 +5927,6 @@
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"license": "MIT"
},
- "node_modules/path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/picocolors": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
@@ -5590,20 +6127,32 @@
}
},
"node_modules/prisma": {
- "version": "5.10.2",
- "resolved": "https://registry.npmjs.org/prisma/-/prisma-5.10.2.tgz",
- "integrity": "sha512-hqb/JMz9/kymRE25pMWCxkdyhbnIWrq+h7S6WysJpdnCvhstbJSNP/S6mScEcqiB8Qv2F+0R3yG+osRaWqZacQ==",
+ "version": "6.5.0",
+ "resolved": "https://registry.npmjs.org/prisma/-/prisma-6.5.0.tgz",
+ "integrity": "sha512-yUGXmWqv5F4PByMSNbYFxke/WbnyTLjnJ5bKr8fLkcnY7U5rU9rUTh/+Fja+gOrRxEgtCbCtca94IeITj4j/pg==",
"devOptional": true,
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
- "@prisma/engines": "5.10.2"
+ "@prisma/config": "6.5.0",
+ "@prisma/engines": "6.5.0"
},
"bin": {
"prisma": "build/index.js"
},
"engines": {
- "node": ">=16.13"
+ "node": ">=18.18"
+ },
+ "optionalDependencies": {
+ "fsevents": "2.3.3"
+ },
+ "peerDependencies": {
+ "typescript": ">=5.1.0"
+ },
+ "peerDependenciesMeta": {
+ "typescript": {
+ "optional": true
+ }
}
},
"node_modules/prop-types": {
@@ -6304,16 +6853,6 @@
"url": "https://github.com/sponsors/isaacs"
}
},
- "node_modules/slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
@@ -6804,16 +7343,16 @@
}
},
"node_modules/ts-api-utils": {
- "version": "1.4.3",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz",
- "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.0.1.tgz",
+ "integrity": "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w==",
"dev": true,
"license": "MIT",
"engines": {
- "node": ">=16"
+ "node": ">=18.12"
},
"peerDependencies": {
- "typescript": ">=4.2.0"
+ "typescript": ">=4.8.4"
}
},
"node_modules/ts-interface-checker": {
@@ -6962,7 +7501,7 @@
"version": "5.8.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz",
"integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==",
- "dev": true,
+ "devOptional": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
diff --git a/package.json b/package.json
index 5acc48f..eec1993 100644
--- a/package.json
+++ b/package.json
@@ -12,7 +12,7 @@
"prisma:studio": "prisma studio"
},
"dependencies": {
- "@prisma/client": "5.10.2",
+ "@prisma/client": "^6.5.0",
"@radix-ui/react-avatar": "^1.0.4",
"@radix-ui/react-dialog": "^1.0.5",
"@radix-ui/react-dropdown-menu": "^2.0.6",
@@ -22,16 +22,16 @@
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-toast": "^1.1.5",
"class-variance-authority": "^0.7.0",
- "clsx": "^2.1.0",
+ "clsx": "^2.1.1",
"express": "^4.18.2",
"framer-motion": "^11.0.5",
"lucide-react": "^0.331.0",
- "next": "14.1.0",
+ "next": "^14.2.25",
"next-themes": "^0.2.1",
"nuqs": "^1.16.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
- "tailwind-merge": "^2.2.1",
+ "tailwind-merge": "^2.6.0",
"tailwindcss-animate": "^1.0.7",
"zod": "^3.22.4"
},
@@ -42,10 +42,10 @@
"@types/react-dom": "^18.2.19",
"autoprefixer": "^10.4.17",
"eslint": "^8.56.0",
- "eslint-config-next": "14.1.0",
+ "eslint-config-next": "^14.2.25",
"postcss": "^8.4.35",
- "prisma": "^5.10.2",
+ "prisma": "^6.5.0",
"tailwindcss": "^3.4.1",
"typescript": "^5.3.3"
}
-}
\ No newline at end of file
+}
diff --git a/prisma/schema.prisma b/prisma/schema.prisma
index 6679af9..bac4395 100644
--- a/prisma/schema.prisma
+++ b/prisma/schema.prisma
@@ -8,27 +8,26 @@ datasource db {
}
model Contact {
- id String @id @default(cuid())
- ethereumAddress String @unique
+ id String @id @default(cuid())
+ ethereumAddress String @unique
+ ensName String?
+ name String?
+ email String?
+ twitter String?
+ discord String?
+ telegram String?
+ createdAt DateTime @default(now())
+ updatedAt DateTime @updatedAt
+ farcaster String?
+ otherSocial String?
+ warpcastAddress String?
ethereumAddress2 String?
- warpcastAddress String?
- ensName String?
- name String?
- farcaster String?
- twitter String?
- discord String?
- telegram String?
- email String?
- otherSocial String?
- createdAt DateTime @default(now())
- updatedAt DateTime @updatedAt
-
- // Relations
- nftHoldings NftHolding[]
- tokenHoldings TokenHolding[]
- daoMemberships DaoMembership[]
- notes Note[]
- tags TagsOnContacts[]
+ ContactSource ContactSource[]
+ daoMemberships DaoMembership[]
+ nftHoldings NftHolding[]
+ notes Note[]
+ tags TagsOnContacts[]
+ tokenHoldings TokenHolding[]
}
model NftHolding {
@@ -101,13 +100,14 @@ model TagsOnContacts {
}
model DataSource {
- id String @id @default(cuid())
- name String @unique
- type String
- description String?
- lastScraped DateTime?
- createdAt DateTime @default(now())
- updatedAt DateTime @updatedAt
+ id String @id @default(cuid())
+ name String @unique
+ type String
+ description String?
+ lastScraped DateTime?
+ createdAt DateTime @default(now())
+ updatedAt DateTime @updatedAt
+ ContactSource ContactSource[]
}
model ScrapingJob {
@@ -123,3 +123,17 @@ model ScrapingJob {
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
}
+
+model ContactSource {
+ id String @id
+ contactId String
+ dataSourceId String
+ createdAt DateTime @db.Timestamp(6)
+ updatedAt DateTime @db.Timestamp(6)
+ Contact Contact @relation(fields: [contactId], references: [id], onDelete: Cascade, onUpdate: NoAction)
+ DataSource DataSource @relation(fields: [dataSourceId], references: [id], onDelete: Cascade, onUpdate: NoAction)
+
+ @@unique([contactId, dataSourceId])
+ @@index([contactId])
+ @@index([dataSourceId])
+}
diff --git a/push-to-gitea.sh b/push-to-gitea.sh
new file mode 100755
index 0000000..7bdf7e0
--- /dev/null
+++ b/push-to-gitea.sh
@@ -0,0 +1,56 @@
+#!/bin/bash
+# Script to push changes to Gitea repository
+
+set -e # Exit immediately if a command exits with a non-zero status
+
+# Colors for pretty output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+# Check if git is installed
+if ! command -v git &> /dev/null; then
+ echo -e "${RED}Error: git is not installed. Please install git first.${NC}"
+ exit 1
+fi
+
+# Check if we're in a git repository
+if ! git rev-parse --is-inside-work-tree &> /dev/null; then
+ echo -e "${RED}Error: Not a git repository. Please run this script from within a git repository.${NC}"
+ exit 1
+fi
+
+# Check if remote already exists
+if git remote | grep -q "gitea"; then
+ echo -e "${YELLOW}Remote 'gitea' already exists.${NC}"
+else
+ echo -e "${YELLOW}Adding 'gitea' remote...${NC}"
+ git remote add gitea git@git.boilerhaus.org:boiler/stones.git
+fi
+
+# Get current branch
+CURRENT_BRANCH=$(git symbolic-ref --short HEAD)
+echo -e "${YELLOW}Current branch: ${CURRENT_BRANCH}${NC}"
+
+# Check for uncommitted changes
+if ! git diff-index --quiet HEAD --; then
+ echo -e "${YELLOW}You have uncommitted changes.${NC}"
+ read -p "Do you want to commit them? (y/n): " -n 1 -r
+ echo
+ if [[ $REPLY =~ ^[Yy]$ ]]; then
+ read -p "Enter commit message: " COMMIT_MSG
+ git add .
+ git commit -m "$COMMIT_MSG"
+ else
+ echo -e "${YELLOW}Continuing without committing changes.${NC}"
+ fi
+fi
+
+# Push to Gitea
+echo -e "${YELLOW}Pushing to Gitea...${NC}"
+git push -u gitea $CURRENT_BRANCH
+
+echo -e "${GREEN}Successfully pushed to Gitea!${NC}"
+echo -e "${GREEN}Repository URL: git@git.boilerhaus.org:boiler/stones.git${NC}"
+echo -e "${YELLOW}To deploy, run the deploy.sh script on your server.${NC}"
\ No newline at end of file
diff --git a/run-dev.sh b/run-dev.sh
new file mode 100755
index 0000000..a26c791
--- /dev/null
+++ b/run-dev.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+# Script to run a development server for the Stones Database application
+
+set -e # Exit immediately if a command exits with a non-zero status
+
+# Colors for pretty output
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+
+# Check for required tools
+echo -e "${YELLOW}Checking for required tools...${NC}"
+
+# Check if Node.js is installed
+if ! command -v node &> /dev/null; then
+ echo -e "${RED}Error: Node.js is not installed. Please install Node.js first.${NC}"
+ exit 1
+fi
+
+# Check Node.js version
+NODE_VERSION=$(node -v | cut -d "v" -f 2 | cut -d "." -f 1)
+if [ "$NODE_VERSION" -lt "16" ]; then
+ echo -e "${RED}Error: Node.js version 16 or higher is required. Current version: $(node -v)${NC}"
+ exit 1
+fi
+
+# Check if npm is installed
+if ! command -v npm &> /dev/null; then
+ echo -e "${RED}Error: npm is not installed. Please install npm first.${NC}"
+ exit 1
+fi
+
+# Print npm and Node.js versions for diagnostic purposes
+echo -e "${YELLOW}Node.js version: $(node -v)${NC}"
+echo -e "${YELLOW}npm version: $(npm -v)${NC}"
+
+# Check Next.js version for diagnostic purposes
+echo -e "${YELLOW}Next.js version: $(npm list next | grep next@ | head -1)${NC}"
+
+# Check for .env file
+if [ ! -f ".env.local" ] && [ ! -f ".env" ]; then
+ echo -e "${YELLOW}Creating .env.local file with development settings...${NC}"
+ cat > .env.local << EOF
+# Database Connection for Development
+DATABASE_URL="postgresql://postgres:postgres@localhost:5432/stones"
+
+# Authentication
+AUTH_SECRET="dev-secret-key-for-testing"
+
+# Application
+NEXT_PUBLIC_APP_URL="http://localhost:3000"
+EOF
+ echo -e "${YELLOW}Please update the .env.local file with your actual development database values.${NC}"
+fi
+
+# Install dependencies if node_modules doesn't exist
+if [ ! -d "node_modules" ]; then
+ echo -e "${YELLOW}Installing dependencies...${NC}"
+ npm install
+else
+ echo -e "${YELLOW}Dependencies already installed. To reinstall, remove the node_modules directory.${NC}"
+fi
+
+# Run database migrations if schema.prisma exists
+if [ -f "prisma/schema.prisma" ]; then
+ echo -e "${YELLOW}Running database migrations...${NC}"
+ npx prisma migrate dev --name dev-migration
+
+ echo -e "${YELLOW}Generating Prisma client...${NC}"
+ npx prisma generate
+fi
+
+# Check PostgreSQL connectivity
+echo -e "${YELLOW}Checking PostgreSQL connectivity...${NC}"
+if command -v pg_isready &> /dev/null; then
+ if pg_isready -h localhost -p 5432; then
+ echo -e "${GREEN}PostgreSQL server is running at localhost:5432${NC}"
+ else
+ echo -e "${RED}Warning: PostgreSQL server at localhost:5432 is not responding${NC}"
+ echo -e "${RED}Please ensure your PostgreSQL server is running${NC}"
+ fi
+else
+ echo -e "${YELLOW}pg_isready not found, skipping PostgreSQL connectivity check${NC}"
+fi
+
+# Add network debug information
+echo -e "${YELLOW}Network interfaces:${NC}"
+ip addr | grep "inet " | awk '{print $2}' | cut -d/ -f1
+echo ""
+
+# Create next.config.js file with proper configuration
+echo -e "${YELLOW}Creating/updating next.config.js file...${NC}"
+if [ -f "next.config.js" ]; then
+ mv next.config.js next.config.js.bak
+fi
+
+cat > next.config.js << EOF
+/** @type {import('next').NextConfig} */
+const nextConfig = {
+ reactStrictMode: true,
+ swcMinify: true,
+}
+
+module.exports = nextConfig
+EOF
+
+# Add HOST environment variable for Next.js to bind to all interfaces
+echo -e "${YELLOW}Starting Next.js with HOST=0.0.0.0 to enable network access${NC}"
+echo -e "${GREEN}Starting development server...${NC}"
+echo -e "${GREEN}The application will be available at:${NC}"
+echo -e "${GREEN} - http://localhost:3000${NC}"
+ip addr | grep "inet " | grep -v "127.0.0.1" | awk '{print " - http://" $2 ":3000"}' | cut -d/ -f1
+
+# Use HOST environment variable to allow access from any IP
+HOST=0.0.0.0 npm run dev
+
+# This part will execute when the server is stopped
+echo -e "${YELLOW}Development server stopped.${NC}"
\ No newline at end of file
diff --git a/scripts/moloch_dao/check_daohaus_api.py b/scripts/moloch_dao/check_daohaus_api.py
new file mode 100644
index 0000000..9eb6909
--- /dev/null
+++ b/scripts/moloch_dao/check_daohaus_api.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+"""
+Check DAOhaus API Response
+
+This script checks the response from the DAOhaus API for Public Haus DAO.
+"""
+
+import requests
+import sys
+
+# Constants
+DAOHAUS_API_URL = "https://admin.daohaus.club/api"
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+CHAIN_ID = "10" # Optimism chain ID
+
+def check_api_response(url):
+ """
+ Check the response from an API URL
+
+ Args:
+ url: The URL to check
+ """
+ print(f"Checking URL: {url}")
+
+ try:
+ # Make request to API
+ response = requests.get(url)
+
+ # Print status code
+ print(f"Status code: {response.status_code}")
+
+ # Print headers
+ print("Headers:")
+ for key, value in response.headers.items():
+ print(f" {key}: {value}")
+
+ # Print content
+ print("\nContent:")
+ print(response.text)
+
+ # Try to parse as JSON
+ try:
+ data = response.json()
+ print("\nJSON data:")
+ print(data)
+ except Exception as e:
+ print(f"\nError parsing JSON: {e}")
+
+ except Exception as e:
+ print(f"Exception checking API: {e}")
+
+def main():
+ """Main function"""
+ # Check DAO info endpoint
+ dao_url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}"
+ print(f"Checking DAO info endpoint: {dao_url}")
+ check_api_response(dao_url)
+
+ print("\n" + "=" * 80 + "\n")
+
+ # Check members endpoint
+ members_url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}/members"
+ print(f"Checking members endpoint: {members_url}")
+ check_api_response(members_url)
+
+ # Try alternative API URL
+ print("\n" + "=" * 80 + "\n")
+ print("Trying alternative API URL...")
+
+ alt_api_url = "https://api.daohaus.club/api"
+ alt_dao_url = f"{alt_api_url}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}"
+ print(f"Checking alternative DAO info endpoint: {alt_dao_url}")
+ check_api_response(alt_dao_url)
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/check_subgraph.py b/scripts/moloch_dao/check_subgraph.py
new file mode 100644
index 0000000..87f10e7
--- /dev/null
+++ b/scripts/moloch_dao/check_subgraph.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+"""
+Check DAOhaus v3 Subgraph
+
+This script checks if the DAOhaus v3 subgraph on Optimism is responding
+and lists any available DAOs without filtering.
+"""
+
+import requests
+import json
+
+# Constants
+SUBGRAPH_URLS = [
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-optimism",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-goerli",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-gnosis",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-arbitrum",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-polygon",
+ "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-celo"
+]
+
+def check_subgraph(url):
+ """
+ Check if a subgraph is responding
+
+ Args:
+ url: The subgraph URL to check
+
+ Returns:
+ True if responding, False otherwise
+ """
+ # Simple query to check if subgraph is responding
+ query = """
+ query {
+ _meta {
+ block {
+ number
+ }
+ deployment
+ hasIndexingErrors
+ }
+ }
+ """
+
+ try:
+ # Make request to subgraph
+ response = requests.post(
+ url,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error checking subgraph: {response.text}")
+ return False
+
+ data = response.json()
+
+ # Check if response has data
+ if not data.get("data") or not data["data"].get("_meta"):
+ print(f"Invalid response from subgraph: {data}")
+ return False
+
+ # Get meta data
+ meta = data["data"]["_meta"]
+ print(f"Subgraph is responding at {url}")
+ print(f"Block number: {meta['block']['number']}")
+ print(f"Deployment: {meta['deployment']}")
+ print(f"Has indexing errors: {meta['hasIndexingErrors']}")
+ print("-" * 50)
+
+ return True
+ except Exception as e:
+ print(f"Exception checking subgraph: {e}")
+ return False
+
+def list_daos(url):
+ """
+ List all DAOs in a subgraph
+
+ Args:
+ url: The subgraph URL to query
+ """
+ # GraphQL query to list all DAOs
+ query = """
+ query {
+ daos(first: 10) {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """
+
+ try:
+ # Make request to subgraph
+ response = requests.post(
+ url,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error listing DAOs: {response.text}")
+ return
+
+ data = response.json()
+
+ # Check if DAOs exist
+ if not data.get("data") or not data["data"].get("daos"):
+ print("No DAOs found")
+ return
+
+ # Get DAOs
+ daos = data["data"]["daos"]
+ print(f"Found {len(daos)} DAOs")
+
+ # Print results
+ for dao in daos:
+ print(f"ID: {dao['id']}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print("-" * 50)
+ except Exception as e:
+ print(f"Exception listing DAOs: {e}")
+
+def main():
+ """Main function"""
+ print("Checking DAOhaus v3 subgraphs...")
+
+ for url in SUBGRAPH_URLS:
+ print(f"\nChecking subgraph at {url}...")
+ if check_subgraph(url):
+ print("\nListing DAOs in subgraph...")
+ list_daos(url)
+
+ return 0
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/moloch_dao/check_subgraph_status.py b/scripts/moloch_dao/check_subgraph_status.py
new file mode 100644
index 0000000..2226de3
--- /dev/null
+++ b/scripts/moloch_dao/check_subgraph_status.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+"""
+Check DAOhaus v3 Subgraph Status
+
+This script checks the status of the DAOhaus v3 subgraph with a simple query.
+"""
+
+import requests
+import json
+
+# Constants
+SUBGRAPH_URL = "https://api.thegraph.com/subgraphs/id/HouDe2pTdyKM9CTG1aodnPPPhm7U148BCH7eJ4HHwpdQ"
+
+def check_subgraph_status():
+ """
+ Check the status of the subgraph
+ """
+ # Simple query to check if subgraph is responding
+ query = """
+ query {
+ _meta {
+ block {
+ number
+ }
+ deployment
+ hasIndexingErrors
+ }
+ }
+ """
+
+ print(f"Checking subgraph at {SUBGRAPH_URL}...")
+
+ try:
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Print status code
+ print(f"Status code: {response.status_code}")
+
+ # Print headers
+ print("Headers:")
+ for key, value in response.headers.items():
+ print(f" {key}: {value}")
+
+ # Print content
+ print("\nContent:")
+ print(response.text)
+
+ # Try to parse as JSON
+ try:
+ data = response.json()
+ print("\nJSON data:")
+ print(json.dumps(data, indent=2))
+ except Exception as e:
+ print(f"\nError parsing JSON: {e}")
+
+ except Exception as e:
+ print(f"Exception checking subgraph: {e}")
+
+def main():
+ """Main function"""
+ check_subgraph_status()
+
+ return 0
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/moloch_dao/explore_daohaus_subgraph.py b/scripts/moloch_dao/explore_daohaus_subgraph.py
new file mode 100644
index 0000000..f8ec569
--- /dev/null
+++ b/scripts/moloch_dao/explore_daohaus_subgraph.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3
+"""
+Explore DAOhaus v3 Subgraph
+
+This script explores the DAOhaus v3 subgraph and lists all available DAOs.
+It can also search for DAOs by name or ID.
+"""
+
+import sys
+import requests
+import json
+import argparse
+
+# Constants
+SUBGRAPH_URL = "https://api.thegraph.com/subgraphs/id/HouDe2pTdyKM9CTG1aodnPPPhm7U148BCH7eJ4HHwpdQ"
+
+def list_daos(limit=100, skip=0):
+ """
+ List all DAOs in the subgraph
+
+ Args:
+ limit: Maximum number of DAOs to return
+ skip: Number of DAOs to skip
+
+ Returns:
+ List of DAOs
+ """
+ # GraphQL query to list all DAOs
+ query = """
+ query {
+ daos(first: %d, skip: %d, orderBy: createdAt, orderDirection: desc) {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """ % (limit, skip)
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error listing DAOs: {response.text}")
+ return []
+
+ data = response.json()
+
+ # Check if DAOs exist
+ if not data.get("data") or not data["data"].get("daos"):
+ print("No DAOs found")
+ return []
+
+ # Get DAOs
+ daos = data["data"]["daos"]
+ print(f"Found {len(daos)} DAOs")
+
+ return daos
+
+def search_daos_by_name(name, limit=100):
+ """
+ Search for DAOs by name
+
+ Args:
+ name: Name to search for
+ limit: Maximum number of DAOs to return
+
+ Returns:
+ List of matching DAOs
+ """
+ # GraphQL query to search for DAOs by name
+ query = """
+ query {
+ daos(first: %d, where: {name_contains_nocase: "%s"}, orderBy: createdAt, orderDirection: desc) {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """ % (limit, name)
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error searching DAOs: {response.text}")
+ return []
+
+ data = response.json()
+
+ # Check if DAOs exist
+ if not data.get("data") or not data["data"].get("daos"):
+ print(f"No DAOs found with name containing '{name}'")
+ return []
+
+ # Get DAOs
+ daos = data["data"]["daos"]
+ print(f"Found {len(daos)} DAOs with name containing '{name}'")
+
+ return daos
+
+def get_dao_by_id(dao_id):
+ """
+ Get a DAO by ID
+
+ Args:
+ dao_id: ID of the DAO to get
+
+ Returns:
+ DAO data if found, None otherwise
+ """
+ # GraphQL query to get a DAO by ID
+ query = """
+ query {
+ dao(id: "%s") {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ members {
+ id
+ memberAddress
+ shares
+ loot
+ createdAt
+ }
+ }
+ }
+ """ % dao_id.lower()
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error getting DAO: {response.text}")
+ return None
+
+ data = response.json()
+
+ # Check if DAO exists
+ if not data.get("data") or not data["data"].get("dao"):
+ print(f"DAO not found with ID: {dao_id}")
+ return None
+
+ # Get DAO
+ dao = data["data"]["dao"]
+ print(f"Found DAO with ID: {dao_id}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print(f"Member count: {len(dao['members'])}")
+
+ return dao
+
+def print_dao_info(dao):
+ """
+ Print information about a DAO
+
+ Args:
+ dao: DAO data to print
+ """
+ print(f"ID: {dao['id']}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print("-" * 50)
+
+def main():
+ """Main function"""
+ parser = argparse.ArgumentParser(description="Explore DAOhaus v3 Subgraph")
+ parser.add_argument("--list", action="store_true", help="List all DAOs")
+ parser.add_argument("--search", type=str, help="Search for DAOs by name")
+ parser.add_argument("--id", type=str, help="Get a DAO by ID")
+ parser.add_argument("--limit", type=int, default=100, help="Maximum number of DAOs to return")
+ parser.add_argument("--skip", type=int, default=0, help="Number of DAOs to skip")
+
+ args = parser.parse_args()
+
+ if args.id:
+ # Get a DAO by ID
+ dao = get_dao_by_id(args.id)
+ if dao:
+ print_dao_info(dao)
+ elif args.search:
+ # Search for DAOs by name
+ daos = search_daos_by_name(args.search, args.limit)
+ for dao in daos:
+ print_dao_info(dao)
+ elif args.list:
+ # List all DAOs
+ daos = list_daos(args.limit, args.skip)
+ for dao in daos:
+ print_dao_info(dao)
+ else:
+ # Default to listing all DAOs
+ print("Listing all DAOs...")
+ daos = list_daos(args.limit, args.skip)
+ for dao in daos:
+ print_dao_info(dao)
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/find_public_haus_dao.py b/scripts/moloch_dao/find_public_haus_dao.py
new file mode 100644
index 0000000..7879ec8
--- /dev/null
+++ b/scripts/moloch_dao/find_public_haus_dao.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python3
+"""
+Find Public Haus DAO ID
+
+This script queries the DAOhaus v3 subgraph on Optimism mainnet to find the Public Haus DAO ID.
+It searches for DAOs with names containing 'Public Haus' or similar terms, and also checks
+a specific DAO ID if provided.
+"""
+
+import os
+import sys
+import requests
+import json
+from dotenv import load_dotenv
+
+# Load environment variables
+load_dotenv()
+
+# Constants
+SUBGRAPH_URL = "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-optimism"
+SPECIFIC_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID to check
+
+def check_specific_dao(dao_id):
+ """
+ Check if a specific DAO ID exists
+
+ Args:
+ dao_id: The DAO ID to check
+
+ Returns:
+ DAO data if found, None otherwise
+ """
+ # GraphQL query to check a specific DAO
+ query = """
+ query {
+ dao(id: "%s") {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """ % dao_id.lower()
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error checking DAO: {response.text}")
+ return None
+
+ data = response.json()
+
+ # Check if DAO exists
+ if not data.get("data") or not data["data"].get("dao"):
+ print(f"DAO not found with ID: {dao_id}")
+ return None
+
+ # Get DAO
+ dao = data["data"]["dao"]
+ print(f"Found DAO with ID: {dao_id}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print("-" * 50)
+
+ return dao
+
+def search_daos(search_term):
+ """
+ Search for DAOs with names containing the search term
+
+ Args:
+ search_term: Term to search for in DAO names
+
+ Returns:
+ List of matching DAOs
+ """
+ # GraphQL query to search for DAOs
+ query = """
+ query {
+ daos(where: {name_contains_nocase: "%s"}, first: 100) {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """ % search_term
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error searching DAOs: {response.text}")
+ return []
+
+ data = response.json()
+
+ # Check if DAOs exist
+ if not data.get("data") or not data["data"].get("daos"):
+ print(f"No DAOs found with name containing '{search_term}'")
+ return []
+
+ # Get DAOs
+ daos = data["data"]["daos"]
+ print(f"Found {len(daos)} DAOs with name containing '{search_term}'")
+
+ return daos
+
+def main():
+ """Main function"""
+ # First check the specific DAO ID
+ print(f"Checking specific DAO ID: {SPECIFIC_DAO_ID}...")
+ specific_dao = check_specific_dao(SPECIFIC_DAO_ID)
+
+ # Search terms to try
+ search_terms = ["Public Haus", "PublicHaus", "Public", "Haus"]
+
+ all_daos = []
+
+ # Try each search term
+ for term in search_terms:
+ print(f"\nSearching for DAOs with name containing '{term}'...")
+ daos = search_daos(term)
+ all_daos.extend(daos)
+
+ # Print results
+ for dao in daos:
+ print(f"ID: {dao['id']}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print("-" * 50)
+
+ # If no DAOs found, try listing all DAOs
+ if not all_daos and not specific_dao:
+ print("\nNo DAOs found with the search terms. Listing all DAOs...")
+
+ # GraphQL query to list all DAOs
+ query = """
+ query {
+ daos(first: 100) {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ print(f"Error listing DAOs: {response.text}")
+ return 1
+
+ data = response.json()
+
+ # Check if DAOs exist
+ if not data.get("data") or not data["data"].get("daos"):
+ print("No DAOs found")
+ return 1
+
+ # Get DAOs
+ daos = data["data"]["daos"]
+ print(f"Found {len(daos)} DAOs")
+
+ # Print results
+ for dao in daos:
+ print(f"ID: {dao['id']}")
+ print(f"Name: {dao['name']}")
+ print(f"Created: {dao['createdAt']}")
+ print(f"Members: {dao['activeMemberCount']}")
+ print(f"Shares: {dao['totalShares']}")
+ print(f"Loot: {dao['totalLoot']}")
+ print("-" * 50)
+
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_etherscan.py b/scripts/moloch_dao/import_public_haus_etherscan.py
new file mode 100755
index 0000000..f595608
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_etherscan.py
@@ -0,0 +1,351 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members using Optimism Etherscan API
+
+This script fetches holders of the Public Haus shares token using the Optimism Etherscan API,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_etherscan.py
+"""
+
+import os
+import sys
+import logging
+import json
+import time
+import requests
+from typing import Dict, Any, List, Optional
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_etherscan_importer")
+
+# Constants
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+SHARES_TOKEN_ADDRESS = "0x4950c436F69c8b4F68ed814A70a5E1D94495c4a7" # From the image, sharesToken address
+
+# Optimism Etherscan API
+OPTIMISM_ETHERSCAN_API_URL = "https://api-optimistic.etherscan.io/api"
+
+class PublicHausEtherscanImporter:
+ """Importer for Public Haus members using Optimism Etherscan API"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Get Etherscan API key
+ self.etherscan_api_key = os.getenv("ETHERSCAN_API_KEY")
+ if not self.etherscan_api_key:
+ logger.warning("ETHERSCAN_API_KEY not set, using API without key (rate limited)")
+ self.etherscan_api_key = ""
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ # Initialize scraping job
+ self.job_id = self.db.create_scraping_job(
+ source_name="Public Haus DAO Etherscan",
+ status="running"
+ )
+ logger.info(f"Created scraping job with ID: {self.job_id}")
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Etherscan",
+ source_type="blockchain",
+ description="Public Haus DAO members identified by token holdings via Etherscan"
+ )
+
+ def get_token_info(self) -> Dict[str, Any]:
+ """
+ Get information about the shares token from Etherscan
+
+ Returns:
+ Token information
+ """
+ try:
+ # Get token info from Etherscan
+ params = {
+ "module": "token",
+ "action": "tokeninfo",
+ "contractaddress": SHARES_TOKEN_ADDRESS,
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ if data["status"] == "1":
+ token_info = data["result"][0]
+ logger.info(f"Token info: {token_info.get('name')} ({token_info.get('symbol')})")
+ return token_info
+ else:
+ # If Etherscan API fails, use hardcoded values
+ logger.warning(f"Error getting token info from Etherscan: {data.get('message')}")
+ return {
+ "name": "Public Haus Shares",
+ "symbol": "SHARES",
+ "decimals": "18",
+ "totalSupply": "0"
+ }
+
+ except Exception as e:
+ logger.error(f"Error getting token info: {e}")
+ # Return default values
+ return {
+ "name": "Public Haus Shares",
+ "symbol": "SHARES",
+ "decimals": "18",
+ "totalSupply": "0"
+ }
+
+ def fetch_token_holders(self) -> List[Dict[str, Any]]:
+ """
+ Fetch holders of the shares token using Etherscan API
+
+ Returns:
+ List of token holders with their balances
+ """
+ try:
+ # Get token info
+ token_info = self.get_token_info()
+ decimals = int(token_info.get("decimals", 18))
+
+ # Get token holders from Etherscan
+ params = {
+ "module": "token",
+ "action": "tokenholderlist",
+ "contractaddress": SHARES_TOKEN_ADDRESS,
+ "page": 1,
+ "offset": 100, # Get up to 100 holders
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ holders = []
+
+ if data["status"] == "1":
+ for holder in data["result"]:
+ address = holder["address"]
+ balance = int(holder["TokenHolderQuantity"])
+
+ # Skip zero balances
+ if balance > 0:
+ holders.append({
+ "address": address,
+ "balance": balance,
+ "balanceFormatted": balance / (10 ** decimals),
+ "dao": "Public Haus"
+ })
+
+ logger.info(f"Found {len(holders)} token holders with non-zero balance")
+ else:
+ # If Etherscan API fails, try alternative approach
+ logger.warning(f"Error getting token holders from Etherscan: {data.get('message')}")
+
+ # If the tokenholderlist endpoint is not available, try getting transfers
+ params = {
+ "module": "account",
+ "action": "tokentx",
+ "contractaddress": SHARES_TOKEN_ADDRESS,
+ "page": 1,
+ "offset": 1000, # Get up to 1000 transfers
+ "sort": "desc",
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ if data["status"] == "1":
+ # Extract unique addresses from transfers
+ addresses = set()
+ for tx in data["result"]:
+ addresses.add(tx["to"])
+ addresses.add(tx["from"])
+
+ # Remove zero address
+ if "0x0000000000000000000000000000000000000000" in addresses:
+ addresses.remove("0x0000000000000000000000000000000000000000")
+
+ # Create holder objects
+ for address in addresses:
+ holders.append({
+ "address": address,
+ "balance": 1, # We don't know the actual balance
+ "balanceFormatted": 1,
+ "dao": "Public Haus"
+ })
+
+ logger.info(f"Found {len(holders)} unique addresses from token transfers")
+
+ # If we still don't have any holders, use the DAO address itself
+ if not holders:
+ logger.warning("No token holders found, using DAO address as fallback")
+ holders.append({
+ "address": PUBLIC_HAUS_DAO_ID,
+ "balance": 1,
+ "balanceFormatted": 1,
+ "dao": "Public Haus"
+ })
+
+ return holders
+
+ except Exception as e:
+ logger.error(f"Error fetching token holders: {e}")
+ raise
+
+ def process_holder(self, holder: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a token holder and import into the database
+
+ Args:
+ holder: Token holder information
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ try:
+ # Extract holder information
+ address = holder["address"]
+ balance = holder["balance"]
+ balance_formatted = holder["balanceFormatted"]
+ dao_name = holder["dao"]
+
+ # Check if contact exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_id = self.db.upsert_contact(
+ ethereum_address=address,
+ ens_name=None
+ )
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": dao_name,
+ "shares": balance, # Use token balance as shares
+ "loot": 0, # We don't have loot information
+ "delegating_to": None
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares Token Balance: {balance_formatted}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content
+ )
+
+ # Add tag for the DAO
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=dao_name
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ except Exception as e:
+ logger.error(f"Error processing holder {holder.get('address')}: {e}")
+ return None
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of holders imported
+ """
+ try:
+ # Fetch token holders
+ holders = self.fetch_token_holders()
+
+ if not holders:
+ logger.info("No token holders found")
+ self.db.update_scraping_job(self.job_id, "completed")
+ return 0
+
+ # Process holders
+ imported_count = 0
+ existing_count = 0
+
+ for holder in holders:
+ try:
+ contact_id = self.process_holder(holder)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing holder {holder.get('address')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ self.job_id,
+ "completed",
+ records_processed=len(holders),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} holders out of {len(holders)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(self.job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing holders: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausEtherscanImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} token holders.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing token holders: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_members.py b/scripts/moloch_dao/import_public_haus_members.py
new file mode 100644
index 0000000..7b2489e
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_members.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members from DAOhaus v3 Subgraph
+
+This script fetches members of Public Haus DAO from the DAOhaus v3 subgraph on Optimism mainnet,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_members.py
+"""
+
+import os
+import sys
+import logging
+import requests
+import json
+from typing import Dict, Any, List, Optional
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_importer")
+
+# Constants
+SUBGRAPH_URL = "https://api.thegraph.com/subgraphs/name/hausdao/daohaus-v3-optimism"
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+
+class PublicHausImporter:
+ """Importer for Public Haus members from DAOhaus v3 subgraph"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Subgraph",
+ source_type="subgraph",
+ description="Public Haus DAO members from DAOhaus v3 subgraph on Optimism mainnet"
+ )
+
+ def fetch_members_from_subgraph(self) -> List[Dict[str, Any]]:
+ """
+ Fetch Public Haus members from the DAOhaus v3 subgraph
+
+ Returns:
+ List of member data from the subgraph
+ """
+ # GraphQL query to fetch members
+ query = """
+ query {
+ dao(id: "%s") {
+ id
+ name
+ members {
+ id
+ memberAddress
+ shares
+ loot
+ createdAt
+ delegatingTo
+ delegateOfCount
+ delegateOf {
+ memberAddress
+ }
+ }
+ }
+ }
+ """ % PUBLIC_HAUS_DAO_ID.lower()
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Error fetching members: {response.text}")
+ raise Exception(f"Error fetching members: {response.status_code}")
+
+ data = response.json()
+
+ # Check if DAO exists
+ if not data.get("data") or not data["data"].get("dao"):
+ logger.error(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+ raise Exception(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+
+ # Get members
+ members = data["data"]["dao"]["members"]
+ logger.info(f"Fetched {len(members)} members from subgraph")
+
+ return members
+
+ def process_member(self, member: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a single member and import into database
+
+ Args:
+ member: Member data from the subgraph
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ # Extract member data
+ address = member["memberAddress"]
+ shares = int(member["shares"])
+ loot = int(member["loot"])
+ created_at = member["createdAt"]
+ delegating_to = member.get("delegatingTo")
+
+ # Skip if no address
+ if not address:
+ logger.warning(f"Member has no address: {member}")
+ return None
+
+ # Check if contact already exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_data = {
+ "ethereumAddress": address,
+ "name": f"Public Haus Member {address[:8]}", # Default name
+ }
+
+ contact_id = self.db.upsert_contact(contact_data)
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "delegatingTo" = %(delegating_to)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": "Public Haus",
+ "shares": shares,
+ "loot": loot,
+ "delegating_to": delegating_to
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares: {shares}\nLoot: {loot}\nJoined: {created_at}"
+ if delegating_to:
+ note_content += f"\nDelegating to: {delegating_to}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content,
+ source="Public Haus DAO Subgraph"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of members imported
+ """
+ # Create a scraping job
+ job_id = self.db.create_scraping_job("Public Haus DAO Importer", "running")
+ logger.info(f"Created scraping job with ID: {job_id}")
+
+ try:
+ # Fetch members
+ members = self.fetch_members_from_subgraph()
+
+ if not members:
+ logger.info("No members found")
+ self.db.update_scraping_job(job_id, "completed")
+ return 0
+
+ # Process members
+ imported_count = 0
+ existing_count = 0
+
+ for member in members:
+ try:
+ contact_id = self.process_member(member)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing member {member.get('memberAddress')}: {e}")
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "completed",
+ records_processed=len(members),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} members out of {len(members)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing members: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing members: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_members_api.py b/scripts/moloch_dao/import_public_haus_members_api.py
new file mode 100644
index 0000000..ceb0ad4
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_members_api.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members from DAOhaus API
+
+This script fetches members of Public Haus DAO from the DAOhaus API on Optimism mainnet,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_members_api.py
+"""
+
+import os
+import sys
+import logging
+import requests
+import json
+import time
+from typing import Dict, Any, List, Optional
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_importer")
+
+# Constants
+DAOHAUS_API_URL = "https://admin.daohaus.club/api"
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+CHAIN_ID = "10" # Optimism chain ID
+
+class PublicHausImporter:
+ """Importer for Public Haus members from DAOhaus API"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO API",
+ source_type="api",
+ description="Public Haus DAO members from DAOhaus API on Optimism mainnet"
+ )
+
+ def fetch_dao_info(self) -> Dict[str, Any]:
+ """
+ Fetch Public Haus DAO information from the DAOhaus API
+
+ Returns:
+ DAO information
+ """
+ # Make request to DAOhaus API
+ url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}"
+ response = requests.get(url)
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Error fetching DAO info: {response.text}")
+ raise Exception(f"Error fetching DAO info: {response.status_code}")
+
+ data = response.json()
+ logger.info(f"Fetched DAO info: {data.get('name')}")
+
+ return data
+
+ def fetch_members(self) -> List[Dict[str, Any]]:
+ """
+ Fetch Public Haus members from the DAOhaus API
+
+ Returns:
+ List of member data from the API
+ """
+ # Make request to DAOhaus API
+ url = f"{DAOHAUS_API_URL}/dao/{CHAIN_ID}/{PUBLIC_HAUS_DAO_ID}/members"
+ response = requests.get(url)
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Error fetching members: {response.text}")
+ raise Exception(f"Error fetching members: {response.status_code}")
+
+ data = response.json()
+
+ # Check if members exist
+ if not data:
+ logger.error(f"No members found for DAO: {PUBLIC_HAUS_DAO_ID}")
+ return []
+
+ logger.info(f"Fetched {len(data)} members from API")
+
+ return data
+
+ def process_member(self, member: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a single member and import into database
+
+ Args:
+ member: Member data from the API
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ # Extract member data
+ address = member.get("memberAddress")
+ shares = int(member.get("shares", 0))
+ loot = int(member.get("loot", 0))
+ joined_at = member.get("createdAt")
+ delegating_to = member.get("delegatingTo")
+
+ # Skip if no address
+ if not address:
+ logger.warning(f"Member has no address: {member}")
+ return None
+
+ # Check if contact already exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_data = {
+ "ethereumAddress": address,
+ "name": f"Public Haus Member {address[:8]}", # Default name
+ }
+
+ contact_id = self.db.upsert_contact(contact_data)
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "delegatingTo" = %(delegating_to)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": "Public Haus",
+ "shares": shares,
+ "loot": loot,
+ "delegating_to": delegating_to
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares: {shares}\nLoot: {loot}"
+ if joined_at:
+ note_content += f"\nJoined: {joined_at}"
+ if delegating_to:
+ note_content += f"\nDelegating to: {delegating_to}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content,
+ source="Public Haus DAO API"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of members imported
+ """
+ # Create a scraping job
+ job_id = self.db.create_scraping_job("Public Haus DAO Importer", "running")
+ logger.info(f"Created scraping job with ID: {job_id}")
+
+ try:
+ # Fetch DAO info
+ dao_info = self.fetch_dao_info()
+ logger.info(f"DAO Name: {dao_info.get('name')}")
+
+ # Fetch members
+ members = self.fetch_members()
+
+ if not members:
+ logger.info("No members found")
+ self.db.update_scraping_job(job_id, "completed")
+ return 0
+
+ # Process members
+ imported_count = 0
+ existing_count = 0
+
+ for member in members:
+ try:
+ contact_id = self.process_member(member)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing member {member.get('memberAddress')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "completed",
+ records_processed=len(members),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} members out of {len(members)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing members: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing members: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_members_events.py b/scripts/moloch_dao/import_public_haus_members_events.py
new file mode 100644
index 0000000..1aab491
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_members_events.py
@@ -0,0 +1,304 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members from Optimism Blockchain using Events
+
+This script fetches members of Public Haus DAO by querying events from the Optimism blockchain,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_members_events.py
+"""
+
+import os
+import sys
+import logging
+import json
+import time
+from typing import Dict, Any, List, Optional
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_importer")
+
+# Constants
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+
+# Moloch DAO V3 ABI (partial, only what we need for events)
+MOLOCH_V3_ABI = [
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "member", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "shares", "type": "uint256"}
+ ],
+ "name": "SharingEvent",
+ "type": "event"
+ },
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "applicant", "type": "address"}
+ ],
+ "name": "MembershipProposalSubmitted",
+ "type": "event"
+ },
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "member", "type": "address"}
+ ],
+ "name": "MemberAdded",
+ "type": "event"
+ },
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "internalType": "address", "name": "memberAddress", "type": "address"},
+ {"indexed": False, "internalType": "uint256", "name": "shares", "type": "uint256"},
+ {"indexed": False, "internalType": "uint256", "name": "loot", "type": "uint256"}
+ ],
+ "name": "ProcessProposal",
+ "type": "event"
+ }
+]
+
+class PublicHausImporter:
+ """Importer for Public Haus members from Optimism blockchain using events"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3
+ optimism_rpc_url = os.getenv("OPTIMISM_RPC_URL")
+ if not optimism_rpc_url:
+ raise ValueError("OPTIMISM_RPC_URL environment variable not set")
+
+ self.web3 = Web3(Web3.HTTPProvider(optimism_rpc_url))
+ if not self.web3.is_connected():
+ raise ValueError("Failed to connect to Optimism RPC")
+
+ logger.info(f"Connected to Optimism: {self.web3.is_connected()}")
+
+ # Initialize contract
+ self.contract = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(PUBLIC_HAUS_DAO_ID),
+ abi=MOLOCH_V3_ABI
+ )
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ # Initialize scraping job
+ self.job_id = self.db.create_scraping_job(
+ source_name="Public Haus DAO Blockchain Events",
+ status="running"
+ )
+ logger.info(f"Created scraping job with ID: {self.job_id}")
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Blockchain",
+ source_type="blockchain",
+ description="Public Haus DAO members from Optimism blockchain"
+ )
+
+ def fetch_members_from_events(self) -> List[Dict[str, Any]]:
+ """
+ Fetch Public Haus members by querying events
+
+ Returns:
+ List of member information
+ """
+ try:
+ # Get the latest block number
+ latest_block = self.web3.eth.block_number
+
+ # Calculate the starting block (approximately 6 months ago)
+ # Optimism has ~1 block every 2 seconds
+ blocks_per_day = 43200 # 86400 seconds / 2 seconds per block
+ start_block = max(0, latest_block - (blocks_per_day * 180)) # 180 days
+
+ logger.info(f"Fetching events from block {start_block} to {latest_block}")
+
+ # Get all member-related events
+ member_addresses = set()
+
+ # Try different event types that might indicate membership
+ for event_name in ["MemberAdded", "ProcessProposal", "SharingEvent", "MembershipProposalSubmitted"]:
+ try:
+ event_filter = self.contract.events[event_name].create_filter(
+ fromBlock=start_block,
+ toBlock=latest_block
+ )
+ events = event_filter.get_all_entries()
+
+ logger.info(f"Found {len(events)} {event_name} events")
+
+ for event in events:
+ if hasattr(event.args, 'member'):
+ member_addresses.add(event.args.member)
+ elif hasattr(event.args, 'memberAddress'):
+ member_addresses.add(event.args.memberAddress)
+ elif hasattr(event.args, 'applicant'):
+ member_addresses.add(event.args.applicant)
+ except Exception as e:
+ logger.warning(f"Error fetching {event_name} events: {e}")
+ continue
+
+ # If we didn't find any members through events, try a different approach
+ if not member_addresses:
+ logger.warning("No members found through events, trying alternative approach")
+
+ # Try to get members by checking recent transactions to the DAO
+ transactions = []
+ for block_num in range(latest_block - 1000, latest_block):
+ block = self.web3.eth.get_block(block_num, full_transactions=True)
+ for tx in block.transactions:
+ if tx.to and tx.to.lower() == PUBLIC_HAUS_DAO_ID.lower():
+ transactions.append(tx)
+ member_addresses.add(tx['from'])
+
+ # Convert addresses to member objects
+ members = []
+ for address in member_addresses:
+ members.append({
+ "address": address,
+ "dao": "Public Haus",
+ "shares": 0, # We don't have share information from events
+ "loot": 0 # We don't have loot information from events
+ })
+
+ logger.info(f"Found {len(members)} unique members")
+ return members
+
+ except Exception as e:
+ logger.error(f"Error fetching members from events: {e}")
+ raise
+
+ def process_member(self, member: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a member and import into the database
+
+ Args:
+ member: Member information
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ try:
+ # Extract member information
+ address = member["address"]
+ dao_name = member["dao"]
+
+ # Check if contact exists
+ contact_id = self.db.get_contact_by_ethereum_address(address)
+
+ if contact_id:
+ logger.info(f"Contact already exists for address {address}")
+ else:
+ # Create new contact
+ contact_id = self.db.create_contact(
+ name=f"Public Haus Member {address[:8]}",
+ ethereum_address=address,
+ email=None,
+ twitter=None,
+ github=None,
+ telegram=None,
+ discord=None
+ )
+ logger.info(f"Created new contact with ID {contact_id} for address {address}")
+
+ # Link contact to data source
+ self.db.link_contact_to_data_source(
+ contact_id=contact_id,
+ data_source_id=self.data_source_id,
+ external_id=address
+ )
+
+ # Add tag for the DAO
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=dao_name
+ )
+
+ # Add note about membership
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ note=f"Member of {dao_name} DAO on Optimism"
+ )
+
+ return contact_id
+
+ except Exception as e:
+ logger.error(f"Error processing member {member['address']}: {e}")
+ return None
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of imported members
+ """
+ try:
+ # Fetch members
+ members = self.fetch_members_from_events()
+
+ # Process members
+ imported_count = 0
+ for member in members:
+ contact_id = self.process_member(member)
+ if contact_id:
+ imported_count += 1
+
+ # Sleep to avoid rate limiting
+ time.sleep(0.1)
+
+ # Update scraping job
+ self.db.update_scraping_job(
+ job_id=self.job_id,
+ status="completed",
+ records_processed=len(members),
+ records_added=imported_count,
+ records_updated=0
+ )
+
+ logger.info(f"Imported {imported_count} members out of {len(members)}")
+ return imported_count
+
+ except Exception as e:
+ logger.error(f"Error importing members: {e}")
+
+ # Update scraping job with error
+ self.db.update_scraping_job(
+ job_id=self.job_id,
+ status="failed",
+ error_message=str(e)
+ )
+
+ raise
+
+def main():
+ """Main entry point"""
+ try:
+ importer = PublicHausImporter()
+ imported_count = importer.run()
+ logger.info(f"Successfully imported {imported_count} Public Haus members")
+ except Exception as e:
+ logger.error(f"Error importing members: {e}")
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_members_graph.py b/scripts/moloch_dao/import_public_haus_members_graph.py
new file mode 100644
index 0000000..4a5cb67
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_members_graph.py
@@ -0,0 +1,298 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members from DAOhaus v3 Subgraph
+
+This script fetches members of Public Haus DAO from the DAOhaus v3 subgraph,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_members_graph.py
+"""
+
+import os
+import sys
+import logging
+import requests
+import json
+import time
+from typing import Dict, Any, List, Optional
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_importer")
+
+# Constants
+SUBGRAPH_URL = "https://api.thegraph.com/subgraphs/id/HouDe2pTdyKM9CTG1aodnPPPhm7U148BCH7eJ4HHwpdQ"
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID
+
+class PublicHausImporter:
+ """Importer for Public Haus members from DAOhaus v3 subgraph"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Subgraph",
+ source_type="subgraph",
+ description="Public Haus DAO members from DAOhaus v3 subgraph"
+ )
+
+ def fetch_dao_info(self) -> Dict[str, Any]:
+ """
+ Fetch Public Haus DAO information from the subgraph
+
+ Returns:
+ DAO information
+ """
+ # GraphQL query to fetch DAO info
+ query = """
+ query {
+ dao(id: "%s") {
+ id
+ name
+ createdAt
+ totalShares
+ totalLoot
+ activeMemberCount
+ }
+ }
+ """ % PUBLIC_HAUS_DAO_ID.lower()
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Error fetching DAO info: {response.text}")
+ raise Exception(f"Error fetching DAO info: {response.status_code}")
+
+ data = response.json()
+
+ # Check if DAO exists
+ if not data.get("data") or not data["data"].get("dao"):
+ logger.error(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+ raise Exception(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+
+ dao = data["data"]["dao"]
+ logger.info(f"Fetched DAO info: {dao.get('name')}")
+
+ return dao
+
+ def fetch_members(self) -> List[Dict[str, Any]]:
+ """
+ Fetch Public Haus members from the subgraph
+
+ Returns:
+ List of member data from the subgraph
+ """
+ # GraphQL query to fetch members
+ query = """
+ query {
+ dao(id: "%s") {
+ members {
+ id
+ memberAddress
+ shares
+ loot
+ createdAt
+ delegatingTo
+ delegateOfCount
+ delegateOf {
+ memberAddress
+ }
+ }
+ }
+ }
+ """ % PUBLIC_HAUS_DAO_ID.lower()
+
+ # Make request to subgraph
+ response = requests.post(
+ SUBGRAPH_URL,
+ json={"query": query}
+ )
+
+ # Check for errors
+ if response.status_code != 200:
+ logger.error(f"Error fetching members: {response.text}")
+ raise Exception(f"Error fetching members: {response.status_code}")
+
+ data = response.json()
+
+ # Check if DAO exists
+ if not data.get("data") or not data["data"].get("dao"):
+ logger.error(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+ raise Exception(f"DAO not found: {PUBLIC_HAUS_DAO_ID}")
+
+ # Get members
+ members = data["data"]["dao"]["members"]
+ logger.info(f"Fetched {len(members)} members from subgraph")
+
+ return members
+
+ def process_member(self, member: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a single member and import into database
+
+ Args:
+ member: Member data from the subgraph
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ # Extract member data
+ address = member["memberAddress"]
+ shares = int(member["shares"])
+ loot = int(member["loot"])
+ created_at = member["createdAt"]
+ delegating_to = member.get("delegatingTo")
+
+ # Skip if no address
+ if not address:
+ logger.warning(f"Member has no address: {member}")
+ return None
+
+ # Check if contact already exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_data = {
+ "ethereumAddress": address,
+ "name": f"Public Haus Member {address[:8]}", # Default name
+ }
+
+ contact_id = self.db.upsert_contact(contact_data)
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "delegatingTo" = %(delegating_to)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": "Public Haus",
+ "shares": shares,
+ "loot": loot,
+ "delegating_to": delegating_to
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares: {shares}\nLoot: {loot}\nJoined: {created_at}"
+ if delegating_to:
+ note_content += f"\nDelegating to: {delegating_to}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content,
+ source="Public Haus DAO Subgraph"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of members imported
+ """
+ # Create a scraping job
+ job_id = self.db.create_scraping_job("Public Haus DAO Importer", "running")
+ logger.info(f"Created scraping job with ID: {job_id}")
+
+ try:
+ # Fetch DAO info
+ dao_info = self.fetch_dao_info()
+ logger.info(f"DAO Name: {dao_info.get('name')}")
+
+ # Fetch members
+ members = self.fetch_members()
+
+ if not members:
+ logger.info("No members found")
+ self.db.update_scraping_job(job_id, "completed")
+ return 0
+
+ # Process members
+ imported_count = 0
+ existing_count = 0
+
+ for member in members:
+ try:
+ contact_id = self.process_member(member)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing member {member.get('memberAddress')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "completed",
+ records_processed=len(members),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} members out of {len(members)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing members: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing members: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_members_web3.py b/scripts/moloch_dao/import_public_haus_members_web3.py
new file mode 100644
index 0000000..50b7f57
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_members_web3.py
@@ -0,0 +1,333 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members from Optimism Blockchain
+
+This script fetches members of Public Haus DAO by directly querying the Optimism blockchain,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_members_web3.py
+"""
+
+import os
+import sys
+import logging
+import json
+import time
+from typing import Dict, Any, List, Optional
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_importer")
+
+# Constants
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+
+# Moloch DAO V3 ABI (partial, only what we need)
+MOLOCH_V3_ABI = [
+ {
+ "inputs": [{"internalType": "address", "name": "memberAddress", "type": "address"}],
+ "name": "members",
+ "outputs": [
+ {"internalType": "address", "name": "delegateKey", "type": "address"},
+ {"internalType": "uint256", "name": "shares", "type": "uint256"},
+ {"internalType": "uint256", "name": "loot", "type": "uint256"},
+ {"internalType": "bool", "name": "exists", "type": "bool"},
+ {"internalType": "uint256", "name": "highestIndexYesVote", "type": "uint256"},
+ {"internalType": "uint256", "name": "jailed", "type": "uint256"}
+ ],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "getMemberAddresses",
+ "outputs": [{"internalType": "address[]", "name": "", "type": "address[]"}],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "totalShares",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function"
+ },
+ {
+ "inputs": [],
+ "name": "totalLoot",
+ "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}],
+ "stateMutability": "view",
+ "type": "function"
+ }
+]
+
+class PublicHausImporter:
+ """Importer for Public Haus members from Optimism blockchain"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3
+ optimism_rpc_url = os.getenv("OPTIMISM_RPC_URL")
+ if not optimism_rpc_url:
+ raise ValueError("OPTIMISM_RPC_URL environment variable not set")
+
+ self.web3 = Web3(Web3.HTTPProvider(optimism_rpc_url))
+ if not self.web3.is_connected():
+ raise ValueError("Failed to connect to Optimism RPC")
+
+ logger.info(f"Connected to Optimism: {self.web3.is_connected()}")
+
+ # Initialize contract
+ self.contract = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(PUBLIC_HAUS_DAO_ID),
+ abi=MOLOCH_V3_ABI
+ )
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Blockchain",
+ source_type="blockchain",
+ description="Public Haus DAO members from Optimism blockchain"
+ )
+
+ def fetch_dao_info(self) -> Dict[str, Any]:
+ """
+ Fetch Public Haus DAO information from the blockchain
+
+ Returns:
+ DAO information
+ """
+ try:
+ # Get total shares and loot
+ total_shares = self.contract.functions.totalShares().call()
+ total_loot = self.contract.functions.totalLoot().call()
+
+ dao_info = {
+ "id": PUBLIC_HAUS_DAO_ID,
+ "name": "Public Haus",
+ "totalShares": total_shares,
+ "totalLoot": total_loot
+ }
+
+ logger.info(f"Fetched DAO info: Public Haus")
+ logger.info(f"Total Shares: {total_shares}")
+ logger.info(f"Total Loot: {total_loot}")
+
+ return dao_info
+ except Exception as e:
+ logger.error(f"Error fetching DAO info: {e}")
+ raise
+
+ def fetch_members(self) -> List[Dict[str, Any]]:
+ """
+ Fetch Public Haus members from the blockchain
+
+ Returns:
+ List of member data
+ """
+ try:
+ # Get member addresses
+ member_addresses = self.contract.functions.getMemberAddresses().call()
+ logger.info(f"Fetched {len(member_addresses)} member addresses")
+
+ members = []
+
+ # Get member details
+ for address in member_addresses:
+ try:
+ member = self.contract.functions.members(address).call()
+
+ # Extract member data
+ delegate_key, shares, loot, exists, highest_index_yes_vote, jailed = member
+
+ # Skip if not a member
+ if not exists:
+ continue
+
+ # Create member object
+ member_data = {
+ "memberAddress": address,
+ "delegateKey": delegate_key,
+ "shares": shares,
+ "loot": loot,
+ "jailed": jailed > 0
+ }
+
+ members.append(member_data)
+ except Exception as e:
+ logger.error(f"Error fetching member {address}: {e}")
+
+ logger.info(f"Fetched {len(members)} members with details")
+
+ return members
+ except Exception as e:
+ logger.error(f"Error fetching members: {e}")
+ raise
+
+ def process_member(self, member: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a single member and import into database
+
+ Args:
+ member: Member data from the blockchain
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ # Extract member data
+ address = member["memberAddress"]
+ shares = int(member["shares"])
+ loot = int(member["loot"])
+ delegate_key = member["delegateKey"]
+ jailed = member["jailed"]
+
+ # Skip if no address
+ if not address:
+ logger.warning(f"Member has no address: {member}")
+ return None
+
+ # Check if contact already exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_data = {
+ "ethereumAddress": address,
+ "name": f"Public Haus Member {address[:8]}", # Default name
+ }
+
+ contact_id = self.db.upsert_contact(contact_data)
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "delegatingTo" = %(delegating_to)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": "Public Haus",
+ "shares": shares,
+ "loot": loot,
+ "delegating_to": delegate_key if delegate_key != address else None
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares: {shares}\nLoot: {loot}"
+ if delegate_key != address:
+ note_content += f"\nDelegating to: {delegate_key}"
+ if jailed:
+ note_content += "\nJailed: Yes"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content,
+ source="Public Haus DAO Blockchain"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of members imported
+ """
+ # Create a scraping job
+ job_id = self.db.create_scraping_job("Public Haus DAO Importer", "running")
+ logger.info(f"Created scraping job with ID: {job_id}")
+
+ try:
+ # Fetch DAO info
+ dao_info = self.fetch_dao_info()
+
+ # Fetch members
+ members = self.fetch_members()
+
+ if not members:
+ logger.info("No members found")
+ self.db.update_scraping_job(job_id, "completed")
+ return 0
+
+ # Process members
+ imported_count = 0
+ existing_count = 0
+
+ for member in members:
+ try:
+ contact_id = self.process_member(member)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing member {member.get('memberAddress')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "completed",
+ records_processed=len(members),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} members out of {len(members)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing members: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing members: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_token_holders.py b/scripts/moloch_dao/import_public_haus_token_holders.py
new file mode 100755
index 0000000..2644e59
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_token_holders.py
@@ -0,0 +1,392 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members by Querying Token Holders
+
+This script fetches members of Public Haus DAO by querying holders of the shares token,
+imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_token_holders.py
+"""
+
+import os
+import sys
+import logging
+import json
+import time
+from typing import Dict, Any, List, Optional
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_token_importer")
+
+# Constants
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+SHARES_TOKEN_ADDRESS = "0x4950c436F69c8b4F68ed814A70a5E1D94495c4a7" # From the image, sharesToken address
+
+# ERC20 ABI (minimal for balance checking)
+ERC20_ABI = [
+ {
+ "constant": True,
+ "inputs": [{"name": "_owner", "type": "address"}],
+ "name": "balanceOf",
+ "outputs": [{"name": "balance", "type": "uint256"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "totalSupply",
+ "outputs": [{"name": "", "type": "uint256"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "name",
+ "outputs": [{"name": "", "type": "string"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "symbol",
+ "outputs": [{"name": "", "type": "string"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "decimals",
+ "outputs": [{"name": "", "type": "uint8"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [{"name": "_owner", "type": "address"}, {"name": "_spender", "type": "address"}],
+ "name": "allowance",
+ "outputs": [{"name": "", "type": "uint256"}],
+ "type": "function"
+ },
+ {
+ "constant": False,
+ "inputs": [{"name": "_to", "type": "address"}, {"name": "_value", "type": "uint256"}],
+ "name": "transfer",
+ "outputs": [{"name": "", "type": "bool"}],
+ "type": "function"
+ }
+]
+
+# Transfer event ABI for querying token transfers
+TRANSFER_EVENT_ABI = [
+ {
+ "anonymous": False,
+ "inputs": [
+ {"indexed": True, "name": "from", "type": "address"},
+ {"indexed": True, "name": "to", "type": "address"},
+ {"indexed": False, "name": "value", "type": "uint256"}
+ ],
+ "name": "Transfer",
+ "type": "event"
+ }
+]
+
+class PublicHausTokenImporter:
+ """Importer for Public Haus members by querying token holders"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3
+ optimism_rpc_url = os.getenv("OPTIMISM_RPC_URL")
+ if not optimism_rpc_url:
+ raise ValueError("OPTIMISM_RPC_URL environment variable not set")
+
+ self.web3 = Web3(Web3.HTTPProvider(optimism_rpc_url))
+ if not self.web3.is_connected():
+ raise ValueError("Failed to connect to Optimism RPC")
+
+ logger.info(f"Connected to Optimism: {self.web3.is_connected()}")
+
+ # Initialize token contract
+ self.shares_token = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(SHARES_TOKEN_ADDRESS),
+ abi=ERC20_ABI
+ )
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ # Initialize scraping job
+ self.job_id = self.db.create_scraping_job(
+ source_name="Public Haus Token Holders",
+ status="running"
+ )
+ logger.info(f"Created scraping job with ID: {self.job_id}")
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Token Holders",
+ source_type="blockchain",
+ description="Public Haus DAO members identified by token holdings"
+ )
+
+ def get_token_info(self) -> Dict[str, Any]:
+ """
+ Get information about the shares token
+
+ Returns:
+ Token information
+ """
+ try:
+ name = self.shares_token.functions.name().call()
+ symbol = self.shares_token.functions.symbol().call()
+ decimals = self.shares_token.functions.decimals().call()
+ total_supply = self.shares_token.functions.totalSupply().call()
+
+ token_info = {
+ "address": SHARES_TOKEN_ADDRESS,
+ "name": name,
+ "symbol": symbol,
+ "decimals": decimals,
+ "totalSupply": total_supply
+ }
+
+ logger.info(f"Token info: {name} ({symbol})")
+ logger.info(f"Total supply: {total_supply / (10 ** decimals):.2f} {symbol}")
+
+ return token_info
+ except Exception as e:
+ logger.error(f"Error getting token info: {e}")
+ raise
+
+ def fetch_token_holders(self) -> List[Dict[str, Any]]:
+ """
+ Fetch holders of the shares token by analyzing transfer events
+
+ Returns:
+ List of token holders with their balances
+ """
+ try:
+ # Get token info
+ token_info = self.get_token_info()
+ decimals = token_info["decimals"]
+
+ # Get the latest block number
+ latest_block = self.web3.eth.block_number
+
+ # Calculate the starting block (approximately 6 months ago)
+ # Optimism has ~1 block every 2 seconds
+ blocks_per_day = 43200 # 86400 seconds / 2 seconds per block
+ start_block = max(0, latest_block - (blocks_per_day * 180)) # 180 days
+
+ logger.info(f"Fetching Transfer events from block {start_block} to {latest_block}")
+
+ # Create a contract instance with the Transfer event ABI
+ token_events = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(SHARES_TOKEN_ADDRESS),
+ abi=TRANSFER_EVENT_ABI
+ )
+
+ # Get Transfer events
+ transfer_filter = token_events.events.Transfer.create_filter(
+ fromBlock=start_block,
+ toBlock=latest_block
+ )
+
+ transfers = transfer_filter.get_all_entries()
+ logger.info(f"Found {len(transfers)} Transfer events")
+
+ # Track addresses that have received tokens
+ holder_addresses = set()
+
+ for transfer in transfers:
+ from_address = transfer.args.get('from')
+ to_address = transfer.args.get('to')
+
+ # Skip zero address (minting/burning)
+ if to_address != '0x0000000000000000000000000000000000000000':
+ holder_addresses.add(to_address)
+
+ # Check current balances for all potential holders
+ holders = []
+ for address in holder_addresses:
+ try:
+ balance = self.shares_token.functions.balanceOf(address).call()
+
+ # Only include addresses with non-zero balance
+ if balance > 0:
+ holders.append({
+ "address": address,
+ "balance": balance,
+ "balanceFormatted": balance / (10 ** decimals),
+ "dao": "Public Haus"
+ })
+ except Exception as e:
+ logger.error(f"Error checking balance for {address}: {e}")
+
+ # Sort holders by balance (descending)
+ holders.sort(key=lambda x: x["balance"], reverse=True)
+
+ logger.info(f"Found {len(holders)} token holders with non-zero balance")
+ return holders
+
+ except Exception as e:
+ logger.error(f"Error fetching token holders: {e}")
+ raise
+
+ def process_holder(self, holder: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a token holder and import into the database
+
+ Args:
+ holder: Token holder information
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ try:
+ # Extract holder information
+ address = holder["address"]
+ balance = holder["balance"]
+ balance_formatted = holder["balanceFormatted"]
+ dao_name = holder["dao"]
+
+ # Check if contact exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_data = {
+ "ethereumAddress": address,
+ "name": f"Public Haus Member {address[:8]}", # Default name
+ }
+
+ contact_id = self.db.upsert_contact(contact_data)
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" ("contactId", "daoName", "shares", "loot", "delegatingTo")
+ VALUES (%(contact_id)s, %(dao_name)s, %(shares)s, %(loot)s, %(delegating_to)s)
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "shares" = %(shares)s,
+ "loot" = %(loot)s,
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": dao_name,
+ "shares": balance, # Use token balance as shares
+ "loot": 0, # We don't have loot information
+ "delegating_to": None
+ }
+ )
+
+ # Add note about membership
+ note_content = f"Public Haus DAO Member\nShares Token Balance: {balance_formatted}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content,
+ source="Public Haus DAO Token Holders"
+ )
+
+ # Add tag for the DAO
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=dao_name
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ except Exception as e:
+ logger.error(f"Error processing holder {holder.get('address')}: {e}")
+ return None
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of holders imported
+ """
+ try:
+ # Fetch token holders
+ holders = self.fetch_token_holders()
+
+ if not holders:
+ logger.info("No token holders found")
+ self.db.update_scraping_job(self.job_id, "completed")
+ return 0
+
+ # Process holders
+ imported_count = 0
+ existing_count = 0
+
+ for holder in holders:
+ try:
+ contact_id = self.process_holder(holder)
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing holder {holder.get('address')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ self.job_id,
+ "completed",
+ records_processed=len(holders),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} holders out of {len(holders)} processed")
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(self.job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing holders: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausTokenImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} token holders.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing token holders: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/import_public_haus_tokens.py b/scripts/moloch_dao/import_public_haus_tokens.py
new file mode 100755
index 0000000..283faa6
--- /dev/null
+++ b/scripts/moloch_dao/import_public_haus_tokens.py
@@ -0,0 +1,756 @@
+#!/usr/bin/env python3
+"""
+Import Public Haus Members by Querying Token Holders
+
+This script fetches members of Public Haus DAO by querying holders of both the voting (shares)
+and non-voting (loot) tokens, imports them into the database, and links them to the Public Haus DAO.
+
+Usage:
+ python import_public_haus_tokens.py
+"""
+
+import os
+import sys
+import logging
+import json
+import time
+import requests
+from typing import Dict, Any, List, Optional, Set
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+from utils.ens_resolver import ENSResolver
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_tokens_importer")
+
+# Constants
+PUBLIC_HAUS_DAO_ID = "0xf5d6b637a9185707f52d40d452956ca49018247a" # Public Haus DAO ID on Optimism
+SHARES_TOKEN_ADDRESS = "0x4950c436F69c8b4F80f688edc814C5bA84Aa70f5" # Voting token (shares)
+LOOT_TOKEN_ADDRESS = "0xab6033E3EC2144FB279fe68dA92B7aC6a42Da6d8" # Non-voting token (loot)
+
+# Optimism Etherscan API
+OPTIMISM_ETHERSCAN_API_URL = "https://api-optimistic.etherscan.io/api"
+
+# ERC20 ABI (minimal for balance checking)
+ERC20_ABI = [
+ {
+ "constant": True,
+ "inputs": [{"name": "_owner", "type": "address"}],
+ "name": "balanceOf",
+ "outputs": [{"name": "balance", "type": "uint256"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "totalSupply",
+ "outputs": [{"name": "", "type": "uint256"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "name",
+ "outputs": [{"name": "", "type": "string"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "symbol",
+ "outputs": [{"name": "", "type": "string"}],
+ "type": "function"
+ },
+ {
+ "constant": True,
+ "inputs": [],
+ "name": "decimals",
+ "outputs": [{"name": "", "type": "uint8"}],
+ "type": "function"
+ }
+]
+
+class PublicHausTokensImporter:
+ """Importer for Public Haus DAO members based on token holdings"""
+
+ def __init__(self):
+ """Initialize the importer"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3
+ optimism_rpc_url = os.getenv("OPTIMISM_RPC_URL")
+ if not optimism_rpc_url:
+ raise ValueError("OPTIMISM_RPC_URL environment variable not set")
+
+ self.web3 = Web3(Web3.HTTPProvider(optimism_rpc_url))
+ if not self.web3.is_connected():
+ raise ValueError("Failed to connect to Optimism RPC")
+
+ logger.info(f"Connected to Optimism: {self.web3.is_connected()}")
+
+ # Initialize token contracts
+ self.shares_token = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(SHARES_TOKEN_ADDRESS),
+ abi=ERC20_ABI
+ )
+
+ self.loot_token = self.web3.eth.contract(
+ address=self.web3.to_checksum_address(LOOT_TOKEN_ADDRESS),
+ abi=ERC20_ABI
+ )
+
+ # Get Etherscan API key
+ self.etherscan_api_key = os.getenv("OPTIMISM_ETHERSCAN_API_KEY")
+ if not self.etherscan_api_key:
+ logger.warning("OPTIMISM_ETHERSCAN_API_KEY not set, using API without key (rate limited)")
+ self.etherscan_api_key = ""
+ else:
+ logger.info("Using Optimism Etherscan API key")
+
+ # Initialize ENS resolver for Ethereum mainnet
+ ethereum_rpc_url = os.getenv("ETHEREUM_RPC_URL", "https://eth-mainnet.g.alchemy.com/v2/1fkIfqUX_MoHhd4Qqnu9UItM8Fc4Ls2q")
+ self.eth_web3 = Web3(Web3.HTTPProvider(ethereum_rpc_url))
+ if not self.eth_web3.is_connected():
+ logger.warning("Failed to connect to Ethereum RPC for ENS resolution")
+ self.ens_resolver = None
+ else:
+ logger.info(f"Connected to Ethereum for ENS resolution: {self.eth_web3.is_connected()}")
+ self.ens_resolver = ENSResolver(self.eth_web3)
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ # Initialize scraping job
+ self.job_id = self.db.create_scraping_job(
+ source_name="Public Haus DAO Tokens",
+ status="running"
+ )
+ logger.info(f"Created scraping job with ID: {self.job_id}")
+
+ def register_data_source(self) -> str:
+ """Register the Public Haus data source in the database"""
+ return self.db.upsert_data_source(
+ name="Public Haus DAO Tokens",
+ source_type="blockchain",
+ description="Public Haus DAO members identified by token holdings"
+ )
+
+ def get_token_info(self, token_contract, token_name) -> Dict[str, Any]:
+ """
+ Get information about a token
+
+ Args:
+ token_contract: Web3 contract instance
+ token_name: Name of the token for logging
+
+ Returns:
+ Token information
+ """
+ try:
+ name = token_contract.functions.name().call()
+ symbol = token_contract.functions.symbol().call()
+ decimals = token_contract.functions.decimals().call()
+ total_supply = token_contract.functions.totalSupply().call()
+
+ token_info = {
+ "name": name,
+ "symbol": symbol,
+ "decimals": decimals,
+ "totalSupply": total_supply
+ }
+
+ logger.info(f"{token_name} info: {name} ({symbol})")
+ logger.info(f"{token_name} total supply: {total_supply / (10 ** decimals):.2f} {symbol}")
+
+ return token_info
+ except Exception as e:
+ logger.error(f"Error getting {token_name} info via Web3: {e}")
+
+ # Try Etherscan API as fallback
+ try:
+ token_address = token_contract.address
+ params = {
+ "module": "token",
+ "action": "tokeninfo",
+ "contractaddress": token_address,
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ if data["status"] == "1":
+ token_info = data["result"][0]
+ logger.info(f"{token_name} info from Etherscan: {token_info.get('name')} ({token_info.get('symbol')})")
+ return {
+ "name": token_info.get("name", f"Public Haus {token_name}"),
+ "symbol": token_info.get("symbol", token_name.upper()),
+ "decimals": int(token_info.get("decimals", 18)),
+ "totalSupply": int(token_info.get("totalSupply", "0"))
+ }
+ except Exception as etherscan_error:
+ logger.error(f"Error getting {token_name} info via Etherscan: {etherscan_error}")
+
+ # Return default values if both methods fail
+ return {
+ "name": f"Public Haus {token_name}",
+ "symbol": token_name.upper(),
+ "decimals": 18,
+ "totalSupply": 0
+ }
+
+ def fetch_token_holders_via_etherscan(self, token_address, token_name, decimals) -> List[Dict[str, Any]]:
+ """
+ Fetch holders of a token using Etherscan API
+
+ Args:
+ token_address: Address of the token
+ token_name: Name of the token for logging
+ decimals: Token decimals
+
+ Returns:
+ List of token holders with their balances
+ """
+ try:
+ # Get token holders from Etherscan
+ params = {
+ "module": "token",
+ "action": "tokenholderlist",
+ "contractaddress": token_address,
+ "page": 1,
+ "offset": 100, # Get up to 100 holders
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ holders = []
+
+ if data["status"] == "1":
+ for holder in data["result"]:
+ address = holder["address"]
+ balance = int(holder["TokenHolderQuantity"])
+
+ # Skip zero balances
+ if balance > 0:
+ holders.append({
+ "address": address,
+ "balance": balance,
+ "balanceFormatted": balance / (10 ** decimals),
+ "tokenType": token_name
+ })
+
+ logger.info(f"Found {len(holders)} {token_name} holders with non-zero balance via Etherscan")
+ return holders
+ else:
+ logger.warning(f"Error getting {token_name} holders from Etherscan: {data.get('message')}")
+ return []
+
+ except Exception as e:
+ logger.error(f"Error fetching {token_name} holders via Etherscan: {e}")
+ return []
+
+ def fetch_token_transfers_via_etherscan(self, token_address, token_name, decimals) -> List[Dict[str, Any]]:
+ """
+ Fetch token transfers using Etherscan API and extract unique addresses
+
+ Args:
+ token_address: Address of the token
+ token_name: Name of the token for logging
+ decimals: Token decimals
+
+ Returns:
+ List of token holders with their balances
+ """
+ try:
+ # Get token transfers from Etherscan
+ params = {
+ "module": "account",
+ "action": "tokentx",
+ "contractaddress": token_address,
+ "page": 1,
+ "offset": 1000, # Get up to 1000 transfers
+ "sort": "desc",
+ "apikey": self.etherscan_api_key
+ }
+
+ response = requests.get(OPTIMISM_ETHERSCAN_API_URL, params=params)
+ data = response.json()
+
+ addresses = set()
+
+ if data["status"] == "1":
+ for tx in data["result"]:
+ addresses.add(tx["to"])
+ addresses.add(tx["from"])
+
+ # Remove zero address
+ if "0x0000000000000000000000000000000000000000" in addresses:
+ addresses.remove("0x0000000000000000000000000000000000000000")
+
+ # Create holder objects
+ holders = []
+ for address in addresses:
+ holders.append({
+ "address": address,
+ "balance": 1, # We don't know the actual balance
+ "balanceFormatted": 1,
+ "tokenType": token_name
+ })
+
+ logger.info(f"Found {len(holders)} unique addresses from {token_name} transfers via Etherscan")
+ return holders
+ else:
+ logger.warning(f"Error getting {token_name} transfers from Etherscan: {data.get('message')}")
+ return []
+
+ except Exception as e:
+ logger.error(f"Error fetching {token_name} transfers via Etherscan: {e}")
+ return []
+
+ def fetch_token_holders_via_web3(self, token_contract, token_name, decimals) -> List[Dict[str, Any]]:
+ """
+ Fetch holders of a token by checking balances of known addresses
+
+ Args:
+ token_contract: Web3 contract instance
+ token_name: Name of the token for logging
+ decimals: Token decimals
+
+ Returns:
+ List of token holders with their balances
+ """
+ try:
+ # Get the latest block number
+ latest_block = self.web3.eth.block_number
+
+ # Try to get some known addresses from recent transactions to the token contract
+ known_addresses = set()
+
+ # Check the last 100 blocks for transactions to the token contract
+ for block_num in range(max(0, latest_block - 100), latest_block + 1):
+ try:
+ block = self.web3.eth.get_block(block_num, full_transactions=True)
+ for tx in block.transactions:
+ if tx.to and tx.to.lower() == token_contract.address.lower():
+ known_addresses.add(tx['from'])
+ except Exception as e:
+ logger.warning(f"Error getting block {block_num}: {e}")
+ continue
+
+ # Add the DAO address as a known address
+ known_addresses.add(self.web3.to_checksum_address(PUBLIC_HAUS_DAO_ID))
+
+ # Check balances for known addresses
+ holders = []
+ for address in known_addresses:
+ try:
+ balance = token_contract.functions.balanceOf(address).call()
+
+ # Only include addresses with non-zero balance
+ if balance > 0:
+ holders.append({
+ "address": address,
+ "balance": balance,
+ "balanceFormatted": balance / (10 ** decimals),
+ "tokenType": token_name
+ })
+ except Exception as e:
+ logger.error(f"Error checking {token_name} balance for {address}: {e}")
+
+ logger.info(f"Found {len(holders)} {token_name} holders with non-zero balance via Web3")
+ return holders
+
+ except Exception as e:
+ logger.error(f"Error fetching {token_name} holders via Web3: {e}")
+ return []
+
+ def fetch_all_token_holders(self) -> List[Dict[str, Any]]:
+ """
+ Fetch holders of both shares and loot tokens
+
+ Returns:
+ List of token holders with their balances
+ """
+ all_holders = []
+
+ # Get token info
+ shares_info = self.get_token_info(self.shares_token, "Shares")
+ loot_info = self.get_token_info(self.loot_token, "Loot")
+
+ shares_decimals = shares_info["decimals"]
+ loot_decimals = loot_info["decimals"]
+
+ # Try different methods to get token holders
+
+ # 1. Try Etherscan tokenholderlist endpoint
+ shares_holders = self.fetch_token_holders_via_etherscan(
+ SHARES_TOKEN_ADDRESS, "Shares", shares_decimals
+ )
+ loot_holders = self.fetch_token_holders_via_etherscan(
+ LOOT_TOKEN_ADDRESS, "Loot", loot_decimals
+ )
+
+ # 2. If that fails, try getting transfers
+ if not shares_holders:
+ shares_holders = self.fetch_token_transfers_via_etherscan(
+ SHARES_TOKEN_ADDRESS, "Shares", shares_decimals
+ )
+
+ if not loot_holders:
+ loot_holders = self.fetch_token_transfers_via_etherscan(
+ LOOT_TOKEN_ADDRESS, "Loot", loot_decimals
+ )
+
+ # 3. If that fails, try Web3
+ if not shares_holders:
+ shares_holders = self.fetch_token_holders_via_web3(
+ self.shares_token, "Shares", shares_decimals
+ )
+
+ if not loot_holders:
+ loot_holders = self.fetch_token_holders_via_web3(
+ self.loot_token, "Loot", loot_decimals
+ )
+
+ # Combine holders
+ all_holders.extend(shares_holders)
+ all_holders.extend(loot_holders)
+
+ # If we still don't have any holders, use the DAO address itself
+ if not all_holders:
+ logger.warning("No token holders found, using DAO address as fallback")
+ all_holders.append({
+ "address": PUBLIC_HAUS_DAO_ID,
+ "balance": 1,
+ "balanceFormatted": 1,
+ "tokenType": "Fallback"
+ })
+
+ return all_holders
+
+ def merge_holders(self, holders: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Merge holders by address, combining shares and loot
+
+ Args:
+ holders: List of token holders
+
+ Returns:
+ List of merged holders
+ """
+ merged = {}
+
+ for holder in holders:
+ address = holder["address"]
+ balance = holder["balance"]
+ token_type = holder["tokenType"]
+
+ if address not in merged:
+ merged[address] = {
+ "address": address,
+ "shares": 0,
+ "sharesFormatted": 0,
+ "loot": 0,
+ "lootFormatted": 0,
+ "dao": "Public Haus"
+ }
+
+ if token_type == "Shares":
+ merged[address]["shares"] = balance
+ merged[address]["sharesFormatted"] = holder["balanceFormatted"]
+ elif token_type == "Loot":
+ merged[address]["loot"] = balance
+ merged[address]["lootFormatted"] = holder["balanceFormatted"]
+ else:
+ # Fallback
+ merged[address]["shares"] = balance
+ merged[address]["sharesFormatted"] = holder["balanceFormatted"]
+
+ return list(merged.values())
+
+ def resolve_ens_name(self, address: str) -> Optional[str]:
+ """
+ Resolve ENS name for an Ethereum address
+
+ Args:
+ address: Ethereum address to resolve
+
+ Returns:
+ ENS name if found, None otherwise
+ """
+ if not self.ens_resolver:
+ return None
+
+ try:
+ ens_name = self.ens_resolver.get_ens_name(address)
+ if ens_name:
+ logger.info(f"Resolved ENS name for {address}: {ens_name}")
+ return ens_name
+ except Exception as e:
+ logger.error(f"Error resolving ENS name for {address}: {e}")
+ return None
+
+ def process_holder(self, holder: Dict[str, Any]) -> Optional[str]:
+ """
+ Process a token holder and import into the database
+
+ Args:
+ holder: Token holder information
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ try:
+ # Extract holder information
+ address = holder["address"]
+ shares = holder["shares"]
+ shares_formatted = holder["sharesFormatted"]
+ loot = holder["loot"]
+ loot_formatted = holder["lootFormatted"]
+ dao_name = holder["dao"]
+
+ # Check if contact exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ existing_contacts = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+
+ if existing_contacts:
+ # Use existing contact
+ contact_id = existing_contacts[0]["id"]
+ logger.info(f"Found existing contact {contact_id} for address {address}")
+ else:
+ # Create new contact
+ contact_id = self.db.upsert_contact(
+ ethereum_address=address,
+ ens_name=None
+ )
+ logger.info(f"Created new contact {contact_id} for address {address}")
+
+ # Add DAO membership
+ self.db.execute_update(
+ """
+ INSERT INTO "DaoMembership" (id, "contactId", "daoName", "daoType", "createdAt", "updatedAt")
+ VALUES (gen_random_uuid(), %(contact_id)s, %(dao_name)s, %(dao_type)s, NOW(), NOW())
+ ON CONFLICT ("contactId", "daoName")
+ DO UPDATE SET
+ "updatedAt" = NOW()
+ """,
+ {
+ "contact_id": contact_id,
+ "dao_name": dao_name,
+ "dao_type": "Moloch V3"
+ }
+ )
+
+ # Add note about membership with token holdings
+ note_content = f"Public Haus DAO Member\nShares: {shares_formatted}\nLoot: {loot_formatted}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content
+ )
+
+ # Add tags for the DAO and token holdings
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=dao_name
+ )
+
+ if shares > 0:
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=f"{dao_name} Voting Member"
+ )
+
+ if loot > 0:
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name=f"{dao_name} Non-Voting Member"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ except Exception as e:
+ logger.error(f"Error processing holder {holder.get('address')}: {e}")
+ return None
+
+ def process_address(self, address: str, shares_balance: float = 0, loot_balance: float = 0) -> Optional[str]:
+ """
+ Process a single address, creating or updating a contact and linking to the DAO
+
+ Args:
+ address: Ethereum address
+ shares_balance: Balance of shares token
+ loot_balance: Balance of loot token
+
+ Returns:
+ Contact ID if successful, None otherwise
+ """
+ try:
+ # Check if contact already exists
+ query = 'SELECT id, name, "ensName" FROM "Contact" WHERE "ethereumAddress" ILIKE %(address)s'
+ result = self.db.execute_query(query, {"address": address})
+
+ contact_id = None
+ is_new = False
+
+ # Resolve ENS name if needed
+ ens_name = None
+ if not result or not result[0].get("ensName"):
+ ens_name = self.resolve_ens_name(address)
+
+ if result:
+ # Contact exists, get ID
+ contact_id = result[0]["id"]
+ logger.info(f"Found existing contact for {address}: {contact_id}")
+
+ # Update ENS name if we found one and it's not already set
+ if ens_name and not result[0].get("ensName"):
+ self.db.update_contact(contact_id, {"ensName": ens_name})
+ logger.info(f"Updated ENS name for contact {contact_id}: {ens_name}")
+ else:
+ # Create new contact
+ # Use ENS name as contact name if available, otherwise use address
+ contact_name = ens_name.split('.')[0] if ens_name else f"ETH_{address[:8]}"
+
+ contact_id = self.db.upsert_contact(
+ ethereum_address=address,
+ ens_name=ens_name
+ )
+ logger.info(f"Created new contact for {address}: {contact_id}")
+ is_new = True
+
+ # Add DAO membership
+ self.db.add_dao_membership(
+ contact_id=contact_id,
+ dao_name="Public Haus",
+ dao_type="Moloch V3"
+ )
+
+ # Add note about membership with token holdings
+ note_content = f"Public Haus DAO Member\nShares: {shares_balance}\nLoot: {loot_balance}"
+
+ self.db.add_note_to_contact(
+ contact_id=contact_id,
+ content=note_content
+ )
+
+ # Add tags for the DAO and token holdings
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name="Public Haus"
+ )
+
+ if shares_balance > 0:
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name="Public Haus Voting Member"
+ )
+
+ if loot_balance > 0:
+ self.db.add_tag_to_contact(
+ contact_id=contact_id,
+ tag_name="Public Haus Non-Voting Member"
+ )
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return contact_id
+
+ except Exception as e:
+ logger.error(f"Error processing address {address}: {e}")
+ return None
+
+ def run(self) -> int:
+ """
+ Run the importer
+
+ Returns:
+ Number of holders imported
+ """
+ try:
+ # Fetch token holders
+ all_holders = self.fetch_all_token_holders()
+
+ # Merge holders by address
+ merged_holders = self.merge_holders(all_holders)
+
+ if not merged_holders:
+ logger.info("No token holders found")
+ self.db.update_scraping_job(self.job_id, "completed")
+ return 0
+
+ # Process holders
+ imported_count = 0
+ existing_count = 0
+
+ for holder in merged_holders:
+ try:
+ contact_id = self.process_address(
+ address=holder["address"],
+ shares_balance=holder["shares"],
+ loot_balance=holder["loot"]
+ )
+ if contact_id:
+ imported_count += 1
+ except Exception as e:
+ logger.exception(f"Error processing holder {holder.get('address')}: {e}")
+
+ # Add a small delay to avoid overwhelming the database
+ time.sleep(0.1)
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ self.job_id,
+ "completed",
+ records_processed=len(merged_holders),
+ records_added=imported_count,
+ records_updated=existing_count
+ )
+
+ logger.info(f"Imported {imported_count} holders out of {len(merged_holders)} processed")
+
+ # Run ENS resolution for any contacts that don't have ENS names
+ if self.ens_resolver:
+ logger.info("Running ENS resolution for contacts without ENS names...")
+ from utils.resolve_ens_names import ENSResolver as BatchENSResolver
+ batch_resolver = BatchENSResolver()
+ batch_resolver.run(batch_size=50, delay_seconds=0.5)
+
+ return imported_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(self.job_id, "failed", error_message=str(e))
+ logger.exception(f"Error importing holders: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ importer = PublicHausTokensImporter()
+ imported_count = importer.run()
+ logger.info(f"Import completed successfully. Imported {imported_count} token holders.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error importing token holders: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/resolve_metacartel_ens.py b/scripts/moloch_dao/resolve_metacartel_ens.py
new file mode 100644
index 0000000..e10092e
--- /dev/null
+++ b/scripts/moloch_dao/resolve_metacartel_ens.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+"""
+Resolve ENS Names for Meta Cartel Members
+
+This script resolves ENS names for Meta Cartel members imported from the CSV file.
+It updates the contacts with ENS names and profile information, and links them to the data source.
+"""
+
+import os
+import sys
+import logging
+from typing import Dict, Any, List, Optional
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.ens_resolver import ENSResolver
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("metacartel_ens_resolver")
+
+class MetaCartelENSResolver:
+ """Resolver for ENS names of Meta Cartel members"""
+
+ def __init__(self):
+ """Initialize the resolver"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3 and ENS resolver
+ alchemy_api_key = os.getenv("ALCHEMY_API_KEY")
+ if not alchemy_api_key:
+ raise ValueError("ALCHEMY_API_KEY not found in environment variables")
+
+ self.web3 = Web3(Web3.HTTPProvider(f"https://eth-mainnet.g.alchemy.com/v2/{alchemy_api_key}"))
+ self.ens_resolver = ENSResolver(self.web3)
+
+ # Get data source ID
+ self.data_source_id = self.get_data_source_id()
+
+ def get_data_source_id(self) -> str:
+ """Get the ID of the Meta Cartel DAO CSV data source"""
+ query = 'SELECT id FROM "DataSource" WHERE name = %(name)s'
+ result = self.db.execute_query(query, {"name": "Meta Cartel DAO CSV"})
+
+ if not result:
+ raise ValueError("Meta Cartel DAO CSV data source not found")
+
+ return result[0]["id"]
+
+ def get_metacartel_members(self) -> List[Dict[str, Any]]:
+ """Get all Meta Cartel members from the database"""
+ query = """
+ SELECT c.id, c."ethereumAddress", c."ensName"
+ FROM "Contact" c
+ JOIN "DaoMembership" dm ON c.id = dm."contactId"
+ WHERE dm."daoName" = 'Meta Cartel'
+ """
+ return self.db.execute_query(query)
+
+ def resolve_ens_for_member(self, contact_id: str, ethereum_address: str, current_ens: Optional[str] = None) -> bool:
+ """
+ Resolve ENS name for a member and update their profile.
+
+ Args:
+ contact_id: ID of the contact
+ ethereum_address: Ethereum address of the member
+ current_ens: Current ENS name of the member, if any
+
+ Returns:
+ True if ENS was resolved or already exists, False otherwise
+ """
+ # Skip if already has ENS
+ if current_ens:
+ logger.info(f"Contact {contact_id} already has ENS: {current_ens}")
+
+ # Still update profile from ENS if needed
+ self.ens_resolver.update_contact_from_ens(contact_id, current_ens)
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return True
+
+ # Resolve ENS name
+ ens_name = self.ens_resolver.get_ens_name(ethereum_address)
+
+ if not ens_name:
+ logger.info(f"No ENS name found for {ethereum_address}")
+
+ # Still link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return False
+
+ # Update contact with ENS name
+ self.db.update_contact(contact_id, {"ensName": ens_name})
+ logger.info(f"Updated contact {contact_id} with ENS name: {ens_name}")
+
+ # Update profile from ENS
+ self.ens_resolver.update_contact_from_ens(contact_id, ens_name)
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return True
+
+ def run(self):
+ """Run the resolver"""
+ logger.info("Starting ENS resolution for Meta Cartel members")
+
+ # Get all Meta Cartel members
+ members = self.get_metacartel_members()
+ logger.info(f"Found {len(members)} Meta Cartel members")
+
+ # Resolve ENS for each member
+ resolved_count = 0
+ for member in members:
+ if self.resolve_ens_for_member(
+ member["id"],
+ member["ethereumAddress"],
+ member.get("ensName")
+ ):
+ resolved_count += 1
+
+ logger.info(f"Resolved ENS for {resolved_count} out of {len(members)} members")
+ return resolved_count
+
+def main():
+ """Main function"""
+ try:
+ resolver = MetaCartelENSResolver()
+ resolved_count = resolver.run()
+ logger.info(f"ENS resolution completed successfully. Resolved {resolved_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error resolving ENS names: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/moloch_dao/resolve_public_haus_ens.py b/scripts/moloch_dao/resolve_public_haus_ens.py
new file mode 100644
index 0000000..8f82fe1
--- /dev/null
+++ b/scripts/moloch_dao/resolve_public_haus_ens.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python3
+"""
+Resolve ENS Names for Public Haus Members
+
+This script resolves ENS names for Public Haus members imported from the DAOhaus API on Optimism mainnet.
+It updates the contacts with ENS names and profile information, and links them to the data source.
+"""
+
+import os
+import sys
+import logging
+from typing import Dict, Any, List, Optional
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.ens_resolver import ENSResolver
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("public_haus_ens_resolver")
+
+class PublicHausENSResolver:
+ """Resolver for ENS names of Public Haus members"""
+
+ def __init__(self):
+ """Initialize the resolver"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3 and ENS resolver
+ alchemy_api_key = os.getenv("ALCHEMY_API_KEY")
+ if not alchemy_api_key:
+ raise ValueError("ALCHEMY_API_KEY not found in environment variables")
+
+ self.web3 = Web3(Web3.HTTPProvider(f"https://eth-mainnet.g.alchemy.com/v2/{alchemy_api_key}"))
+ self.ens_resolver = ENSResolver(self.web3)
+
+ # Get data source ID
+ self.data_source_id = self.get_data_source_id()
+
+ def get_data_source_id(self) -> str:
+ """Get the ID of the Public Haus DAO API data source"""
+ query = 'SELECT id FROM "DataSource" WHERE name = %(name)s'
+ result = self.db.execute_query(query, {"name": "Public Haus DAO Tokens"})
+
+ if not result:
+ raise ValueError("Public Haus DAO Tokens data source not found")
+
+ return result[0]["id"]
+
+ def get_public_haus_members(self) -> List[Dict[str, Any]]:
+ """Get all Public Haus members from the database"""
+ query = """
+ SELECT c.id, c."ethereumAddress", c."ensName"
+ FROM "Contact" c
+ JOIN "DaoMembership" dm ON c.id = dm."contactId"
+ WHERE dm."daoName" = 'Public Haus'
+ """
+ return self.db.execute_query(query)
+
+ def resolve_ens_for_member(self, contact_id: str, ethereum_address: str, current_ens: Optional[str] = None) -> bool:
+ """
+ Resolve ENS name for a member and update their profile.
+
+ Args:
+ contact_id: ID of the contact
+ ethereum_address: Ethereum address of the member
+ current_ens: Current ENS name of the member, if any
+
+ Returns:
+ True if ENS was resolved or already exists, False otherwise
+ """
+ # Skip if already has ENS
+ if current_ens:
+ logger.info(f"Contact {contact_id} already has ENS: {current_ens}")
+
+ # Still update profile from ENS if needed
+ self.ens_resolver.update_contact_from_ens(contact_id, current_ens)
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return True
+
+ # Resolve ENS name
+ ens_name = self.ens_resolver.get_ens_name(ethereum_address)
+
+ if not ens_name:
+ logger.info(f"No ENS name found for {ethereum_address}")
+
+ # Still link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return False
+
+ # Update contact with ENS name
+ self.db.update_contact(contact_id, {"ensName": ens_name})
+ logger.info(f"Updated contact {contact_id} with ENS name: {ens_name}")
+
+ # Update profile from ENS
+ self.ens_resolver.update_contact_from_ens(contact_id, ens_name)
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return True
+
+ def run(self):
+ """Run the resolver"""
+ logger.info("Starting ENS resolution for Public Haus members")
+
+ # Get all Public Haus members
+ members = self.get_public_haus_members()
+ logger.info(f"Found {len(members)} Public Haus members")
+
+ # Resolve ENS for each member
+ resolved_count = 0
+ for member in members:
+ if self.resolve_ens_for_member(
+ member["id"],
+ member["ethereumAddress"],
+ member.get("ensName")
+ ):
+ resolved_count += 1
+
+ logger.info(f"Resolved ENS for {resolved_count} out of {len(members)} members")
+ return resolved_count
+
+def main():
+ """Main function"""
+ try:
+ resolver = PublicHausENSResolver()
+ resolved_count = resolver.run()
+ logger.info(f"ENS resolution completed successfully. Resolved {resolved_count} members.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error resolving ENS names: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/nft_holders/public_nouns_scraper.py b/scripts/nft_holders/public_nouns_scraper.py
index bb0366b..dd475dd 100755
--- a/scripts/nft_holders/public_nouns_scraper.py
+++ b/scripts/nft_holders/public_nouns_scraper.py
@@ -46,7 +46,7 @@ class PublicNounsHoldersScraper:
"""
self.contract_address = Web3.to_checksum_address(contract_address)
self.collection_name = collection_name
- self.etherscan_api_key = os.getenv("ETHERSCAN_API_KEY")
+ self.etherscan_api_key = os.getenv("ETHEREUM_ETHERSCAN_API_KEY")
self.alchemy_api_key = os.getenv("ALCHEMY_API_KEY")
self.web3 = Web3(Web3.HTTPProvider(f"https://eth-mainnet.g.alchemy.com/v2/{self.alchemy_api_key}"))
self.db = DatabaseConnector()
@@ -54,7 +54,7 @@ class PublicNounsHoldersScraper:
# Validate API keys
if not self.etherscan_api_key:
- logger.error("ETHERSCAN_API_KEY not found in environment variables")
+ logger.error("ETHEREUM_ETHERSCAN_API_KEY not found in environment variables")
sys.exit(1)
if not self.alchemy_api_key:
logger.error("ALCHEMY_API_KEY not found in environment variables")
diff --git a/scripts/utils/fix_contact_issues.py b/scripts/utils/fix_contact_issues.py
new file mode 100644
index 0000000..d761aa8
--- /dev/null
+++ b/scripts/utils/fix_contact_issues.py
@@ -0,0 +1,368 @@
+#!/usr/bin/env python3
+"""
+Fix Contact Issues
+
+This script addresses two main issues with the contacts in the database:
+1. Removes prefixed names like "RG_0x..." and "MC_0x..." and replaces them with NULL
+ if they don't have ENS names
+2. Merges duplicate contacts that have the same Ethereum address but different records
+
+Usage:
+ python fix_contact_issues.py
+"""
+
+import os
+import sys
+import argparse
+from typing import Dict, List, Any, Optional
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("fix_contact_issues")
+
+class ContactFixer:
+ """Fixes issues with contacts in the database."""
+
+ def __init__(self):
+ """Initialize the contact fixer."""
+ self.db = DatabaseConnector()
+
+ def fix_prefixed_names(self) -> int:
+ """
+ Replace prefixed names like "RG_0x..." and "MC_0x..." with NULL.
+ Only do this for contacts that don't have ENS names.
+
+ Returns:
+ Number of contacts fixed
+ """
+ logger.info("Fixing prefixed names...")
+
+ # Find contacts with prefixed names
+ query = """
+ SELECT id, name, "ethereumAddress", "ensName"
+ FROM "Contact"
+ WHERE (name LIKE 'RG\\_%' OR name LIKE 'MC\\_%' OR name LIKE 'ETH\\_%'
+ OR name LIKE '%_0x%')
+ AND "ensName" IS NULL
+ """
+
+ contacts = self.db.execute_query(query, {})
+ logger.info(f"Found {len(contacts)} contacts with prefixed names")
+
+ # Update contacts to set name to NULL
+ fixed_count = 0
+ for contact in contacts:
+ update_query = """
+ UPDATE "Contact"
+ SET name = NULL,
+ "updatedAt" = NOW()
+ WHERE id = %(contact_id)s
+ """
+
+ rows_updated = self.db.execute_update(update_query, {
+ "contact_id": contact["id"]
+ })
+
+ if rows_updated > 0:
+ logger.info(f"Cleared name for contact {contact['id']} (was '{contact['name']}')")
+ fixed_count += 1
+
+ logger.info(f"Fixed {fixed_count} contacts with prefixed names")
+ return fixed_count
+
+ def find_duplicate_contacts(self) -> List[Dict[str, Any]]:
+ """
+ Find contacts with duplicate Ethereum addresses.
+
+ Returns:
+ List of Ethereum addresses with duplicate contacts
+ """
+ query = """
+ SELECT "ethereumAddress", COUNT(*) as count
+ FROM "Contact"
+ GROUP BY "ethereumAddress"
+ HAVING COUNT(*) > 1
+ ORDER BY COUNT(*) DESC
+ """
+
+ duplicates = self.db.execute_query(query, {})
+ logger.info(f"Found {len(duplicates)} Ethereum addresses with duplicate contacts")
+
+ return duplicates
+
+ def merge_duplicate_contacts(self) -> int:
+ """
+ Merge duplicate contacts by keeping the most complete record.
+
+ Returns:
+ Number of contacts merged
+ """
+ logger.info("Merging duplicate contacts...")
+
+ # Find duplicate contacts
+ duplicates = self.find_duplicate_contacts()
+
+ # For each duplicate address
+ total_merged = 0
+ for duplicate in duplicates:
+ eth_address = duplicate["ethereumAddress"]
+
+ # Get all contacts with this address
+ query = """
+ SELECT id, "ethereumAddress", "ensName", name, email,
+ twitter, discord, telegram, farcaster, "otherSocial",
+ "warpcastAddress", "ethereumAddress2", "createdAt"
+ FROM "Contact"
+ WHERE "ethereumAddress" = %(eth_address)s
+ ORDER BY "createdAt" ASC
+ """
+
+ contacts = self.db.execute_query(query, {"eth_address": eth_address})
+
+ if len(contacts) <= 1:
+ continue
+
+ # Determine the primary contact (the one to keep)
+ # We'll keep the oldest one (first created) as the primary
+ primary_contact = contacts[0]
+ primary_id = primary_contact["id"]
+
+ # Merge data from other contacts into the primary
+ for contact in contacts[1:]:
+ # Update primary contact with any non-null fields from this contact
+ update_data = {}
+ for field in ["ensName", "name", "email", "twitter", "discord",
+ "telegram", "farcaster", "otherSocial", "warpcastAddress",
+ "ethereumAddress2"]:
+ if contact[field] is not None and primary_contact[field] is None:
+ update_data[field] = contact[field]
+
+ if update_data:
+ self.db.update_contact(primary_id, update_data)
+ logger.info(f"Updated primary contact {primary_id} with data from {contact['id']}")
+
+ # Move all related data to the primary contact
+ self.move_related_data(contact["id"], primary_id)
+
+ # Delete the duplicate contact
+ delete_query = """
+ DELETE FROM "Contact"
+ WHERE id = %(contact_id)s
+ """
+
+ self.db.execute_update(delete_query, {"contact_id": contact["id"]})
+ logger.info(f"Deleted duplicate contact {contact['id']}")
+ total_merged += 1
+
+ logger.info(f"Merged {total_merged} duplicate contacts")
+ return total_merged
+
+ def move_related_data(self, from_id: str, to_id: str) -> None:
+ """
+ Move all related data from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move data from
+ to_id: ID of the contact to move data to
+ """
+ # Move NFT holdings
+ self.move_nft_holdings(from_id, to_id)
+
+ # Move token holdings
+ self.move_token_holdings(from_id, to_id)
+
+ # Move DAO memberships
+ self.move_dao_memberships(from_id, to_id)
+
+ # Move notes
+ self.move_notes(from_id, to_id)
+
+ # Move tags
+ self.move_tags(from_id, to_id)
+
+ # Move contact sources
+ self.move_contact_sources(from_id, to_id)
+
+ def move_nft_holdings(self, from_id: str, to_id: str) -> None:
+ """
+ Move NFT holdings from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move holdings from
+ to_id: ID of the contact to move holdings to
+ """
+ query = """
+ INSERT INTO "NftHolding" (
+ id, "contactId", "contractAddress", "tokenId", "collectionName",
+ "acquiredAt", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %(to_id)s, "contractAddress", "tokenId", "collectionName",
+ "acquiredAt", "createdAt", NOW()
+ FROM "NftHolding"
+ WHERE "contactId" = %(from_id)s
+ ON CONFLICT ("contactId", "contractAddress", "tokenId") DO NOTHING
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def move_token_holdings(self, from_id: str, to_id: str) -> None:
+ """
+ Move token holdings from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move holdings from
+ to_id: ID of the contact to move holdings to
+ """
+ query = """
+ INSERT INTO "TokenHolding" (
+ id, "contactId", "contractAddress", "tokenSymbol", balance,
+ "lastUpdated", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %(to_id)s, "contractAddress", "tokenSymbol", balance,
+ "lastUpdated", "createdAt", NOW()
+ FROM "TokenHolding"
+ WHERE "contactId" = %(from_id)s
+ ON CONFLICT ("contactId", "contractAddress") DO NOTHING
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def move_dao_memberships(self, from_id: str, to_id: str) -> None:
+ """
+ Move DAO memberships from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move memberships from
+ to_id: ID of the contact to move memberships to
+ """
+ query = """
+ INSERT INTO "DaoMembership" (
+ id, "contactId", "daoName", "daoType", "joinedAt", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %(to_id)s, "daoName", "daoType", "joinedAt", "createdAt", NOW()
+ FROM "DaoMembership"
+ WHERE "contactId" = %(from_id)s
+ ON CONFLICT ("contactId", "daoName") DO NOTHING
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def move_notes(self, from_id: str, to_id: str) -> None:
+ """
+ Move notes from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move notes from
+ to_id: ID of the contact to move notes to
+ """
+ query = """
+ INSERT INTO "Note" (
+ id, "contactId", content, "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %(to_id)s, content, "createdAt", NOW()
+ FROM "Note"
+ WHERE "contactId" = %(from_id)s
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def move_tags(self, from_id: str, to_id: str) -> None:
+ """
+ Move tags from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move tags from
+ to_id: ID of the contact to move tags to
+ """
+ query = """
+ INSERT INTO "TagsOnContacts" (
+ "contactId", "tagId", "assignedAt"
+ )
+ SELECT
+ %(to_id)s, "tagId", "assignedAt"
+ FROM "TagsOnContacts"
+ WHERE "contactId" = %(from_id)s
+ ON CONFLICT ("contactId", "tagId") DO NOTHING
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def move_contact_sources(self, from_id: str, to_id: str) -> None:
+ """
+ Move contact sources from one contact to another.
+
+ Args:
+ from_id: ID of the contact to move sources from
+ to_id: ID of the contact to move sources to
+ """
+ # Check if the ContactSource table exists
+ query = """
+ SELECT EXISTS (
+ SELECT FROM information_schema.tables
+ WHERE table_name = 'ContactSource'
+ ) as exists
+ """
+
+ result = self.db.execute_query(query, {})
+ if not result or not result[0]["exists"]:
+ logger.info("ContactSource table does not exist, skipping contact sources migration")
+ return
+
+ query = """
+ INSERT INTO "ContactSource" (
+ id, "contactId", "dataSourceId", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %(to_id)s, "dataSourceId", "createdAt", NOW()
+ FROM "ContactSource"
+ WHERE "contactId" = %(from_id)s
+ ON CONFLICT ("contactId", "dataSourceId") DO NOTHING
+ """
+
+ self.db.execute_update(query, {"from_id": from_id, "to_id": to_id})
+
+ def run(self) -> None:
+ """Run all fixes."""
+ logger.info("Starting contact fixes...")
+
+ # Fix prefixed names
+ fixed_names = self.fix_prefixed_names()
+
+ # Merge duplicate contacts
+ merged_contacts = self.merge_duplicate_contacts()
+
+ logger.info(f"Completed fixes: {fixed_names} name prefixes removed, {merged_contacts} duplicate contacts merged")
+
+def main():
+ """Main entry point for the script."""
+ parser = argparse.ArgumentParser(description="Fix contact issues")
+ parser.add_argument("--names-only", action="store_true",
+ help="Only fix prefixed names, don't merge duplicates")
+ parser.add_argument("--duplicates-only", action="store_true",
+ help="Only merge duplicate contacts, don't fix names")
+
+ args = parser.parse_args()
+
+ fixer = ContactFixer()
+
+ if args.names_only:
+ fixer.fix_prefixed_names()
+ elif args.duplicates_only:
+ fixer.merge_duplicate_contacts()
+ else:
+ fixer.run()
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/utils/fix_contact_names.py b/scripts/utils/fix_contact_names.py
new file mode 100644
index 0000000..0421749
--- /dev/null
+++ b/scripts/utils/fix_contact_names.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+"""
+Fix Contact Names
+
+This script removes prefixed names like "RG_0x...", "MC_0x...", and "ETH_0x..."
+and replaces them with NULL if they don't have ENS names.
+
+Usage:
+ python fix_contact_names.py
+"""
+
+import os
+import sys
+import argparse
+import psycopg2
+from psycopg2.extras import RealDictCursor
+from dotenv import load_dotenv
+
+# Load environment variables
+load_dotenv()
+
+def fix_contact_names():
+ """
+ Fix contact names by removing prefixed names and replacing with NULL.
+ """
+ # Get database connection string from environment variables
+ db_url = os.getenv("PYTHON_DATABASE_URL")
+ if not db_url:
+ db_url = os.getenv("DATABASE_URL").split("?schema=")[0]
+
+ # Connect to the database
+ conn = psycopg2.connect(db_url)
+ conn.autocommit = True
+
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cursor:
+ # Find contacts with prefixed names
+ query = """
+ SELECT id, name, "ethereumAddress", "ensName"
+ FROM "Contact"
+ WHERE (name LIKE 'RG\\_%' OR name LIKE 'MC\\_%' OR name LIKE 'ETH\\_%'
+ OR name LIKE '%\\_0x%' ESCAPE '\\')
+ AND "ensName" IS NULL
+ """
+
+ cursor.execute(query)
+ contacts = cursor.fetchall()
+ print(f"Found {len(contacts)} contacts with prefixed names")
+
+ # Update contacts to set name to NULL
+ fixed_count = 0
+ for contact in contacts:
+ update_query = """
+ UPDATE "Contact"
+ SET name = NULL,
+ "updatedAt" = NOW()
+ WHERE id = %s
+ """
+
+ cursor.execute(update_query, (contact["id"],))
+ rows_updated = cursor.rowcount
+
+ if rows_updated > 0:
+ print(f"Cleared name for contact {contact['id']} (was '{contact['name']}')")
+ fixed_count += 1
+
+ print(f"Fixed {fixed_count} contacts with prefixed names")
+
+ finally:
+ conn.close()
+
+def main():
+ """Main entry point for the script."""
+ parser = argparse.ArgumentParser(description="Fix contact names")
+ args = parser.parse_args()
+
+ fix_contact_names()
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/utils/merge_duplicate_contacts.py b/scripts/utils/merge_duplicate_contacts.py
new file mode 100644
index 0000000..149ad41
--- /dev/null
+++ b/scripts/utils/merge_duplicate_contacts.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python3
+"""
+Merge Duplicate Contacts
+
+This script finds and merges duplicate contacts in the database.
+Duplicates are defined as contacts with the same Ethereum address.
+
+Usage:
+ python merge_duplicate_contacts.py
+"""
+
+import os
+import sys
+import argparse
+import psycopg2
+from psycopg2.extras import RealDictCursor
+from dotenv import load_dotenv
+
+# Load environment variables
+load_dotenv()
+
+def merge_duplicate_contacts():
+ """
+ Find and merge duplicate contacts.
+ """
+ # Get database connection string from environment variables
+ db_url = os.getenv("PYTHON_DATABASE_URL")
+ if not db_url:
+ db_url = os.getenv("DATABASE_URL").split("?schema=")[0]
+
+ # Connect to the database
+ conn = psycopg2.connect(db_url)
+ conn.autocommit = True
+
+ try:
+ with conn.cursor(cursor_factory=RealDictCursor) as cursor:
+ # Find duplicate Ethereum addresses
+ query = """
+ SELECT "ethereumAddress", COUNT(*) as count
+ FROM "Contact"
+ GROUP BY "ethereumAddress"
+ HAVING COUNT(*) > 1
+ ORDER BY COUNT(*) DESC
+ """
+
+ cursor.execute(query)
+ duplicates = cursor.fetchall()
+ print(f"Found {len(duplicates)} Ethereum addresses with duplicate contacts")
+
+ # Process each set of duplicates
+ total_merged = 0
+ for duplicate in duplicates:
+ eth_address = duplicate["ethereumAddress"]
+
+ # Get all contacts with this address
+ query = """
+ SELECT id, "ethereumAddress", "ensName", name, email,
+ twitter, discord, telegram, farcaster, "otherSocial",
+ "warpcastAddress", "ethereumAddress2", "createdAt"
+ FROM "Contact"
+ WHERE "ethereumAddress" = %s
+ ORDER BY "createdAt" ASC
+ """
+
+ cursor.execute(query, (eth_address,))
+ contacts = cursor.fetchall()
+
+ # Skip if we somehow don't have duplicates
+ if len(contacts) <= 1:
+ continue
+
+ # Choose the oldest contact as the primary
+ primary_contact = contacts[0]
+ primary_id = primary_contact["id"]
+
+ print(f"Processing {len(contacts)} duplicates for address {eth_address}")
+ print(f" Primary contact: {primary_id}")
+
+ # Merge data from other contacts into the primary
+ for contact in contacts[1:]:
+ contact_id = contact["id"]
+
+ # Move NFT holdings
+ print(f" Moving NFT holdings from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "NftHolding" (
+ id, "contactId", "contractAddress", "tokenId", "collectionName",
+ "acquiredAt", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %s, "contractAddress", "tokenId", "collectionName",
+ "acquiredAt", "createdAt", NOW()
+ FROM "NftHolding"
+ WHERE "contactId" = %s
+ ON CONFLICT ("contactId", "contractAddress", "tokenId") DO NOTHING
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Move token holdings
+ print(f" Moving token holdings from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "TokenHolding" (
+ id, "contactId", "contractAddress", "tokenSymbol", balance,
+ "lastUpdated", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %s, "contractAddress", "tokenSymbol", balance,
+ "lastUpdated", "createdAt", NOW()
+ FROM "TokenHolding"
+ WHERE "contactId" = %s
+ ON CONFLICT ("contactId", "contractAddress") DO NOTHING
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Move DAO memberships
+ print(f" Moving DAO memberships from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "DaoMembership" (
+ id, "contactId", "daoName", "daoType", "joinedAt", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %s, "daoName", "daoType", "joinedAt", "createdAt", NOW()
+ FROM "DaoMembership"
+ WHERE "contactId" = %s
+ ON CONFLICT ("contactId", "daoName") DO NOTHING
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Move notes
+ print(f" Moving notes from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "Note" (
+ id, "contactId", content, "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %s, content, "createdAt", NOW()
+ FROM "Note"
+ WHERE "contactId" = %s
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Move tags
+ print(f" Moving tags from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "TagsOnContacts" (
+ "contactId", "tagId", "assignedAt"
+ )
+ SELECT
+ %s, "tagId", "assignedAt"
+ FROM "TagsOnContacts"
+ WHERE "contactId" = %s
+ ON CONFLICT ("contactId", "tagId") DO NOTHING
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Check if ContactSource table exists
+ query = """
+ SELECT EXISTS (
+ SELECT FROM information_schema.tables
+ WHERE table_name = 'ContactSource'
+ ) as exists
+ """
+ cursor.execute(query)
+ result = cursor.fetchone()
+
+ # Move contact sources if table exists
+ if result and result["exists"]:
+ print(f" Moving contact sources from {contact_id} to {primary_id}")
+ query = """
+ INSERT INTO "ContactSource" (
+ id, "contactId", "dataSourceId", "createdAt", "updatedAt"
+ )
+ SELECT
+ gen_random_uuid(), %s, "dataSourceId", "createdAt", NOW()
+ FROM "ContactSource"
+ WHERE "contactId" = %s
+ ON CONFLICT ("contactId", "dataSourceId") DO NOTHING
+ """
+ cursor.execute(query, (primary_id, contact_id))
+
+ # Update primary contact with non-null values from this contact
+ update_fields = []
+ update_values = []
+
+ for field in ["ensName", "name", "email", "twitter", "discord",
+ "telegram", "farcaster", "otherSocial", "warpcastAddress",
+ "ethereumAddress2"]:
+ if contact[field] is not None and primary_contact[field] is None:
+ update_fields.append(f'"{field}" = %s')
+ update_values.append(contact[field])
+ print(f" Updating primary contact {field} to {contact[field]}")
+
+ if update_fields:
+ update_values.append(primary_id)
+ query = f"""
+ UPDATE "Contact"
+ SET {', '.join(update_fields)}, "updatedAt" = NOW()
+ WHERE id = %s
+ """
+ cursor.execute(query, update_values)
+
+ # Delete the duplicate contact
+ print(f" Deleting duplicate contact {contact_id}")
+ query = """
+ DELETE FROM "Contact"
+ WHERE id = %s
+ """
+ cursor.execute(query, (contact_id,))
+ total_merged += 1
+
+ print(f"Merged {total_merged} duplicate contacts")
+
+ finally:
+ conn.close()
+
+def main():
+ """Main entry point for the script."""
+ parser = argparse.ArgumentParser(description="Merge duplicate contacts")
+ args = parser.parse_args()
+
+ merge_duplicate_contacts()
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/scripts/utils/resolve_all_ens.py b/scripts/utils/resolve_all_ens.py
new file mode 100644
index 0000000..ea597c8
--- /dev/null
+++ b/scripts/utils/resolve_all_ens.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python3
+"""
+Resolve ENS Names and Contact Information for All Contacts
+
+This script resolves ENS names and additional contact information for all contacts
+in the database that have Ethereum addresses. It uses the existing ENS resolver utility
+to fetch ENS names and text records containing social profiles and contact information.
+
+Usage:
+ python resolve_all_ens.py [--batch-size BATCH_SIZE] [--delay DELAY]
+"""
+
+import os
+import sys
+import logging
+import time
+import argparse
+from typing import Dict, Any, List, Optional, Tuple
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.ens_resolver import ENSResolver
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("all_ens_resolver")
+
+class AllContactsENSResolver:
+ """Resolver for ENS names and contact information for all contacts"""
+
+ def __init__(self):
+ """Initialize the resolver"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3 and ENS resolver
+ alchemy_api_key = os.getenv("ALCHEMY_API_KEY")
+ if not alchemy_api_key:
+ raise ValueError("ALCHEMY_API_KEY not found in environment variables")
+
+ self.web3 = Web3(Web3.HTTPProvider(f"https://eth-mainnet.g.alchemy.com/v2/{alchemy_api_key}"))
+ self.ens_resolver = ENSResolver(self.web3)
+
+ # Register data source
+ self.data_source_id = self.register_data_source()
+
+ def register_data_source(self) -> str:
+ """Register the ENS data source in the database"""
+ return self.db.upsert_data_source(
+ name="ENS Resolver",
+ source_type="blockchain",
+ description="ENS names and profile information resolved from Ethereum addresses"
+ )
+
+ def get_contacts_without_ens(self) -> List[Dict[str, Any]]:
+ """Get all contacts that have an Ethereum address but no ENS name"""
+ query = """
+ SELECT id, "ethereumAddress", name
+ FROM "Contact"
+ WHERE "ethereumAddress" IS NOT NULL
+ AND "ensName" IS NULL
+ """
+
+ result = self.db.execute_query(query)
+ logger.info(f"Found {len(result)} contacts without ENS names")
+ return result
+
+ def get_all_contacts_with_eth_address(self) -> List[Dict[str, Any]]:
+ """Get all contacts that have an Ethereum address"""
+ query = """
+ SELECT id, "ethereumAddress", "ensName", name, twitter, discord, telegram, email, farcaster
+ FROM "Contact"
+ WHERE "ethereumAddress" IS NOT NULL
+ """
+
+ result = self.db.execute_query(query)
+ logger.info(f"Found {len(result)} contacts with Ethereum addresses")
+ return result
+
+ def process_contact(self, contact: Dict[str, Any]) -> Tuple[bool, bool]:
+ """
+ Process a single contact to resolve ENS name and contact info
+
+ Args:
+ contact: Contact data from the database
+
+ Returns:
+ Tuple of (ens_updated, info_updated) booleans
+ """
+ contact_id = contact["id"]
+ address = contact["ethereumAddress"]
+ current_ens = contact.get("ensName")
+
+ ens_updated = False
+ info_updated = False
+
+ # Skip if no address
+ if not address:
+ return ens_updated, info_updated
+
+ # Resolve ENS name if not already set
+ if not current_ens:
+ ens_name = self.ens_resolver.get_ens_name(address)
+ if ens_name:
+ # Update contact with ENS name
+ self.db.update_contact(contact_id, {"ensName": ens_name})
+ logger.info(f"Updated contact {contact_id} with ENS name: {ens_name}")
+ current_ens = ens_name
+ ens_updated = True
+
+ # Get contact info from ENS text records if we have an ENS name
+ if current_ens:
+ # Update profile from ENS
+ self.ens_resolver.update_contact_from_ens(contact_id, current_ens)
+ info_updated = True
+
+ # Link to data source
+ self.db.link_contact_to_data_source(contact_id, self.data_source_id)
+
+ return ens_updated, info_updated
+
+ def run(self, batch_size: int = 50, delay_seconds: float = 0.5, resolve_all: bool = False):
+ """
+ Run the resolver for contacts
+
+ Args:
+ batch_size: Number of contacts to process in each batch
+ delay_seconds: Delay between processing contacts
+ resolve_all: Whether to process all contacts or just those without ENS names
+
+ Returns:
+ Tuple of (ens_updated_count, info_updated_count)
+ """
+ # Create a scraping job
+ job_id = self.db.create_scraping_job("ENS Resolver", "running")
+ logger.info(f"Created scraping job with ID: {job_id}")
+
+ try:
+ if resolve_all:
+ contacts = self.get_all_contacts_with_eth_address()
+ else:
+ contacts = self.get_contacts_without_ens()
+
+ if not contacts:
+ logger.info("No contacts found to process")
+ self.db.update_scraping_job(job_id, "completed")
+ return 0, 0
+
+ ens_updated_count = 0
+ info_updated_count = 0
+
+ # Process in batches to avoid rate limiting
+ for i in range(0, len(contacts), batch_size):
+ batch = contacts[i:i+batch_size]
+ logger.info(f"Processing batch {i//batch_size + 1}/{(len(contacts) + batch_size - 1)//batch_size}")
+
+ for contact in batch:
+ ens_updated, info_updated = self.process_contact(contact)
+
+ if ens_updated:
+ ens_updated_count += 1
+ if info_updated:
+ info_updated_count += 1
+
+ # Add a small delay to avoid rate limiting
+ time.sleep(delay_seconds)
+
+ # Update the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "running",
+ records_processed=len(batch),
+ records_updated=sum(1 for c in batch if self.process_contact(c)[0] or self.process_contact(c)[1])
+ )
+
+ # Complete the scraping job
+ self.db.update_scraping_job(
+ job_id,
+ "completed",
+ records_processed=len(contacts),
+ records_added=ens_updated_count,
+ records_updated=info_updated_count
+ )
+
+ logger.info(f"Updated ENS names for {ens_updated_count} contacts and contact info for {info_updated_count} contacts out of {len(contacts)} processed")
+ return ens_updated_count, info_updated_count
+
+ except Exception as e:
+ # Update the scraping job with error
+ self.db.update_scraping_job(job_id, "failed", error_message=str(e))
+ logger.exception(f"Error resolving ENS names: {e}")
+ raise
+
+def main():
+ """Main function"""
+ try:
+ parser = argparse.ArgumentParser(description="Resolve ENS names and contact information for all contacts")
+ parser.add_argument("--all", action="store_true", help="Process all contacts with Ethereum addresses, not just those without ENS names")
+ parser.add_argument("--batch-size", type=int, default=50, help="Number of contacts to process in each batch")
+ parser.add_argument("--delay", type=float, default=0.5, help="Delay in seconds between processing contacts")
+
+ args = parser.parse_args()
+
+ resolver = AllContactsENSResolver()
+ ens_count, info_count = resolver.run(
+ batch_size=args.batch_size,
+ delay_seconds=args.delay,
+ resolve_all=args.all
+ )
+
+ logger.info(f"ENS resolution completed successfully. Updated {ens_count} ENS names and {info_count} contact info records.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error running ENS resolver: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/scripts/utils/resolve_ens_names.py b/scripts/utils/resolve_ens_names.py
new file mode 100644
index 0000000..801bfb7
--- /dev/null
+++ b/scripts/utils/resolve_ens_names.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python3
+"""
+Resolve ENS Names and Contact Information
+
+This script fetches ENS names and additional contact information for Ethereum addresses
+in the database. It uses the Web3 library to query the Ethereum blockchain for ENS records
+and text records containing social profiles and contact information.
+
+Usage:
+ python resolve_ens_names.py
+"""
+
+import os
+import sys
+import logging
+import time
+from typing import List, Dict, Any, Optional, Tuple
+from web3 import Web3
+from dotenv import load_dotenv
+
+# Add parent directory to path to import utils
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from utils.db_connector import DatabaseConnector
+from utils.logger import setup_logger
+
+# Load environment variables
+load_dotenv()
+
+# Setup logging
+logger = setup_logger("ens_resolver")
+
+class ENSResolver:
+ """Resolver for ENS names and contact information from Ethereum addresses"""
+
+ # ENS text record keys to check
+ TEXT_RECORDS = [
+ "name", # Display name
+ "email", # Email address
+ "url", # Website URL
+ "avatar", # Avatar URL
+ "description", # Bio/description
+ "notice", # Notice
+ "keywords", # Keywords/tags
+ "com.twitter", # Twitter handle
+ "com.github", # GitHub username
+ "org.telegram", # Telegram username
+ "com.discord", # Discord username
+ "com.reddit", # Reddit username
+ "xyz.farcaster", # Farcaster handle
+ "social.picture", # Profile picture
+ "vnd.twitter", # Alternative Twitter
+ "vnd.github", # Alternative GitHub
+ ]
+
+ def __init__(self):
+ """Initialize the resolver"""
+ # Initialize database
+ self.db = DatabaseConnector()
+
+ # Initialize Web3 connection
+ infura_key = os.getenv("INFURA_API_KEY")
+ if not infura_key:
+ raise ValueError("INFURA_API_KEY environment variable is required")
+
+ self.w3 = Web3(Web3.HTTPProvider(f"https://mainnet.infura.io/v3/{infura_key}"))
+ if not self.w3.is_connected():
+ raise ConnectionError("Failed to connect to Ethereum node")
+
+ logger.info(f"Connected to Ethereum node: {self.w3.client_version}")
+
+ def get_contacts_without_ens(self) -> List[Dict[str, Any]]:
+ """Get all contacts that have an Ethereum address but no ENS name"""
+ query = """
+ SELECT id, "ethereumAddress", name
+ FROM "Contact"
+ WHERE "ethereumAddress" IS NOT NULL
+ AND "ensName" IS NULL
+ """
+
+ result = self.db.execute_query(query)
+ logger.info(f"Found {len(result)} contacts without ENS names")
+ return result
+
+ def get_all_contacts_with_eth_address(self) -> List[Dict[str, Any]]:
+ """Get all contacts that have an Ethereum address to check for additional info"""
+ query = """
+ SELECT id, "ethereumAddress", "ensName", name, twitter, discord, telegram, email, farcaster
+ FROM "Contact"
+ WHERE "ethereumAddress" IS NOT NULL
+ """
+
+ result = self.db.execute_query(query)
+ logger.info(f"Found {len(result)} contacts with Ethereum addresses")
+ return result
+
+ def resolve_ens_name(self, address: str) -> Optional[str]:
+ """Resolve ENS name for an Ethereum address"""
+ try:
+ # Ensure the address is properly formatted
+ checksum_address = self.w3.to_checksum_address(address)
+
+ # Try to get the ENS name
+ ens_name = self.w3.ens.name(checksum_address)
+
+ # If we got a name, verify it resolves back to the same address
+ if ens_name:
+ resolved_address = self.w3.ens.address(ens_name)
+ if resolved_address and resolved_address.lower() == address.lower():
+ logger.info(f"Resolved ENS name for {address}: {ens_name}")
+ return ens_name
+ else:
+ logger.warning(f"ENS name {ens_name} for {address} resolves to different address {resolved_address}")
+
+ return None
+ except Exception as e:
+ logger.error(f"Error resolving ENS name for {address}: {e}")
+ return None
+
+ def get_ens_text_records(self, ens_name: str) -> Dict[str, str]:
+ """Get text records for an ENS name"""
+ text_records = {}
+
+ try:
+ for key in self.TEXT_RECORDS:
+ try:
+ value = self.w3.ens.get_text(ens_name, key)
+ if value:
+ text_records[key] = value
+ except Exception as e:
+ logger.debug(f"Error getting text record '{key}' for {ens_name}: {e}")
+
+ if text_records:
+ logger.info(f"Found {len(text_records)} text records for {ens_name}: {', '.join(text_records.keys())}")
+
+ return text_records
+ except Exception as e:
+ logger.error(f"Error getting text records for {ens_name}: {e}")
+ return {}
+
+ def map_text_records_to_contact_fields(self, text_records: Dict[str, str]) -> Dict[str, str]:
+ """Map ENS text records to Contact model fields"""
+ contact_fields = {}
+
+ # Map known fields
+ if "name" in text_records:
+ contact_fields["name"] = text_records["name"]
+
+ if "email" in text_records:
+ contact_fields["email"] = text_records["email"]
+
+ # Twitter can be in different text records
+ for twitter_key in ["com.twitter", "vnd.twitter"]:
+ if twitter_key in text_records:
+ twitter = text_records[twitter_key]
+ # Remove @ if present
+ if twitter.startswith("@"):
+ twitter = twitter[1:]
+ contact_fields["twitter"] = twitter
+ break
+
+ # Discord
+ if "com.discord" in text_records:
+ contact_fields["discord"] = text_records["com.discord"]
+
+ # Telegram
+ if "org.telegram" in text_records:
+ contact_fields["telegram"] = text_records["org.telegram"]
+
+ # Farcaster
+ if "xyz.farcaster" in text_records:
+ contact_fields["farcaster"] = text_records["xyz.farcaster"]
+
+ # Collect other social profiles
+ other_social = []
+
+ if "com.github" in text_records or "vnd.github" in text_records:
+ github = text_records.get("com.github") or text_records.get("vnd.github")
+ other_social.append(f"GitHub: {github}")
+
+ if "com.reddit" in text_records:
+ other_social.append(f"Reddit: {text_records['com.reddit']}")
+
+ if "url" in text_records:
+ other_social.append(f"Website: {text_records['url']}")
+
+ if other_social:
+ contact_fields["otherSocial"] = "; ".join(other_social)
+
+ return contact_fields
+
+ def update_contact_info(self, contact_id: str, ens_name: Optional[str] = None, contact_info: Optional[Dict[str, str]] = None) -> bool:
+ """Update a contact with ENS name and additional contact information"""
+ try:
+ # Build the update query dynamically based on what fields we have
+ update_fields = []
+ params = {"contact_id": contact_id}
+
+ if ens_name:
+ update_fields.append('"ensName" = %(ens_name)s')
+ params["ens_name"] = ens_name
+
+ if contact_info:
+ for field, value in contact_info.items():
+ update_fields.append(f'"{field}" = %({field})s')
+ params[field] = value
+
+ if not update_fields:
+ logger.warning(f"No fields to update for contact {contact_id}")
+ return False
+
+ query = f"""
+ UPDATE "Contact"
+ SET {", ".join(update_fields)},
+ "updatedAt" = NOW()
+ WHERE id = %(contact_id)s
+ """
+
+ self.db.execute_update(query, params)
+
+ # Also update the name if it's currently a generic name and we have a better name
+ if ens_name and "name" not in contact_info:
+ name_query = """
+ SELECT name FROM "Contact" WHERE id = %(contact_id)s
+ """
+ result = self.db.execute_query(name_query, {"contact_id": contact_id})
+ current_name = result[0]["name"] if result else None
+
+ # If the current name is generic (starts with MC_ or ETH_ or RG_), update it
+ if current_name and (current_name.startswith("MC_") or current_name.startswith("ETH_") or current_name.startswith("RG_")):
+ # Use ENS name without .eth suffix as the name
+ name = ens_name[:-4] if ens_name.endswith('.eth') else ens_name
+
+ update_name_query = """
+ UPDATE "Contact"
+ SET name = %(name)s,
+ "updatedAt" = NOW()
+ WHERE id = %(contact_id)s
+ """
+
+ self.db.execute_update(update_name_query, {
+ "contact_id": contact_id,
+ "name": name
+ })
+
+ logger.info(f"Updated contact {contact_id} name from '{current_name}' to '{name}'")
+
+ fields_updated = []
+ if ens_name:
+ fields_updated.append("ENS name")
+ if contact_info:
+ fields_updated.extend(list(contact_info.keys()))
+
+ logger.info(f"Updated contact {contact_id} with: {', '.join(fields_updated)}")
+ return True
+ except Exception as e:
+ logger.error(f"Error updating contact {contact_id}: {e}")
+ return False
+
+ def process_contact(self, contact: Dict[str, Any]) -> Tuple[bool, bool]:
+ """Process a single contact to resolve ENS name and contact info"""
+ contact_id = contact["id"]
+ address = contact["ethereumAddress"]
+ current_ens = contact.get("ensName")
+
+ ens_updated = False
+ info_updated = False
+
+ # Skip if no address
+ if not address:
+ return ens_updated, info_updated
+
+ # Resolve ENS name if not already set
+ ens_name = None
+ if not current_ens:
+ ens_name = self.resolve_ens_name(address)
+ if ens_name:
+ ens_updated = True
+ else:
+ ens_name = current_ens
+
+ # Get contact info from ENS text records if we have an ENS name
+ contact_info = {}
+ if ens_name:
+ text_records = self.get_ens_text_records(ens_name)
+ if text_records:
+ contact_info = self.map_text_records_to_contact_fields(text_records)
+
+ # Only include fields that are different from what we already have
+ for field in list(contact_info.keys()):
+ if field in contact and contact[field] == contact_info[field]:
+ del contact_info[field]
+
+ if contact_info:
+ info_updated = True
+
+ # Update the contact if we have new information
+ if ens_updated or info_updated:
+ self.update_contact_info(contact_id, ens_name if ens_updated else None, contact_info if info_updated else None)
+
+ return ens_updated, info_updated
+
+ def run(self, batch_size: int = 50, delay_seconds: float = 0.5, resolve_all: bool = False):
+ """Run the resolver for contacts"""
+ if resolve_all:
+ contacts = self.get_all_contacts_with_eth_address()
+ else:
+ contacts = self.get_contacts_without_ens()
+
+ if not contacts:
+ logger.info("No contacts found to process")
+ return 0, 0
+
+ ens_updated_count = 0
+ info_updated_count = 0
+
+ # Process in batches to avoid rate limiting
+ for i in range(0, len(contacts), batch_size):
+ batch = contacts[i:i+batch_size]
+ logger.info(f"Processing batch {i//batch_size + 1}/{(len(contacts) + batch_size - 1)//batch_size}")
+
+ for contact in batch:
+ ens_updated, info_updated = self.process_contact(contact)
+
+ if ens_updated:
+ ens_updated_count += 1
+ if info_updated:
+ info_updated_count += 1
+
+ # Add a small delay to avoid rate limiting
+ time.sleep(delay_seconds)
+
+ logger.info(f"Updated ENS names for {ens_updated_count} contacts and contact info for {info_updated_count} contacts out of {len(contacts)} processed")
+ return ens_updated_count, info_updated_count
+
+def main():
+ """Main function"""
+ try:
+ import argparse
+
+ parser = argparse.ArgumentParser(description="Resolve ENS names and contact information")
+ parser.add_argument("--all", action="store_true", help="Process all contacts with Ethereum addresses, not just those without ENS names")
+ parser.add_argument("--batch-size", type=int, default=50, help="Number of contacts to process in each batch")
+ parser.add_argument("--delay", type=float, default=0.5, help="Delay in seconds between processing contacts")
+
+ args = parser.parse_args()
+
+ resolver = ENSResolver()
+ ens_count, info_count = resolver.run(
+ batch_size=args.batch_size,
+ delay_seconds=args.delay,
+ resolve_all=args.all
+ )
+
+ logger.info(f"ENS resolution completed successfully. Updated {ens_count} ENS names and {info_count} contact info records.")
+ return 0
+ except Exception as e:
+ logger.exception(f"Error running ENS resolver: {e}")
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
\ No newline at end of file
diff --git a/src/app/api/auth/login/route.ts b/src/app/api/auth/login/route.ts
new file mode 100644
index 0000000..1136d13
--- /dev/null
+++ b/src/app/api/auth/login/route.ts
@@ -0,0 +1,65 @@
+import { NextRequest, NextResponse } from "next/server";
+import { cookies } from "next/headers";
+
+// Mock user data - in a real app this would come from a database
+const USERS = [
+ {
+ id: "1",
+ name: "Admin",
+ role: "admin",
+ username: "admin",
+ password: "stones1234" // In production, use hashed passwords
+ }
+];
+
+export async function POST(request: NextRequest) {
+ try {
+ const body = await request.json();
+ const { username, password } = body;
+
+ // Validate inputs
+ if (!username || !password) {
+ return NextResponse.json(
+ { success: false, message: "Missing required fields" },
+ { status: 400 }
+ );
+ }
+
+ // Find user
+ const user = USERS.find(u => u.username === username && u.password === password);
+
+ if (!user) {
+ return NextResponse.json(
+ { success: false, message: "Invalid credentials" },
+ { status: 401 }
+ );
+ }
+
+ // Set auth cookie
+ const cookieStore = cookies();
+ cookieStore.set('auth', JSON.stringify({
+ id: user.id,
+ name: user.name,
+ role: user.role
+ }), {
+ httpOnly: true,
+ secure: process.env.NODE_ENV === 'production',
+ maxAge: 60 * 60 * 24 * 7, // 1 week
+ path: '/'
+ });
+
+ return NextResponse.json({
+ success: true,
+ user: {
+ name: user.name,
+ role: user.role
+ }
+ });
+ } catch (error) {
+ console.error("Login error:", error);
+ return NextResponse.json(
+ { success: false, message: "Internal server error" },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/src/app/api/auth/logout/route.ts b/src/app/api/auth/logout/route.ts
new file mode 100644
index 0000000..f5b6158
--- /dev/null
+++ b/src/app/api/auth/logout/route.ts
@@ -0,0 +1,20 @@
+import { NextRequest, NextResponse } from "next/server";
+import { cookies } from "next/headers";
+
+export async function POST(request: NextRequest) {
+ try {
+ // Delete auth cookie
+ const cookieStore = cookies();
+ cookieStore.delete('auth');
+
+ return NextResponse.json({
+ success: true
+ });
+ } catch (error) {
+ console.error("Logout error:", error);
+ return NextResponse.json(
+ { success: false, message: "Internal server error" },
+ { status: 500 }
+ );
+ }
+}
\ No newline at end of file
diff --git a/src/app/contacts/[id]/page.tsx b/src/app/contacts/[id]/page.tsx
new file mode 100644
index 0000000..de94628
--- /dev/null
+++ b/src/app/contacts/[id]/page.tsx
@@ -0,0 +1,313 @@
+import { Metadata } from "next";
+import Link from "next/link";
+import { notFound } from "next/navigation";
+import { getUser } from "@/lib/auth";
+import { prisma } from "@/lib/prisma";
+import { Button } from "@/components/ui/button";
+import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
+import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
+import { LogoutButton } from "@/components/auth/logout-button";
+import { Badge } from "@/components/ui/badge";
+
+export const metadata: Metadata = {
+ title: "Contact Details - Stones Database",
+ description: "View and manage contact details",
+};
+
+interface ContactDetailPageProps {
+ params: {
+ id: string;
+ };
+}
+
+export default async function ContactDetailPage({
+ params,
+}: ContactDetailPageProps) {
+ const user = await getUser();
+
+ // Get contact with all related data
+ const contact = await prisma.contact.findUnique({
+ where: {
+ id: params.id,
+ },
+ include: {
+ nftHoldings: true,
+ daoMemberships: true,
+ tokenHoldings: true,
+ notes: true,
+ tags: {
+ include: {
+ tag: true,
+ },
+ },
+ },
+ });
+
+ if (!contact) {
+ notFound();
+ }
+
+ return (
+
+
+
+
+
+ Stones Database
+
+
+
+
+
+
+
+
+
+
+
+
+ {contact.name || contact.ensName || "Unnamed Contact"}
+
+
{contact.ethereumAddress}
+
+
+
+
+
+
+
+
+
+
+
+ Contact Information
+
+
+
+
Name
+
{contact.name || "-"}
+
+
+
ENS Name
+
{contact.ensName || "-"}
+
+
+
Ethereum Address
+
{contact.ethereumAddress}
+
+ {contact.ethereumAddress2 && (
+
+
Secondary Ethereum Address
+
{contact.ethereumAddress2}
+
+ )}
+
+
Email
+
{contact.email || "-"}
+
+
+
+
Twitter
+
{contact.twitter || "-"}
+
+
+
Discord
+
{contact.discord || "-"}
+
+
+
+
+
Telegram
+
{contact.telegram || "-"}
+
+
+
Farcaster
+
{contact.farcaster || "-"}
+
+
+ {contact.tags.length > 0 && (
+
+
Tags
+
+ {contact.tags.map((tagItem) => (
+
+ {tagItem.tag.name}
+
+ ))}
+
+
+ )}
+
+
Added On
+
{new Date(contact.createdAt).toLocaleDateString()}
+
+
+
+
+
+
+
+
+
+ NFT Holdings ({contact.nftHoldings.length})
+
+
+ DAO Memberships ({contact.daoMemberships.length})
+
+
+ Token Holdings ({contact.tokenHoldings.length})
+
+
+ Notes ({contact.notes.length})
+
+
+
+
+ {contact.nftHoldings.length > 0 ? (
+
+
+
+
+ | Collection |
+ Token ID |
+ Contract Address |
+
+
+
+ {contact.nftHoldings.map((nft) => (
+
+ | {nft.collectionName || "Unknown Collection"} |
+ {nft.tokenId} |
+
+ {nft.contractAddress.substring(0, 6)}...
+ {nft.contractAddress.substring(nft.contractAddress.length - 4)}
+ |
+
+ ))}
+
+
+
+ ) : (
+
+ No NFT holdings found.
+
+ )}
+
+
+
+ {contact.daoMemberships.length > 0 ? (
+
+
+
+
+ | DAO Name |
+ DAO Type |
+ Joined Date |
+
+
+
+ {contact.daoMemberships.map((dao) => (
+
+ | {dao.daoName} |
+ {dao.daoType} |
+
+ {dao.joinedAt
+ ? new Date(dao.joinedAt).toLocaleDateString()
+ : "Unknown"}
+ |
+
+ ))}
+
+
+
+ ) : (
+
+ No DAO memberships found.
+
+ )}
+
+
+
+ {contact.tokenHoldings.length > 0 ? (
+
+
+
+
+ | Token |
+ Balance |
+ Last Updated |
+
+
+
+ {contact.tokenHoldings.map((token) => (
+
+ | {token.tokenSymbol || "Unknown Token"} |
+ {token.balance} |
+
+ {new Date(token.lastUpdated).toLocaleDateString()}
+ |
+
+ ))}
+
+
+
+ ) : (
+
+ No token holdings found.
+
+ )}
+
+
+
+ {contact.notes.length > 0 ? (
+
+ {contact.notes.map((note) => (
+
+
+
+ {new Date(note.createdAt).toLocaleDateString()}
+
+
+
+ {note.content}
+
+
+ ))}
+
+ ) : (
+
+ No notes found.
+
+ )}
+
+
+
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/app/contacts/page.tsx b/src/app/contacts/page.tsx
new file mode 100644
index 0000000..7563c3d
--- /dev/null
+++ b/src/app/contacts/page.tsx
@@ -0,0 +1,105 @@
+import { Metadata } from "next";
+import Link from "next/link";
+import { getUser } from "@/lib/auth";
+import { prisma } from "@/lib/prisma";
+import { Button } from "@/components/ui/button";
+import { Input } from "@/components/ui/input";
+import { LogoutButton } from "@/components/auth/logout-button";
+import { ContactsList } from "@/components/contacts/contacts-list";
+
+export const metadata: Metadata = {
+ title: "Contacts - Stones Database",
+ description: "Manage contacts in the Stones Database",
+};
+
+interface ContactsPageProps {
+ searchParams: { [key: string]: string | string[] | undefined };
+}
+
+export default async function ContactsPage({ searchParams }: ContactsPageProps) {
+ const user = await getUser();
+ const page = Number(searchParams.page) || 1;
+ const limit = 25;
+ const skip = (page - 1) * limit;
+
+ // Get contacts with pagination
+ const contacts = await prisma.contact.findMany({
+ skip,
+ take: limit,
+ orderBy: {
+ createdAt: "desc",
+ },
+ include: {
+ nftHoldings: {
+ take: 1,
+ },
+ daoMemberships: {
+ take: 1,
+ },
+ },
+ });
+
+ // Get total count for pagination
+ const totalContacts = await prisma.contact.count();
+ const totalPages = Math.ceil(totalContacts / limit);
+
+ return (
+
+
+
+
+
+ Stones Database
+
+
+
+
+
+
+
+
Contacts
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/app/dashboard/page.tsx b/src/app/dashboard/page.tsx
new file mode 100644
index 0000000..ff3ca3f
--- /dev/null
+++ b/src/app/dashboard/page.tsx
@@ -0,0 +1,150 @@
+import { Metadata } from "next";
+import Link from "next/link";
+import { getUser } from "@/lib/auth";
+import { Button } from "@/components/ui/button";
+import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from "@/components/ui/card";
+import { prisma } from "@/lib/prisma";
+import { LogoutButton } from "@/components/auth/logout-button";
+
+export const metadata: Metadata = {
+ title: "Dashboard - Stones Database",
+ description: "Dashboard for Stones Database",
+};
+
+export default async function DashboardPage() {
+ const user = await getUser();
+
+ // Get counts from database
+ const contactCount = await prisma.contact.count();
+ const nftHoldingCount = await prisma.nftHolding.count();
+ const daoMembershipCount = await prisma.daoMembership.count();
+ const tokenHoldingCount = await prisma.tokenHolding.count();
+
+ return (
+
+
+
+
+
+ Stones Database
+
+
+
+
+
+
+ Dashboard
+
+
+
+
+ Total Contacts
+
+
+ {contactCount}
+
+
+
+
+
+
+
+
+ NFT Holdings
+
+
+ {nftHoldingCount}
+
+
+
+
+
+
+
+
+ DAO Memberships
+
+
+ {daoMembershipCount}
+
+
+
+
+
+
+
+
+ Token Holdings
+
+
+ {tokenHoldingCount}
+
+
+
+
+
+
+
+
+
Quick Actions
+
+
+
+
+
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/app/layout.tsx b/src/app/layout.tsx
index b41a23a..3687093 100644
--- a/src/app/layout.tsx
+++ b/src/app/layout.tsx
@@ -1,8 +1,6 @@
import type { Metadata } from "next";
import { Inter } from "next/font/google";
import "./globals.css";
-import { ThemeProvider } from "@/components/theme-provider";
-import { Toaster } from "@/components/ui/toaster";
const inter = Inter({ subsets: ["latin"] });
@@ -17,17 +15,9 @@ export default function RootLayout({
children: React.ReactNode;
}>) {
return (
-
+
-
- {children}
-
-
+ {children}
);
diff --git a/src/app/login/page.tsx b/src/app/login/page.tsx
new file mode 100644
index 0000000..7541051
--- /dev/null
+++ b/src/app/login/page.tsx
@@ -0,0 +1,25 @@
+import { Metadata } from "next";
+import { LoginForm } from "../../components/auth/login-form";
+
+export const metadata: Metadata = {
+ title: "Login - Stones Database",
+ description: "Login to the Stones Database",
+};
+
+export default function LoginPage() {
+ return (
+
+
+
+
+ Login to Stones Database
+
+
+ Enter your credentials to access the database
+
+
+
+
+
+ );
+}
\ No newline at end of file
diff --git a/src/app/page.tsx b/src/app/page.tsx
index 7165247..28bcf2f 100644
--- a/src/app/page.tsx
+++ b/src/app/page.tsx
@@ -1,7 +1,5 @@
import { Metadata } from "next";
import Link from "next/link";
-import { Button } from "@/components/ui/button";
-import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from "@/components/ui/card";
export const metadata: Metadata = {
title: "Stones Database",
@@ -11,103 +9,85 @@ export const metadata: Metadata = {
export default function Home() {
return (
-
+
Stones Database
-