Files
Northern-Thailand-Ping-Rive…/scripts/migrate_geolocation.py
grabowski af62cfef0b
Some checks failed
Security & Dependency Updates / Dependency Security Scan (push) Successful in 29s
Security & Dependency Updates / Docker Security Scan (push) Failing after 53s
Security & Dependency Updates / License Compliance (push) Successful in 13s
Security & Dependency Updates / Check for Dependency Updates (push) Successful in 19s
Security & Dependency Updates / Code Quality Metrics (push) Successful in 11s
Security & Dependency Updates / Security Summary (push) Successful in 7s
Initial commit: Northern Thailand Ping River Monitor v3.1.0
Features:
- Real-time water level monitoring for Ping River Basin (16 stations)
- Coverage from Chiang Dao to Nakhon Sawan in Northern Thailand
- FastAPI web interface with interactive dashboard and station management
- Multi-database support (SQLite, MySQL, PostgreSQL, InfluxDB, VictoriaMetrics)
- Comprehensive monitoring with health checks and metrics collection
- Docker deployment with Grafana integration
- Production-ready architecture with enterprise-grade observability

 CI/CD & Automation:
- Complete Gitea Actions workflows for CI/CD, security, and releases
- Multi-Python version testing (3.9-3.12)
- Multi-architecture Docker builds (amd64, arm64)
- Daily security scanning and dependency monitoring
- Automated documentation generation
- Performance testing and validation

 Production Ready:
- Type safety with Pydantic models and comprehensive type hints
- Data validation layer with range checking and error handling
- Rate limiting and request tracking for API protection
- Enhanced logging with rotation, colors, and performance metrics
- Station management API for dynamic CRUD operations
- Comprehensive documentation and deployment guides

 Technical Stack:
- Python 3.9+ with FastAPI and Pydantic
- Multi-database architecture with adapter pattern
- Docker containerization with multi-stage builds
- Grafana dashboards for visualization
- Gitea Actions for CI/CD automation
- Enterprise monitoring and alerting

 Ready for deployment to B4L infrastructure!
2025-08-12 15:40:24 +07:00

295 lines
10 KiB
Python

#!/usr/bin/env python3
"""
Migration script to add geolocation columns to existing water monitoring database
"""
import os
import sys
import sqlite3
import logging
from typing import Dict, Any
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
def migrate_sqlite(db_path: str = 'water_monitoring.db') -> bool:
"""Migrate SQLite database to add geolocation columns"""
try:
logging.info(f"Migrating SQLite database: {db_path}")
# Connect to database
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# Check if columns already exist
cursor.execute("PRAGMA table_info(stations)")
columns = [column[1] for column in cursor.fetchall()]
logging.info(f"Current columns in stations table: {columns}")
# Add columns if they don't exist
columns_added = []
if 'latitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN latitude REAL")
columns_added.append('latitude')
logging.info("Added latitude column")
if 'longitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN longitude REAL")
columns_added.append('longitude')
logging.info("Added longitude column")
if 'geohash' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN geohash TEXT")
columns_added.append('geohash')
logging.info("Added geohash column")
if columns_added:
# Update P.1 station with sample geolocation data
cursor.execute("""
UPDATE stations
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
WHERE station_code = 'P.1'
""")
# Commit changes
conn.commit()
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
logging.info("Updated P.1 station with sample geolocation data")
else:
logging.info("All geolocation columns already exist")
# Verify the changes
cursor.execute("SELECT station_code, latitude, longitude, geohash FROM stations WHERE station_code = 'P.1'")
result = cursor.fetchone()
if result:
logging.info(f"P.1 station geolocation: {result}")
conn.close()
return True
except Exception as e:
logging.error(f"Error migrating SQLite database: {e}")
return False
def migrate_postgresql(connection_string: str) -> bool:
"""Migrate PostgreSQL database to add geolocation columns"""
try:
import psycopg2
from urllib.parse import urlparse
logging.info("Migrating PostgreSQL database")
# Parse connection string
parsed = urlparse(connection_string)
# Connect to database
conn = psycopg2.connect(
host=parsed.hostname,
port=parsed.port or 5432,
database=parsed.path[1:], # Remove leading slash
user=parsed.username,
password=parsed.password
)
cursor = conn.cursor()
# Check if columns exist
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'stations'
""")
columns = [row[0] for row in cursor.fetchall()]
logging.info(f"Current columns in stations table: {columns}")
# Add columns if they don't exist
columns_added = []
if 'latitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN latitude DECIMAL(10,8)")
columns_added.append('latitude')
logging.info("Added latitude column")
if 'longitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN longitude DECIMAL(11,8)")
columns_added.append('longitude')
logging.info("Added longitude column")
if 'geohash' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN geohash VARCHAR(20)")
columns_added.append('geohash')
logging.info("Added geohash column")
if columns_added:
# Update P.1 station with sample geolocation data
cursor.execute("""
UPDATE stations
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
WHERE station_code = 'P.1'
""")
# Commit changes
conn.commit()
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
logging.info("Updated P.1 station with sample geolocation data")
else:
logging.info("All geolocation columns already exist")
conn.close()
return True
except ImportError:
logging.error("psycopg2 not installed. Run: pip install psycopg2-binary")
return False
except Exception as e:
logging.error(f"Error migrating PostgreSQL database: {e}")
return False
def migrate_mysql(connection_string: str) -> bool:
"""Migrate MySQL database to add geolocation columns"""
try:
import pymysql
from urllib.parse import urlparse
logging.info("Migrating MySQL database")
# Parse connection string
parsed = urlparse(connection_string)
# Connect to database
conn = pymysql.connect(
host=parsed.hostname,
port=parsed.port or 3306,
database=parsed.path[1:], # Remove leading slash
user=parsed.username,
password=parsed.password
)
cursor = conn.cursor()
# Check if columns exist
cursor.execute("DESCRIBE stations")
columns = [row[0] for row in cursor.fetchall()]
logging.info(f"Current columns in stations table: {columns}")
# Add columns if they don't exist
columns_added = []
if 'latitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN latitude DECIMAL(10,8)")
columns_added.append('latitude')
logging.info("Added latitude column")
if 'longitude' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN longitude DECIMAL(11,8)")
columns_added.append('longitude')
logging.info("Added longitude column")
if 'geohash' not in columns:
cursor.execute("ALTER TABLE stations ADD COLUMN geohash VARCHAR(20)")
columns_added.append('geohash')
logging.info("Added geohash column")
if columns_added:
# Update P.1 station with sample geolocation data
cursor.execute("""
UPDATE stations
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
WHERE station_code = 'P.1'
""")
# Commit changes
conn.commit()
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
logging.info("Updated P.1 station with sample geolocation data")
else:
logging.info("All geolocation columns already exist")
conn.close()
return True
except ImportError:
logging.error("pymysql not installed. Run: pip install pymysql")
return False
except Exception as e:
logging.error(f"Error migrating MySQL database: {e}")
return False
def load_config_from_env() -> Dict[str, Any]:
"""Load database configuration from environment variables"""
db_type = os.getenv('DB_TYPE', 'sqlite').lower()
if db_type == 'postgresql':
return {
'type': 'postgresql',
'connection_string': os.getenv('POSTGRES_CONNECTION_STRING',
'postgresql://postgres:password@localhost/water_monitoring')
}
elif db_type == 'mysql':
return {
'type': 'mysql',
'connection_string': os.getenv('MYSQL_CONNECTION_STRING',
'mysql://root:password@localhost/water_monitoring')
}
elif db_type == 'victoriametrics':
logging.info("VictoriaMetrics doesn't require schema migration")
return {'type': 'victoriametrics'}
elif db_type == 'influxdb':
logging.info("InfluxDB doesn't require schema migration")
return {'type': 'influxdb'}
else:
# Default to SQLite
return {
'type': 'sqlite',
'db_path': os.getenv('SQLITE_DB_PATH', 'water_monitoring.db')
}
def main():
"""Main migration function"""
logging.info("Starting geolocation column migration...")
# Load configuration
config = load_config_from_env()
db_type = config['type']
logging.info(f"Detected database type: {db_type.upper()}")
success = False
if db_type == 'sqlite':
db_path = config.get('db_path', 'water_monitoring.db')
if not os.path.exists(db_path):
logging.error(f"Database file not found: {db_path}")
sys.exit(1)
success = migrate_sqlite(db_path)
elif db_type == 'postgresql':
success = migrate_postgresql(config['connection_string'])
elif db_type == 'mysql':
success = migrate_mysql(config['connection_string'])
elif db_type in ['victoriametrics', 'influxdb']:
logging.info(f"{db_type.upper()} doesn't require schema migration")
success = True
else:
logging.error(f"Unsupported database type: {db_type}")
sys.exit(1)
if success:
logging.info("✅ Migration completed successfully!")
logging.info("You can now restart your water monitoring application")
logging.info("The system will automatically use the new geolocation columns")
else:
logging.error("❌ Migration failed!")
sys.exit(1)
if __name__ == "__main__":
main()