Initial commit: Northern Thailand Ping River Monitor v3.1.0
Some checks failed
Security & Dependency Updates / Dependency Security Scan (push) Successful in 29s
Security & Dependency Updates / Docker Security Scan (push) Failing after 53s
Security & Dependency Updates / License Compliance (push) Successful in 13s
Security & Dependency Updates / Check for Dependency Updates (push) Successful in 19s
Security & Dependency Updates / Code Quality Metrics (push) Successful in 11s
Security & Dependency Updates / Security Summary (push) Successful in 7s
Some checks failed
Security & Dependency Updates / Dependency Security Scan (push) Successful in 29s
Security & Dependency Updates / Docker Security Scan (push) Failing after 53s
Security & Dependency Updates / License Compliance (push) Successful in 13s
Security & Dependency Updates / Check for Dependency Updates (push) Successful in 19s
Security & Dependency Updates / Code Quality Metrics (push) Successful in 11s
Security & Dependency Updates / Security Summary (push) Successful in 7s
Features: - Real-time water level monitoring for Ping River Basin (16 stations) - Coverage from Chiang Dao to Nakhon Sawan in Northern Thailand - FastAPI web interface with interactive dashboard and station management - Multi-database support (SQLite, MySQL, PostgreSQL, InfluxDB, VictoriaMetrics) - Comprehensive monitoring with health checks and metrics collection - Docker deployment with Grafana integration - Production-ready architecture with enterprise-grade observability CI/CD & Automation: - Complete Gitea Actions workflows for CI/CD, security, and releases - Multi-Python version testing (3.9-3.12) - Multi-architecture Docker builds (amd64, arm64) - Daily security scanning and dependency monitoring - Automated documentation generation - Performance testing and validation Production Ready: - Type safety with Pydantic models and comprehensive type hints - Data validation layer with range checking and error handling - Rate limiting and request tracking for API protection - Enhanced logging with rotation, colors, and performance metrics - Station management API for dynamic CRUD operations - Comprehensive documentation and deployment guides Technical Stack: - Python 3.9+ with FastAPI and Pydantic - Multi-database architecture with adapter pattern - Docker containerization with multi-stage builds - Grafana dashboards for visualization - Gitea Actions for CI/CD automation - Enterprise monitoring and alerting Ready for deployment to B4L infrastructure!
This commit is contained in:
337
src/main.py
Normal file
337
src/main.py
Normal file
@@ -0,0 +1,337 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Main entry point for the Thailand Water Monitor system
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import sys
|
||||
import signal
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from .config import Config
|
||||
from .water_scraper_v3 import EnhancedWaterMonitorScraper
|
||||
from .logging_config import setup_logging, get_logger
|
||||
from .exceptions import ConfigurationError, DatabaseConnectionError
|
||||
from .metrics import get_metrics_collector
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
def setup_signal_handlers(scraper: Optional[EnhancedWaterMonitorScraper] = None):
|
||||
"""Setup signal handlers for graceful shutdown"""
|
||||
def signal_handler(signum, frame):
|
||||
logger.info(f"Received signal {signum}, shutting down gracefully...")
|
||||
if scraper:
|
||||
logger.info("Stopping scraper...")
|
||||
sys.exit(0)
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
def run_test_cycle():
|
||||
"""Run a single test cycle"""
|
||||
logger.info("Running test cycle...")
|
||||
|
||||
try:
|
||||
# Validate configuration
|
||||
Config.validate_config()
|
||||
|
||||
# Initialize scraper
|
||||
db_config = Config.get_database_config()
|
||||
scraper = EnhancedWaterMonitorScraper(db_config)
|
||||
|
||||
# Run single scraping cycle
|
||||
result = scraper.run_scraping_cycle()
|
||||
|
||||
if result:
|
||||
logger.info("✅ Test cycle completed successfully")
|
||||
|
||||
# Show some statistics
|
||||
latest_data = scraper.get_latest_data(5)
|
||||
if latest_data:
|
||||
logger.info(f"Latest data points: {len(latest_data)}")
|
||||
for data in latest_data[:3]: # Show first 3
|
||||
logger.info(f" • {data['station_code']}: {data['water_level']:.2f}m, {data['discharge']:.1f} cms")
|
||||
else:
|
||||
logger.warning("⚠️ Test cycle completed but no new data was found")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Test cycle failed: {e}")
|
||||
return False
|
||||
|
||||
def run_continuous_monitoring():
|
||||
"""Run continuous monitoring with scheduling"""
|
||||
logger.info("Starting continuous monitoring...")
|
||||
|
||||
try:
|
||||
# Validate configuration
|
||||
Config.validate_config()
|
||||
|
||||
# Initialize scraper
|
||||
db_config = Config.get_database_config()
|
||||
scraper = EnhancedWaterMonitorScraper(db_config)
|
||||
|
||||
# Setup signal handlers
|
||||
setup_signal_handlers(scraper)
|
||||
|
||||
logger.info(f"Monitoring started with {Config.SCRAPING_INTERVAL_HOURS}h interval")
|
||||
logger.info("Press Ctrl+C to stop")
|
||||
|
||||
# Run initial cycle
|
||||
logger.info("Running initial data collection...")
|
||||
scraper.run_scraping_cycle()
|
||||
|
||||
# Start scheduled monitoring
|
||||
import schedule
|
||||
|
||||
schedule.every(Config.SCRAPING_INTERVAL_HOURS).hours.do(scraper.run_scraping_cycle)
|
||||
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
time.sleep(60) # Check every minute
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Monitoring stopped by user")
|
||||
except Exception as e:
|
||||
logger.error(f"Monitoring failed: {e}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def run_gap_filling(days_back: int):
|
||||
"""Run gap filling for missing data"""
|
||||
logger.info(f"Checking for data gaps in the last {days_back} days...")
|
||||
|
||||
try:
|
||||
# Validate configuration
|
||||
Config.validate_config()
|
||||
|
||||
# Initialize scraper
|
||||
db_config = Config.get_database_config()
|
||||
scraper = EnhancedWaterMonitorScraper(db_config)
|
||||
|
||||
# Fill gaps
|
||||
filled_count = scraper.fill_data_gaps(days_back)
|
||||
|
||||
if filled_count > 0:
|
||||
logger.info(f"✅ Filled {filled_count} missing data points")
|
||||
else:
|
||||
logger.info("✅ No data gaps found")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Gap filling failed: {e}")
|
||||
return False
|
||||
|
||||
def run_data_update(days_back: int):
|
||||
"""Update existing data with latest values"""
|
||||
logger.info(f"Updating existing data for the last {days_back} days...")
|
||||
|
||||
try:
|
||||
# Validate configuration
|
||||
Config.validate_config()
|
||||
|
||||
# Initialize scraper
|
||||
db_config = Config.get_database_config()
|
||||
scraper = EnhancedWaterMonitorScraper(db_config)
|
||||
|
||||
# Update data
|
||||
updated_count = scraper.update_existing_data(days_back)
|
||||
|
||||
if updated_count > 0:
|
||||
logger.info(f"✅ Updated {updated_count} data points")
|
||||
else:
|
||||
logger.info("✅ No data updates needed")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ Data update failed: {e}")
|
||||
return False
|
||||
|
||||
def run_web_api():
|
||||
"""Run the FastAPI web interface"""
|
||||
logger.info("Starting web API server...")
|
||||
|
||||
try:
|
||||
import uvicorn
|
||||
from .web_api import app
|
||||
|
||||
# Validate configuration
|
||||
Config.validate_config()
|
||||
|
||||
# Run the server
|
||||
uvicorn.run(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
log_config=None # Use our custom logging
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
logger.error("FastAPI not installed. Run: pip install fastapi uvicorn")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Web API failed: {e}")
|
||||
return False
|
||||
|
||||
def show_status():
|
||||
"""Show current system status"""
|
||||
logger.info("=== Northern Thailand Ping River Monitor Status ===")
|
||||
|
||||
try:
|
||||
# Show configuration
|
||||
Config.print_settings()
|
||||
|
||||
# Test database connection
|
||||
logger.info("\n=== Database Connection Test ===")
|
||||
db_config = Config.get_database_config()
|
||||
scraper = EnhancedWaterMonitorScraper(db_config)
|
||||
|
||||
if scraper.db_adapter:
|
||||
logger.info("✅ Database connection successful")
|
||||
|
||||
# Show latest data
|
||||
latest_data = scraper.get_latest_data(3)
|
||||
if latest_data:
|
||||
logger.info(f"\n=== Latest Data ({len(latest_data)} points) ===")
|
||||
for data in latest_data:
|
||||
timestamp = data['timestamp']
|
||||
if isinstance(timestamp, str):
|
||||
timestamp = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
|
||||
logger.info(f" • {data['station_code']} ({timestamp}): {data['water_level']:.2f}m")
|
||||
else:
|
||||
logger.info("No data found in database")
|
||||
else:
|
||||
logger.error("❌ Database connection failed")
|
||||
|
||||
# Show metrics if available
|
||||
metrics_collector = get_metrics_collector()
|
||||
metrics = metrics_collector.get_all_metrics()
|
||||
|
||||
if any(metrics.values()):
|
||||
logger.info("\n=== Metrics Summary ===")
|
||||
for metric_type, values in metrics.items():
|
||||
if values:
|
||||
logger.info(f"{metric_type.title()}: {len(values)} metrics")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Status check failed: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Northern Thailand Ping River Monitor",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
%(prog)s --test # Run single test cycle
|
||||
%(prog)s # Run continuous monitoring
|
||||
%(prog)s --web-api # Start web API server
|
||||
%(prog)s --fill-gaps 7 # Fill missing data for last 7 days
|
||||
%(prog)s --update-data 2 # Update existing data for last 2 days
|
||||
%(prog)s --status # Show system status
|
||||
"""
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--test",
|
||||
action="store_true",
|
||||
help="Run a single test cycle"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--web-api",
|
||||
action="store_true",
|
||||
help="Start the web API server"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--fill-gaps",
|
||||
type=int,
|
||||
metavar="DAYS",
|
||||
help="Fill missing data gaps for the specified number of days back"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--update-data",
|
||||
type=int,
|
||||
metavar="DAYS",
|
||||
help="Update existing data for the specified number of days back"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
action="store_true",
|
||||
help="Show current system status"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--log-level",
|
||||
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
||||
default=Config.LOG_LEVEL,
|
||||
help="Set logging level"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--log-file",
|
||||
default=Config.LOG_FILE,
|
||||
help="Log file path"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup logging
|
||||
setup_logging(
|
||||
log_level=args.log_level,
|
||||
log_file=args.log_file,
|
||||
enable_console=True,
|
||||
enable_colors=True
|
||||
)
|
||||
|
||||
logger.info("🏔️ Northern Thailand Ping River Monitor starting...")
|
||||
logger.info(f"Version: 3.1.0")
|
||||
logger.info(f"Log level: {args.log_level}")
|
||||
|
||||
try:
|
||||
success = False
|
||||
|
||||
if args.test:
|
||||
success = run_test_cycle()
|
||||
elif args.web_api:
|
||||
success = run_web_api()
|
||||
elif args.fill_gaps is not None:
|
||||
success = run_gap_filling(args.fill_gaps)
|
||||
elif args.update_data is not None:
|
||||
success = run_data_update(args.update_data)
|
||||
elif args.status:
|
||||
success = show_status()
|
||||
else:
|
||||
success = run_continuous_monitoring()
|
||||
|
||||
if success:
|
||||
logger.info("✅ Operation completed successfully")
|
||||
sys.exit(0)
|
||||
else:
|
||||
logger.error("❌ Operation failed")
|
||||
sys.exit(1)
|
||||
|
||||
except ConfigurationError as e:
|
||||
logger.error(f"Configuration error: {e}")
|
||||
sys.exit(1)
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Operation cancelled by user")
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user