Initial commit: Northern Thailand Ping River Monitor v3.1.0
Some checks failed
Security & Dependency Updates / Dependency Security Scan (push) Successful in 29s
Security & Dependency Updates / Docker Security Scan (push) Failing after 53s
Security & Dependency Updates / License Compliance (push) Successful in 13s
Security & Dependency Updates / Check for Dependency Updates (push) Successful in 19s
Security & Dependency Updates / Code Quality Metrics (push) Successful in 11s
Security & Dependency Updates / Security Summary (push) Successful in 7s
Some checks failed
Security & Dependency Updates / Dependency Security Scan (push) Successful in 29s
Security & Dependency Updates / Docker Security Scan (push) Failing after 53s
Security & Dependency Updates / License Compliance (push) Successful in 13s
Security & Dependency Updates / Check for Dependency Updates (push) Successful in 19s
Security & Dependency Updates / Code Quality Metrics (push) Successful in 11s
Security & Dependency Updates / Security Summary (push) Successful in 7s
Features: - Real-time water level monitoring for Ping River Basin (16 stations) - Coverage from Chiang Dao to Nakhon Sawan in Northern Thailand - FastAPI web interface with interactive dashboard and station management - Multi-database support (SQLite, MySQL, PostgreSQL, InfluxDB, VictoriaMetrics) - Comprehensive monitoring with health checks and metrics collection - Docker deployment with Grafana integration - Production-ready architecture with enterprise-grade observability CI/CD & Automation: - Complete Gitea Actions workflows for CI/CD, security, and releases - Multi-Python version testing (3.9-3.12) - Multi-architecture Docker builds (amd64, arm64) - Daily security scanning and dependency monitoring - Automated documentation generation - Performance testing and validation Production Ready: - Type safety with Pydantic models and comprehensive type hints - Data validation layer with range checking and error handling - Rate limiting and request tracking for API protection - Enhanced logging with rotation, colors, and performance metrics - Station management API for dynamic CRUD operations - Comprehensive documentation and deployment guides Technical Stack: - Python 3.9+ with FastAPI and Pydantic - Multi-database architecture with adapter pattern - Docker containerization with multi-stage builds - Grafana dashboards for visualization - Gitea Actions for CI/CD automation - Enterprise monitoring and alerting Ready for deployment to B4L infrastructure!
This commit is contained in:
51
scripts/generate_badges.py
Normal file
51
scripts/generate_badges.py
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate status badges for README.md
|
||||
"""
|
||||
|
||||
import json
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
def generate_badge_url(label, message, color="brightgreen"):
|
||||
"""Generate a shields.io badge URL"""
|
||||
return f"https://img.shields.io/badge/{label}-{message}-{color}"
|
||||
|
||||
def generate_workflow_badge(repo_url, workflow_name, branch="main"):
|
||||
"""Generate workflow status badge"""
|
||||
# For Gitea, you might need to adjust this based on your instance
|
||||
badge_url = f"{repo_url}/actions/workflows/{workflow_name}/badge.svg?branch={branch}"
|
||||
return badge_url
|
||||
|
||||
def main():
|
||||
"""Generate badges for the project"""
|
||||
repo_url = "https://git.b4l.co.th/B4L/Northern-Thailand-Ping-River-Monitor"
|
||||
|
||||
badges = {
|
||||
"CI/CD": generate_workflow_badge(repo_url, "ci.yml"),
|
||||
"Security": generate_workflow_badge(repo_url, "security.yml"),
|
||||
"Documentation": generate_workflow_badge(repo_url, "docs.yml"),
|
||||
"Python": generate_badge_url("Python", "3.9%2B", "blue"),
|
||||
"FastAPI": generate_badge_url("FastAPI", "0.104%2B", "green"),
|
||||
"Docker": generate_badge_url("Docker", "Ready", "blue"),
|
||||
"License": generate_badge_url("License", "MIT", "green"),
|
||||
"Version": generate_badge_url("Version", "v3.1.0", "blue"),
|
||||
}
|
||||
|
||||
print("# Status Badges")
|
||||
print()
|
||||
print("Add these badges to your README.md:")
|
||||
print()
|
||||
|
||||
for name, url in badges.items():
|
||||
print(f"[]({repo_url})")
|
||||
|
||||
print()
|
||||
print("# Markdown Format")
|
||||
print()
|
||||
|
||||
badge_line = " ".join([f"[]({repo_url})" for name, url in badges.items()])
|
||||
print(badge_line)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
35
scripts/init_git.bat
Normal file
35
scripts/init_git.bat
Normal file
@@ -0,0 +1,35 @@
|
||||
@echo off
|
||||
REM Git initialization script for Northern Thailand Ping River Monitor
|
||||
|
||||
echo 🏔️ Initializing Git repository for Northern Thailand Ping River Monitor
|
||||
|
||||
REM Initialize git repository
|
||||
git init
|
||||
|
||||
REM Add remote origin
|
||||
git remote add origin https://git.b4l.co.th/B4L/Northern-Thailand-Ping-River-Monitor.git
|
||||
|
||||
REM Add all files
|
||||
git add .
|
||||
|
||||
REM Initial commit
|
||||
git commit -m "Initial commit: Northern Thailand Ping River Monitor v3.1.0
|
||||
|
||||
Features:
|
||||
- Real-time water level monitoring for Ping River Basin
|
||||
- 16 monitoring stations from Chiang Dao to Nakhon Sawan
|
||||
- FastAPI web interface with station management
|
||||
- Multi-database support (SQLite, MySQL, PostgreSQL, InfluxDB, VictoriaMetrics)
|
||||
- Comprehensive monitoring and health checks
|
||||
- Docker deployment with Grafana integration
|
||||
- Production-ready architecture with CI/CD pipeline"
|
||||
|
||||
echo ✅ Git repository initialized successfully!
|
||||
echo.
|
||||
echo Next steps:
|
||||
echo 1. Review and edit .env file with your configuration
|
||||
echo 2. Push to remote repository:
|
||||
echo git push -u origin main
|
||||
echo.
|
||||
echo 3. Start the application:
|
||||
echo python run.py --web-api
|
89
scripts/init_git.sh
Normal file
89
scripts/init_git.sh
Normal file
@@ -0,0 +1,89 @@
|
||||
#!/bin/bash
|
||||
# Git initialization script for Northern Thailand Ping River Monitor
|
||||
|
||||
echo "🏔️ Initializing Git repository for Northern Thailand Ping River Monitor"
|
||||
|
||||
# Initialize git repository
|
||||
git init
|
||||
|
||||
# Add remote origin
|
||||
git remote add origin https://git.b4l.co.th/B4L/Northern-Thailand-Ping-River-Monitor.git
|
||||
|
||||
# Create .gitignore if it doesn't exist
|
||||
if [ ! -f .gitignore ]; then
|
||||
echo "Creating .gitignore file..."
|
||||
cat > .gitignore << 'EOF'
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
logs/
|
||||
|
||||
# Database files
|
||||
*.db
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
EOF
|
||||
fi
|
||||
|
||||
# Add all files
|
||||
git add .
|
||||
|
||||
# Initial commit
|
||||
git commit -m "Initial commit: Northern Thailand Ping River Monitor v3.1.0
|
||||
|
||||
Features:
|
||||
- Real-time water level monitoring for Ping River Basin
|
||||
- 16 monitoring stations from Chiang Dao to Nakhon Sawan
|
||||
- FastAPI web interface with station management
|
||||
- Multi-database support (SQLite, MySQL, PostgreSQL, InfluxDB, VictoriaMetrics)
|
||||
- Comprehensive monitoring and health checks
|
||||
- Docker deployment with Grafana integration
|
||||
- Production-ready architecture with CI/CD pipeline"
|
||||
|
||||
echo "✅ Git repository initialized successfully!"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo "1. Review and edit .env file with your configuration"
|
||||
echo "2. Push to remote repository:"
|
||||
echo " git push -u origin main"
|
||||
echo ""
|
||||
echo "3. Start the application:"
|
||||
echo " make run-api"
|
||||
echo " # or: python run.py --web-api"
|
294
scripts/migrate_geolocation.py
Normal file
294
scripts/migrate_geolocation.py
Normal file
@@ -0,0 +1,294 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to add geolocation columns to existing water monitoring database
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import logging
|
||||
from typing import Dict, Any
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
def migrate_sqlite(db_path: str = 'water_monitoring.db') -> bool:
|
||||
"""Migrate SQLite database to add geolocation columns"""
|
||||
try:
|
||||
logging.info(f"Migrating SQLite database: {db_path}")
|
||||
|
||||
# Connect to database
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if columns already exist
|
||||
cursor.execute("PRAGMA table_info(stations)")
|
||||
columns = [column[1] for column in cursor.fetchall()]
|
||||
|
||||
logging.info(f"Current columns in stations table: {columns}")
|
||||
|
||||
# Add columns if they don't exist
|
||||
columns_added = []
|
||||
|
||||
if 'latitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN latitude REAL")
|
||||
columns_added.append('latitude')
|
||||
logging.info("Added latitude column")
|
||||
|
||||
if 'longitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN longitude REAL")
|
||||
columns_added.append('longitude')
|
||||
logging.info("Added longitude column")
|
||||
|
||||
if 'geohash' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN geohash TEXT")
|
||||
columns_added.append('geohash')
|
||||
logging.info("Added geohash column")
|
||||
|
||||
if columns_added:
|
||||
# Update P.1 station with sample geolocation data
|
||||
cursor.execute("""
|
||||
UPDATE stations
|
||||
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
|
||||
WHERE station_code = 'P.1'
|
||||
""")
|
||||
|
||||
# Commit changes
|
||||
conn.commit()
|
||||
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
|
||||
logging.info("Updated P.1 station with sample geolocation data")
|
||||
else:
|
||||
logging.info("All geolocation columns already exist")
|
||||
|
||||
# Verify the changes
|
||||
cursor.execute("SELECT station_code, latitude, longitude, geohash FROM stations WHERE station_code = 'P.1'")
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
logging.info(f"P.1 station geolocation: {result}")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error migrating SQLite database: {e}")
|
||||
return False
|
||||
|
||||
def migrate_postgresql(connection_string: str) -> bool:
|
||||
"""Migrate PostgreSQL database to add geolocation columns"""
|
||||
try:
|
||||
import psycopg2
|
||||
from urllib.parse import urlparse
|
||||
|
||||
logging.info("Migrating PostgreSQL database")
|
||||
|
||||
# Parse connection string
|
||||
parsed = urlparse(connection_string)
|
||||
|
||||
# Connect to database
|
||||
conn = psycopg2.connect(
|
||||
host=parsed.hostname,
|
||||
port=parsed.port or 5432,
|
||||
database=parsed.path[1:], # Remove leading slash
|
||||
user=parsed.username,
|
||||
password=parsed.password
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if columns exist
|
||||
cursor.execute("""
|
||||
SELECT column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = 'stations'
|
||||
""")
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
logging.info(f"Current columns in stations table: {columns}")
|
||||
|
||||
# Add columns if they don't exist
|
||||
columns_added = []
|
||||
|
||||
if 'latitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN latitude DECIMAL(10,8)")
|
||||
columns_added.append('latitude')
|
||||
logging.info("Added latitude column")
|
||||
|
||||
if 'longitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN longitude DECIMAL(11,8)")
|
||||
columns_added.append('longitude')
|
||||
logging.info("Added longitude column")
|
||||
|
||||
if 'geohash' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN geohash VARCHAR(20)")
|
||||
columns_added.append('geohash')
|
||||
logging.info("Added geohash column")
|
||||
|
||||
if columns_added:
|
||||
# Update P.1 station with sample geolocation data
|
||||
cursor.execute("""
|
||||
UPDATE stations
|
||||
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
|
||||
WHERE station_code = 'P.1'
|
||||
""")
|
||||
|
||||
# Commit changes
|
||||
conn.commit()
|
||||
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
|
||||
logging.info("Updated P.1 station with sample geolocation data")
|
||||
else:
|
||||
logging.info("All geolocation columns already exist")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
logging.error("psycopg2 not installed. Run: pip install psycopg2-binary")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Error migrating PostgreSQL database: {e}")
|
||||
return False
|
||||
|
||||
def migrate_mysql(connection_string: str) -> bool:
|
||||
"""Migrate MySQL database to add geolocation columns"""
|
||||
try:
|
||||
import pymysql
|
||||
from urllib.parse import urlparse
|
||||
|
||||
logging.info("Migrating MySQL database")
|
||||
|
||||
# Parse connection string
|
||||
parsed = urlparse(connection_string)
|
||||
|
||||
# Connect to database
|
||||
conn = pymysql.connect(
|
||||
host=parsed.hostname,
|
||||
port=parsed.port or 3306,
|
||||
database=parsed.path[1:], # Remove leading slash
|
||||
user=parsed.username,
|
||||
password=parsed.password
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if columns exist
|
||||
cursor.execute("DESCRIBE stations")
|
||||
columns = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
logging.info(f"Current columns in stations table: {columns}")
|
||||
|
||||
# Add columns if they don't exist
|
||||
columns_added = []
|
||||
|
||||
if 'latitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN latitude DECIMAL(10,8)")
|
||||
columns_added.append('latitude')
|
||||
logging.info("Added latitude column")
|
||||
|
||||
if 'longitude' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN longitude DECIMAL(11,8)")
|
||||
columns_added.append('longitude')
|
||||
logging.info("Added longitude column")
|
||||
|
||||
if 'geohash' not in columns:
|
||||
cursor.execute("ALTER TABLE stations ADD COLUMN geohash VARCHAR(20)")
|
||||
columns_added.append('geohash')
|
||||
logging.info("Added geohash column")
|
||||
|
||||
if columns_added:
|
||||
# Update P.1 station with sample geolocation data
|
||||
cursor.execute("""
|
||||
UPDATE stations
|
||||
SET latitude = 15.6944, longitude = 100.2028, geohash = 'w5q6uuhvfcfp25'
|
||||
WHERE station_code = 'P.1'
|
||||
""")
|
||||
|
||||
# Commit changes
|
||||
conn.commit()
|
||||
logging.info(f"Successfully added columns: {', '.join(columns_added)}")
|
||||
logging.info("Updated P.1 station with sample geolocation data")
|
||||
else:
|
||||
logging.info("All geolocation columns already exist")
|
||||
|
||||
conn.close()
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
logging.error("pymysql not installed. Run: pip install pymysql")
|
||||
return False
|
||||
except Exception as e:
|
||||
logging.error(f"Error migrating MySQL database: {e}")
|
||||
return False
|
||||
|
||||
def load_config_from_env() -> Dict[str, Any]:
|
||||
"""Load database configuration from environment variables"""
|
||||
db_type = os.getenv('DB_TYPE', 'sqlite').lower()
|
||||
|
||||
if db_type == 'postgresql':
|
||||
return {
|
||||
'type': 'postgresql',
|
||||
'connection_string': os.getenv('POSTGRES_CONNECTION_STRING',
|
||||
'postgresql://postgres:password@localhost/water_monitoring')
|
||||
}
|
||||
elif db_type == 'mysql':
|
||||
return {
|
||||
'type': 'mysql',
|
||||
'connection_string': os.getenv('MYSQL_CONNECTION_STRING',
|
||||
'mysql://root:password@localhost/water_monitoring')
|
||||
}
|
||||
elif db_type == 'victoriametrics':
|
||||
logging.info("VictoriaMetrics doesn't require schema migration")
|
||||
return {'type': 'victoriametrics'}
|
||||
elif db_type == 'influxdb':
|
||||
logging.info("InfluxDB doesn't require schema migration")
|
||||
return {'type': 'influxdb'}
|
||||
else:
|
||||
# Default to SQLite
|
||||
return {
|
||||
'type': 'sqlite',
|
||||
'db_path': os.getenv('SQLITE_DB_PATH', 'water_monitoring.db')
|
||||
}
|
||||
|
||||
def main():
|
||||
"""Main migration function"""
|
||||
logging.info("Starting geolocation column migration...")
|
||||
|
||||
# Load configuration
|
||||
config = load_config_from_env()
|
||||
db_type = config['type']
|
||||
|
||||
logging.info(f"Detected database type: {db_type.upper()}")
|
||||
|
||||
success = False
|
||||
|
||||
if db_type == 'sqlite':
|
||||
db_path = config.get('db_path', 'water_monitoring.db')
|
||||
if not os.path.exists(db_path):
|
||||
logging.error(f"Database file not found: {db_path}")
|
||||
sys.exit(1)
|
||||
success = migrate_sqlite(db_path)
|
||||
|
||||
elif db_type == 'postgresql':
|
||||
success = migrate_postgresql(config['connection_string'])
|
||||
|
||||
elif db_type == 'mysql':
|
||||
success = migrate_mysql(config['connection_string'])
|
||||
|
||||
elif db_type in ['victoriametrics', 'influxdb']:
|
||||
logging.info(f"{db_type.upper()} doesn't require schema migration")
|
||||
success = True
|
||||
|
||||
else:
|
||||
logging.error(f"Unsupported database type: {db_type}")
|
||||
sys.exit(1)
|
||||
|
||||
if success:
|
||||
logging.info("✅ Migration completed successfully!")
|
||||
logging.info("You can now restart your water monitoring application")
|
||||
logging.info("The system will automatically use the new geolocation columns")
|
||||
else:
|
||||
logging.error("❌ Migration failed!")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
39
scripts/water-monitor.service
Normal file
39
scripts/water-monitor.service
Normal file
@@ -0,0 +1,39 @@
|
||||
[Unit]
|
||||
Description=Thailand Water Level Monitor
|
||||
Documentation=https://github.com/your-username/thailand-water-monitor
|
||||
After=network.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=water-monitor
|
||||
Group=water-monitor
|
||||
WorkingDirectory=/opt/thailand-water-monitor
|
||||
ExecStart=/opt/thailand-water-monitor/venv/bin/python src/water_scraper_v3.py
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
Restart=always
|
||||
RestartSec=60
|
||||
TimeoutStopSec=30
|
||||
|
||||
# Environment variables
|
||||
Environment=DB_TYPE=victoriametrics
|
||||
Environment=VM_HOST=localhost
|
||||
Environment=VM_PORT=8428
|
||||
Environment=PYTHONPATH=/opt/thailand-water-monitor
|
||||
Environment=PYTHONUNBUFFERED=1
|
||||
|
||||
# Security settings
|
||||
NoNewPrivileges=true
|
||||
PrivateTmp=true
|
||||
ProtectSystem=strict
|
||||
ProtectHome=true
|
||||
ReadWritePaths=/opt/thailand-water-monitor
|
||||
CapabilityBoundingSet=
|
||||
|
||||
# Logging
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
SyslogIdentifier=water-monitor
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Reference in New Issue
Block a user