diff --git a/src/main.py b/src/main.py index 357cfac..1f50d5c 100644 --- a/src/main.py +++ b/src/main.py @@ -84,12 +84,21 @@ def run_continuous_monitoring(): # Run initial cycle logger.info("Running initial data collection...") scraper.run_scraping_cycle() - + # Start scheduled monitoring import schedule - - schedule.every(Config.SCRAPING_INTERVAL_HOURS).hours.do(scraper.run_scraping_cycle) - + from datetime import datetime, timedelta + + # Calculate next full hour + now = datetime.now() + next_hour = (now + timedelta(hours=1)).replace(minute=0, second=0, microsecond=0) + minutes_to_wait = (next_hour - now).total_seconds() / 60 + + logger.info(f"Next scheduled run at {next_hour.strftime('%H:%M')} (waiting {minutes_to_wait:.1f} minutes)") + + # Schedule at the top of each hour + schedule.every().hour.at(":00").do(scraper.run_scraping_cycle) + while True: schedule.run_pending() time.sleep(60) # Check every minute