Initial commit
This commit is contained in:
59
.gitignore
vendored
Normal file
59
.gitignore
vendored
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig
|
||||||
|
# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos
|
||||||
|
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos
|
||||||
|
|
||||||
|
### macOS ###
|
||||||
|
# General
|
||||||
|
.DS_Store
|
||||||
|
.AppleDouble
|
||||||
|
.LSOverride
|
||||||
|
|
||||||
|
# Icon must end with two \r
|
||||||
|
Icon
|
||||||
|
|
||||||
|
|
||||||
|
# Thumbnails
|
||||||
|
._*
|
||||||
|
|
||||||
|
# Files that might appear in the root of a volume
|
||||||
|
.DocumentRevisions-V100
|
||||||
|
.fseventsd
|
||||||
|
.Spotlight-V100
|
||||||
|
.TemporaryItems
|
||||||
|
.Trashes
|
||||||
|
.VolumeIcon.icns
|
||||||
|
.com.apple.timemachine.donotpresent
|
||||||
|
|
||||||
|
# Directories potentially created on remote AFP share
|
||||||
|
.AppleDB
|
||||||
|
.AppleDesktop
|
||||||
|
Network Trash Folder
|
||||||
|
Temporary Items
|
||||||
|
.apdisk
|
||||||
|
|
||||||
|
### macOS Patch ###
|
||||||
|
# iCloud generated files
|
||||||
|
*.icloud
|
||||||
|
|
||||||
|
### VisualStudioCode ###
|
||||||
|
.vscode/*
|
||||||
|
!.vscode/settings.json
|
||||||
|
!.vscode/tasks.json
|
||||||
|
!.vscode/launch.json
|
||||||
|
!.vscode/extensions.json
|
||||||
|
!.vscode/*.code-snippets
|
||||||
|
|
||||||
|
# Local History for Visual Studio Code
|
||||||
|
.history/
|
||||||
|
|
||||||
|
# Built Visual Studio Code Extensions
|
||||||
|
*.vsix
|
||||||
|
|
||||||
|
### VisualStudioCode Patch ###
|
||||||
|
# Ignore all local history of files
|
||||||
|
.history
|
||||||
|
.ionide
|
||||||
|
|
||||||
|
# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos
|
||||||
|
|
||||||
|
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
|
158
config.conf
Normal file
158
config.conf
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
{
|
||||||
|
"trading_pairs": [
|
||||||
|
{
|
||||||
|
"symbol": "BTCUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "ETHUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "BNBUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "XRPUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 3,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "SOLUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 2,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "HBARUSDT",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"symbol": "HBARBTC",
|
||||||
|
"enabled": true,
|
||||||
|
"priority": 1,
|
||||||
|
"record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"technical_indicators": {
|
||||||
|
"enabled": ["sma", "ema", "rsi", "macd", "bb", "atr"],
|
||||||
|
"periods": {
|
||||||
|
"sma": [20, 50, 200],
|
||||||
|
"ema": [12, 26],
|
||||||
|
"rsi": [14],
|
||||||
|
"macd": {
|
||||||
|
"fast": 12,
|
||||||
|
"slow": 26,
|
||||||
|
"signal": 9
|
||||||
|
},
|
||||||
|
"bb": {
|
||||||
|
"period": 20,
|
||||||
|
"std": 2
|
||||||
|
},
|
||||||
|
"atr": [14],
|
||||||
|
"stoch": {
|
||||||
|
"k_period": 14,
|
||||||
|
"d_period": 3
|
||||||
|
},
|
||||||
|
"adx": [14]
|
||||||
|
},
|
||||||
|
"calculation_intervals": ["1m", "5m", "15m", "1h", "4h", "1d"]
|
||||||
|
},
|
||||||
|
"collection": {
|
||||||
|
"bulk_chunk_size": 1000,
|
||||||
|
"websocket_reconnect_delay": 5,
|
||||||
|
"tick_batch_size": 100,
|
||||||
|
"candle_intervals": ["1m", "5m", "15m", "1h", "4h", "1d"],
|
||||||
|
"max_retries": 3,
|
||||||
|
"retry_delay": 1,
|
||||||
|
"rate_limit_requests_per_minute": 2000,
|
||||||
|
"concurrent_symbol_limit": 10,
|
||||||
|
"default_record_from_date": "2020-01-01T00:00:00Z"
|
||||||
|
},
|
||||||
|
"gap_filling": {
|
||||||
|
"enable_auto_gap_filling": true,
|
||||||
|
"auto_fill_schedule_hours": 24,
|
||||||
|
"max_gap_size_candles": 1000,
|
||||||
|
"min_gap_size_candles": 2,
|
||||||
|
"enable_intelligent_averaging": true,
|
||||||
|
"averaging_lookback_candles": 10,
|
||||||
|
"max_consecutive_empty_candles": 5,
|
||||||
|
"intervals_to_monitor": ["1m", "5m", "15m", "1h", "4h", "1d"]
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"batch_insert_size": 1000,
|
||||||
|
"compression_after_days": 7,
|
||||||
|
"retention_policy_days": 365,
|
||||||
|
"vacuum_analyze_interval_hours": 24,
|
||||||
|
"connection_pool": {
|
||||||
|
"min_size": 10,
|
||||||
|
"max_size": 50,
|
||||||
|
"command_timeout": 60
|
||||||
|
},
|
||||||
|
"partitioning": {
|
||||||
|
"chunk_time_interval": "1 day",
|
||||||
|
"compress_chunk_time_interval": "7 days"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gap_filling": {
|
||||||
|
"enable_auto_gap_filling": true,
|
||||||
|
"auto_fill_schedule_hours": 24,
|
||||||
|
"intervals_to_monitor": ["1m", "5m", "15m", "1h", "4h", "1d"],
|
||||||
|
"max_gap_size_candles": 1000,
|
||||||
|
"max_consecutive_empty_candles": 5,
|
||||||
|
"averaging_lookback_candles": 10,
|
||||||
|
"enable_intelligent_averaging": true,
|
||||||
|
"max_fill_attempts": 3
|
||||||
|
},
|
||||||
|
"ui": {
|
||||||
|
"refresh_interval_seconds": 5,
|
||||||
|
"max_chart_points": 1000,
|
||||||
|
"default_timeframe": "1d",
|
||||||
|
"theme": "dark",
|
||||||
|
"enable_realtime_updates": true
|
||||||
|
},
|
||||||
|
"monitoring": {
|
||||||
|
"enable_performance_metrics": true,
|
||||||
|
"log_slow_queries": true,
|
||||||
|
"slow_query_threshold_ms": 1000,
|
||||||
|
"enable_health_checks": true,
|
||||||
|
"health_check_interval_seconds": 30
|
||||||
|
},
|
||||||
|
"alerts": {
|
||||||
|
"enable_price_alerts": false,
|
||||||
|
"enable_volume_alerts": false,
|
||||||
|
"enable_system_alerts": true,
|
||||||
|
"price_change_threshold_percent": 5.0,
|
||||||
|
"volume_change_threshold_percent": 50.0
|
||||||
|
},
|
||||||
|
"data_quality": {
|
||||||
|
"enable_data_validation": true,
|
||||||
|
"max_price_deviation_percent": 10.0,
|
||||||
|
"min_volume_threshold": 0.001,
|
||||||
|
"enable_outlier_detection": true,
|
||||||
|
"outlier_detection_window": 100
|
||||||
|
},
|
||||||
|
"features": {
|
||||||
|
"enable_candle_generation_from_ticks": true,
|
||||||
|
"enable_technical_indicator_alerts": false,
|
||||||
|
"enable_market_analysis": true,
|
||||||
|
"enable_backtesting": false,
|
||||||
|
"enable_paper_trading": false
|
||||||
|
},
|
||||||
|
"system": {
|
||||||
|
"max_memory_usage_mb": 8192,
|
||||||
|
"max_cpu_usage_percent": 80,
|
||||||
|
"enable_auto_scaling": false,
|
||||||
|
"enable_caching": true,
|
||||||
|
"cache_ttl_seconds": 300
|
||||||
|
}
|
||||||
|
}
|
179
ui.py
Normal file
179
ui.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
ui.py - Main Application Entry Point and FastAPI App Initialization
|
||||||
|
Orchestrates all components and initializes the FastAPI application
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
from pathlib import Path
|
||||||
|
from fastapi import FastAPI, WebSocket
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
import uvicorn
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Import application modules
|
||||||
|
from db import DatabaseManager
|
||||||
|
from utils import load_config, setup_logging
|
||||||
|
from main import BinanceDataCollector
|
||||||
|
|
||||||
|
# Import UI modules
|
||||||
|
from ui_models import serialize_for_json
|
||||||
|
from ui_routes import APIRoutes
|
||||||
|
from ui_websocket import handle_websocket_connection, broadcast_status_updates, websocket_connections
|
||||||
|
from ui_state import state_manager, get_current_status
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv('variables.env')
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
setup_logging()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Global application components
|
||||||
|
app = FastAPI(
|
||||||
|
title="Crypto Trading Data Collector",
|
||||||
|
version="3.1.0",
|
||||||
|
description="Real-time cryptocurrency market data collection and analysis platform"
|
||||||
|
)
|
||||||
|
|
||||||
|
db_manager: Optional[DatabaseManager] = None
|
||||||
|
data_collector: Optional[BinanceDataCollector] = None
|
||||||
|
config: Dict[str, Any] = {}
|
||||||
|
api_routes: Optional[APIRoutes] = None
|
||||||
|
|
||||||
|
# Add CORS middleware
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
"""Initialize application on startup"""
|
||||||
|
global db_manager, data_collector, config, api_routes
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("=" * 80)
|
||||||
|
logger.info("Starting Crypto Trading Data Collector v3.1.0")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
config = load_config()
|
||||||
|
logger.info("✓ Configuration loaded successfully")
|
||||||
|
|
||||||
|
# Initialize database
|
||||||
|
db_manager = DatabaseManager()
|
||||||
|
await db_manager.initialize()
|
||||||
|
logger.info("✓ Database initialized successfully")
|
||||||
|
|
||||||
|
# Initialize data collector
|
||||||
|
data_collector = BinanceDataCollector()
|
||||||
|
await data_collector.initialize()
|
||||||
|
logger.info("✓ Data collector initialized successfully")
|
||||||
|
|
||||||
|
# Initialize API routes
|
||||||
|
api_routes = APIRoutes(
|
||||||
|
app,
|
||||||
|
db_manager,
|
||||||
|
data_collector,
|
||||||
|
config,
|
||||||
|
state_manager
|
||||||
|
)
|
||||||
|
logger.info("✓ API routes registered successfully")
|
||||||
|
|
||||||
|
# Restore collection state if it was running before reload
|
||||||
|
if state_manager.get("is_collecting", False):
|
||||||
|
logger.info("Restoring collection state from persistent storage...")
|
||||||
|
try:
|
||||||
|
await data_collector.start_continuous_collection()
|
||||||
|
logger.info("✓ Collection state restored successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"✗ Error restoring collection state: {e}")
|
||||||
|
state_manager.update(is_collecting=False)
|
||||||
|
|
||||||
|
# Start WebSocket broadcaster
|
||||||
|
async def status_getter():
|
||||||
|
return await get_current_status(db_manager, data_collector, config)
|
||||||
|
|
||||||
|
asyncio.create_task(broadcast_status_updates(status_getter))
|
||||||
|
logger.info("✓ WebSocket broadcaster started")
|
||||||
|
|
||||||
|
logger.info("=" * 80)
|
||||||
|
logger.info("FastAPI application startup complete - Ready to serve requests")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("=" * 80)
|
||||||
|
logger.error(f"FATAL ERROR during startup: {e}", exc_info=True)
|
||||||
|
logger.error("=" * 80)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
"""Clean shutdown"""
|
||||||
|
global db_manager, data_collector
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.info("=" * 80)
|
||||||
|
logger.info("Shutting down Crypto Trading Data Collector")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
# Save current state before shutdown
|
||||||
|
if data_collector:
|
||||||
|
state_manager.update(
|
||||||
|
is_collecting=data_collector.is_collecting if hasattr(data_collector, 'is_collecting') else False,
|
||||||
|
websocket_collection_running=data_collector.websocket_collection_running if hasattr(data_collector, 'websocket_collection_running') else False
|
||||||
|
)
|
||||||
|
logger.info("✓ State saved")
|
||||||
|
|
||||||
|
# Close database connections
|
||||||
|
if db_manager:
|
||||||
|
try:
|
||||||
|
await db_manager.close()
|
||||||
|
logger.info("✓ Database connections closed")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"✗ Error closing database: {e}")
|
||||||
|
|
||||||
|
logger.info("=" * 80)
|
||||||
|
logger.info("Shutdown complete")
|
||||||
|
logger.info("=" * 80)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error during shutdown: {e}", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
|
@app.websocket("/ws")
|
||||||
|
async def websocket_endpoint(websocket: WebSocket):
|
||||||
|
"""WebSocket endpoint for real-time updates"""
|
||||||
|
await handle_websocket_connection(websocket)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point for running the application"""
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Get configuration from environment or use defaults
|
||||||
|
host = os.getenv("WEB_HOST", "0.0.0.0")
|
||||||
|
port = int(os.getenv("WEB_PORT", "8000"))
|
||||||
|
reload = os.getenv("WEB_RELOAD", "False").lower() == "true"
|
||||||
|
|
||||||
|
logger.info(f"Starting server on {host}:{port} (reload={reload})")
|
||||||
|
|
||||||
|
uvicorn.run(
|
||||||
|
"ui:app",
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
reload=reload,
|
||||||
|
log_level="info"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
114
ui_models.py
Normal file
114
ui_models.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
ui_models.py - Pydantic Models and Data Structures
|
||||||
|
|
||||||
|
Defines all API request/response models and data validation schemas
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Dict, List, Optional, Any, Union
|
||||||
|
from pydantic import BaseModel, Field, validator
|
||||||
|
|
||||||
|
|
||||||
|
# Pydantic models for API requests/responses
|
||||||
|
|
||||||
|
class TradingPairConfig(BaseModel):
|
||||||
|
"""Configuration for a trading pair"""
|
||||||
|
symbol: str
|
||||||
|
enabled: bool
|
||||||
|
priority: int = 1
|
||||||
|
record_from_date: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TradingPairAdd(BaseModel):
|
||||||
|
"""Request to add a new trading pair"""
|
||||||
|
symbol: str
|
||||||
|
priority: int = 1
|
||||||
|
record_from_date: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class BulkDownloadRequest(BaseModel):
|
||||||
|
"""Request for bulk historical data download"""
|
||||||
|
symbols: List[str] # Changed from 'symbol' to 'symbols' to support multiple
|
||||||
|
start_date: str
|
||||||
|
end_date: Optional[str] = None
|
||||||
|
intervals: Optional[List[str]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class GapFillRequest(BaseModel):
|
||||||
|
"""Request to fill data gaps"""
|
||||||
|
symbol: str
|
||||||
|
interval: str
|
||||||
|
gap_start: str
|
||||||
|
gap_end: str
|
||||||
|
|
||||||
|
|
||||||
|
class AutoGapFillRequest(BaseModel):
|
||||||
|
"""Request to automatically fill gaps for a symbol"""
|
||||||
|
symbol: str
|
||||||
|
intervals: Optional[List[str]] = None
|
||||||
|
fill_genuine_gaps: bool = True
|
||||||
|
|
||||||
|
|
||||||
|
class GapDetectionRequest(BaseModel):
|
||||||
|
"""Request to detect gaps"""
|
||||||
|
symbol: Optional[str] = None
|
||||||
|
interval: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class TechnicalIndicatorsConfig(BaseModel):
|
||||||
|
"""Complete technical indicators configuration (matching config.conf structure)"""
|
||||||
|
enabled: Optional[List[str]] = None
|
||||||
|
periods: Optional[Dict[str, Any]] = None
|
||||||
|
calculation_intervals: Optional[List[str]] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
extra = "allow" # Allow additional fields
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigUpdate(BaseModel):
|
||||||
|
"""Update application configuration - accepts partial updates"""
|
||||||
|
trading_pairs: Optional[List[TradingPairConfig]] = None
|
||||||
|
technical_indicators: Optional[Dict[str, Any]] = None
|
||||||
|
collection: Optional[Dict[str, Any]] = None
|
||||||
|
gap_filling: Optional[Dict[str, Any]] = None
|
||||||
|
database: Optional[Dict[str, Any]] = None
|
||||||
|
ui: Optional[Dict[str, Any]] = None
|
||||||
|
monitoring: Optional[Dict[str, Any]] = None
|
||||||
|
alerts: Optional[Dict[str, Any]] = None
|
||||||
|
data_quality: Optional[Dict[str, Any]] = None
|
||||||
|
features: Optional[Dict[str, Any]] = None
|
||||||
|
system: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
extra = "allow" # Allow additional config sections
|
||||||
|
|
||||||
|
|
||||||
|
class EnvVarUpdate(BaseModel):
|
||||||
|
"""Update environment variable"""
|
||||||
|
key: str
|
||||||
|
value: str
|
||||||
|
|
||||||
|
|
||||||
|
class ChartDataRequest(BaseModel):
|
||||||
|
"""Request chart data for visualization"""
|
||||||
|
symbol: str
|
||||||
|
interval: str = "1h"
|
||||||
|
limit: int = 500
|
||||||
|
|
||||||
|
|
||||||
|
# Utility functions for JSON serialization
|
||||||
|
|
||||||
|
def serialize_for_json(obj: Any) -> Any:
|
||||||
|
"""Recursively serialize datetime and Decimal objects in nested structures"""
|
||||||
|
if isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
|
elif isinstance(obj, Decimal):
|
||||||
|
return float(obj)
|
||||||
|
elif isinstance(obj, dict):
|
||||||
|
return {k: serialize_for_json(v) for k, v in obj.items()}
|
||||||
|
elif isinstance(obj, list):
|
||||||
|
return [serialize_for_json(item) for item in obj]
|
||||||
|
return obj
|
721
ui_routes.py
Normal file
721
ui_routes.py
Normal file
@@ -0,0 +1,721 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
ui_routes.py - API Endpoints and Route Handlers
|
||||||
|
|
||||||
|
Defines all FastAPI routes and business logic for API endpoints
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Dict, Any, Optional, List
|
||||||
|
|
||||||
|
from fastapi import HTTPException, Request
|
||||||
|
from fastapi.responses import HTMLResponse, JSONResponse
|
||||||
|
|
||||||
|
from dotenv import set_key, dotenv_values
|
||||||
|
|
||||||
|
# UI and models
|
||||||
|
from ui_models import (
|
||||||
|
TradingPairConfig,
|
||||||
|
TradingPairAdd,
|
||||||
|
BulkDownloadRequest,
|
||||||
|
GapFillRequest,
|
||||||
|
ConfigUpdate,
|
||||||
|
EnvVarUpdate,
|
||||||
|
ChartDataRequest,
|
||||||
|
AutoGapFillRequest,
|
||||||
|
GapDetectionRequest,
|
||||||
|
serialize_for_json,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ui_template_dashboard import get_dashboard_html
|
||||||
|
from ui_template_config import get_config_html
|
||||||
|
from ui_state import get_current_status
|
||||||
|
from utils import load_config, save_config, validate_symbol, reload_env_vars
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _tz_aware(dt: datetime) -> datetime:
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
return dt.replace(tzinfo=timezone.utc)
|
||||||
|
return dt.astimezone(timezone.utc)
|
||||||
|
|
||||||
|
|
||||||
|
def _ok(data: Any, status: str = "success", http_status: int = 200) -> JSONResponse:
|
||||||
|
return JSONResponse(content={"status": status, "data": serialize_for_json(data)}, status_code=http_status)
|
||||||
|
|
||||||
|
|
||||||
|
def _err(message: str, http_status: int = 500, extra: Optional[Dict[str, Any]] = None) -> JSONResponse:
|
||||||
|
payload = {"status": "error", "message": message}
|
||||||
|
if extra:
|
||||||
|
payload.update(extra)
|
||||||
|
return JSONResponse(content=payload, status_code=http_status)
|
||||||
|
|
||||||
|
|
||||||
|
class APIRoutes:
|
||||||
|
"""Encapsulates all API route handlers"""
|
||||||
|
|
||||||
|
def __init__(self, app, db_manager, data_collector, config, state_manager):
|
||||||
|
self.app = app
|
||||||
|
self.db_manager = db_manager
|
||||||
|
self.data_collector = data_collector
|
||||||
|
self.config = config
|
||||||
|
self.state_manager = state_manager
|
||||||
|
|
||||||
|
# Register all routes
|
||||||
|
self._register_routes()
|
||||||
|
|
||||||
|
def _register_routes(self):
|
||||||
|
"""Register all API routes"""
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Pages
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/", response_class=HTMLResponse)
|
||||||
|
async def dashboard():
|
||||||
|
"""Serve the main dashboard"""
|
||||||
|
return get_dashboard_html()
|
||||||
|
|
||||||
|
@self.app.get("/config", response_class=HTMLResponse)
|
||||||
|
async def config_page():
|
||||||
|
"""Serve the configuration management page"""
|
||||||
|
return get_config_html()
|
||||||
|
|
||||||
|
@self.app.get("/gaps", response_class=HTMLResponse)
|
||||||
|
async def gaps_page():
|
||||||
|
"""Serve the gap monitoring page"""
|
||||||
|
from ui_template_gaps import get_gaps_monitoring_html
|
||||||
|
return get_gaps_monitoring_html()
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Status
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/stats")
|
||||||
|
async def get_stats():
|
||||||
|
"""Get current system statistics"""
|
||||||
|
try:
|
||||||
|
status = await get_current_status(self.db_manager, self.data_collector, self.config)
|
||||||
|
return JSONResponse(content=serialize_for_json(status))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting stats: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Gaps and Coverage
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/all-pairs")
|
||||||
|
async def get_all_pairs_gaps():
|
||||||
|
"""Get gap status for all trading pairs"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
logger.error("Database manager not initialized")
|
||||||
|
return _err("Database not initialized", 500)
|
||||||
|
logger.info("Fetching gap status for all pairs")
|
||||||
|
status = await self.db_manager.get_all_pairs_gap_status()
|
||||||
|
logger.info(f"Retrieved gap status for {len(status)} pair-interval combinations")
|
||||||
|
return _ok(status)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting all pairs gaps: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/details/{symbol}/{interval}")
|
||||||
|
async def get_gap_details(symbol: str, interval: str):
|
||||||
|
"""Get detailed gap information including daily coverage"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
|
||||||
|
sym = symbol.upper()
|
||||||
|
gap_info = await self.db_manager.detect_gaps(sym, interval)
|
||||||
|
end_date = datetime.utcnow()
|
||||||
|
start_date = end_date - timedelta(days=90)
|
||||||
|
daily_coverage = await self.db_manager.get_data_coverage_by_day(sym, interval, start_date, end_date)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"coverage_percent": gap_info.get('coverage', {}).get('coverage_percent', 0),
|
||||||
|
"total_records": gap_info.get('coverage', {}).get('total_records', 0),
|
||||||
|
"missing_records": gap_info.get('coverage', {}).get('missing_records', 0),
|
||||||
|
"gaps": gap_info.get('gaps', []),
|
||||||
|
"daily_coverage": daily_coverage.get('daily_coverage', []),
|
||||||
|
}
|
||||||
|
return _ok(data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting gap details: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/gaps/fill-intelligent")
|
||||||
|
async def fill_gaps_intelligent(request: Request):
|
||||||
|
"""Intelligently fill gaps with multiple attempts and averaging fallback"""
|
||||||
|
try:
|
||||||
|
body = await request.json()
|
||||||
|
symbol = body.get('symbol')
|
||||||
|
interval = body.get('interval')
|
||||||
|
max_attempts = int(body.get('max_attempts', 3))
|
||||||
|
|
||||||
|
if not symbol or not interval:
|
||||||
|
return _err("Missing symbol or interval", 400)
|
||||||
|
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
|
||||||
|
result = await self.db_manager.fill_gaps_intelligently(symbol.upper(), interval, max_attempts)
|
||||||
|
logger.info(f"Intelligent gap fill completed: {result}")
|
||||||
|
return _ok(result)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in intelligent gap fill: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/prioritized/{symbol}/{interval}")
|
||||||
|
async def get_prioritized_gaps(symbol: str, interval: str):
|
||||||
|
"""Get gaps sorted by priority (recent and small gaps first)"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
prioritized = await self.db_manager.get_prioritized_gaps(symbol.upper(), interval)
|
||||||
|
return _ok(prioritized)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting prioritized gaps: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/progress/{symbol}/{interval}")
|
||||||
|
async def get_gap_progress(symbol: str, interval: str):
|
||||||
|
"""Get real-time progress and estimated completion time"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
progress = await self.db_manager.get_gap_fill_progress(symbol.upper(), interval)
|
||||||
|
return _ok(progress)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting gap progress: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/health/{symbol}/{interval}")
|
||||||
|
async def get_data_health(symbol: str, interval: str):
|
||||||
|
"""Get comprehensive data health analysis"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
health = await self.db_manager.check_data_health(symbol.upper(), interval)
|
||||||
|
return _ok(health)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking data health: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/gaps/smart-fill/{symbol}")
|
||||||
|
async def smart_fill_gaps(symbol: str):
|
||||||
|
"""Intelligently fill gaps starting with highest priority"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
|
||||||
|
from utils import load_config
|
||||||
|
cfg = load_config()
|
||||||
|
intervals = cfg.get('collection', {}).get('candle_intervals', ['1m', '5m', '15m', '1h', '4h', '1d'])
|
||||||
|
|
||||||
|
results: List[Dict[str, Any]] = []
|
||||||
|
for interval in intervals:
|
||||||
|
prioritized = await self.db_manager.get_prioritized_gaps(symbol.upper(), interval)
|
||||||
|
if not prioritized:
|
||||||
|
continue
|
||||||
|
filled = 0
|
||||||
|
for gap in prioritized[:5]:
|
||||||
|
if gap.get('missing_candles', 0) <= 100:
|
||||||
|
try:
|
||||||
|
await self.db_manager.fill_gaps_intelligently(symbol.upper(), interval, max_attempts=3)
|
||||||
|
filled += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error filling gap: {e}")
|
||||||
|
results.append({'interval': interval, 'gaps_filled': filled, 'total_gaps': len(prioritized)})
|
||||||
|
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Smart fill completed for {symbol}", "data": results})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in smart fill: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/gaps/fill")
|
||||||
|
async def fill_gaps(request: GapFillRequest):
|
||||||
|
"""Fill data gaps"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
raise HTTPException(status_code=500, detail="Data collector not initialized")
|
||||||
|
|
||||||
|
gap_start = datetime.fromisoformat(request.gap_start)
|
||||||
|
gap_end = datetime.fromisoformat(request.gap_end)
|
||||||
|
gap_start = _tz_aware(gap_start)
|
||||||
|
gap_end = _tz_aware(gap_end)
|
||||||
|
|
||||||
|
await self.data_collector.bulk_download_historical_data(
|
||||||
|
request.symbol.upper(),
|
||||||
|
gap_start,
|
||||||
|
gap_end,
|
||||||
|
[request.interval],
|
||||||
|
)
|
||||||
|
logger.info(f"Gap filled for {request.symbol} {request.interval}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": "Gap filled successfully"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error filling gap: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/gaps/auto-fill")
|
||||||
|
async def auto_fill_gaps(request: AutoGapFillRequest):
|
||||||
|
"""Automatically fill gaps for a symbol"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
raise HTTPException(status_code=500, detail="Data collector not initialized")
|
||||||
|
result = await self.data_collector.auto_fill_gaps(
|
||||||
|
request.symbol.upper(),
|
||||||
|
request.intervals,
|
||||||
|
request.fill_genuine_gaps,
|
||||||
|
)
|
||||||
|
logger.info(f"Auto gap fill completed for {request.symbol}: {result}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Filled gaps for {request.symbol}", "result": serialize_for_json(result)})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in auto gap fill: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/summary")
|
||||||
|
async def get_gaps_summary():
|
||||||
|
"""Get summary of all gaps across all symbols"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
summary = await self.db_manager.get_all_gaps_summary()
|
||||||
|
return _ok(summary)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting gaps summary: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/status/{symbol}/{interval}")
|
||||||
|
async def get_gap_status(symbol: str, interval: str):
|
||||||
|
"""Get gap fill status for a specific symbol/interval"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
status = await self.db_manager.get_gap_fill_status(symbol.upper(), interval)
|
||||||
|
return _ok(status)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting gap status: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.get("/api/gaps/{symbol}/{interval}")
|
||||||
|
async def detect_gaps(symbol: str, interval: str):
|
||||||
|
"""Detect data gaps"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
gaps = await self.db_manager.detect_gaps(symbol.upper(), interval)
|
||||||
|
return JSONResponse(content={"status": "success", "gaps": serialize_for_json(gaps)})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error detecting gaps: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/gaps/fill-genuine/{symbol}/{interval}")
|
||||||
|
async def fill_genuine_gaps(symbol: str, interval: str):
|
||||||
|
"""Fill genuine empty gaps with intelligent averaging"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
gap_config = self.config.get('gap_filling', {})
|
||||||
|
max_consecutive = int(gap_config.get('max_consecutive_empty_candles', 5))
|
||||||
|
lookback = int(gap_config.get('averaging_lookback_candles', 10))
|
||||||
|
filled_count = await self.db_manager.fill_genuine_gaps_with_averages(
|
||||||
|
symbol.upper(), interval, max_consecutive, lookback
|
||||||
|
)
|
||||||
|
logger.info(f"Filled {filled_count} genuine gaps for {symbol} {interval}")
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"status": "success",
|
||||||
|
"message": f"Filled {filled_count} genuine empty candles",
|
||||||
|
"filled_count": filled_count,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error filling genuine gaps: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Symbols and Prices
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/symbols")
|
||||||
|
async def get_symbols():
|
||||||
|
"""Get list of all available symbols"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
logger.error("Database manager not initialized")
|
||||||
|
return JSONResponse(content={"status": "error", "symbols": []}, status_code=500)
|
||||||
|
symbols = await self.db_manager.get_available_symbols()
|
||||||
|
logger.info(f"Retrieved {len(symbols)} symbols from database")
|
||||||
|
return JSONResponse(content={"status": "success", "symbols": symbols})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting symbols: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "symbols": []}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.get("/api/price-trends/{symbol}")
|
||||||
|
async def get_price_trends(symbol: str):
|
||||||
|
"""Get current price and trend indicators for multiple timeframes"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
logger.error("Database manager not initialized")
|
||||||
|
return _err("Database not initialized", 500)
|
||||||
|
logger.info(f"Price trends request for {symbol}")
|
||||||
|
data = await self.db_manager.get_current_price_and_trends_with_volume(symbol.upper())
|
||||||
|
if not data:
|
||||||
|
logger.warning(f"No price data found for {symbol}")
|
||||||
|
return _err(f"No data found for {symbol}. Please start data collection first.", 404)
|
||||||
|
pair_config = next((p for p in self.config.get('trading_pairs', []) if p['symbol'] == symbol.upper()), None)
|
||||||
|
data['enabled'] = pair_config.get('enabled', False) if pair_config else False
|
||||||
|
logger.info(f"Returning price trends for {symbol}: price={data.get('current_price')}")
|
||||||
|
return _ok(data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting price trends: {e}", exc_info=True)
|
||||||
|
return _err(f"Error retrieving price trends: {str(e)}", 500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Collection control
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.post("/api/collection/start")
|
||||||
|
async def start_collection():
|
||||||
|
"""Start data collection"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
raise HTTPException(status_code=500, detail="Data collector not initialized")
|
||||||
|
if self.state_manager.get("is_collecting", False):
|
||||||
|
return JSONResponse(content={"status": "info", "message": "Collection already running"})
|
||||||
|
await self.data_collector.start_continuous_collection()
|
||||||
|
self.state_manager.update(is_collecting=True)
|
||||||
|
logger.info("Collection started via API")
|
||||||
|
return JSONResponse(content={"status": "success", "message": "Collection started"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting collection: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.post("/api/collection/stop")
|
||||||
|
async def stop_collection():
|
||||||
|
"""Stop data collection"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
raise HTTPException(status_code=500, detail="Data collector not initialized")
|
||||||
|
if not self.state_manager.get("is_collecting", False):
|
||||||
|
return JSONResponse(content={"status": "info", "message": "Collection not running"})
|
||||||
|
await self.data_collector.stop_continuous_collection()
|
||||||
|
self.state_manager.update(is_collecting=False)
|
||||||
|
logger.info("Collection stopped via API")
|
||||||
|
return JSONResponse(content={"status": "success", "message": "Collection stopped"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error stopping collection: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Configuration
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/config")
|
||||||
|
async def get_configuration():
|
||||||
|
"""Get current configuration"""
|
||||||
|
try:
|
||||||
|
cfg = load_config()
|
||||||
|
return JSONResponse(content=serialize_for_json(cfg))
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting config: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@self.app.post("/api/config")
|
||||||
|
async def update_configuration(request: Request):
|
||||||
|
"""Update configuration - accepts raw JSON body"""
|
||||||
|
try:
|
||||||
|
body = await request.json()
|
||||||
|
logger.info(f"Received config update keys: {list(body.keys())}")
|
||||||
|
|
||||||
|
current_config = load_config()
|
||||||
|
# Deep merge/replace top-level keys
|
||||||
|
for key, value in body.items():
|
||||||
|
if key in current_config and isinstance(current_config[key], dict) and isinstance(value, dict):
|
||||||
|
current_config[key].update(value)
|
||||||
|
else:
|
||||||
|
current_config[key] = value
|
||||||
|
|
||||||
|
save_config(current_config)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(current_config)
|
||||||
|
logger.info("Configuration updated successfully")
|
||||||
|
return JSONResponse(content={"status": "success", "message": "Configuration updated"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating config: {e}", exc_info=True)
|
||||||
|
return _err(str(e), 500)
|
||||||
|
|
||||||
|
@self.app.post("/api/trading-pairs")
|
||||||
|
async def add_trading_pair(pair: TradingPairAdd):
|
||||||
|
"""Add a new trading pair"""
|
||||||
|
try:
|
||||||
|
if not validate_symbol(pair.symbol.upper()):
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Invalid symbol format"}, status_code=400)
|
||||||
|
|
||||||
|
cfg = load_config()
|
||||||
|
existing = [p for p in cfg.get('trading_pairs', []) if p['symbol'] == pair.symbol.upper()]
|
||||||
|
if existing:
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Trading pair already exists"}, status_code=409)
|
||||||
|
|
||||||
|
record_from_date = pair.record_from_date or cfg.get('collection', {}).get('default_record_from_date', '2020-01-01T00:00:00Z')
|
||||||
|
cfg.setdefault('trading_pairs', []).append({
|
||||||
|
'symbol': pair.symbol.upper(),
|
||||||
|
'enabled': True,
|
||||||
|
'priority': pair.priority,
|
||||||
|
'record_from_date': record_from_date,
|
||||||
|
})
|
||||||
|
save_config(cfg)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(cfg)
|
||||||
|
logger.info(f"Added trading pair: {pair.symbol}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Added {pair.symbol}"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error adding trading pair: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.put("/api/trading-pairs/{symbol}")
|
||||||
|
async def update_trading_pair(symbol: str, request: Request):
|
||||||
|
"""Update a trading pair's configuration"""
|
||||||
|
try:
|
||||||
|
update = await request.json()
|
||||||
|
logger.info(f"Updating trading pair {symbol}: {update}")
|
||||||
|
cfg = load_config()
|
||||||
|
|
||||||
|
pair_found = False
|
||||||
|
for pair in cfg.get('trading_pairs', []):
|
||||||
|
if pair['symbol'] == symbol.upper():
|
||||||
|
if 'enabled' in update:
|
||||||
|
pair['enabled'] = bool(update['enabled'])
|
||||||
|
if 'priority' in update:
|
||||||
|
pair['priority'] = int(update['priority'])
|
||||||
|
if 'record_from_date' in update:
|
||||||
|
pair['record_from_date'] = update['record_from_date']
|
||||||
|
pair_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not pair_found:
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Trading pair not found"}, status_code=404)
|
||||||
|
|
||||||
|
save_config(cfg)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(cfg)
|
||||||
|
logger.info(f"Updated trading pair: {symbol}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Updated {symbol}"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating trading pair: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.delete("/api/trading-pairs/{symbol}")
|
||||||
|
async def remove_trading_pair(symbol: str):
|
||||||
|
"""Remove a trading pair"""
|
||||||
|
try:
|
||||||
|
cfg = load_config()
|
||||||
|
original_count = len(cfg.get('trading_pairs', []))
|
||||||
|
cfg['trading_pairs'] = [p for p in cfg.get('trading_pairs', []) if p['symbol'] != symbol.upper()]
|
||||||
|
|
||||||
|
if len(cfg['trading_pairs']) == original_count:
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Trading pair not found"}, status_code=404)
|
||||||
|
|
||||||
|
save_config(cfg)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(cfg)
|
||||||
|
logger.info(f"Removed trading pair: {symbol}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Removed {symbol}"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error removing trading pair: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.post("/api/indicators/toggle/{indicator_name}")
|
||||||
|
async def toggle_indicator(indicator_name: str):
|
||||||
|
"""Toggle a technical indicator on/off"""
|
||||||
|
try:
|
||||||
|
cfg = load_config()
|
||||||
|
enabled_indicators = cfg.setdefault('technical_indicators', {}).setdefault('enabled', [])
|
||||||
|
if indicator_name in enabled_indicators:
|
||||||
|
enabled_indicators.remove(indicator_name)
|
||||||
|
action = "disabled"
|
||||||
|
else:
|
||||||
|
enabled_indicators.append(indicator_name)
|
||||||
|
action = "enabled"
|
||||||
|
save_config(cfg)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(cfg)
|
||||||
|
logger.info(f"Indicator {indicator_name} {action}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Indicator {indicator_name} {action}", "enabled": indicator_name in enabled_indicators})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error toggling indicator: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.put("/api/indicators/{indicator_name}/periods")
|
||||||
|
async def update_indicator_periods(indicator_name: str, request: Request):
|
||||||
|
"""Update periods for a technical indicator"""
|
||||||
|
try:
|
||||||
|
body = await request.json()
|
||||||
|
periods = body.get('periods')
|
||||||
|
if periods is None:
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Missing 'periods' in request"}, status_code=400)
|
||||||
|
|
||||||
|
cfg = load_config()
|
||||||
|
periods_cfg = cfg.setdefault('technical_indicators', {}).setdefault('periods', {})
|
||||||
|
if indicator_name not in periods_cfg:
|
||||||
|
return JSONResponse(content={"status": "error", "message": f"Unknown indicator: {indicator_name}"}, status_code=404)
|
||||||
|
|
||||||
|
periods_cfg[indicator_name] = periods
|
||||||
|
save_config(cfg)
|
||||||
|
self.config.clear()
|
||||||
|
self.config.update(cfg)
|
||||||
|
logger.info(f"Updated {indicator_name} periods to {periods}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Updated {indicator_name} periods"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating indicator periods: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Chart and Data
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.post("/api/chart-data")
|
||||||
|
async def get_chart_data(request: ChartDataRequest):
|
||||||
|
"""Get chart data for visualization"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
logger.error("Database manager not initialized")
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Database not initialized"}, status_code=500)
|
||||||
|
|
||||||
|
logger.info(f"Chart data request: symbol={request.symbol}, interval={request.interval}, limit={request.limit}")
|
||||||
|
data = await self.db_manager.get_recent_candles(request.symbol.upper(), request.interval, request.limit)
|
||||||
|
logger.info(f"Retrieved {len(data) if data else 0} candles from database")
|
||||||
|
if not data:
|
||||||
|
logger.warning(f"No data found for {request.symbol} at {request.interval}")
|
||||||
|
return JSONResponse(content={"status": "error", "message": f"No data found for {request.symbol} at {request.interval}. Please start data collection or download historical data first."}, status_code=404)
|
||||||
|
logger.info(f"Returning {len(data)} candles for {request.symbol}")
|
||||||
|
return JSONResponse(content={"status": "success", "data": data})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting chart data: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": f"Error retrieving chart data: {str(e)}"}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.post("/api/bulk-download")
|
||||||
|
async def bulk_download(request: BulkDownloadRequest):
|
||||||
|
"""Download historical data in bulk"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
raise HTTPException(status_code=500, detail="Data collector not initialized")
|
||||||
|
|
||||||
|
start_date = datetime.fromisoformat(request.start_date)
|
||||||
|
end_date = datetime.fromisoformat(request.end_date) if request.end_date else datetime.utcnow()
|
||||||
|
start_date = _tz_aware(start_date)
|
||||||
|
end_date = _tz_aware(end_date)
|
||||||
|
|
||||||
|
intervals = request.intervals or ['1h', '4h', '1d']
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for symbol in request.symbols:
|
||||||
|
try:
|
||||||
|
symu = symbol.upper()
|
||||||
|
# Initialize progress for UI
|
||||||
|
self.data_collector.download_progress[symu] = {
|
||||||
|
'status': 'pending',
|
||||||
|
'intervals': {i: {'status': 'pending', 'records': 0} for i in intervals},
|
||||||
|
'start_time': datetime.now(timezone.utc).isoformat(),
|
||||||
|
}
|
||||||
|
# Spawn task
|
||||||
|
task = asyncio.create_task(
|
||||||
|
self.data_collector.bulk_download_historical_data(symu, start_date, end_date, intervals)
|
||||||
|
)
|
||||||
|
results.append({'symbol': symu, 'status': 'started', 'intervals': intervals})
|
||||||
|
logger.info(f"Bulk download started for {symbol}")
|
||||||
|
except Exception as ie:
|
||||||
|
logger.error(f"Error starting bulk download for {symbol}: {ie}")
|
||||||
|
results.append({'symbol': symu, 'status': 'error', 'error': str(ie)})
|
||||||
|
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Bulk download started for {len(request.symbols)} symbol(s)", "results": results})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting bulk download: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.get("/api/download-progress")
|
||||||
|
async def get_download_progress():
|
||||||
|
"""Get progress for all active downloads"""
|
||||||
|
try:
|
||||||
|
if not self.data_collector:
|
||||||
|
return JSONResponse(content={"status": "error", "message": "Data collector not initialized"}, status_code=500)
|
||||||
|
progress = await self.data_collector.get_download_progress()
|
||||||
|
return JSONResponse(content={"status": "success", "downloads": serialize_for_json(progress)})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting download progress: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Environment variables
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/env")
|
||||||
|
async def get_env_vars():
|
||||||
|
"""Get environment variables"""
|
||||||
|
try:
|
||||||
|
env_vars = dotenv_values('variables.env') or {}
|
||||||
|
safe_vars = {
|
||||||
|
k: ('***' if any(s in k.upper() for s in ['SECRET', 'KEY', 'PASSWORD', 'TOKEN']) else v)
|
||||||
|
for k, v in env_vars.items()
|
||||||
|
}
|
||||||
|
return JSONResponse(content=safe_vars)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting env vars: {e}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=str(e))
|
||||||
|
|
||||||
|
@self.app.post("/api/env")
|
||||||
|
async def update_env_var(env_update: EnvVarUpdate):
|
||||||
|
"""Update environment variable"""
|
||||||
|
try:
|
||||||
|
key_upper = env_update.key.upper()
|
||||||
|
display_value = env_update.value if not any(s in key_upper for s in ['PASSWORD', 'SECRET', 'KEY', 'TOKEN']) else '***'
|
||||||
|
logger.info(f"Updating env var: {env_update.key} = {display_value}")
|
||||||
|
set_key('variables.env', env_update.key, env_update.value)
|
||||||
|
reload_env_vars('variables.env')
|
||||||
|
logger.info(f"Updated and reloaded env var: {env_update.key}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Updated {env_update.key}"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating env var: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
@self.app.delete("/api/env/{key}")
|
||||||
|
async def delete_env_var(key: str):
|
||||||
|
"""Delete environment variable"""
|
||||||
|
try:
|
||||||
|
# Manual edit due to lack of delete in python-dotenv API
|
||||||
|
try:
|
||||||
|
with open('variables.env', 'r', encoding='utf-8') as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
except FileNotFoundError:
|
||||||
|
lines = []
|
||||||
|
new_lines = [line for line in lines if not line.startswith(f"{key}=")]
|
||||||
|
with open('variables.env', 'w', encoding='utf-8') as f:
|
||||||
|
f.writelines(new_lines)
|
||||||
|
reload_env_vars('variables.env')
|
||||||
|
logger.info(f"Deleted env var: {key}")
|
||||||
|
return JSONResponse(content={"status": "success", "message": f"Deleted {key}"})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error deleting env var: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
||||||
|
|
||||||
|
# ---------------------------
|
||||||
|
# Database stats
|
||||||
|
# ---------------------------
|
||||||
|
|
||||||
|
@self.app.get("/api/database/stats")
|
||||||
|
async def get_database_stats():
|
||||||
|
"""Get detailed database statistics"""
|
||||||
|
try:
|
||||||
|
if not self.db_manager:
|
||||||
|
raise HTTPException(status_code=500, detail="Database not initialized")
|
||||||
|
stats = await self.db_manager.get_detailed_statistics()
|
||||||
|
return JSONResponse(content={"status": "success", "stats": serialize_for_json(stats)})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting database stats: {e}", exc_info=True)
|
||||||
|
return JSONResponse(content={"status": "error", "message": str(e)}, status_code=500)
|
136
ui_state.py
Normal file
136
ui_state.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
ui_state.py - State Management and Persistence
|
||||||
|
Handles persistent state across application reloads with file-based storage
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
STATE_FILE = Path(".collector_state.json")
|
||||||
|
|
||||||
|
|
||||||
|
class StateManager:
|
||||||
|
"""Thread-safe state manager that persists across uvicorn reloads"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.state = self._load_state()
|
||||||
|
|
||||||
|
def _load_state(self) -> Dict[str, Any]:
|
||||||
|
"""Load state from disk with integrity checks"""
|
||||||
|
try:
|
||||||
|
if STATE_FILE.exists():
|
||||||
|
with open(STATE_FILE, 'r') as f:
|
||||||
|
state = json.load(f)
|
||||||
|
|
||||||
|
# Check if state is recent (within last 60 seconds)
|
||||||
|
if 'timestamp' in state:
|
||||||
|
saved_time = datetime.fromisoformat(state['timestamp'])
|
||||||
|
age = (datetime.utcnow() - saved_time).total_seconds()
|
||||||
|
|
||||||
|
if age < 60: # Extended validity window
|
||||||
|
logger.info(
|
||||||
|
f"Loaded persistent state (age: {age:.1f}s): "
|
||||||
|
f"collecting={state.get('is_collecting')}"
|
||||||
|
)
|
||||||
|
return state
|
||||||
|
else:
|
||||||
|
logger.info(f"State too old ({age:.1f}s), starting fresh")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error loading state: {e}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"is_collecting": False,
|
||||||
|
"websocket_collection_running": False,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
def _save_state(self):
|
||||||
|
"""Save state to disk atomically"""
|
||||||
|
try:
|
||||||
|
self.state['timestamp'] = datetime.utcnow().isoformat()
|
||||||
|
|
||||||
|
# Atomic write using temp file
|
||||||
|
temp_file = STATE_FILE.with_suffix('.tmp')
|
||||||
|
with open(temp_file, 'w') as f:
|
||||||
|
json.dump(self.state, f)
|
||||||
|
temp_file.replace(STATE_FILE)
|
||||||
|
|
||||||
|
logger.debug(f"Saved state: {self.state}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error saving state: {e}")
|
||||||
|
|
||||||
|
def update(self, **kwargs):
|
||||||
|
"""Update state and persist"""
|
||||||
|
self.state.update(kwargs)
|
||||||
|
self._save_state()
|
||||||
|
|
||||||
|
def get(self, key: str, default=None):
|
||||||
|
"""Get state value"""
|
||||||
|
return self.state.get(key, default)
|
||||||
|
|
||||||
|
def get_all(self) -> Dict[str, Any]:
|
||||||
|
"""Get all state"""
|
||||||
|
return self.state.copy()
|
||||||
|
|
||||||
|
|
||||||
|
# Global state manager instance
|
||||||
|
state_manager = StateManager()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_current_status(db_manager, data_collector, config) -> Dict[str, Any]:
|
||||||
|
"""Get current system status - robust against reload issues"""
|
||||||
|
try:
|
||||||
|
# Use state manager as source of truth
|
||||||
|
is_collecting = state_manager.get("is_collecting", False)
|
||||||
|
|
||||||
|
# Double-check with data collector if available
|
||||||
|
if data_collector and hasattr(data_collector, 'is_collecting'):
|
||||||
|
actual_collecting = data_collector.is_collecting
|
||||||
|
|
||||||
|
# Sync state if mismatch detected
|
||||||
|
if actual_collecting != is_collecting:
|
||||||
|
logger.warning(
|
||||||
|
f"State mismatch detected! State: {is_collecting}, "
|
||||||
|
f"Actual: {actual_collecting}"
|
||||||
|
)
|
||||||
|
is_collecting = actual_collecting
|
||||||
|
state_manager.update(is_collecting=actual_collecting)
|
||||||
|
|
||||||
|
# Get database statistics
|
||||||
|
total_records = await db_manager.get_total_records() if db_manager else 0
|
||||||
|
last_update = await db_manager.get_last_update_time() if db_manager else "Never"
|
||||||
|
|
||||||
|
# Get active trading pairs
|
||||||
|
active_pairs = []
|
||||||
|
if config and 'trading_pairs' in config:
|
||||||
|
active_pairs = [
|
||||||
|
pair['symbol']
|
||||||
|
for pair in config['trading_pairs']
|
||||||
|
if pair.get('enabled', False)
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "Active" if is_collecting else "Stopped",
|
||||||
|
"total_records": total_records,
|
||||||
|
"last_update": last_update,
|
||||||
|
"active_pairs": len(active_pairs),
|
||||||
|
"active_pair_list": active_pairs,
|
||||||
|
"is_collecting": is_collecting
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting status: {e}")
|
||||||
|
return {
|
||||||
|
"status": "Error",
|
||||||
|
"total_records": 0,
|
||||||
|
"last_update": "Never",
|
||||||
|
"active_pairs": 0,
|
||||||
|
"active_pair_list": [],
|
||||||
|
"is_collecting": False,
|
||||||
|
"error": str(e)
|
||||||
|
}
|
981
ui_template_config.py
Normal file
981
ui_template_config.py
Normal file
@@ -0,0 +1,981 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
ui_template_config.py - Configuration Management HTML Template
|
||||||
|
|
||||||
|
Contains the configuration interface for managing trading pairs, indicators,
|
||||||
|
gap filling settings, and system configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_config_html():
|
||||||
|
"""Return the configuration management HTML"""
|
||||||
|
return """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Configuration - Trading Intelligence System</title>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||||
|
background: linear-gradient(135deg, #1e3c72 0%, #2a5298 100%);
|
||||||
|
color: #e0e0e0;
|
||||||
|
min-height: 100vh;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
max-width: 1400px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header {
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
backdrop-filter: blur(10px);
|
||||||
|
border-radius: 15px;
|
||||||
|
padding: 25px;
|
||||||
|
margin-bottom: 25px;
|
||||||
|
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header h1 {
|
||||||
|
font-size: 2.2em;
|
||||||
|
background: linear-gradient(to right, #4facfe, #00f2fe);
|
||||||
|
-webkit-background-clip: text;
|
||||||
|
-webkit-text-fill-color: transparent;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-tabs {
|
||||||
|
display: flex;
|
||||||
|
gap: 15px;
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-tab {
|
||||||
|
padding: 12px 24px;
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||||
|
border-radius: 8px;
|
||||||
|
color: #e0e0e0;
|
||||||
|
text-decoration: none;
|
||||||
|
font-weight: 500;
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-tab:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.2);
|
||||||
|
transform: translateY(-2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-tab.active {
|
||||||
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||||
|
border-color: #667eea;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section {
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
backdrop-filter: blur(10px);
|
||||||
|
border-radius: 15px;
|
||||||
|
padding: 25px;
|
||||||
|
margin-bottom: 25px;
|
||||||
|
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.section h2 {
|
||||||
|
font-size: 1.5em;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn {
|
||||||
|
padding: 12px 24px;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
font-size: 0.95em;
|
||||||
|
display: inline-block;
|
||||||
|
margin-right: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary {
|
||||||
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary:hover {
|
||||||
|
transform: translateY(-2px);
|
||||||
|
box-shadow: 0 6px 20px rgba(102, 126, 234, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary {
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
color: #e0e0e0;
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-success {
|
||||||
|
background: linear-gradient(135deg, #11998e 0%, #38ef7d 100%);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-success:hover {
|
||||||
|
transform: translateY(-2px);
|
||||||
|
box-shadow: 0 6px 20px rgba(56, 239, 125, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger {
|
||||||
|
background: linear-gradient(135deg, #eb3349 0%, #f45c43 100%);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger:hover {
|
||||||
|
transform: translateY(-2px);
|
||||||
|
box-shadow: 0 6px 20px rgba(235, 51, 73, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-sm {
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-size: 0.85em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
margin-top: 20px;
|
||||||
|
background: rgba(0, 0, 0, 0.3);
|
||||||
|
border-radius: 12px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
th, td {
|
||||||
|
padding: 15px;
|
||||||
|
text-align: left;
|
||||||
|
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
th {
|
||||||
|
background: rgba(102, 126, 234, 0.3);
|
||||||
|
font-weight: 600;
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
tr:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
input, select, textarea {
|
||||||
|
padding: 12px;
|
||||||
|
border-radius: 8px;
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
color: #e0e0e0;
|
||||||
|
font-size: 0.95em;
|
||||||
|
width: 100%;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
input:focus, select:focus, textarea:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: #667eea;
|
||||||
|
background: rgba(255, 255, 255, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-group label {
|
||||||
|
display: block;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
font-weight: 500;
|
||||||
|
color: #e0e0e0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-card {
|
||||||
|
background: rgba(255, 255, 255, 0.05);
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: 15px;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-card:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-card.enabled {
|
||||||
|
border-color: #38ef7d;
|
||||||
|
background: rgba(56, 239, 125, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-card.disabled {
|
||||||
|
border-color: #eb3349;
|
||||||
|
background: rgba(235, 51, 73, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-name {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 1.1em;
|
||||||
|
text-transform: uppercase;
|
||||||
|
color: #4facfe;
|
||||||
|
}
|
||||||
|
|
||||||
|
.indicator-periods {
|
||||||
|
font-size: 0.9em;
|
||||||
|
color: #a0a0a0;
|
||||||
|
margin-top: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge {
|
||||||
|
padding: 5px 15px;
|
||||||
|
border-radius: 20px;
|
||||||
|
font-size: 0.85em;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-enabled {
|
||||||
|
background: rgba(56, 239, 125, 0.3);
|
||||||
|
color: #38ef7d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-disabled {
|
||||||
|
background: rgba(235, 51, 73, 0.3);
|
||||||
|
color: #eb3349;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert {
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 8px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert-success {
|
||||||
|
background: rgba(56, 239, 125, 0.2);
|
||||||
|
border: 1px solid #38ef7d;
|
||||||
|
color: #38ef7d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert-error {
|
||||||
|
background: rgba(235, 51, 73, 0.2);
|
||||||
|
border: 1px solid #eb3349;
|
||||||
|
color: #eb3349;
|
||||||
|
}
|
||||||
|
|
||||||
|
.alert-info {
|
||||||
|
background: rgba(79, 172, 254, 0.2);
|
||||||
|
border: 1px solid #4facfe;
|
||||||
|
color: #4facfe;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
||||||
|
gap: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-item {
|
||||||
|
background: rgba(0, 0, 0, 0.3);
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 10px;
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-label {
|
||||||
|
font-weight: 600;
|
||||||
|
color: #4facfe;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-value {
|
||||||
|
font-size: 0.95em;
|
||||||
|
color: #e0e0e0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal {
|
||||||
|
display: none;
|
||||||
|
position: fixed;
|
||||||
|
z-index: 1000;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background: rgba(0, 0, 0, 0.7);
|
||||||
|
backdrop-filter: blur(5px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
background: linear-gradient(135deg, #1e3c72 0%, #2a5298 100%);
|
||||||
|
margin: 5% auto;
|
||||||
|
padding: 30px;
|
||||||
|
border: 1px solid rgba(255, 255, 255, 0.2);
|
||||||
|
border-radius: 15px;
|
||||||
|
width: 90%;
|
||||||
|
max-width: 600px;
|
||||||
|
box-shadow: 0 10px 40px rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.close {
|
||||||
|
color: #aaa;
|
||||||
|
float: right;
|
||||||
|
font-size: 28px;
|
||||||
|
font-weight: bold;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.close:hover {
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: rgba(255, 255, 255, 0.1);
|
||||||
|
border-radius: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: rgba(102, 126, 234, 0.5);
|
||||||
|
border-radius: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: rgba(102, 126, 234, 0.7);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<!-- Header -->
|
||||||
|
<div class="header">
|
||||||
|
<h1>⚙️ System Configuration</h1>
|
||||||
|
<p>Manage trading pairs, indicators, and system settings</p>
|
||||||
|
<div class="nav-tabs">
|
||||||
|
<a href="/" class="nav-tab">Dashboard</a>
|
||||||
|
<a href="/config" class="nav-tab active">Configuration</a>
|
||||||
|
<a href="/gaps" class="nav-tab">Gaps</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Alert Container -->
|
||||||
|
<div id="alertContainer"></div>
|
||||||
|
|
||||||
|
<!-- Trading Pairs Section -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>📊 Trading Pairs</h2>
|
||||||
|
<button class="btn btn-success" onclick="showAddPairModal()">+ Add Trading Pair</button>
|
||||||
|
<button class="btn btn-primary" onclick="loadConfig()">🔄 Refresh</button>
|
||||||
|
|
||||||
|
<table id="pairsTable">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Symbol</th>
|
||||||
|
<th>Enabled</th>
|
||||||
|
<th>Priority</th>
|
||||||
|
<th>Record From Date</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="pairsTableBody">
|
||||||
|
<tr>
|
||||||
|
<td colspan="5" style="text-align: center; padding: 40px;">Loading...</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Technical Indicators Section -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>📈 Technical Indicators</h2>
|
||||||
|
<p style="margin-bottom: 20px; color: #a0a0a0;">Enable or disable technical indicators and configure their parameters</p>
|
||||||
|
<div id="indicatorsSection">
|
||||||
|
<div style="text-align: center; padding: 40px;">Loading indicators...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Gap Filling Configuration -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>🔧 Gap Filling Configuration</h2>
|
||||||
|
<div id="gapFillingConfig">
|
||||||
|
<div style="text-align: center; padding: 40px;">Loading gap filling settings...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Collection Settings -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>📥 Collection Settings</h2>
|
||||||
|
<div id="collectionSettings">
|
||||||
|
<div style="text-align: center; padding: 40px;">Loading collection settings...</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Environment Variables Section -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>🔧 Environment Variables</h2>
|
||||||
|
|
||||||
|
<div style="margin-bottom: 20px;">
|
||||||
|
<input type="text" id="newEnvKey" placeholder="Key (e.g., DB_HOST)" style="width: 200px; display: inline-block; margin-right: 10px;">
|
||||||
|
<input type="text" id="newEnvValue" placeholder="Value" style="width: 300px; display: inline-block; margin-right: 10px;">
|
||||||
|
<button class="btn btn-primary" onclick="addEnvVar()">+ Add Variable</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table id="envTable">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Key</th>
|
||||||
|
<th>Value</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="envTableBody">
|
||||||
|
<tr>
|
||||||
|
<td colspan="3" style="text-align: center; padding: 40px;">Loading...</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Save Configuration -->
|
||||||
|
<div class="section">
|
||||||
|
<h2>💾 Save Configuration</h2>
|
||||||
|
<p style="margin-bottom: 20px; color: #a0a0a0;">Save all configuration changes to disk</p>
|
||||||
|
<button class="btn btn-success" onclick="saveAllConfig()">💾 Save All Changes</button>
|
||||||
|
<button class="btn btn-secondary" onclick="loadConfig()">🔄 Reload Configuration</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Add Trading Pair Modal -->
|
||||||
|
<div id="addPairModal" class="modal">
|
||||||
|
<div class="modal-content">
|
||||||
|
<span class="close" onclick="closeAddPairModal()">×</span>
|
||||||
|
<h2>Add Trading Pair</h2>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="newPairSymbol">Symbol:</label>
|
||||||
|
<input type="text" id="newPairSymbol" placeholder="e.g., BTCUSDT" style="text-transform: uppercase;">
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="newPairPriority">Priority:</label>
|
||||||
|
<select id="newPairPriority">
|
||||||
|
<option value="1">High (1)</option>
|
||||||
|
<option value="2">Medium (2)</option>
|
||||||
|
<option value="3">Low (3)</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="newPairRecordFrom">Record From Date:</label>
|
||||||
|
<input type="datetime-local" id="newPairRecordFrom">
|
||||||
|
<small style="color: #a0a0a0; display: block; margin-top: 5px;">Leave empty to use default (2020-01-01)</small>
|
||||||
|
</div>
|
||||||
|
<button class="btn btn-primary" onclick="addTradingPair()">Add Pair</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Edit Indicator Modal -->
|
||||||
|
<div id="editIndicatorModal" class="modal">
|
||||||
|
<div class="modal-content">
|
||||||
|
<span class="close" onclick="closeEditIndicatorModal()">×</span>
|
||||||
|
<h2>Edit Indicator: <span id="editIndicatorName"></span></h2>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="editIndicatorPeriods">Periods (JSON format):</label>
|
||||||
|
<textarea id="editIndicatorPeriods" rows="5"></textarea>
|
||||||
|
<small style="color: #a0a0a0; display: block; margin-top: 5px;">
|
||||||
|
Examples: [20, 50, 200] for arrays or {"fast": 12, "slow": 26} for objects
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
<button class="btn btn-primary" onclick="saveIndicatorPeriods()">Save Changes</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
let currentConfig = {};
|
||||||
|
let currentIndicator = '';
|
||||||
|
|
||||||
|
// Load configuration on page load
|
||||||
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
|
loadConfig();
|
||||||
|
loadEnvVars();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Show alert message
|
||||||
|
function showAlert(message, type = 'info') {
|
||||||
|
const container = document.getElementById('alertContainer');
|
||||||
|
const alert = document.createElement('div');
|
||||||
|
alert.className = `alert alert-${type}`;
|
||||||
|
alert.textContent = message;
|
||||||
|
alert.style.display = 'block';
|
||||||
|
container.appendChild(alert);
|
||||||
|
|
||||||
|
setTimeout(() => {
|
||||||
|
alert.remove();
|
||||||
|
}, 5000);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load configuration
|
||||||
|
async function loadConfig() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/config');
|
||||||
|
currentConfig = await response.json();
|
||||||
|
|
||||||
|
displayTradingPairs(currentConfig.trading_pairs || []);
|
||||||
|
displayIndicators(currentConfig.technical_indicators || {});
|
||||||
|
displayGapFillingConfig(currentConfig.gap_filling || {});
|
||||||
|
displayCollectionSettings(currentConfig.collection || {});
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading config:', error);
|
||||||
|
showAlert('Error loading configuration: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display trading pairs
|
||||||
|
function displayTradingPairs(pairs) {
|
||||||
|
const tbody = document.getElementById('pairsTableBody');
|
||||||
|
|
||||||
|
if (!pairs || pairs.length === 0) {
|
||||||
|
tbody.innerHTML = '<tr><td colspan="5" style="text-align: center;">No trading pairs configured</td></tr>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = pairs.map(pair => `
|
||||||
|
<tr>
|
||||||
|
<td><strong>${pair.symbol}</strong></td>
|
||||||
|
<td>
|
||||||
|
<span class="status-badge ${pair.enabled ? 'status-enabled' : 'status-disabled'}">
|
||||||
|
${pair.enabled ? '✅ Enabled' : '❌ Disabled'}
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
<td>${pair.priority}</td>
|
||||||
|
<td>${pair.record_from_date || 'Not set'}</td>
|
||||||
|
<td>
|
||||||
|
<button class="btn btn-sm ${pair.enabled ? 'btn-danger' : 'btn-success'}"
|
||||||
|
onclick="togglePair('${pair.symbol}', ${!pair.enabled})">
|
||||||
|
${pair.enabled ? 'Disable' : 'Enable'}
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm btn-danger" onclick="removePair('${pair.symbol}')">
|
||||||
|
Delete
|
||||||
|
</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
`).join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display technical indicators
|
||||||
|
function displayIndicators(indicators) {
|
||||||
|
const container = document.getElementById('indicatorsSection');
|
||||||
|
|
||||||
|
if (!indicators || !indicators.enabled || !indicators.periods) {
|
||||||
|
container.innerHTML = '<div style="text-align: center; color: #a0a0a0;">No indicators configured</div>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const enabledList = indicators.enabled || [];
|
||||||
|
const periods = indicators.periods || {};
|
||||||
|
|
||||||
|
// Get all available indicators from periods
|
||||||
|
const allIndicators = Object.keys(periods);
|
||||||
|
|
||||||
|
container.innerHTML = allIndicators.map(indicator => {
|
||||||
|
const isEnabled = enabledList.includes(indicator);
|
||||||
|
const periodValue = periods[indicator];
|
||||||
|
|
||||||
|
// Format period display
|
||||||
|
let periodDisplay = '';
|
||||||
|
if (Array.isArray(periodValue)) {
|
||||||
|
periodDisplay = `Periods: ${periodValue.join(', ')}`;
|
||||||
|
} else if (typeof periodValue === 'object') {
|
||||||
|
periodDisplay = `Config: ${JSON.stringify(periodValue)}`;
|
||||||
|
} else {
|
||||||
|
periodDisplay = `Period: ${periodValue}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="indicator-card ${isEnabled ? 'enabled' : 'disabled'}">
|
||||||
|
<div class="indicator-header">
|
||||||
|
<div>
|
||||||
|
<div class="indicator-name">${indicator.toUpperCase()}</div>
|
||||||
|
<div class="indicator-periods">${periodDisplay}</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<button class="btn btn-sm ${isEnabled ? 'btn-danger' : 'btn-success'}"
|
||||||
|
onclick="toggleIndicator('${indicator}')">
|
||||||
|
${isEnabled ? 'Disable' : 'Enable'}
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-sm btn-primary" onclick="editIndicator('${indicator}')">
|
||||||
|
Edit
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}).join('');
|
||||||
|
|
||||||
|
// Show calculation intervals
|
||||||
|
if (indicators.calculation_intervals) {
|
||||||
|
container.innerHTML += `
|
||||||
|
<div style="margin-top: 20px; padding: 15px; background: rgba(79, 172, 254, 0.1); border-radius: 8px; border: 1px solid rgba(79, 172, 254, 0.3);">
|
||||||
|
<strong style="color: #4facfe;">Calculation Intervals:</strong>
|
||||||
|
<div style="margin-top: 10px; color: #e0e0e0;">
|
||||||
|
${indicators.calculation_intervals.join(', ')}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display gap filling configuration
|
||||||
|
function displayGapFillingConfig(gapConfig) {
|
||||||
|
const container = document.getElementById('gapFillingConfig');
|
||||||
|
|
||||||
|
container.innerHTML = `
|
||||||
|
<div class="config-grid">
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Auto Gap Filling</div>
|
||||||
|
<div class="config-value">${gapConfig.enable_auto_gap_filling ? '✅ Enabled' : '❌ Disabled'}</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Schedule (hours)</div>
|
||||||
|
<div class="config-value">${gapConfig.auto_fill_schedule_hours || 24} hours</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Max Gap Size</div>
|
||||||
|
<div class="config-value">${gapConfig.max_gap_size_candles || 1000} candles</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Min Gap Size</div>
|
||||||
|
<div class="config-value">${gapConfig.min_gap_size_candles || 2} candles</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Intelligent Averaging</div>
|
||||||
|
<div class="config-value">${gapConfig.enable_intelligent_averaging ? '✅ Enabled' : '❌ Disabled'}</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Lookback Candles</div>
|
||||||
|
<div class="config-value">${gapConfig.averaging_lookback_candles || 10} candles</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Max Consecutive Empty</div>
|
||||||
|
<div class="config-value">${gapConfig.max_consecutive_empty_candles || 5} candles</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Monitored Intervals</div>
|
||||||
|
<div class="config-value">${gapConfig.intervals_to_monitor ? gapConfig.intervals_to_monitor.join(', ') : 'Not set'}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display collection settings
|
||||||
|
function displayCollectionSettings(collectionConfig) {
|
||||||
|
const container = document.getElementById('collectionSettings');
|
||||||
|
|
||||||
|
container.innerHTML = `
|
||||||
|
<div class="config-grid">
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Bulk Chunk Size</div>
|
||||||
|
<div class="config-value">${collectionConfig.bulk_chunk_size || 1000} records</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Tick Batch Size</div>
|
||||||
|
<div class="config-value">${collectionConfig.tick_batch_size || 100} ticks</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">WebSocket Reconnect Delay</div>
|
||||||
|
<div class="config-value">${collectionConfig.websocket_reconnect_delay || 5} seconds</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Max Retries</div>
|
||||||
|
<div class="config-value">${collectionConfig.max_retries || 3} attempts</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Rate Limit</div>
|
||||||
|
<div class="config-value">${collectionConfig.rate_limit_requests_per_minute || 2000} req/min</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Default Record From Date</div>
|
||||||
|
<div class="config-value">${collectionConfig.default_record_from_date || 'Not set'}</div>
|
||||||
|
</div>
|
||||||
|
<div class="config-item">
|
||||||
|
<div class="config-label">Candle Intervals</div>
|
||||||
|
<div class="config-value">${collectionConfig.candle_intervals ? collectionConfig.candle_intervals.join(', ') : 'Not set'}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Toggle trading pair
|
||||||
|
async function togglePair(symbol, enabled) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/trading-pairs/${symbol}`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ enabled })
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error toggling pair: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove trading pair
|
||||||
|
async function removePair(symbol) {
|
||||||
|
if (!confirm(`Are you sure you want to remove ${symbol}?`)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/trading-pairs/${symbol}`, {
|
||||||
|
method: 'DELETE'
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error removing pair: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add trading pair modal
|
||||||
|
function showAddPairModal() {
|
||||||
|
document.getElementById('addPairModal').style.display = 'block';
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeAddPairModal() {
|
||||||
|
document.getElementById('addPairModal').style.display = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addTradingPair() {
|
||||||
|
const symbol = document.getElementById('newPairSymbol').value.toUpperCase();
|
||||||
|
const priority = parseInt(document.getElementById('newPairPriority').value);
|
||||||
|
const recordFrom = document.getElementById('newPairRecordFrom').value;
|
||||||
|
|
||||||
|
if (!symbol) {
|
||||||
|
showAlert('Please enter a symbol', 'error');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = { symbol, priority };
|
||||||
|
if (recordFrom) {
|
||||||
|
body.record_from_date = recordFrom + ':00Z';
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch('/api/trading-pairs', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(body)
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
closeAddPairModal();
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error adding pair: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Toggle indicator
|
||||||
|
async function toggleIndicator(indicator) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/indicators/toggle/${indicator}`, {
|
||||||
|
method: 'POST'
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error toggling indicator: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Edit indicator
|
||||||
|
function editIndicator(indicator) {
|
||||||
|
currentIndicator = indicator;
|
||||||
|
document.getElementById('editIndicatorName').textContent = indicator.toUpperCase();
|
||||||
|
|
||||||
|
const periods = currentConfig.technical_indicators.periods[indicator];
|
||||||
|
document.getElementById('editIndicatorPeriods').value = JSON.stringify(periods, null, 2);
|
||||||
|
|
||||||
|
document.getElementById('editIndicatorModal').style.display = 'block';
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeEditIndicatorModal() {
|
||||||
|
document.getElementById('editIndicatorModal').style.display = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveIndicatorPeriods() {
|
||||||
|
try {
|
||||||
|
const periodsText = document.getElementById('editIndicatorPeriods').value;
|
||||||
|
const periods = JSON.parse(periodsText);
|
||||||
|
|
||||||
|
const response = await fetch(`/api/indicators/${currentIndicator}/periods`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ periods })
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
closeEditIndicatorModal();
|
||||||
|
loadConfig();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error saving indicator: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables
|
||||||
|
async function loadEnvVars() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/env');
|
||||||
|
const envVars = await response.json();
|
||||||
|
|
||||||
|
const tbody = document.getElementById('envTableBody');
|
||||||
|
|
||||||
|
if (Object.keys(envVars).length === 0) {
|
||||||
|
tbody.innerHTML = '<tr><td colspan="3" style="text-align: center;">No environment variables</td></tr>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tbody.innerHTML = Object.entries(envVars).map(([key, value]) => `
|
||||||
|
<tr>
|
||||||
|
<td><strong>${key}</strong></td>
|
||||||
|
<td><code>${value}</code></td>
|
||||||
|
<td>
|
||||||
|
<button class="btn btn-sm btn-danger" onclick="deleteEnvVar('${key}')">Delete</button>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
`).join('');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading env vars:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addEnvVar() {
|
||||||
|
const key = document.getElementById('newEnvKey').value.trim();
|
||||||
|
const value = document.getElementById('newEnvValue').value.trim();
|
||||||
|
|
||||||
|
if (!key || !value) {
|
||||||
|
showAlert('Please enter both key and value', 'error');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/env', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ key, value })
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
document.getElementById('newEnvKey').value = '';
|
||||||
|
document.getElementById('newEnvValue').value = '';
|
||||||
|
loadEnvVars();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error adding env var: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteEnvVar(key) {
|
||||||
|
if (!confirm(`Delete environment variable ${key}?`)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/env/${key}`, {
|
||||||
|
method: 'DELETE'
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
|
||||||
|
if (data.status === 'success') {
|
||||||
|
loadEnvVars();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error deleting env var: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save all configuration
|
||||||
|
async function saveAllConfig() {
|
||||||
|
if (!confirm('Save all configuration changes?')) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/config', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(currentConfig)
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
showAlert(data.message, data.status === 'success' ? 'success' : 'error');
|
||||||
|
} catch (error) {
|
||||||
|
showAlert('Error saving config: ' + error.message, 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close modals on outside click
|
||||||
|
window.onclick = function(event) {
|
||||||
|
if (event.target.classList.contains('modal')) {
|
||||||
|
event.target.style.display = 'none';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
1601
ui_template_dashboard.py
Normal file
1601
ui_template_dashboard.py
Normal file
File diff suppressed because it is too large
Load Diff
780
ui_template_gaps.py
Normal file
780
ui_template_gaps.py
Normal file
@@ -0,0 +1,780 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
ui_template_gaps.py - Data Gap Monitoring Interface
|
||||||
|
Provides visual interface for tracking and filling data gaps
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_gaps_monitoring_html():
|
||||||
|
"""Return the gaps monitoring page HTML"""
|
||||||
|
return """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<title>Gap Monitoring - Trading System</title>
|
||||||
|
<style>
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||||
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||||
|
min-height: 100vh;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
max-width: 1400px;
|
||||||
|
margin: 0 auto;
|
||||||
|
background: white;
|
||||||
|
border-radius: 10px;
|
||||||
|
box-shadow: 0 10px 40px rgba(0,0,0,0.1);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header {
|
||||||
|
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
||||||
|
color: white;
|
||||||
|
padding: 30px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header h1 {
|
||||||
|
font-size: 32px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header p {
|
||||||
|
opacity: 0.9;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-bar {
|
||||||
|
background: #2d3748;
|
||||||
|
padding: 15px 30px;
|
||||||
|
display: flex;
|
||||||
|
gap: 10px;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button {
|
||||||
|
background: #4a5568;
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
padding: 10px 20px;
|
||||||
|
border-radius: 5px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 14px;
|
||||||
|
transition: all 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button:hover {
|
||||||
|
background: #667eea;
|
||||||
|
transform: translateY(-2px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button.active {
|
||||||
|
background: #667eea;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stats-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||||
|
gap: 20px;
|
||||||
|
padding: 30px;
|
||||||
|
background: #f7fafc;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-card {
|
||||||
|
background: white;
|
||||||
|
padding: 20px;
|
||||||
|
border-radius: 8px;
|
||||||
|
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-value {
|
||||||
|
font-size: 32px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: #667eea;
|
||||||
|
margin: 10px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-label {
|
||||||
|
color: #718096;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content {
|
||||||
|
padding: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section {
|
||||||
|
margin-bottom: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-title {
|
||||||
|
font-size: 20px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: #2d3748;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
border-bottom: 2px solid #e2e8f0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pairs-table {
|
||||||
|
width: 100%;
|
||||||
|
border-collapse: collapse;
|
||||||
|
background: white;
|
||||||
|
box-shadow: 0 2px 8px rgba(0,0,0,0.1);
|
||||||
|
border-radius: 8px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pairs-table thead {
|
||||||
|
background: #2d3748;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pairs-table th {
|
||||||
|
padding: 15px;
|
||||||
|
text-align: left;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pairs-table td {
|
||||||
|
padding: 12px 15px;
|
||||||
|
border-bottom: 1px solid #e2e8f0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pairs-table tbody tr:hover {
|
||||||
|
background: #f7fafc;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 4px 12px;
|
||||||
|
border-radius: 12px;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-excellent {
|
||||||
|
background: #c6f6d5;
|
||||||
|
color: #22543d;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-good {
|
||||||
|
background: #bee3f8;
|
||||||
|
color: #2c5282;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-partial {
|
||||||
|
background: #feebc8;
|
||||||
|
color: #744210;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-poor {
|
||||||
|
background: #fed7d7;
|
||||||
|
color: #742a2a;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button {
|
||||||
|
background: #667eea;
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
padding: 6px 12px;
|
||||||
|
border-radius: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 12px;
|
||||||
|
margin-right: 5px;
|
||||||
|
transition: all 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button:hover {
|
||||||
|
background: #5a67d8;
|
||||||
|
transform: translateY(-1px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button.secondary {
|
||||||
|
background: #48bb78;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-button.secondary:hover {
|
||||||
|
background: #38a169;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading {
|
||||||
|
text-align: center;
|
||||||
|
padding: 40px;
|
||||||
|
color: #718096;
|
||||||
|
}
|
||||||
|
|
||||||
|
.spinner {
|
||||||
|
border: 4px solid #e2e8f0;
|
||||||
|
border-top: 4px solid #667eea;
|
||||||
|
border-radius: 50%;
|
||||||
|
width: 40px;
|
||||||
|
height: 40px;
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
margin: 0 auto 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
0% { transform: rotate(0deg); }
|
||||||
|
100% { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal {
|
||||||
|
display: none;
|
||||||
|
position: fixed;
|
||||||
|
z-index: 1000;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background: rgba(0,0,0,0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
background: white;
|
||||||
|
margin: 5% auto;
|
||||||
|
padding: 30px;
|
||||||
|
border-radius: 10px;
|
||||||
|
max-width: 800px;
|
||||||
|
max-height: 80vh;
|
||||||
|
overflow-y: auto;
|
||||||
|
box-shadow: 0 10px 40px rgba(0,0,0,0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
padding-bottom: 15px;
|
||||||
|
border-bottom: 2px solid #e2e8f0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-title {
|
||||||
|
font-size: 24px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: #2d3748;
|
||||||
|
}
|
||||||
|
|
||||||
|
.close {
|
||||||
|
font-size: 28px;
|
||||||
|
font-weight: bold;
|
||||||
|
color: #718096;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: color 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.close:hover {
|
||||||
|
color: #2d3748;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-details {
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-item {
|
||||||
|
background: #f7fafc;
|
||||||
|
padding: 15px;
|
||||||
|
border-radius: 8px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
border-left: 4px solid #667eea;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-info {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
gap: 10px;
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-info-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-label {
|
||||||
|
color: #718096;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.gap-value {
|
||||||
|
color: #2d3748;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-bar {
|
||||||
|
width: 100%;
|
||||||
|
height: 8px;
|
||||||
|
background: #e2e8f0;
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
margin: 5px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-fill {
|
||||||
|
height: 100%;
|
||||||
|
background: linear-gradient(90deg, #667eea, #48bb78);
|
||||||
|
transition: width 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.interval-badge {
|
||||||
|
display: inline-block;
|
||||||
|
padding: 2px 8px;
|
||||||
|
background: #edf2f7;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 11px;
|
||||||
|
font-weight: 600;
|
||||||
|
color: #2d3748;
|
||||||
|
margin-right: 5px;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<div class="header">
|
||||||
|
<h1>📊 Data Gap Monitoring</h1>
|
||||||
|
<p>Track and fill data gaps across all trading pairs</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="nav-bar">
|
||||||
|
<button onclick="window.location.href='/'" class="nav-button">Dashboard</button>
|
||||||
|
<button onclick="window.location.href='/config'" class="nav-button">Config</button>
|
||||||
|
<button onclick="window.location.href='/gaps'" class="nav-button active">Gaps</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="stats-grid">
|
||||||
|
<div class="stat-card">
|
||||||
|
<div class="stat-label">Total Pairs</div>
|
||||||
|
<div class="stat-value" id="totalPairs">-</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-card">
|
||||||
|
<div class="stat-label">Pairs with Gaps</div>
|
||||||
|
<div class="stat-value" id="pairsWithGaps">-</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-card">
|
||||||
|
<div class="stat-label">Total Missing Records</div>
|
||||||
|
<div class="stat-value" id="totalMissing">-</div>
|
||||||
|
</div>
|
||||||
|
<div class="stat-card">
|
||||||
|
<div class="stat-label">Avg Coverage</div>
|
||||||
|
<div class="stat-value" id="avgCoverage">-</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="content">
|
||||||
|
<div class="section">
|
||||||
|
<div class="section-title">Trading Pairs Gap Status</div>
|
||||||
|
<div id="loadingIndicator" class="loading">
|
||||||
|
<div class="spinner"></div>
|
||||||
|
<p>Loading gap data...</p>
|
||||||
|
</div>
|
||||||
|
<table class="pairs-table" id="pairsTable" style="display:none;">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Symbol</th>
|
||||||
|
<th>From Date</th>
|
||||||
|
<th>To Date</th>
|
||||||
|
<th>Intervals</th>
|
||||||
|
<th>Coverage</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Actions</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="pairsTableBody">
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Gap Details Modal -->
|
||||||
|
<div id="gapModal" class="modal">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<div class="modal-title" id="modalTitle">Gap Details</div>
|
||||||
|
<span class="close" onclick="closeModal()">×</span>
|
||||||
|
</div>
|
||||||
|
<div id="modalBody">
|
||||||
|
<div class="loading">
|
||||||
|
<div class="spinner"></div>
|
||||||
|
<p>Loading details...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
let gapData = [];
|
||||||
|
|
||||||
|
async function loadGapData() {
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/gaps/all-pairs');
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.status === 'success' && result.data) {
|
||||||
|
gapData = result.data;
|
||||||
|
processAndDisplayData(gapData);
|
||||||
|
} else {
|
||||||
|
showError('Failed to load gap data: ' + (result.message || 'Unknown error'));
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading gap data:', error);
|
||||||
|
showError('Error loading gap data: ' + error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function processAndDisplayData(data) {
|
||||||
|
// Group by symbol
|
||||||
|
const symbolMap = new Map();
|
||||||
|
|
||||||
|
data.forEach(item => {
|
||||||
|
if (!symbolMap.has(item.symbol)) {
|
||||||
|
symbolMap.set(item.symbol, {
|
||||||
|
symbol: item.symbol,
|
||||||
|
intervals: [],
|
||||||
|
totalCoverage: 0,
|
||||||
|
totalMissing: 0,
|
||||||
|
totalRecords: 0,
|
||||||
|
firstDate: item.first_record,
|
||||||
|
lastDate: item.last_record,
|
||||||
|
healthScore: null
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const symbolData = symbolMap.get(item.symbol);
|
||||||
|
symbolData.intervals.push({
|
||||||
|
interval: item.interval,
|
||||||
|
coverage: item.coverage_percent || 0,
|
||||||
|
missing: item.missing_records || 0,
|
||||||
|
total: item.total_records || 0,
|
||||||
|
gaps: item.gaps || []
|
||||||
|
});
|
||||||
|
|
||||||
|
symbolData.totalMissing += item.missing_records || 0;
|
||||||
|
symbolData.totalRecords += item.total_records || 0;
|
||||||
|
|
||||||
|
// Update date ranges
|
||||||
|
if (!symbolData.firstDate || (item.first_record && item.first_record < symbolData.firstDate)) {
|
||||||
|
symbolData.firstDate = item.first_record;
|
||||||
|
}
|
||||||
|
if (!symbolData.lastDate || (item.last_record && item.last_record > symbolData.lastDate)) {
|
||||||
|
symbolData.lastDate = item.last_record;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Calculate average coverage per symbol and load health scores
|
||||||
|
symbolMap.forEach(async (symbolData) => {
|
||||||
|
if (symbolData.intervals.length > 0) {
|
||||||
|
const totalCoverage = symbolData.intervals.reduce((sum, i) => sum + i.coverage, 0);
|
||||||
|
symbolData.totalCoverage = totalCoverage / symbolData.intervals.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load health score for first interval (as a sample)
|
||||||
|
if (symbolData.intervals[0]) {
|
||||||
|
await loadHealthScore(symbolData.symbol, symbolData.intervals[0].interval, symbolData);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update statistics
|
||||||
|
updateStatistics(symbolMap);
|
||||||
|
|
||||||
|
// Display table
|
||||||
|
displayPairsTable(symbolMap);
|
||||||
|
|
||||||
|
// Hide loading indicator
|
||||||
|
document.getElementById('loadingIndicator').style.display = 'none';
|
||||||
|
document.getElementById('pairsTable').style.display = 'table';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadHealthScore(symbol, interval, symbolData) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/gaps/health/${symbol}/${interval}`);
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.status === 'success' && result.data) {
|
||||||
|
symbolData.healthScore = result.data.health_score;
|
||||||
|
symbolData.healthIssues = result.data.issues || [];
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error loading health for ${symbol}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateStatistics(symbolMap) {
|
||||||
|
const totalPairs = symbolMap.size;
|
||||||
|
const pairsWithGaps = Array.from(symbolMap.values()).filter(s => s.totalMissing > 0).length;
|
||||||
|
const totalMissing = Array.from(symbolMap.values()).reduce((sum, s) => sum + s.totalMissing, 0);
|
||||||
|
const avgCoverage = totalPairs > 0
|
||||||
|
? Array.from(symbolMap.values()).reduce((sum, s) => sum + s.totalCoverage, 0) / totalPairs
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
document.getElementById('totalPairs').textContent = totalPairs;
|
||||||
|
document.getElementById('pairsWithGaps').textContent = pairsWithGaps;
|
||||||
|
document.getElementById('totalMissing').textContent = totalMissing.toLocaleString();
|
||||||
|
document.getElementById('avgCoverage').textContent = avgCoverage.toFixed(1) + '%';
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayPairsTable(symbolMap) {
|
||||||
|
const tbody = document.getElementById('pairsTableBody');
|
||||||
|
tbody.innerHTML = '';
|
||||||
|
|
||||||
|
// Sort by coverage (worst first)
|
||||||
|
const sortedSymbols = Array.from(symbolMap.values()).sort((a, b) => a.totalCoverage - b.totalCoverage);
|
||||||
|
|
||||||
|
sortedSymbols.forEach(symbolData => {
|
||||||
|
const row = document.createElement('tr');
|
||||||
|
|
||||||
|
const coverage = symbolData.totalCoverage;
|
||||||
|
let statusClass, statusText;
|
||||||
|
if (coverage >= 95) {
|
||||||
|
statusClass = 'status-excellent';
|
||||||
|
statusText = 'Excellent';
|
||||||
|
} else if (coverage >= 80) {
|
||||||
|
statusClass = 'status-good';
|
||||||
|
statusText = 'Good';
|
||||||
|
} else if (coverage >= 50) {
|
||||||
|
statusClass = 'status-partial';
|
||||||
|
statusText = 'Partial';
|
||||||
|
} else {
|
||||||
|
statusClass = 'status-poor';
|
||||||
|
statusText = 'Poor';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format dates
|
||||||
|
const fromDate = symbolData.firstDate ? new Date(symbolData.firstDate).toLocaleDateString() : 'N/A';
|
||||||
|
const toDate = symbolData.lastDate ? new Date(symbolData.lastDate).toLocaleDateString() : 'N/A';
|
||||||
|
|
||||||
|
// Create interval badges
|
||||||
|
const intervalBadges = symbolData.intervals.map(i =>
|
||||||
|
`<span class="interval-badge">${i.interval}</span>`
|
||||||
|
).join('');
|
||||||
|
|
||||||
|
// Health indicator
|
||||||
|
const healthIndicator = symbolData.healthScore !== null
|
||||||
|
? `<span style="color: ${getHealthColor(symbolData.healthScore)}">❤ ${symbolData.healthScore}</span>`
|
||||||
|
: '';
|
||||||
|
|
||||||
|
row.innerHTML = `
|
||||||
|
<td><strong>${symbolData.symbol}</strong> ${healthIndicator}</td>
|
||||||
|
<td>${fromDate}</td>
|
||||||
|
<td>${toDate}</td>
|
||||||
|
<td>${intervalBadges}</td>
|
||||||
|
<td>
|
||||||
|
<div class="progress-bar">
|
||||||
|
<div class="progress-fill" style="width: ${coverage}%"></div>
|
||||||
|
</div>
|
||||||
|
<small>${coverage.toFixed(1)}% (${symbolData.totalMissing.toLocaleString()} missing)</small>
|
||||||
|
</td>
|
||||||
|
<td><span class="status-badge ${statusClass}">${statusText}</span></td>
|
||||||
|
<td>
|
||||||
|
<button class="action-button" onclick="showDetails('${symbolData.symbol}')">Details</button>
|
||||||
|
${symbolData.totalMissing > 0 ? `<button class="action-button secondary" onclick="smartFillGaps('${symbolData.symbol}')">Smart Fill</button>` : ''}
|
||||||
|
</td>
|
||||||
|
`;
|
||||||
|
|
||||||
|
tbody.appendChild(row);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getHealthColor(score) {
|
||||||
|
if (score >= 90) return '#48bb78';
|
||||||
|
if (score >= 70) return '#4299e1';
|
||||||
|
if (score >= 50) return '#ed8936';
|
||||||
|
return '#f56565';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function showDetails(symbol) {
|
||||||
|
const modal = document.getElementById('gapModal');
|
||||||
|
const modalTitle = document.getElementById('modalTitle');
|
||||||
|
const modalBody = document.getElementById('modalBody');
|
||||||
|
|
||||||
|
modalTitle.textContent = `${symbol} - Gap Details & Analytics`;
|
||||||
|
modalBody.innerHTML = '<div class="loading"><div class="spinner"></div><p>Loading details...</p></div>';
|
||||||
|
modal.style.display = 'block';
|
||||||
|
|
||||||
|
// Get data for this symbol
|
||||||
|
const symbolData = gapData.filter(item => item.symbol === symbol);
|
||||||
|
|
||||||
|
let detailsHTML = '';
|
||||||
|
|
||||||
|
for (const item of symbolData) {
|
||||||
|
const coverage = item.coverage_percent || 0;
|
||||||
|
const statusClass = coverage >= 95 ? 'status-excellent' : coverage >= 80 ? 'status-good' : coverage >= 50 ? 'status-partial' : 'status-poor';
|
||||||
|
|
||||||
|
// Get progress and health data
|
||||||
|
const progressData = await fetchProgress(symbol, item.interval);
|
||||||
|
const healthData = await fetchHealth(symbol, item.interval);
|
||||||
|
const prioritizedGaps = await fetchPrioritizedGaps(symbol, item.interval);
|
||||||
|
|
||||||
|
detailsHTML += `
|
||||||
|
<div class="gap-item">
|
||||||
|
<h3>${item.interval} <span class="status-badge ${statusClass}">${coverage.toFixed(1)}%</span></h3>
|
||||||
|
|
||||||
|
${healthData ? `
|
||||||
|
<div style="background: #fff5f5; padding: 10px; border-radius: 4px; margin: 10px 0; border-left: 3px solid ${getHealthColor(healthData.health_score)}">
|
||||||
|
<strong>Health Score: ${healthData.health_score}/100</strong> (${healthData.status})
|
||||||
|
${healthData.issues.length > 0 ? `
|
||||||
|
<ul style="margin: 5px 0 0 20px; font-size: 12px;">
|
||||||
|
${healthData.issues.map(issue => `<li>${issue.message}</li>`).join('')}
|
||||||
|
</ul>
|
||||||
|
` : '<p style="margin: 5px 0 0 0; font-size: 12px; color: #48bb78;">✓ No issues detected</p>'}
|
||||||
|
</div>
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
${progressData && progressData.missing_records > 0 ? `
|
||||||
|
<div style="background: #ebf8ff; padding: 10px; border-radius: 4px; margin: 10px 0;">
|
||||||
|
<strong>⏱ Estimated Time to Complete:</strong> ${progressData.estimated_time_human}
|
||||||
|
<small style="display: block; margin-top: 5px; color: #718096;">
|
||||||
|
${progressData.missing_records.toLocaleString()} records remaining
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
` : ''}
|
||||||
|
|
||||||
|
<div class="gap-info">
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">First Record:</span>
|
||||||
|
<span class="gap-value">${item.first_record ? new Date(item.first_record).toLocaleString() : 'N/A'}</span>
|
||||||
|
</div>
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">Last Record:</span>
|
||||||
|
<span class="gap-value">${item.last_record ? new Date(item.last_record).toLocaleString() : 'N/A'}</span>
|
||||||
|
</div>
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">Total Records:</span>
|
||||||
|
<span class="gap-value">${(item.total_records || 0).toLocaleString()}</span>
|
||||||
|
</div>
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">Expected Records:</span>
|
||||||
|
<span class="gap-value">${(item.expected_records || 0).toLocaleString()}</span>
|
||||||
|
</div>
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">Missing Records:</span>
|
||||||
|
<span class="gap-value">${(item.missing_records || 0).toLocaleString()}</span>
|
||||||
|
</div>
|
||||||
|
<div class="gap-info-item">
|
||||||
|
<span class="gap-label">Number of Gaps:</span>
|
||||||
|
<span class="gap-value">${(item.gaps || []).length}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
if (prioritizedGaps && prioritizedGaps.length > 0) {
|
||||||
|
detailsHTML += '<h4 style="margin-top: 15px; color: #2d3748;">🎯 Priority Gaps (Smartly Sorted):</h4>';
|
||||||
|
prioritizedGaps.slice(0, 10).forEach((gap, idx) => {
|
||||||
|
const priorityColor = gap.priority_score > 150 ? '#48bb78' : gap.priority_score > 100 ? '#4299e1' : '#ed8936';
|
||||||
|
detailsHTML += `
|
||||||
|
<div style="background: #edf2f7; padding: 10px; margin: 5px 0; border-radius: 4px; font-size: 13px; border-left: 3px solid ${priorityColor}">
|
||||||
|
<strong>#${idx + 1} Priority:</strong> ${gap.priority_score.toFixed(1)} |
|
||||||
|
<strong>Age:</strong> ${gap.days_old} days<br>
|
||||||
|
<strong>Gap:</strong> ${new Date(gap.gap_start).toLocaleString()} → ${new Date(gap.gap_end).toLocaleString()}<br>
|
||||||
|
<strong>Missing:</strong> ${gap.missing_candles} candles (${gap.duration_hours}h)
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
if (prioritizedGaps.length > 10) {
|
||||||
|
detailsHTML += `<p style="color: #718096; font-size: 13px; margin-top: 5px;">... and ${prioritizedGaps.length - 10} more gaps</p>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
detailsHTML += '</div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
modalBody.innerHTML = detailsHTML || '<p>No gap data available</p>';
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchProgress(symbol, interval) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/gaps/progress/${symbol}/${interval}`);
|
||||||
|
const result = await response.json();
|
||||||
|
return result.status === 'success' ? result.data : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching progress:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchHealth(symbol, interval) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/gaps/health/${symbol}/${interval}`);
|
||||||
|
const result = await response.json();
|
||||||
|
return result.status === 'success' ? result.data : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching health:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchPrioritizedGaps(symbol, interval) {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/gaps/prioritized/${symbol}/${interval}`);
|
||||||
|
const result = await response.json();
|
||||||
|
return result.status === 'success' ? result.data : null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching prioritized gaps:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function smartFillGaps(symbol) {
|
||||||
|
if (!confirm(`Smart fill will automatically prioritize and fill the most important gaps for ${symbol}. Continue?`)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/gaps/smart-fill/${symbol}`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' }
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
|
||||||
|
if (result.status === 'success') {
|
||||||
|
alert(`Smart fill completed for ${symbol}!\\n\\n` +
|
||||||
|
result.data.map(r => `${r.interval}: ${r.gaps_filled}/${r.total_gaps} filled`).join('\\n'));
|
||||||
|
|
||||||
|
// Reload data
|
||||||
|
setTimeout(loadGapData, 3000);
|
||||||
|
} else {
|
||||||
|
alert('Error: ' + result.message);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
alert('Error filling gaps: ' + error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeModal() {
|
||||||
|
document.getElementById('gapModal').style.display = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
function showError(message) {
|
||||||
|
document.getElementById('loadingIndicator').innerHTML = `
|
||||||
|
<p style="color: #e53e3e; font-weight: bold;">${message}</p>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close modal when clicking outside
|
||||||
|
window.onclick = function(event) {
|
||||||
|
const modal = document.getElementById('gapModal');
|
||||||
|
if (event.target == modal) {
|
||||||
|
closeModal();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load data on page load
|
||||||
|
loadGapData();
|
||||||
|
|
||||||
|
// Auto-refresh every 30 seconds
|
||||||
|
setInterval(loadGapData, 30000);
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
72
ui_websocket.py
Normal file
72
ui_websocket.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
ui_websocket.py - WebSocket Connections and Real-time Updates
|
||||||
|
Handles WebSocket connections and broadcasts real-time status updates
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import List
|
||||||
|
from fastapi import WebSocket, WebSocketDisconnect
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Global WebSocket connection pool
|
||||||
|
websocket_connections: List[WebSocket] = []
|
||||||
|
|
||||||
|
|
||||||
|
async def broadcast_to_websockets(message: dict):
|
||||||
|
"""Send message to all connected WebSocket clients"""
|
||||||
|
disconnected = []
|
||||||
|
|
||||||
|
for ws in websocket_connections:
|
||||||
|
try:
|
||||||
|
await ws.send_json(message)
|
||||||
|
except Exception:
|
||||||
|
disconnected.append(ws)
|
||||||
|
|
||||||
|
# Remove disconnected clients
|
||||||
|
for ws in disconnected:
|
||||||
|
if ws in websocket_connections:
|
||||||
|
websocket_connections.remove(ws)
|
||||||
|
|
||||||
|
|
||||||
|
async def broadcast_status_updates(get_status_func):
|
||||||
|
"""Background task to broadcast status updates to all WebSocket clients"""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
await asyncio.sleep(2) # Broadcast every 2 seconds
|
||||||
|
|
||||||
|
if websocket_connections:
|
||||||
|
status = await get_status_func()
|
||||||
|
await broadcast_to_websockets({
|
||||||
|
"type": "status_update",
|
||||||
|
"data": status
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in broadcast task: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_websocket_connection(websocket: WebSocket):
|
||||||
|
"""Handle individual WebSocket connection"""
|
||||||
|
await websocket.accept()
|
||||||
|
websocket_connections.append(websocket)
|
||||||
|
logger.info(f"WebSocket connected. Total connections: {len(websocket_connections)}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
# Keep connection alive and handle incoming messages
|
||||||
|
data = await websocket.receive_text()
|
||||||
|
logger.debug(f"Received WebSocket message: {data}")
|
||||||
|
|
||||||
|
# Echo or handle specific commands if needed
|
||||||
|
# await websocket.send_json({"type": "ack", "message": "received"})
|
||||||
|
|
||||||
|
except WebSocketDisconnect:
|
||||||
|
logger.info("WebSocket disconnected normally")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebSocket error: {e}")
|
||||||
|
finally:
|
||||||
|
if websocket in websocket_connections:
|
||||||
|
websocket_connections.remove(websocket)
|
||||||
|
logger.info(f"WebSocket removed. Total connections: {len(websocket_connections)}")
|
854
utils.py
Normal file
854
utils.py
Normal file
@@ -0,0 +1,854 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
utils.py - Utility Functions for Data Processing and Technical Indicators
|
||||||
|
|
||||||
|
Utility functions for data processing, technical indicators, validation, and configuration management
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
import shutil
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Dict, List, Optional, Any, Union
|
||||||
|
import pandas as pd
|
||||||
|
import pandas_ta as ta
|
||||||
|
import numpy as np
|
||||||
|
from decimal import Decimal, ROUND_HALF_UP, InvalidOperation as DecimalException
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
# Load environment variables
|
||||||
|
load_dotenv('variables.env')
|
||||||
|
|
||||||
|
def setup_logging(log_level: str = None, log_file: str = None):
|
||||||
|
"""Setup logging configuration"""
|
||||||
|
# Use environment variables if parameters not provided
|
||||||
|
if log_level is None:
|
||||||
|
log_level = os.getenv('LOG_LEVEL', 'INFO')
|
||||||
|
if log_file is None:
|
||||||
|
log_file = os.getenv('LOG_FILE', 'crypto_collector.log')
|
||||||
|
|
||||||
|
# Create logs directory if it doesn't exist
|
||||||
|
os.makedirs("logs", exist_ok=True)
|
||||||
|
|
||||||
|
log_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||||
|
date_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
|
||||||
|
# Configure root logger
|
||||||
|
logging.basicConfig(
|
||||||
|
level=getattr(logging, log_level.upper()),
|
||||||
|
format=log_format,
|
||||||
|
datefmt=date_format,
|
||||||
|
handlers=[
|
||||||
|
logging.FileHandler(f"logs/{log_file}"),
|
||||||
|
logging.StreamHandler()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set specific log levels for external libraries
|
||||||
|
logging.getLogger("websockets").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("asyncio").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||||
|
logging.getLogger("binance").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
def load_config(config_file: str = "config.conf") -> Dict[str, Any]:
|
||||||
|
"""Load configuration from JSON file"""
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
try:
|
||||||
|
with open(config_file, 'r') as f:
|
||||||
|
config = json.load(f)
|
||||||
|
|
||||||
|
# Validate configuration structure
|
||||||
|
validate_config(config)
|
||||||
|
logger.debug(f"Successfully loaded config from {config_file}")
|
||||||
|
return config
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.warning(f"Config file {config_file} not found, creating default")
|
||||||
|
# Create default configuration if file doesn't exist
|
||||||
|
default_config = create_default_config()
|
||||||
|
save_config(default_config, config_file)
|
||||||
|
return default_config
|
||||||
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(f"Invalid JSON in configuration file: {e}")
|
||||||
|
raise ValueError(f"Invalid JSON in configuration file: {e}")
|
||||||
|
|
||||||
|
def create_default_config() -> Dict[str, Any]:
|
||||||
|
"""Create default configuration"""
|
||||||
|
return {
|
||||||
|
"trading_pairs": [
|
||||||
|
{"symbol": "BTCUSDT", "enabled": True, "priority": 1},
|
||||||
|
{"symbol": "ETHUSDT", "enabled": True, "priority": 1},
|
||||||
|
{"symbol": "BNBUSDT", "enabled": True, "priority": 2},
|
||||||
|
{"symbol": "XRPUSDT", "enabled": True, "priority": 3},
|
||||||
|
{"symbol": "SOLUSDT", "enabled": True, "priority": 2}
|
||||||
|
],
|
||||||
|
"technical_indicators": {
|
||||||
|
"enabled": ["sma", "ema", "rsi", "macd", "bb", "atr"],
|
||||||
|
"periods": {
|
||||||
|
"sma": [20, 50, 200],
|
||||||
|
"ema": [12, 26],
|
||||||
|
"rsi": [14],
|
||||||
|
"macd": {"fast": 12, "slow": 26, "signal": 9},
|
||||||
|
"bb": {"period": 20, "std": 2},
|
||||||
|
"atr": [14],
|
||||||
|
"stoch": {"k_period": 14, "d_period": 3},
|
||||||
|
"adx": [14]
|
||||||
|
},
|
||||||
|
"calculation_intervals": ["1m", "5m", "15m", "1h", "4h", "1d"]
|
||||||
|
},
|
||||||
|
"collection": {
|
||||||
|
"bulk_chunk_size": 1000,
|
||||||
|
"websocket_reconnect_delay": 5,
|
||||||
|
"tick_batch_size": 100,
|
||||||
|
"candle_intervals": ["1m", "5m", "15m", "1h", "4h", "1d"],
|
||||||
|
"max_retries": 3,
|
||||||
|
"retry_delay": 1,
|
||||||
|
"rate_limit_requests_per_minute": 2000,
|
||||||
|
"concurrent_symbol_limit": 10
|
||||||
|
},
|
||||||
|
"database": {
|
||||||
|
"batch_insert_size": 1000,
|
||||||
|
"compression_after_days": 7,
|
||||||
|
"retention_policy_days": 365,
|
||||||
|
"vacuum_analyze_interval_hours": 24,
|
||||||
|
"connection_pool": {
|
||||||
|
"min_size": 10,
|
||||||
|
"max_size": 50,
|
||||||
|
"command_timeout": 60
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ui": {
|
||||||
|
"refresh_interval_seconds": 5,
|
||||||
|
"max_chart_points": 1000,
|
||||||
|
"default_timeframe": "1d",
|
||||||
|
"theme": "dark",
|
||||||
|
"enable_realtime_updates": True
|
||||||
|
},
|
||||||
|
"gap_filling": {
|
||||||
|
"enable_auto_gap_filling": True,
|
||||||
|
"auto_fill_schedule_hours": 24,
|
||||||
|
"intervals_to_monitor": ["1m", "5m", "15m", "1h", "4h", "1d"],
|
||||||
|
"max_gap_size_candles": 1000,
|
||||||
|
"max_consecutive_empty_candles": 5,
|
||||||
|
"averaging_lookback_candles": 10,
|
||||||
|
"enable_intelligent_averaging": True,
|
||||||
|
"max_fill_attempts": 3
|
||||||
|
},
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
def save_config(config: Dict[str, Any], config_file: str = "config.conf"):
|
||||||
|
"""Save configuration to JSON file using atomic write"""
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Validate before saving
|
||||||
|
validate_config(config)
|
||||||
|
|
||||||
|
# Get the directory of the config file
|
||||||
|
config_dir = os.path.dirname(config_file) or '.'
|
||||||
|
|
||||||
|
# Create a temporary file in the same directory
|
||||||
|
temp_fd, temp_path = tempfile.mkstemp(
|
||||||
|
dir=config_dir,
|
||||||
|
prefix='.tmp_config_',
|
||||||
|
suffix='.conf',
|
||||||
|
text=True
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Write to temporary file
|
||||||
|
with os.fdopen(temp_fd, 'w') as f:
|
||||||
|
json.dump(config, f, indent=2, sort_keys=False)
|
||||||
|
f.flush()
|
||||||
|
os.fsync(f.fileno()) # Force write to disk
|
||||||
|
|
||||||
|
# Atomic rename
|
||||||
|
shutil.move(temp_path, config_file)
|
||||||
|
logger.info(f"Configuration saved successfully to {config_file}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Clean up temp file on error
|
||||||
|
try:
|
||||||
|
os.unlink(temp_path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error saving config: {e}", exc_info=True)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def validate_config(config: Dict[str, Any]):
|
||||||
|
"""Validate configuration structure"""
|
||||||
|
required_sections = ["trading_pairs", "technical_indicators", "collection", "database"]
|
||||||
|
|
||||||
|
for section in required_sections:
|
||||||
|
if section not in config:
|
||||||
|
raise ValueError(f"Missing required configuration section: {section}")
|
||||||
|
|
||||||
|
# Validate trading pairs
|
||||||
|
if not isinstance(config["trading_pairs"], list):
|
||||||
|
raise ValueError("trading_pairs must be a list")
|
||||||
|
|
||||||
|
for pair in config["trading_pairs"]:
|
||||||
|
if not isinstance(pair, dict) or "symbol" not in pair:
|
||||||
|
raise ValueError("Invalid trading pair configuration")
|
||||||
|
if not validate_symbol(pair["symbol"]):
|
||||||
|
raise ValueError(f"Invalid symbol format: {pair['symbol']}")
|
||||||
|
|
||||||
|
# Ensure required fields with defaults
|
||||||
|
if "enabled" not in pair:
|
||||||
|
pair["enabled"] = True
|
||||||
|
if "priority" not in pair:
|
||||||
|
pair["priority"] = 1
|
||||||
|
|
||||||
|
# Validate technical indicators
|
||||||
|
indicators_config = config["technical_indicators"]
|
||||||
|
if "enabled" not in indicators_config or "periods" not in indicators_config:
|
||||||
|
raise ValueError("Invalid technical indicators configuration")
|
||||||
|
|
||||||
|
if not isinstance(indicators_config["enabled"], list):
|
||||||
|
raise ValueError("technical_indicators.enabled must be a list")
|
||||||
|
|
||||||
|
def validate_symbol(symbol: str) -> bool:
|
||||||
|
"""Validate trading pair symbol format"""
|
||||||
|
# Binance symbol format: base currency + quote currency (e.g., BTCUSDT)
|
||||||
|
if not symbol or len(symbol) < 6:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Should be uppercase letters/numbers only
|
||||||
|
if not re.match(r'^[A-Z0-9]+$', symbol):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Should end with common quote currencies
|
||||||
|
quote_currencies = ['USDT', 'BUSD', 'BTC', 'ETH', 'BNB', 'USDC', 'TUSD', 'DAI']
|
||||||
|
if not any(symbol.endswith(quote) for quote in quote_currencies):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def reload_env_vars(env_file: str = 'variables.env'):
|
||||||
|
"""Reload environment variables from file"""
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
load_dotenv(env_file, override=True)
|
||||||
|
|
||||||
|
def format_timestamp(timestamp: Union[int, float, str, datetime]) -> datetime:
|
||||||
|
"""Format timestamp to datetime object"""
|
||||||
|
if isinstance(timestamp, datetime):
|
||||||
|
# Ensure timezone awareness
|
||||||
|
if timestamp.tzinfo is None:
|
||||||
|
return timestamp.replace(tzinfo=timezone.utc)
|
||||||
|
return timestamp
|
||||||
|
|
||||||
|
if isinstance(timestamp, str):
|
||||||
|
try:
|
||||||
|
# Try parsing ISO format first
|
||||||
|
return datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
# Try parsing as timestamp
|
||||||
|
timestamp = float(timestamp)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid timestamp string format: {timestamp}")
|
||||||
|
|
||||||
|
if isinstance(timestamp, (int, float)):
|
||||||
|
# Handle both seconds and milliseconds timestamps
|
||||||
|
if timestamp > 1e10: # Milliseconds
|
||||||
|
timestamp = timestamp / 1000
|
||||||
|
return datetime.fromtimestamp(timestamp, tz=timezone.utc)
|
||||||
|
|
||||||
|
raise ValueError(f"Invalid timestamp format: {type(timestamp)}")
|
||||||
|
|
||||||
|
def parse_kline_data(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Parse Binance kline/candlestick data"""
|
||||||
|
kline = data['k']
|
||||||
|
return {
|
||||||
|
'time': format_timestamp(kline['t']),
|
||||||
|
'symbol': kline['s'],
|
||||||
|
'exchange': 'binance',
|
||||||
|
'interval': kline['i'],
|
||||||
|
'open_price': Decimal(str(kline['o'])),
|
||||||
|
'high_price': Decimal(str(kline['h'])),
|
||||||
|
'low_price': Decimal(str(kline['l'])),
|
||||||
|
'close_price': Decimal(str(kline['c'])),
|
||||||
|
'volume': Decimal(str(kline['v'])),
|
||||||
|
'quote_volume': Decimal(str(kline['q'])) if 'q' in kline else None,
|
||||||
|
'trade_count': int(kline['n']) if 'n' in kline else None
|
||||||
|
}
|
||||||
|
|
||||||
|
def parse_trade_data(data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Parse Binance trade data"""
|
||||||
|
return {
|
||||||
|
'time': format_timestamp(data['T']),
|
||||||
|
'symbol': data['s'],
|
||||||
|
'exchange': 'binance',
|
||||||
|
'price': Decimal(str(data['p'])),
|
||||||
|
'quantity': Decimal(str(data['q'])),
|
||||||
|
'trade_id': int(data['t']),
|
||||||
|
'is_buyer_maker': bool(data['m'])
|
||||||
|
}
|
||||||
|
|
||||||
|
def calculate_technical_indicators(df: pd.DataFrame, indicators_config: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Calculate technical indicators using pandas_ta
|
||||||
|
|
||||||
|
Args:
|
||||||
|
df: DataFrame with OHLCV data (index: time, columns: open, high, low, close, volume)
|
||||||
|
indicators_config: Configuration for indicators to calculate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of dictionaries with indicator data
|
||||||
|
"""
|
||||||
|
if len(df) < 50: # Need enough data for most indicators
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Create a copy and ensure proper data types
|
||||||
|
df_ta = df.copy()
|
||||||
|
|
||||||
|
# Rename columns to match pandas_ta expectations if needed
|
||||||
|
column_mapping = {
|
||||||
|
'open_price': 'open',
|
||||||
|
'high_price': 'high',
|
||||||
|
'low_price': 'low',
|
||||||
|
'close_price': 'close'
|
||||||
|
}
|
||||||
|
|
||||||
|
for old_col, new_col in column_mapping.items():
|
||||||
|
if old_col in df_ta.columns and new_col not in df_ta.columns:
|
||||||
|
df_ta.rename(columns={old_col: new_col}, inplace=True)
|
||||||
|
|
||||||
|
# **CRITICAL FIX**: Convert all columns to float64 to avoid numba pyobject errors
|
||||||
|
# This ensures pandas_ta's numba-compiled functions receive proper numeric types
|
||||||
|
required_columns = ['open', 'high', 'low', 'close', 'volume']
|
||||||
|
for col in required_columns:
|
||||||
|
if col in df_ta.columns:
|
||||||
|
df_ta[col] = pd.to_numeric(df_ta[col], errors='coerce').astype(np.float64)
|
||||||
|
|
||||||
|
# Remove any NaN values that may have been introduced
|
||||||
|
df_ta = df_ta.dropna()
|
||||||
|
|
||||||
|
if len(df_ta) < 50: # Check again after cleaning
|
||||||
|
return []
|
||||||
|
|
||||||
|
indicators_data = []
|
||||||
|
enabled_indicators = indicators_config.get('enabled', [])
|
||||||
|
periods = indicators_config.get('periods', {})
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
for indicator in enabled_indicators:
|
||||||
|
if indicator == 'sma':
|
||||||
|
# Simple Moving Average
|
||||||
|
for period in periods.get('sma', [20]):
|
||||||
|
try:
|
||||||
|
sma_values = ta.sma(df_ta['close'], length=period)
|
||||||
|
if sma_values is not None:
|
||||||
|
for idx, value in sma_values.dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': f'sma_{period}',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps({'period': period})
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating SMA-{period}: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'ema':
|
||||||
|
# Exponential Moving Average
|
||||||
|
for period in periods.get('ema', [12, 26]):
|
||||||
|
try:
|
||||||
|
ema_values = ta.ema(df_ta['close'], length=period)
|
||||||
|
if ema_values is not None:
|
||||||
|
for idx, value in ema_values.dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': f'ema_{period}',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps({'period': period})
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating EMA-{period}: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'rsi':
|
||||||
|
# Relative Strength Index
|
||||||
|
for period in periods.get('rsi', [14]):
|
||||||
|
try:
|
||||||
|
rsi_values = ta.rsi(df_ta['close'], length=period)
|
||||||
|
if rsi_values is not None:
|
||||||
|
for idx, value in rsi_values.dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': f'rsi_{period}',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps({'period': period})
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating RSI-{period}: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'macd':
|
||||||
|
# MACD
|
||||||
|
macd_config = periods.get('macd', {'fast': 12, 'slow': 26, 'signal': 9})
|
||||||
|
try:
|
||||||
|
macd_result = ta.macd(
|
||||||
|
df_ta['close'],
|
||||||
|
fast=macd_config['fast'],
|
||||||
|
slow=macd_config['slow'],
|
||||||
|
signal=macd_config['signal']
|
||||||
|
)
|
||||||
|
|
||||||
|
if macd_result is not None:
|
||||||
|
# MACD Line
|
||||||
|
macd_col = f"MACD_{macd_config['fast']}_{macd_config['slow']}_{macd_config['signal']}"
|
||||||
|
if macd_col in macd_result.columns:
|
||||||
|
for idx, value in macd_result[macd_col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'macd_line',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(macd_config)
|
||||||
|
})
|
||||||
|
|
||||||
|
# MACD Signal
|
||||||
|
signal_col = f"MACDs_{macd_config['fast']}_{macd_config['slow']}_{macd_config['signal']}"
|
||||||
|
if signal_col in macd_result.columns:
|
||||||
|
for idx, value in macd_result[signal_col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'macd_signal',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(macd_config)
|
||||||
|
})
|
||||||
|
|
||||||
|
# MACD Histogram
|
||||||
|
hist_col = f"MACDh_{macd_config['fast']}_{macd_config['slow']}_{macd_config['signal']}"
|
||||||
|
if hist_col in macd_result.columns:
|
||||||
|
for idx, value in macd_result[hist_col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'macd_histogram',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(macd_config)
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating MACD: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'bb':
|
||||||
|
# Bollinger Bands
|
||||||
|
bb_config = periods.get('bb', {'period': 20, 'std': 2})
|
||||||
|
try:
|
||||||
|
bb_result = ta.bbands(
|
||||||
|
df_ta['close'],
|
||||||
|
length=bb_config['period'],
|
||||||
|
std=bb_config['std']
|
||||||
|
)
|
||||||
|
|
||||||
|
if bb_result is not None:
|
||||||
|
# Upper Band
|
||||||
|
for col in bb_result.columns:
|
||||||
|
if col.startswith(f"BBU_{bb_config['period']}"):
|
||||||
|
for idx, value in bb_result[col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'bb_upper',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(bb_config)
|
||||||
|
})
|
||||||
|
break
|
||||||
|
|
||||||
|
# Middle Band
|
||||||
|
for col in bb_result.columns:
|
||||||
|
if col.startswith(f"BBM_{bb_config['period']}"):
|
||||||
|
for idx, value in bb_result[col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'bb_middle',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(bb_config)
|
||||||
|
})
|
||||||
|
break
|
||||||
|
|
||||||
|
# Lower Band
|
||||||
|
for col in bb_result.columns:
|
||||||
|
if col.startswith(f"BBL_{bb_config['period']}"):
|
||||||
|
for idx, value in bb_result[col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'bb_lower',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(bb_config)
|
||||||
|
})
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating Bollinger Bands: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'atr':
|
||||||
|
# Average True Range
|
||||||
|
for period in periods.get('atr', [14]):
|
||||||
|
try:
|
||||||
|
atr_values = ta.atr(df_ta['high'], df_ta['low'], df_ta['close'], length=period)
|
||||||
|
if atr_values is not None:
|
||||||
|
for idx, value in atr_values.dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': f'atr_{period}',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps({'period': period})
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating ATR-{period}: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'stoch':
|
||||||
|
# Stochastic Oscillator
|
||||||
|
stoch_config = periods.get('stoch', {'k_period': 14, 'd_period': 3})
|
||||||
|
try:
|
||||||
|
stoch_result = ta.stoch(
|
||||||
|
df_ta['high'], df_ta['low'], df_ta['close'],
|
||||||
|
k=stoch_config['k_period'],
|
||||||
|
d=stoch_config['d_period']
|
||||||
|
)
|
||||||
|
|
||||||
|
if stoch_result is not None:
|
||||||
|
# %K
|
||||||
|
for col in stoch_result.columns:
|
||||||
|
if 'STOCHk' in col:
|
||||||
|
for idx, value in stoch_result[col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'stoch_k',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(stoch_config)
|
||||||
|
})
|
||||||
|
break
|
||||||
|
|
||||||
|
# %D
|
||||||
|
for col in stoch_result.columns:
|
||||||
|
if 'STOCHd' in col:
|
||||||
|
for idx, value in stoch_result[col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': 'stoch_d',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps(stoch_config)
|
||||||
|
})
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating Stochastic: {e}")
|
||||||
|
|
||||||
|
elif indicator == 'adx':
|
||||||
|
# Average Directional Index
|
||||||
|
for period in periods.get('adx', [14]):
|
||||||
|
try:
|
||||||
|
adx_result = ta.adx(df_ta['high'], df_ta['low'], df_ta['close'], length=period)
|
||||||
|
if adx_result is not None:
|
||||||
|
adx_col = f"ADX_{period}"
|
||||||
|
if adx_col in adx_result.columns:
|
||||||
|
for idx, value in adx_result[adx_col].dropna().items():
|
||||||
|
indicators_data.append({
|
||||||
|
'time': idx,
|
||||||
|
'indicator_name': f'adx_{period}',
|
||||||
|
'indicator_value': round(float(value), 8),
|
||||||
|
'metadata': json.dumps({'period': period})
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating ADX-{period}: {e}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating technical indicators: {e}", exc_info=True)
|
||||||
|
|
||||||
|
return indicators_data
|
||||||
|
|
||||||
|
def resample_ticks_to_ohlcv(ticks: List[Dict[str, Any]], interval: str) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Resample tick data to OHLCV format
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ticks: List of tick data dictionaries
|
||||||
|
interval: Resampling interval (e.g., '1min', '5min', '1H')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of OHLCV dictionaries
|
||||||
|
"""
|
||||||
|
if not ticks:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Convert to DataFrame
|
||||||
|
df = pd.DataFrame(ticks)
|
||||||
|
df['time'] = pd.to_datetime(df['time'])
|
||||||
|
df.set_index('time', inplace=True)
|
||||||
|
|
||||||
|
# Convert price and quantity to float
|
||||||
|
df['price'] = pd.to_numeric(df['price'], errors='coerce')
|
||||||
|
df['quantity'] = pd.to_numeric(df['quantity'], errors='coerce')
|
||||||
|
|
||||||
|
# Group by symbol and resample
|
||||||
|
ohlcv_data = []
|
||||||
|
for symbol in df['symbol'].unique():
|
||||||
|
symbol_df = df[df['symbol'] == symbol].copy()
|
||||||
|
|
||||||
|
# Resample price data
|
||||||
|
ohlcv = symbol_df['price'].resample(interval).agg({
|
||||||
|
'open': 'first',
|
||||||
|
'high': 'max',
|
||||||
|
'low': 'min',
|
||||||
|
'close': 'last'
|
||||||
|
})
|
||||||
|
|
||||||
|
# Resample volume and trade count
|
||||||
|
volume = symbol_df['quantity'].resample(interval).sum()
|
||||||
|
trade_count = symbol_df.resample(interval).size()
|
||||||
|
|
||||||
|
# Combine data
|
||||||
|
for timestamp, row in ohlcv.iterrows():
|
||||||
|
if pd.notna(row['open']): # Skip empty periods
|
||||||
|
ohlcv_data.append({
|
||||||
|
'time': timestamp,
|
||||||
|
'symbol': symbol,
|
||||||
|
'exchange': symbol_df['exchange'].iloc[0] if 'exchange' in symbol_df.columns else 'binance',
|
||||||
|
'interval': interval,
|
||||||
|
'open_price': Decimal(str(row['open'])).quantize(Decimal('0.00000001'), rounding=ROUND_HALF_UP),
|
||||||
|
'high_price': Decimal(str(row['high'])).quantize(Decimal('0.00000001'), rounding=ROUND_HALF_UP),
|
||||||
|
'low_price': Decimal(str(row['low'])).quantize(Decimal('0.00000001'), rounding=ROUND_HALF_UP),
|
||||||
|
'close_price': Decimal(str(row['close'])).quantize(Decimal('0.00000001'), rounding=ROUND_HALF_UP),
|
||||||
|
'volume': Decimal(str(volume.loc[timestamp])) if timestamp in volume.index else Decimal('0'),
|
||||||
|
'quote_volume': None,
|
||||||
|
'trade_count': int(trade_count.loc[timestamp]) if timestamp in trade_count.index else 0
|
||||||
|
})
|
||||||
|
|
||||||
|
return ohlcv_data
|
||||||
|
|
||||||
|
def validate_ohlcv_data(ohlcv: Dict[str, Any]) -> bool:
|
||||||
|
"""Validate OHLCV data integrity"""
|
||||||
|
try:
|
||||||
|
# Check required fields
|
||||||
|
required_fields = ['time', 'symbol', 'open_price', 'high_price', 'low_price', 'close_price', 'volume']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in ohlcv:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check price relationships
|
||||||
|
high = float(ohlcv['high_price'])
|
||||||
|
low = float(ohlcv['low_price'])
|
||||||
|
open_price = float(ohlcv['open_price'])
|
||||||
|
close = float(ohlcv['close_price'])
|
||||||
|
|
||||||
|
# High should be >= all other prices
|
||||||
|
if high < max(low, open_price, close):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Low should be <= all other prices
|
||||||
|
if low > min(high, open_price, close):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# All prices should be positive
|
||||||
|
if any(price <= 0 for price in [high, low, open_price, close]):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Volume should be non-negative
|
||||||
|
if float(ohlcv['volume']) < 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (ValueError, TypeError, KeyError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def calculate_price_change(current_price: float, previous_price: float) -> Dict[str, float]:
|
||||||
|
"""Calculate price change and percentage change"""
|
||||||
|
if previous_price == 0:
|
||||||
|
return {'change': 0.0, 'change_percent': 0.0}
|
||||||
|
|
||||||
|
change = current_price - previous_price
|
||||||
|
change_percent = (change / previous_price) * 100
|
||||||
|
|
||||||
|
return {
|
||||||
|
'change': round(change, 8),
|
||||||
|
'change_percent': round(change_percent, 4)
|
||||||
|
}
|
||||||
|
|
||||||
|
def format_volume(volume: Union[int, float, Decimal]) -> str:
|
||||||
|
"""Format volume for display"""
|
||||||
|
volume = float(volume)
|
||||||
|
|
||||||
|
if volume >= 1e9:
|
||||||
|
return f"{volume / 1e9:.2f}B"
|
||||||
|
elif volume >= 1e6:
|
||||||
|
return f"{volume / 1e6:.2f}M"
|
||||||
|
elif volume >= 1e3:
|
||||||
|
return f"{volume / 1e3:.2f}K"
|
||||||
|
else:
|
||||||
|
return f"{volume:.2f}"
|
||||||
|
|
||||||
|
def get_interval_seconds(interval: str) -> int:
|
||||||
|
"""Convert interval string to seconds"""
|
||||||
|
interval_map = {
|
||||||
|
'1s': 1,
|
||||||
|
'1m': 60,
|
||||||
|
'3m': 180,
|
||||||
|
'5m': 300,
|
||||||
|
'15m': 900,
|
||||||
|
'30m': 1800,
|
||||||
|
'1h': 3600,
|
||||||
|
'2h': 7200,
|
||||||
|
'4h': 14400,
|
||||||
|
'6h': 21600,
|
||||||
|
'8h': 28800,
|
||||||
|
'12h': 43200,
|
||||||
|
'1d': 86400,
|
||||||
|
'3d': 259200,
|
||||||
|
'1w': 604800,
|
||||||
|
'1M': 2592000 # Approximate
|
||||||
|
}
|
||||||
|
|
||||||
|
return interval_map.get(interval, 60) # Default to 1 minute
|
||||||
|
|
||||||
|
def safe_decimal_conversion(value: Any) -> Optional[Decimal]:
|
||||||
|
"""Safely convert value to Decimal"""
|
||||||
|
try:
|
||||||
|
if value is None or value == '':
|
||||||
|
return None
|
||||||
|
return Decimal(str(value)).quantize(Decimal('0.00000001'), rounding=ROUND_HALF_UP)
|
||||||
|
except (ValueError, TypeError, DecimalException):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def batch_data(data: List[Any], batch_size: int) -> List[List[Any]]:
|
||||||
|
"""Split data into batches"""
|
||||||
|
batches = []
|
||||||
|
for i in range(0, len(data), batch_size):
|
||||||
|
batches.append(data[i:i + batch_size])
|
||||||
|
return batches
|
||||||
|
|
||||||
|
def get_binance_symbol_info(symbol: str) -> Dict[str, Any]:
|
||||||
|
"""Get symbol information for validation"""
|
||||||
|
# This is a simplified version - in production you might want to fetch from Binance API
|
||||||
|
common_symbols = {
|
||||||
|
'BTCUSDT': {'baseAsset': 'BTC', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'ETHUSDT': {'baseAsset': 'ETH', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'BNBUSDT': {'baseAsset': 'BNB', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'XRPUSDT': {'baseAsset': 'XRP', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'SOLUSDT': {'baseAsset': 'SOL', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'ADAUSDT': {'baseAsset': 'ADA', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'DOTUSDT': {'baseAsset': 'DOT', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'LINKUSDT': {'baseAsset': 'LINK', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'LTCUSDT': {'baseAsset': 'LTC', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'HBARUSDT': {'baseAsset': 'HBAR', 'quoteAsset': 'USDT', 'status': 'TRADING'},
|
||||||
|
'HBARBTC': {'baseAsset': 'HBAR', 'quoteAsset': 'BTC', 'status': 'TRADING'}
|
||||||
|
}
|
||||||
|
|
||||||
|
return common_symbols.get(symbol, {'status': 'UNKNOWN'})
|
||||||
|
|
||||||
|
class DataValidator:
|
||||||
|
"""Class for validating trading data"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_tick_data(tick: Dict[str, Any]) -> bool:
|
||||||
|
"""Validate tick/trade data"""
|
||||||
|
try:
|
||||||
|
required_fields = ['time', 'symbol', 'price', 'quantity', 'trade_id']
|
||||||
|
for field in required_fields:
|
||||||
|
if field not in tick:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Validate data types and ranges
|
||||||
|
if float(tick['price']) <= 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if float(tick['quantity']) <= 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not isinstance(tick['trade_id'], (int, str)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not validate_symbol(tick['symbol']):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_indicators_data(indicators: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Validate and clean indicators data"""
|
||||||
|
valid_indicators = []
|
||||||
|
|
||||||
|
for indicator in indicators:
|
||||||
|
try:
|
||||||
|
if ('time' in indicator and
|
||||||
|
'indicator_name' in indicator and
|
||||||
|
'indicator_value' in indicator):
|
||||||
|
|
||||||
|
# Check for valid numeric value
|
||||||
|
value = float(indicator['indicator_value'])
|
||||||
|
if not (np.isnan(value) or np.isinf(value)):
|
||||||
|
valid_indicators.append(indicator)
|
||||||
|
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
return valid_indicators
|
||||||
|
|
||||||
|
def create_error_response(error_message: str, error_code: str = "GENERAL_ERROR") -> Dict[str, Any]:
|
||||||
|
"""Create standardized error response"""
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": error_code,
|
||||||
|
"message": error_message,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_success_response(data: Any = None, message: str = "Success") -> Dict[str, Any]:
|
||||||
|
"""Create standardized success response"""
|
||||||
|
response = {
|
||||||
|
"success": True,
|
||||||
|
"message": message,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
|
if data is not None:
|
||||||
|
response["data"] = data
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
class PerformanceTimer:
|
||||||
|
"""Context manager for timing operations"""
|
||||||
|
|
||||||
|
def __init__(self, operation_name: str):
|
||||||
|
self.operation_name = operation_name
|
||||||
|
self.start_time = None
|
||||||
|
self.logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
self.start_time = datetime.utcnow()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.start_time:
|
||||||
|
duration = (datetime.utcnow() - self.start_time).total_seconds()
|
||||||
|
|
||||||
|
# Log slow operations
|
||||||
|
slow_threshold = float(os.getenv('SLOW_QUERY_THRESHOLD_MS', 1000)) / 1000
|
||||||
|
|
||||||
|
if duration > slow_threshold:
|
||||||
|
self.logger.warning(f"SLOW OPERATION: {self.operation_name} took {duration:.3f}s")
|
||||||
|
else:
|
||||||
|
self.logger.debug(f"{self.operation_name} completed in {duration:.3f}s")
|
||||||
|
|
||||||
|
# Export main functions
|
||||||
|
__all__ = [
|
||||||
|
'setup_logging', 'load_config', 'save_config', 'validate_config',
|
||||||
|
'create_default_config', 'validate_symbol', 'format_timestamp',
|
||||||
|
'parse_kline_data', 'parse_trade_data', 'calculate_technical_indicators',
|
||||||
|
'resample_ticks_to_ohlcv', 'validate_ohlcv_data', 'calculate_price_change',
|
||||||
|
'format_volume', 'get_interval_seconds', 'safe_decimal_conversion',
|
||||||
|
'batch_data', 'get_binance_symbol_info', 'DataValidator',
|
||||||
|
'create_error_response', 'create_success_response', 'PerformanceTimer',
|
||||||
|
'reload_env_vars'
|
||||||
|
]
|
74
variables.env
Normal file
74
variables.env
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Environment Variables for Crypto Trading Data Collector
|
||||||
|
# Database Configuration
|
||||||
|
DB_HOST=localhost
|
||||||
|
DB_PORT=5432
|
||||||
|
DB_NAME=crypto_trading
|
||||||
|
DB_USER=postgres
|
||||||
|
DB_PASSWORD=your_secure_password_here
|
||||||
|
|
||||||
|
# Database Connection Pool Settings
|
||||||
|
DB_POOL_MIN_SIZE=20
|
||||||
|
DB_POOL_MAX_SIZE=250
|
||||||
|
DB_COMMAND_TIMEOUT=120
|
||||||
|
|
||||||
|
# Binance API Configuration (Optional - not needed for market data)
|
||||||
|
# BINANCE_API_KEY=your_binance_api_key_here
|
||||||
|
# BINANCE_SECRET_KEY=your_binance_secret_key_here
|
||||||
|
|
||||||
|
# Application Configuration
|
||||||
|
LOG_LEVEL=INFO
|
||||||
|
LOG_FILE=crypto_collector.log
|
||||||
|
|
||||||
|
# Web UI Configuration
|
||||||
|
WEB_HOST=0.0.0.0
|
||||||
|
WEB_PORT=8000
|
||||||
|
WEB_RELOAD=true
|
||||||
|
|
||||||
|
# Performance Settings
|
||||||
|
MAX_CONCURRENT_REQUESTS=100
|
||||||
|
REQUEST_TIMEOUT=30
|
||||||
|
WEBSOCKET_PING_INTERVAL=20
|
||||||
|
WEBSOCKET_PING_TIMEOUT=60
|
||||||
|
|
||||||
|
# Data Collection Settings
|
||||||
|
BULK_DOWNLOAD_BATCH_SIZE=1000
|
||||||
|
TICK_BATCH_SIZE=100
|
||||||
|
WEBSOCKET_RECONNECT_DELAY=5
|
||||||
|
MAX_RETRIES=3
|
||||||
|
|
||||||
|
# Database Maintenance
|
||||||
|
COMPRESSION_AFTER_DAYS=7
|
||||||
|
RETENTION_POLICY_DAYS=365
|
||||||
|
VACUUM_ANALYZE_INTERVAL_HOURS=24
|
||||||
|
|
||||||
|
# Monitoring and Alerting
|
||||||
|
ENABLE_METRICS=true
|
||||||
|
METRICS_PORT=9090
|
||||||
|
ALERT_EMAIL_ENABLED=false
|
||||||
|
ALERT_EMAIL_SMTP_HOST=smtp.gmail.com
|
||||||
|
ALERT_EMAIL_SMTP_PORT=587
|
||||||
|
ALERT_EMAIL_USERNAME=your_email@gmail.com
|
||||||
|
ALERT_EMAIL_PASSWORD=your_email_password
|
||||||
|
ALERT_EMAIL_TO=admin@yourcompany.com
|
||||||
|
|
||||||
|
# Security Settings
|
||||||
|
SECRET_KEY=your_very_secure_secret_key_change_this_in_production
|
||||||
|
ALLOWED_HOSTS=localhost,127.0.0.1,0.0.0.0
|
||||||
|
CORS_ORIGINS=http://localhost:3000,http://localhost:8000
|
||||||
|
|
||||||
|
# TimescaleDB Specific Settings
|
||||||
|
TIMESCALEDB_TELEMETRY=off
|
||||||
|
SHARED_PRELOAD_LIBRARIES=timescaledb
|
||||||
|
|
||||||
|
# Memory and CPU Settings (adjust based on your 128GB RAM / 16-core setup)
|
||||||
|
WORK_MEM=1024MB
|
||||||
|
SHARED_BUFFERS=32GB
|
||||||
|
EFFECTIVE_CACHE_SIZE=64GB
|
||||||
|
MAX_CONNECTIONS=500
|
||||||
|
MAX_WORKER_PROCESSES=14
|
||||||
|
MAX_PARALLEL_WORKERS=14
|
||||||
|
MAX_PARALLEL_WORKERS_PER_GATHER=8
|
||||||
|
|
||||||
|
# NEW: Concurrency Control
|
||||||
|
MAX_CONCURRENT_DOWNLOADS=3
|
||||||
|
MAX_CONCURRENT_GAP_FILLS=2
|
Reference in New Issue
Block a user