|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -1,7 +1,7 @@ |
|
|
""" |
|
|
HF Space Complete API Router |
|
|
Implements all required endpoints for Hugging Face Space deployment |
|
|
-with fallback support and comprehensive data endpoints |
|
|
+using REAL data providers. |
|
|
""" |
|
|
from fastapi import APIRouter, HTTPException, Query, Body, Depends |
|
|
from fastapi.responses import JSONResponse |
|
|
@@ -14,16 +14,19 @@ import json |
|
|
import os |
|
|
from pathlib import Path |
|
|
|
|
|
+# Import Real Data Providers |
|
|
+from backend.live_data.providers import ( |
|
|
+ coingecko_provider, |
|
|
+ binance_provider, |
|
|
+ cryptopanic_provider, |
|
|
+ alternative_me_provider |
|
|
+) |
|
|
+from backend.cache.cache_manager import cache_manager |
|
|
+ |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
router = APIRouter(tags=["HF Space Complete API"]) |
|
|
|
|
|
-# Import persistence |
|
|
-from backend.services.hf_persistence import get_persistence |
|
|
- |
|
|
-persistence = get_persistence() |
|
|
- |
|
|
- |
|
|
# |
|
|
# Pydantic Models for Request/Response |
|
|
# |
|
|
@@ -32,8 +35,7 @@ class MetaInfo(BaseModel): |
|
|
"""Metadata for all responses""" |
|
|
cache_ttl_seconds: int = Field(default=30, description="Cache TTL in seconds") |
|
|
generated_at: str = Field(default_factory=lambda: datetime.now().isoformat()) |
|
|
- source: str = Field(default="hf", description="Data source (hf, fallback provider name)") |
|
|
- |
|
|
+ source: str = Field(default="live", description="Data source") |
|
|
|
|
|
class MarketItem(BaseModel): |
|
|
"""Market ticker item""" |
|
|
@@ -41,8 +43,7 @@ class MarketItem(BaseModel): |
|
|
price: float |
|
|
change_24h: float |
|
|
volume_24h: float |
|
|
- source: str = "hf" |
|
|
- |
|
|
+ source: str = "live" |
|
|
|
|
|
class MarketResponse(BaseModel): |
|
|
"""Market snapshot response""" |
|
|
@@ -50,63 +51,6 @@ class MarketResponse(BaseModel): |
|
|
items: List[MarketItem] |
|
|
meta: MetaInfo |
|
|
|
|
|
- |
|
|
-class TradingPair(BaseModel): |
|
|
- """Trading pair information""" |
|
|
- pair: str |
|
|
- base: str |
|
|
- quote: str |
|
|
- tick_size: float |
|
|
- min_qty: float |
|
|
- |
|
|
- |
|
|
-class PairsResponse(BaseModel): |
|
|
- """Trading pairs response""" |
|
|
- pairs: List[TradingPair] |
|
|
- meta: MetaInfo |
|
|
- |
|
|
- |
|
|
-class OHLCEntry(BaseModel): |
|
|
- """OHLC candlestick entry""" |
|
|
- ts: int |
|
|
- open: float |
|
|
- high: float |
|
|
- low: float |
|
|
- close: float |
|
|
- volume: float |
|
|
- |
|
|
- |
|
|
-class OrderBookEntry(BaseModel): |
|
|
- """Order book entry [price, quantity]""" |
|
|
- price: float |
|
|
- qty: float |
|
|
- |
|
|
- |
|
|
-class DepthResponse(BaseModel): |
|
|
- """Order book depth response""" |
|
|
- bids: List[List[float]] |
|
|
- asks: List[List[float]] |
|
|
- meta: MetaInfo |
|
|
- |
|
|
- |
|
|
-class PredictRequest(BaseModel): |
|
|
- """Model prediction request""" |
|
|
- symbol: str |
|
|
- context: Optional[str] = None |
|
|
- params: Optional[Dict[str, Any]] = None |
|
|
- |
|
|
- |
|
|
-class SignalResponse(BaseModel): |
|
|
- """Trading signal response""" |
|
|
- id: str |
|
|
- symbol: str |
|
|
- type: str # buy, sell, hold |
|
|
- score: float |
|
|
- model: str |
|
|
- created_at: str |
|
|
- meta: MetaInfo |
|
|
- |
|
|
- |
|
|
class NewsArticle(BaseModel): |
|
|
"""News article""" |
|
|
id: str |
|
|
@@ -116,19 +60,11 @@ class NewsArticle(BaseModel): |
|
|
summary: Optional[str] = None |
|
|
published_at: str |
|
|
|
|
|
- |
|
|
class NewsResponse(BaseModel): |
|
|
"""News response""" |
|
|
articles: List[NewsArticle] |
|
|
meta: MetaInfo |
|
|
|
|
|
- |
|
|
-class SentimentRequest(BaseModel): |
|
|
- """Sentiment analysis request""" |
|
|
- text: str |
|
|
- mode: Optional[str] = "crypto" # crypto, news, social |
|
|
- |
|
|
- |
|
|
class SentimentResponse(BaseModel): |
|
|
"""Sentiment analysis response""" |
|
|
score: float |
|
|
@@ -136,29 +72,6 @@ class SentimentResponse(BaseModel): |
|
|
details: Optional[Dict[str, Any]] = None |
|
|
meta: MetaInfo |
|
|
|
|
|
- |
|
|
-class WhaleTransaction(BaseModel): |
|
|
- """Whale transaction""" |
|
|
- id: str |
|
|
- tx_hash: str |
|
|
- chain: str |
|
|
- from_address: str |
|
|
- to_address: str |
|
|
- amount_usd: float |
|
|
- token: str |
|
|
- block: int |
|
|
- tx_at: str |
|
|
- |
|
|
- |
|
|
-class WhaleStatsResponse(BaseModel): |
|
|
- """Whale activity stats""" |
|
|
- total_transactions: int |
|
|
- total_volume_usd: float |
|
|
- avg_transaction_usd: float |
|
|
- top_chains: List[Dict[str, Any]] |
|
|
- meta: MetaInfo |
|
|
- |
|
|
- |
|
|
class GasPrice(BaseModel): |
|
|
"""Gas price information""" |
|
|
fast: float |
|
|
@@ -166,134 +79,13 @@ class GasPrice(BaseModel): |
|
|
slow: float |
|
|
unit: str = "gwei" |
|
|
|
|
|
- |
|
|
class GasResponse(BaseModel): |
|
|
"""Gas price response""" |
|
|
chain: str |
|
|
- gas_prices: GasPrice |
|
|
+ gas_prices: Optional[GasPrice] = None |
|
|
timestamp: str |
|
|
meta: MetaInfo |
|
|
|
|
|
- |
|
|
-class BlockchainStats(BaseModel): |
|
|
- """Blockchain statistics""" |
|
|
- chain: str |
|
|
- blocks_24h: int |
|
|
- transactions_24h: int |
|
|
- avg_gas_price: float |
|
|
- mempool_size: Optional[int] = None |
|
|
- meta: MetaInfo |
|
|
- |
|
|
- |
|
|
-class ProviderInfo(BaseModel): |
|
|
- """Provider information""" |
|
|
- id: str |
|
|
- name: str |
|
|
- category: str |
|
|
- status: str # active, degraded, down |
|
|
- capabilities: List[str] |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Fallback Provider Manager |
|
|
-# ============================================================================ |
|
|
- |
|
|
-class FallbackManager: |
|
|
- """Manages fallback providers from config file""" |
|
|
- |
|
|
- def __init__(self, config_path: str = "/workspace/api-resources/api-config-complete__1_.txt"): |
|
|
- self.config_path = config_path |
|
|
- self.providers = {} |
|
|
- self._load_config() |
|
|
- |
|
|
- def _load_config(self): |
|
|
- """Load fallback providers from config file""" |
|
|
- try: |
|
|
- if not os.path.exists(self.config_path): |
|
|
- logger.warning(f"Config file not found: {self.config_path}") |
|
|
- return |
|
|
- |
|
|
- # Parse the config file to extract provider information |
|
|
- # This is a simple parser - adjust based on actual config format |
|
|
- self.providers = { |
|
|
- 'market_data': { |
|
|
- 'primary': {'name': 'coingecko', 'url': 'https://api.coingecko.com/api/v3'}, |
|
|
- 'fallbacks': [ |
|
|
- {'name': 'binance', 'url': 'https://api.binance.com/api/v3'}, |
|
|
- {'name': 'coincap', 'url': 'https://api.coincap.io/v2'} |
|
|
- ] |
|
|
- }, |
|
|
- 'blockchain': { |
|
|
- 'ethereum': { |
|
|
- 'primary': {'name': 'etherscan', 'url': 'https://api.etherscan.io/api', 'key': 'SZHYFZK2RR8H9TIMJBVW54V4H81K2Z2KR2'}, |
|
|
- 'fallbacks': [ |
|
|
- {'name': 'blockchair', 'url': 'https://api.blockchair.com/ethereum'} |
|
|
- ] |
|
|
- } |
|
|
- }, |
|
|
- 'whale_tracking': { |
|
|
- 'primary': {'name': 'clankapp', 'url': 'https://clankapp.com/api'}, |
|
|
- 'fallbacks': [] |
|
|
- }, |
|
|
- 'news': { |
|
|
- 'primary': {'name': 'cryptopanic', 'url': 'https://cryptopanic.com/api/v1'}, |
|
|
- 'fallbacks': [ |
|
|
- {'name': 'reddit', 'url': 'https://www.reddit.com/r/CryptoCurrency/hot.json'} |
|
|
- ] |
|
|
- }, |
|
|
- 'sentiment': { |
|
|
- 'primary': {'name': 'alternative.me', 'url': 'https://api.alternative.me/fng'} |
|
|
- } |
|
|
- } |
|
|
- logger.info(f"Loaded fallback providers from {self.config_path}") |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error loading fallback config: {e}") |
|
|
- |
|
|
- async def fetch_with_fallback(self, category: str, endpoint: str, params: Optional[Dict] = None) -> tuple: |
|
|
- """ |
|
|
- Fetch data with automatic fallback |
|
|
- Returns (data, source_name) |
|
|
- """ |
|
|
- import aiohttp |
|
|
- |
|
|
- if category not in self.providers: |
|
|
- raise HTTPException(status_code=500, detail=f"Category {category} not configured") |
|
|
- |
|
|
- provider_config = self.providers[category] |
|
|
- |
|
|
- # Try primary first |
|
|
- primary = provider_config.get('primary') |
|
|
- if primary: |
|
|
- try: |
|
|
- async with aiohttp.ClientSession() as session: |
|
|
- url = f"{primary['url']}{endpoint}" |
|
|
- async with session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=10)) as response: |
|
|
- if response.status == 200: |
|
|
- data = await response.json() |
|
|
- return data, primary['name'] |
|
|
- except Exception as e: |
|
|
- logger.warning(f"Primary provider {primary['name']} failed: {e}") |
|
|
- |
|
|
- # Try fallbacks |
|
|
- fallbacks = provider_config.get('fallbacks', []) |
|
|
- for fallback in fallbacks: |
|
|
- try: |
|
|
- async with aiohttp.ClientSession() as session: |
|
|
- url = f"{fallback['url']}{endpoint}" |
|
|
- async with session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=10)) as response: |
|
|
- if response.status == 200: |
|
|
- data = await response.json() |
|
|
- return data, fallback['name'] |
|
|
- except Exception as e: |
|
|
- logger.warning(f"Fallback provider {fallback['name']} failed: {e}") |
|
|
- |
|
|
- raise HTTPException(status_code=503, detail="All providers failed") |
|
|
- |
|
|
- |
|
|
-# Initialize fallback manager |
|
|
-fallback_manager = FallbackManager() |
|
|
- |
|
|
- |
|
|
# |
|
|
# Market & Pairs Endpoints |
|
|
# |
|
|
@@ -301,64 +93,40 @@ fallback_manager = FallbackManager() |
|
|
@router.get("/api/market", response_model=MarketResponse) |
|
|
async def get_market_snapshot(): |
|
|
""" |
|
|
- Get current market snapshot with prices, changes, and volumes |
|
|
- Priority: HF HTTP → Fallback providers |
|
|
+ Get current market snapshot with prices, changes, and volumes. |
|
|
+ Uses CoinGecko API. |
|
|
""" |
|
|
+ cache_key = "market_snapshot" |
|
|
+ cached = await cache_manager.get(cache_key) |
|
|
+ if cached: |
|
|
+ return cached |
|
|
+ |
|
|
try: |
|
|
- # Try HF implementation first |
|
|
- # For now, use fallback |
|
|
- data, source = await fallback_manager.fetch_with_fallback( |
|
|
- 'market_data', |
|
|
- '/simple/price', |
|
|
- params={'ids': 'bitcoin,ethereum,tron', 'vs_currencies': 'usd', 'include_24hr_change': 'true', 'include_24hr_vol': 'true'} |
|
|
- ) |
|
|
+ data = await coingecko_provider.get_market_data(ids="bitcoin,ethereum,tron,solana,binancecoin,ripple") |
|
|
|
|
|
- # Transform data |
|
|
items = [] |
|
|
- for coin_id, coin_data in data.items(): |
|
|
+ for coin in data: |
|
|
items.append(MarketItem( |
|
|
- symbol=coin_id.upper(), |
|
|
- price=coin_data.get('usd', 0), |
|
|
- change_24h=coin_data.get('usd_24h_change', 0), |
|
|
- volume_24h=coin_data.get('usd_24h_vol', 0), |
|
|
- source=source |
|
|
+ symbol=coin.get('symbol', '').upper(), |
|
|
+ price=coin.get('current_price', 0), |
|
|
+ change_24h=coin.get('price_change_percentage_24h', 0), |
|
|
+ volume_24h=coin.get('total_volume', 0), |
|
|
+ source="coingecko" |
|
|
)) |
|
|
|
|
|
- return MarketResponse( |
|
|
+ response = MarketResponse( |
|
|
last_updated=datetime.now().isoformat(), |
|
|
items=items, |
|
|
- meta=MetaInfo(cache_ttl_seconds=30, source=source) |
|
|
+ meta=MetaInfo(cache_ttl_seconds=60, source="coingecko") |
|
|
) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_market_snapshot: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/market/pairs", response_model=PairsResponse) |
|
|
-async def get_trading_pairs(): |
|
|
- """ |
|
|
- Get canonical list of trading pairs |
|
|
- MUST be served by HF HTTP (not WebSocket) |
|
|
- """ |
|
|
- try: |
|
|
- # This should be implemented by HF Space |
|
|
- # For now, return sample data |
|
|
- pairs = [ |
|
|
- TradingPair(pair="BTC/USDT", base="BTC", quote="USDT", tick_size=0.01, min_qty=0.0001), |
|
|
- TradingPair(pair="ETH/USDT", base="ETH", quote="USDT", tick_size=0.01, min_qty=0.001), |
|
|
- TradingPair(pair="BNB/USDT", base="BNB", quote="USDT", tick_size=0.01, min_qty=0.01), |
|
|
- ] |
|
|
|
|
|
- return PairsResponse( |
|
|
- pairs=pairs, |
|
|
- meta=MetaInfo(cache_ttl_seconds=300, source="hf") |
|
|
- ) |
|
|
+ await cache_manager.set(cache_key, response, ttl=60) |
|
|
+ return response |
|
|
|
|
|
except Exception as e: |
|
|
- logger.error(f"Error in get_trading_pairs: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
+ logger.error(f"Error in get_market_snapshot: {e}") |
|
|
+ # Return empty list or cached stale data if available, but NEVER fake data |
|
|
+ raise HTTPException(status_code=503, detail="Market data unavailable") |
|
|
|
|
|
@router.get("/api/market/ohlc") |
|
|
async def get_ohlc( |
|
|
@@ -366,207 +134,55 @@ async def get_ohlc( |
|
|
interval: int = Query(60, description="Interval in minutes"), |
|
|
limit: int = Query(100, description="Number of candles") |
|
|
): |
|
|
- """Get OHLC candlestick data""" |
|
|
+ """Get OHLC candlestick data from Binance""" |
|
|
+ cache_key = f"ohlc_{symbol}_{interval}_{limit}" |
|
|
+ cached = await cache_manager.get(cache_key) |
|
|
+ if cached: |
|
|
+ return cached |
|
|
+ |
|
|
try: |
|
|
- # Should implement actual OHLC fetching |
|
|
- # For now, return sample data |
|
|
- ohlc_data = [] |
|
|
- base_price = 50000 if symbol.upper() == "BTC" else 3500 |
|
|
+ # Map minutes to Binance intervals |
|
|
+ binance_interval = "1h" |
|
|
+ if interval == 1: binance_interval = "1m" |
|
|
+ elif interval == 5: binance_interval = "5m" |
|
|
+ elif interval == 15: binance_interval = "15m" |
|
|
+ elif interval == 60: binance_interval = "1h" |
|
|
+ elif interval == 240: binance_interval = "4h" |
|
|
+ elif interval == 1440: binance_interval = "1d" |
|
|
+ |
|
|
+ # Binance symbol needs to be e.g., BTCUSDT |
|
|
+ formatted_symbol = symbol.upper() |
|
|
+ if not formatted_symbol.endswith("USDT") and not formatted_symbol.endswith("USD"): |
|
|
+ formatted_symbol += "USDT" |
|
|
+ |
|
|
+ klines = await binance_provider.get_klines(formatted_symbol, interval=binance_interval, limit=limit) |
|
|
|
|
|
- for i in range(limit): |
|
|
- ts = int((datetime.now() - timedelta(minutes=interval * (limit - i))).timestamp()) |
|
|
+ ohlc_data = [] |
|
|
+ for k in klines: |
|
|
+ # Binance kline: [open_time, open, high, low, close, volume, ...] |
|
|
ohlc_data.append({ |
|
|
- "ts": ts, |
|
|
- "open": base_price + (i % 10) * 100, |
|
|
- "high": base_price + (i % 10) * 100 + 200, |
|
|
- "low": base_price + (i % 10) * 100 - 100, |
|
|
- "close": base_price + (i % 10) * 100 + 50, |
|
|
- "volume": 1000000 + (i % 5) * 100000 |
|
|
+ "ts": int(k[0] / 1000), |
|
|
+ "open": float(k[1]), |
|
|
+ "high": float(k[2]), |
|
|
+ "low": float(k[3]), |
|
|
+ "close": float(k[4]), |
|
|
+ "volume": float(k[5]) |
|
|
}) |
|
|
|
|
|
- return { |
|
|
+ response = { |
|
|
"symbol": symbol, |
|
|
"interval": interval, |
|
|
"data": ohlc_data, |
|
|
- "meta": MetaInfo(cache_ttl_seconds=120).__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_ohlc: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/market/depth", response_model=DepthResponse) |
|
|
-async def get_order_book_depth( |
|
|
- symbol: str = Query(..., description="Trading symbol"), |
|
|
- limit: int = Query(50, description="Depth limit") |
|
|
-): |
|
|
- """Get order book depth (bids and asks)""" |
|
|
- try: |
|
|
- # Sample orderbook data |
|
|
- base_price = 50000 if symbol.upper() == "BTC" else 3500 |
|
|
- |
|
|
- bids = [[base_price - i * 10, 0.1 + i * 0.01] for i in range(limit)] |
|
|
- asks = [[base_price + i * 10, 0.1 + i * 0.01] for i in range(limit)] |
|
|
- |
|
|
- return DepthResponse( |
|
|
- bids=bids, |
|
|
- asks=asks, |
|
|
- meta=MetaInfo(cache_ttl_seconds=10, source="hf") |
|
|
- ) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_order_book_depth: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/market/tickers") |
|
|
-async def get_tickers( |
|
|
- limit: int = Query(100, description="Number of tickers"), |
|
|
- sort: str = Query("volume", description="Sort by: volume, change, price") |
|
|
-): |
|
|
- """Get sorted tickers""" |
|
|
- try: |
|
|
- # Fetch from fallback |
|
|
- data, source = await fallback_manager.fetch_with_fallback( |
|
|
- 'market_data', |
|
|
- '/coins/markets', |
|
|
- params={'vs_currency': 'usd', 'order': 'market_cap_desc', 'per_page': limit, 'page': 1} |
|
|
- ) |
|
|
- |
|
|
- tickers = [] |
|
|
- for coin in data: |
|
|
- tickers.append({ |
|
|
- 'symbol': coin.get('symbol', '').upper(), |
|
|
- 'name': coin.get('name'), |
|
|
- 'price': coin.get('current_price'), |
|
|
- 'change_24h': coin.get('price_change_percentage_24h'), |
|
|
- 'volume_24h': coin.get('total_volume'), |
|
|
- 'market_cap': coin.get('market_cap') |
|
|
- }) |
|
|
- |
|
|
- return { |
|
|
- 'tickers': tickers, |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=60, source=source).__dict__ |
|
|
+ "meta": MetaInfo(cache_ttl_seconds=60, source="binance").dict() |
|
|
} |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_tickers: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Signals & Models Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/models/{model_key}/predict", response_model=SignalResponse) |
|
|
-async def predict_single(model_key: str, request: PredictRequest): |
|
|
- """ |
|
|
- Run prediction for a single symbol using specified model |
|
|
- """ |
|
|
- try: |
|
|
- # Generate signal |
|
|
- import random |
|
|
- signal_id = f"sig_{int(datetime.now().timestamp())}_{random.randint(1000, 9999)}" |
|
|
- |
|
|
- signal_types = ["buy", "sell", "hold"] |
|
|
- signal_type = random.choice(signal_types) |
|
|
- score = random.uniform(0.6, 0.95) |
|
|
- |
|
|
- signal = SignalResponse( |
|
|
- id=signal_id, |
|
|
- symbol=request.symbol, |
|
|
- type=signal_type, |
|
|
- score=score, |
|
|
- model=model_key, |
|
|
- created_at=datetime.now().isoformat(), |
|
|
- meta=MetaInfo(source=f"model:{model_key}") |
|
|
- ) |
|
|
- |
|
|
- # Store in database |
|
|
- persistence.save_signal(signal.dict()) |
|
|
|
|
|
- return signal |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in predict_single: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/models/batch/predict") |
|
|
-async def predict_batch( |
|
|
- symbols: List[str] = Body(..., embed=True), |
|
|
- context: Optional[str] = Body(None), |
|
|
- params: Optional[Dict[str, Any]] = Body(None) |
|
|
-): |
|
|
- """Run batch prediction for multiple symbols""" |
|
|
- try: |
|
|
- results = [] |
|
|
- import random |
|
|
- |
|
|
- for symbol in symbols: |
|
|
- signal_id = f"sig_{int(datetime.now().timestamp())}_{random.randint(1000, 9999)}" |
|
|
- signal_types = ["buy", "sell", "hold"] |
|
|
- |
|
|
- signal = { |
|
|
- 'id': signal_id, |
|
|
- 'symbol': symbol, |
|
|
- 'type': random.choice(signal_types), |
|
|
- 'score': random.uniform(0.6, 0.95), |
|
|
- 'model': 'batch_model', |
|
|
- 'created_at': datetime.now().isoformat() |
|
|
- } |
|
|
- results.append(signal) |
|
|
- persistence.save_signal(signal) |
|
|
- |
|
|
- return { |
|
|
- 'predictions': results, |
|
|
- 'meta': MetaInfo(source="hf:batch").__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in predict_batch: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
+ await cache_manager.set(cache_key, response, ttl=60) |
|
|
+ return response |
|
|
|
|
|
[email protected]("/api/signals") |
|
|
-async def get_signals( |
|
|
- limit: int = Query(50, description="Number of signals to return"), |
|
|
- symbol: Optional[str] = Query(None, description="Filter by symbol") |
|
|
-): |
|
|
- """Get recent trading signals""" |
|
|
- try: |
|
|
- # Get from database |
|
|
- signals = persistence.get_signals(limit=limit, symbol=symbol) |
|
|
- |
|
|
- return { |
|
|
- 'signals': signals, |
|
|
- 'total': len(signals), |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=30).__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_signals: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/signals/ack") |
|
|
-async def acknowledge_signal(signal_id: str = Body(..., embed=True)): |
|
|
- """Acknowledge a signal""" |
|
|
- try: |
|
|
- # Update in database |
|
|
- success = persistence.acknowledge_signal(signal_id) |
|
|
- if not success: |
|
|
- raise HTTPException(status_code=404, detail="Signal not found") |
|
|
- |
|
|
- return {'status': 'success', 'signal_id': signal_id} |
|
|
- |
|
|
- except HTTPException: |
|
|
- raise |
|
|
except Exception as e: |
|
|
- logger.error(f"Error in acknowledge_signal: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
+ logger.error(f"Error in get_ohlc: {e}") |
|
|
+ # Try fallbacks? For now, fail gracefully. |
|
|
+ raise HTTPException(status_code=503, detail="OHLC data unavailable") |
|
|
|
|
|
# |
|
|
# News & Sentiment Endpoints |
|
|
@@ -577,13 +193,14 @@ async def get_news( |
|
|
limit: int = Query(20, description="Number of articles"), |
|
|
source: Optional[str] = Query(None, description="Filter by source") |
|
|
): |
|
|
- """Get cryptocurrency news""" |
|
|
+ """Get cryptocurrency news from CryptoPanic""" |
|
|
+ cache_key = f"news_{limit}_{source}" |
|
|
+ cached = await cache_manager.get(cache_key) |
|
|
+ if cached: |
|
|
+ return cached |
|
|
+ |
|
|
try: |
|
|
- data, source_name = await fallback_manager.fetch_with_fallback( |
|
|
- 'news', |
|
|
- '/posts/', |
|
|
- params={'public': 'true'} |
|
|
- ) |
|
|
+ data = await cryptopanic_provider.get_news() |
|
|
|
|
|
articles = [] |
|
|
results = data.get('results', [])[:limit] |
|
|
@@ -594,876 +211,84 @@ async def get_news( |
|
|
title=post.get('title', ''), |
|
|
url=post.get('url', ''), |
|
|
source=post.get('source', {}).get('title', 'Unknown'), |
|
|
- summary=post.get('title', ''), |
|
|
+ summary=post.get('slug', ''), |
|
|
published_at=post.get('published_at', datetime.now().isoformat()) |
|
|
)) |
|
|
|
|
|
- return NewsResponse( |
|
|
+ response = NewsResponse( |
|
|
articles=articles, |
|
|
- meta=MetaInfo(cache_ttl_seconds=300, source=source_name) |
|
|
+ meta=MetaInfo(cache_ttl_seconds=300, source="cryptopanic") |
|
|
) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_news: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/news/{news_id}") |
|
|
-async def get_news_article(news_id: str): |
|
|
- """Get specific news article details""" |
|
|
- try: |
|
|
- # Should fetch from database or API |
|
|
- return { |
|
|
- 'id': news_id, |
|
|
- 'title': 'Bitcoin Reaches New High', |
|
|
- 'content': 'Full article content...', |
|
|
- 'url': 'https://example.com/news', |
|
|
- 'source': 'CryptoNews', |
|
|
- 'published_at': datetime.now().isoformat(), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_news_article: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/news/analyze") |
|
|
-async def analyze_news( |
|
|
- text: Optional[str] = Body(None), |
|
|
- url: Optional[str] = Body(None) |
|
|
-): |
|
|
- """Analyze news article for sentiment and topics""" |
|
|
- try: |
|
|
- import random |
|
|
- |
|
|
- sentiment_labels = ["positive", "negative", "neutral"] |
|
|
|
|
|
- return { |
|
|
- 'sentiment': { |
|
|
- 'score': random.uniform(-1, 1), |
|
|
- 'label': random.choice(sentiment_labels) |
|
|
- }, |
|
|
- 'topics': ['bitcoin', 'market', 'trading'], |
|
|
- 'summary': 'Article discusses cryptocurrency market trends...', |
|
|
- 'meta': MetaInfo(source="hf:nlp").__dict__ |
|
|
- } |
|
|
+ await cache_manager.set(cache_key, response, ttl=300) |
|
|
+ return response |
|
|
|
|
|
except Exception as e: |
|
|
- logger.error(f"Error in analyze_news: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
+ logger.error(f"Error in get_news: {e}") |
|
|
+ return NewsResponse(articles=[], meta=MetaInfo(source="error")) |
|
|
|
|
|
|
|
|
[email protected]("/api/sentiment/analyze", response_model=SentimentResponse) |
|
|
-async def analyze_sentiment(request: SentimentRequest): |
|
|
- """Analyze text sentiment""" |
|
|
- try: |
|
|
- import random |
|
|
- |
|
|
- # Use HF sentiment model or fallback to simple analysis |
|
|
- sentiment_labels = ["positive", "negative", "neutral"] |
|
|
- label = random.choice(sentiment_labels) |
|
|
[email protected]("/api/sentiment/global") |
|
|
+async def get_global_sentiment(): |
|
|
+ """Get global market sentiment (Fear & Greed Index)""" |
|
|
+ cache_key = "sentiment_global" |
|
|
+ cached = await cache_manager.get(cache_key) |
|
|
+ if cached: |
|
|
+ return cached |
|
|
|
|
|
- score_map = {"positive": random.uniform(0.5, 1), "negative": random.uniform(-1, -0.5), "neutral": random.uniform(-0.3, 0.3)} |
|
|
- |
|
|
- return SentimentResponse( |
|
|
- score=score_map[label], |
|
|
- label=label, |
|
|
- details={'mode': request.mode, 'text_length': len(request.text)}, |
|
|
- meta=MetaInfo(source="hf:sentiment-model") |
|
|
- ) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in analyze_sentiment: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Whale Tracking Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/crypto/whales/transactions") |
|
|
-async def get_whale_transactions( |
|
|
- limit: int = Query(50, description="Number of transactions"), |
|
|
- chain: Optional[str] = Query(None, description="Filter by blockchain"), |
|
|
- min_amount_usd: float = Query(100000, description="Minimum transaction amount in USD") |
|
|
-): |
|
|
- """Get recent large whale transactions""" |
|
|
try: |
|
|
- # Get from database |
|
|
- transactions = persistence.get_whale_transactions( |
|
|
- limit=limit, |
|
|
- chain=chain, |
|
|
- min_amount_usd=min_amount_usd |
|
|
- ) |
|
|
+ data = await alternative_me_provider.get_fear_and_greed() |
|
|
+ fng_value = 50 |
|
|
+ classification = "Neutral" |
|
|
|
|
|
- return { |
|
|
- 'transactions': transactions, |
|
|
- 'total': len(transactions), |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=60).__dict__ |
|
|
+ if data.get('data'): |
|
|
+ item = data['data'][0] |
|
|
+ fng_value = int(item.get('value', 50)) |
|
|
+ classification = item.get('value_classification', 'Neutral') |
|
|
+ |
|
|
+ result = { |
|
|
+ "score": fng_value, |
|
|
+ "label": classification, |
|
|
+ "meta": MetaInfo(cache_ttl_seconds=3600, source="alternative.me").dict() |
|
|
} |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_whale_transactions: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/crypto/whales/stats", response_model=WhaleStatsResponse) |
|
|
-async def get_whale_stats(hours: int = Query(24, description="Time window in hours")): |
|
|
- """Get aggregated whale activity statistics""" |
|
|
- try: |
|
|
- # Get from database |
|
|
- stats = persistence.get_whale_stats(hours=hours) |
|
|
|
|
|
- return WhaleStatsResponse( |
|
|
- total_transactions=stats.get('total_transactions', 0), |
|
|
- total_volume_usd=stats.get('total_volume_usd', 0), |
|
|
- avg_transaction_usd=stats.get('avg_transaction_usd', 0), |
|
|
- top_chains=stats.get('top_chains', []), |
|
|
- meta=MetaInfo(cache_ttl_seconds=300) |
|
|
- ) |
|
|
- |
|
|
+ await cache_manager.set(cache_key, result, ttl=3600) |
|
|
+ return result |
|
|
except Exception as e: |
|
|
- logger.error(f"Error in get_whale_stats: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
+ logger.error(f"Error in get_global_sentiment: {e}") |
|
|
+ raise HTTPException(status_code=503, detail="Sentiment data unavailable") |
|
|
|
|
|
# |
|
|
-# Blockchain (Gas & Stats) Endpoints |
|
|
+# Blockchain Endpoints |
|
|
# |
|
|
|
|
|
@router.get("/api/crypto/blockchain/gas", response_model=GasResponse) |
|
|
async def get_gas_prices(chain: str = Query("ethereum", description="Blockchain network")): |
|
|
- """Get current gas prices for specified blockchain""" |
|
|
- try: |
|
|
- import random |
|
|
- |
|
|
- # Sample gas prices |
|
|
- base_gas = 20 if chain == "ethereum" else 5 |
|
|
- |
|
|
- return GasResponse( |
|
|
- chain=chain, |
|
|
- gas_prices=GasPrice( |
|
|
- fast=base_gas + random.uniform(5, 15), |
|
|
- standard=base_gas + random.uniform(2, 8), |
|
|
- slow=base_gas + random.uniform(0, 5) |
|
|
- ), |
|
|
- timestamp=datetime.now().isoformat(), |
|
|
- meta=MetaInfo(cache_ttl_seconds=30) |
|
|
- ) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_gas_prices: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/crypto/blockchain/stats", response_model=BlockchainStats) |
|
|
-async def get_blockchain_stats( |
|
|
- chain: str = Query("ethereum", description="Blockchain network"), |
|
|
- hours: int = Query(24, description="Time window") |
|
|
-): |
|
|
- """Get blockchain statistics""" |
|
|
- try: |
|
|
- import random |
|
|
- |
|
|
- return BlockchainStats( |
|
|
- chain=chain, |
|
|
- blocks_24h=random.randint(6000, 7000), |
|
|
- transactions_24h=random.randint(1000000, 1500000), |
|
|
- avg_gas_price=random.uniform(15, 30), |
|
|
- mempool_size=random.randint(50000, 150000), |
|
|
- meta=MetaInfo(cache_ttl_seconds=120) |
|
|
- ) |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_blockchain_stats: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
+ """Get gas prices - Placeholder for real implementation""" |
|
|
+ # TODO: Implement Etherscan or similar provider |
|
|
+ # For now, return empty/null to indicate no data rather than fake data |
|
|
+ return GasResponse( |
|
|
+ chain=chain, |
|
|
+ gas_prices=None, |
|
|
+ timestamp=datetime.now().isoformat(), |
|
|
+ meta=MetaInfo(source="unavailable") |
|
|
+ ) |
|
|
|
|
|
# |
|
|
-# System Management & Provider Endpoints |
|
|
+# System Management |
|
|
# |
|
|
|
|
|
[email protected]("/api/providers") |
|
|
-async def get_providers(): |
|
|
- """List all data providers and their capabilities""" |
|
|
- try: |
|
|
- providers = [] |
|
|
- |
|
|
- for category, config in fallback_manager.providers.items(): |
|
|
- primary = config.get('primary') |
|
|
- if primary: |
|
|
- providers.append(ProviderInfo( |
|
|
- id=f"{category}_primary", |
|
|
- name=primary['name'], |
|
|
- category=category, |
|
|
- status='active', |
|
|
- capabilities=[category] |
|
|
- ).dict()) |
|
|
- |
|
|
- for idx, fallback in enumerate(config.get('fallbacks', [])): |
|
|
- providers.append(ProviderInfo( |
|
|
- id=f"{category}_fallback_{idx}", |
|
|
- name=fallback['name'], |
|
|
- category=category, |
|
|
- status='active', |
|
|
- capabilities=[category] |
|
|
- ).dict()) |
|
|
- |
|
|
- return { |
|
|
- 'providers': providers, |
|
|
- 'total': len(providers), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_providers: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
@router.get("/api/status") |
|
|
async def get_system_status(): |
|
|
"""Get overall system status""" |
|
|
- try: |
|
|
- return { |
|
|
- 'status': 'operational', |
|
|
- 'timestamp': datetime.now().isoformat(), |
|
|
- 'services': { |
|
|
- 'market_data': 'operational', |
|
|
- 'whale_tracking': 'operational', |
|
|
- 'blockchain': 'operational', |
|
|
- 'news': 'operational', |
|
|
- 'sentiment': 'operational', |
|
|
- 'models': 'operational' |
|
|
- }, |
|
|
- 'uptime_seconds': 86400, |
|
|
- 'version': '1.0.0', |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
+ from backend.live_data.providers import get_all_providers_status |
|
|
+ |
|
|
+ provider_status = await get_all_providers_status() |
|
|
|
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_system_status: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/health") |
|
|
-async def health_check(): |
|
|
- """Health check endpoint""" |
|
|
return { |
|
|
- 'status': 'healthy', |
|
|
+ 'status': 'operational', |
|
|
'timestamp': datetime.now().isoformat(), |
|
|
- 'checks': { |
|
|
- 'database': True, |
|
|
- 'fallback_providers': True, |
|
|
- 'models': True |
|
|
- } |
|
|
- } |
|
|
- |
|
|
- |
|
|
[email protected]("/api/freshness") |
|
|
-async def get_data_freshness(): |
|
|
- """Get last-updated timestamps for each subsystem""" |
|
|
- try: |
|
|
- now = datetime.now() |
|
|
- |
|
|
- return { |
|
|
- 'market_data': (now - timedelta(seconds=30)).isoformat(), |
|
|
- 'whale_tracking': (now - timedelta(minutes=1)).isoformat(), |
|
|
- 'blockchain_stats': (now - timedelta(minutes=2)).isoformat(), |
|
|
- 'news': (now - timedelta(minutes=5)).isoformat(), |
|
|
- 'sentiment': (now - timedelta(minutes=1)).isoformat(), |
|
|
- 'signals': (now - timedelta(seconds=10)).isoformat(), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_data_freshness: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Export & Diagnostics Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/v2/export/{export_type}") |
|
|
-async def export_data( |
|
|
- export_type: str, |
|
|
- format: str = Query("json", description="Export format: json or csv") |
|
|
-): |
|
|
- """Export dataset""" |
|
|
- try: |
|
|
- data = {} |
|
|
- |
|
|
- if export_type == "signals": |
|
|
- data = {'signals': persistence.get_signals(limit=10000)} |
|
|
- elif export_type == "whales": |
|
|
- data = {'whale_transactions': persistence.get_whale_transactions(limit=10000)} |
|
|
- elif export_type == "all": |
|
|
- data = { |
|
|
- 'signals': persistence.get_signals(limit=10000), |
|
|
- 'whale_transactions': persistence.get_whale_transactions(limit=10000), |
|
|
- 'database_stats': persistence.get_database_stats(), |
|
|
- 'exported_at': datetime.now().isoformat() |
|
|
- } |
|
|
- else: |
|
|
- raise HTTPException(status_code=400, detail="Invalid export type") |
|
|
- |
|
|
- # Save to file |
|
|
- export_dir = Path("data/exports") |
|
|
- export_dir.mkdir(parents=True, exist_ok=True) |
|
|
- |
|
|
- filename = f"export_{export_type}_{int(datetime.now().timestamp())}.{format}" |
|
|
- filepath = export_dir / filename |
|
|
- |
|
|
- if format == "json": |
|
|
- with open(filepath, 'w') as f: |
|
|
- json.dump(data, f, indent=2) |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'export_type': export_type, |
|
|
- 'format': format, |
|
|
- 'filepath': str(filepath), |
|
|
- 'records': len(data), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in export_data: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/diagnostics/run") |
|
|
-async def run_diagnostics(): |
|
|
- """Run system diagnostics and self-tests""" |
|
|
- try: |
|
|
- results = { |
|
|
- 'timestamp': datetime.now().isoformat(), |
|
|
- 'tests': [] |
|
|
- } |
|
|
- |
|
|
- # Test fallback providers connectivity |
|
|
- for category in ['market_data', 'news', 'sentiment']: |
|
|
- try: |
|
|
- _, source = await fallback_manager.fetch_with_fallback(category, '/', {}) |
|
|
- results['tests'].append({ |
|
|
- 'name': f'{category}_connectivity', |
|
|
- 'status': 'passed', |
|
|
- 'source': source |
|
|
- }) |
|
|
- except: |
|
|
- results['tests'].append({ |
|
|
- 'name': f'{category}_connectivity', |
|
|
- 'status': 'failed' |
|
|
- }) |
|
|
- |
|
|
- # Test model health |
|
|
- results['tests'].append({ |
|
|
- 'name': 'model_health', |
|
|
- 'status': 'passed', |
|
|
- 'models_available': 3 |
|
|
- }) |
|
|
- |
|
|
- # Test database |
|
|
- db_stats = persistence.get_database_stats() |
|
|
- results['tests'].append({ |
|
|
- 'name': 'database_connectivity', |
|
|
- 'status': 'passed', |
|
|
- 'stats': db_stats |
|
|
- }) |
|
|
- |
|
|
- passed = sum(1 for t in results['tests'] if t['status'] == 'passed') |
|
|
- failed = len(results['tests']) - passed |
|
|
- |
|
|
- results['summary'] = { |
|
|
- 'total_tests': len(results['tests']), |
|
|
- 'passed': passed, |
|
|
- 'failed': failed, |
|
|
- 'success_rate': round(passed / len(results['tests']) * 100, 1) |
|
|
- } |
|
|
- |
|
|
- # Save diagnostic results |
|
|
- persistence.set_cache('last_diagnostics', results, ttl_seconds=3600) |
|
|
- |
|
|
- return results |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in run_diagnostics: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/diagnostics/last") |
|
|
-async def get_last_diagnostics(): |
|
|
- """Get last diagnostic results""" |
|
|
- try: |
|
|
- last_results = persistence.get_cache('last_diagnostics') |
|
|
- if last_results: |
|
|
- return last_results |
|
|
- else: |
|
|
- return { |
|
|
- 'message': 'No diagnostics have been run yet', |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_last_diagnostics: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Charts & Analytics Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/charts/health-history") |
|
|
-async def get_health_history(hours: int = Query(24, description="Time window in hours")): |
|
|
- """Get provider health history for charts""" |
|
|
- try: |
|
|
- stats = persistence.get_provider_health_stats(hours=hours) |
|
|
- |
|
|
- # Format for charting |
|
|
- chart_data = { |
|
|
- 'period_hours': hours, |
|
|
- 'series': [] |
|
|
- } |
|
|
- |
|
|
- for provider in stats.get('providers', []): |
|
|
- success_rate = 0 |
|
|
- if provider['total_requests'] > 0: |
|
|
- success_rate = round((provider['success_count'] / provider['total_requests']) * 100, 1) |
|
|
- |
|
|
- chart_data['series'].append({ |
|
|
- 'provider': provider['provider'], |
|
|
- 'category': provider['category'], |
|
|
- 'success_rate': success_rate, |
|
|
- 'avg_response_time': round(provider.get('avg_response_time', 0)), |
|
|
- 'total_requests': provider['total_requests'] |
|
|
- }) |
|
|
- |
|
|
- return { |
|
|
- 'chart_data': chart_data, |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=300).__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_health_history: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/charts/compliance") |
|
|
-async def get_compliance_metrics(days: int = Query(7, description="Time window in days")): |
|
|
- """Get API compliance metrics over time""" |
|
|
- try: |
|
|
- # Calculate compliance based on data availability |
|
|
- db_stats = persistence.get_database_stats() |
|
|
- |
|
|
- compliance = { |
|
|
- 'period_days': days, |
|
|
- 'metrics': { |
|
|
- 'data_freshness': 95.5, # % of endpoints with fresh data |
|
|
- 'uptime': 99.2, # % uptime |
|
|
- 'coverage': 87.3, # % of required endpoints implemented |
|
|
- 'response_time': 98.1 # % meeting SLA |
|
|
- }, |
|
|
- 'details': { |
|
|
- 'signals_available': db_stats.get('signals_count', 0) > 0, |
|
|
- 'whales_available': db_stats.get('whale_transactions_count', 0) > 0, |
|
|
- 'cache_healthy': db_stats.get('cache_entries', 0) > 0, |
|
|
- 'total_health_checks': db_stats.get('health_logs_count', 0) |
|
|
- }, |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=3600).__dict__ |
|
|
- } |
|
|
- |
|
|
- return compliance |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_compliance_metrics: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Logs & Monitoring Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/logs") |
|
|
-async def get_logs( |
|
|
- from_time: Optional[str] = Query(None, description="Start time ISO format"), |
|
|
- to_time: Optional[str] = Query(None, description="End time ISO format"), |
|
|
- limit: int = Query(100, description="Max number of logs") |
|
|
-): |
|
|
- """Get system logs within time range""" |
|
|
- try: |
|
|
- # Get provider health logs as system logs |
|
|
- hours = 24 |
|
|
- if from_time: |
|
|
- try: |
|
|
- from_dt = datetime.fromisoformat(from_time.replace('Z', '+00:00')) |
|
|
- hours = int((datetime.now() - from_dt).total_seconds() / 3600) + 1 |
|
|
- except: |
|
|
- pass |
|
|
- |
|
|
- health_stats = persistence.get_provider_health_stats(hours=hours) |
|
|
- |
|
|
- logs = [] |
|
|
- for provider in health_stats.get('providers', [])[:limit]: |
|
|
- logs.append({ |
|
|
- 'timestamp': datetime.now().isoformat(), |
|
|
- 'level': 'INFO', |
|
|
- 'provider': provider['provider'], |
|
|
- 'category': provider['category'], |
|
|
- 'message': f"Provider {provider['provider']} processed {provider['total_requests']} requests", |
|
|
- 'details': provider |
|
|
- }) |
|
|
- |
|
|
- return { |
|
|
- 'logs': logs, |
|
|
- 'total': len(logs), |
|
|
- 'from': from_time or 'beginning', |
|
|
- 'to': to_time or 'now', |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=60).__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_logs: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/logs/recent") |
|
|
-async def get_recent_logs(limit: int = Query(50, description="Number of recent logs")): |
|
|
- """Get most recent system logs""" |
|
|
- try: |
|
|
- return await get_logs(limit=limit) |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_recent_logs: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Rate Limits & Config Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
[email protected]("/api/rate-limits") |
|
|
-async def get_rate_limits(): |
|
|
- """Get current rate limit configuration""" |
|
|
- try: |
|
|
- rate_limits = { |
|
|
- 'global': { |
|
|
- 'requests_per_minute': 60, |
|
|
- 'requests_per_hour': 3600, |
|
|
- 'burst_limit': 100 |
|
|
- }, |
|
|
- 'endpoints': { |
|
|
- '/api/market/*': {'rpm': 120, 'burst': 200}, |
|
|
- '/api/signals/*': {'rpm': 60, 'burst': 100}, |
|
|
- '/api/news/*': {'rpm': 30, 'burst': 50}, |
|
|
- '/api/crypto/whales/*': {'rpm': 30, 'burst': 50}, |
|
|
- '/api/models/*': {'rpm': 20, 'burst': 30} |
|
|
- }, |
|
|
- 'current_usage': { |
|
|
- 'requests_last_minute': 15, |
|
|
- 'requests_last_hour': 450, |
|
|
- 'remaining_minute': 45, |
|
|
- 'remaining_hour': 3150 |
|
|
- }, |
|
|
- 'meta': MetaInfo(cache_ttl_seconds=30).__dict__ |
|
|
- } |
|
|
- |
|
|
- return rate_limits |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_rate_limits: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/config/keys") |
|
|
-async def get_api_keys(): |
|
|
- """Get configured API keys (masked)""" |
|
|
- try: |
|
|
- # Return masked keys for security |
|
|
- keys = { |
|
|
- 'hf_api_token': 'hf_***' if os.getenv('HF_API_TOKEN') else None, |
|
|
- 'configured_providers': [] |
|
|
- } |
|
|
- |
|
|
- # Check fallback provider keys |
|
|
- for category, config in fallback_manager.providers.items(): |
|
|
- primary = config.get('primary', {}) |
|
|
- if primary.get('key'): |
|
|
- keys['configured_providers'].append({ |
|
|
- 'category': category, |
|
|
- 'provider': primary['name'], |
|
|
- 'has_key': True |
|
|
- }) |
|
|
- |
|
|
- return { |
|
|
- 'keys': keys, |
|
|
- 'total_configured': len(keys['configured_providers']), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_api_keys: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/config/keys/test") |
|
|
-async def test_api_keys(provider: str = Body(..., embed=True)): |
|
|
- """Test API key connectivity for a provider""" |
|
|
- try: |
|
|
- # Find provider category |
|
|
- found_category = None |
|
|
- for category, config in fallback_manager.providers.items(): |
|
|
- primary = config.get('primary', {}) |
|
|
- if primary.get('name') == provider: |
|
|
- found_category = category |
|
|
- break |
|
|
- |
|
|
- if not found_category: |
|
|
- raise HTTPException(status_code=404, detail="Provider not found") |
|
|
- |
|
|
- # Test connectivity |
|
|
- start_time = datetime.now() |
|
|
- try: |
|
|
- _, source = await fallback_manager.fetch_with_fallback(found_category, '/', {}) |
|
|
- response_time = int((datetime.now() - start_time).total_seconds() * 1000) |
|
|
- |
|
|
- # Log the test |
|
|
- persistence.log_provider_health( |
|
|
- provider=provider, |
|
|
- category=found_category, |
|
|
- status='success', |
|
|
- response_time_ms=response_time |
|
|
- ) |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'provider': provider, |
|
|
- 'category': found_category, |
|
|
- 'response_time_ms': response_time, |
|
|
- 'message': 'API key is valid and working' |
|
|
- } |
|
|
- except Exception as test_error: |
|
|
- # Log the failure |
|
|
- persistence.log_provider_health( |
|
|
- provider=provider, |
|
|
- category=found_category, |
|
|
- status='failed', |
|
|
- error_message=str(test_error) |
|
|
- ) |
|
|
- |
|
|
- return { |
|
|
- 'status': 'failed', |
|
|
- 'provider': provider, |
|
|
- 'category': found_category, |
|
|
- 'error': str(test_error), |
|
|
- 'message': 'API key test failed' |
|
|
- } |
|
|
- |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in test_api_keys: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
-# ============================================================================ |
|
|
-# Pool Management Endpoints |
|
|
-# ============================================================================ |
|
|
- |
|
|
-# Global pools storage (in production, use database) |
|
|
-_pools_storage = { |
|
|
- 'pool_1': { |
|
|
- 'id': 'pool_1', |
|
|
- 'name': 'Primary Market Data Pool', |
|
|
- 'providers': ['coingecko', 'binance', 'coincap'], |
|
|
- 'strategy': 'round-robin', |
|
|
- 'health': 'healthy', |
|
|
- 'created_at': datetime.now().isoformat() |
|
|
+ 'providers': provider_status, |
|
|
+ 'version': '1.0.0', |
|
|
+ 'meta': MetaInfo(source="system").dict() |
|
|
} |
|
|
-} |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools") |
|
|
-async def list_pools(): |
|
|
- """List all provider pools""" |
|
|
- try: |
|
|
- pools = list(_pools_storage.values()) |
|
|
- return { |
|
|
- 'pools': pools, |
|
|
- 'total': len(pools), |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in list_pools: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools/{pool_id}") |
|
|
-async def get_pool(pool_id: str): |
|
|
- """Get specific pool details""" |
|
|
- try: |
|
|
- if pool_id not in _pools_storage: |
|
|
- raise HTTPException(status_code=404, detail="Pool not found") |
|
|
- |
|
|
- return { |
|
|
- 'pool': _pools_storage[pool_id], |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in get_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools") |
|
|
-async def create_pool( |
|
|
- name: str = Body(...), |
|
|
- providers: List[str] = Body(...), |
|
|
- strategy: str = Body('round-robin') |
|
|
-): |
|
|
- """Create a new provider pool""" |
|
|
- try: |
|
|
- import uuid |
|
|
- pool_id = f"pool_{uuid.uuid4().hex[:8]}" |
|
|
- |
|
|
- pool = { |
|
|
- 'id': pool_id, |
|
|
- 'name': name, |
|
|
- 'providers': providers, |
|
|
- 'strategy': strategy, |
|
|
- 'health': 'healthy', |
|
|
- 'created_at': datetime.now().isoformat() |
|
|
- } |
|
|
- |
|
|
- _pools_storage[pool_id] = pool |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'pool_id': pool_id, |
|
|
- 'pool': pool, |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in create_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools/{pool_id}") |
|
|
-async def update_pool( |
|
|
- pool_id: str, |
|
|
- name: Optional[str] = Body(None), |
|
|
- providers: Optional[List[str]] = Body(None), |
|
|
- strategy: Optional[str] = Body(None) |
|
|
-): |
|
|
- """Update pool configuration""" |
|
|
- try: |
|
|
- if pool_id not in _pools_storage: |
|
|
- raise HTTPException(status_code=404, detail="Pool not found") |
|
|
- |
|
|
- pool = _pools_storage[pool_id] |
|
|
- |
|
|
- if name: |
|
|
- pool['name'] = name |
|
|
- if providers: |
|
|
- pool['providers'] = providers |
|
|
- if strategy: |
|
|
- pool['strategy'] = strategy |
|
|
- |
|
|
- pool['updated_at'] = datetime.now().isoformat() |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'pool': pool, |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in update_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools/{pool_id}") |
|
|
-async def delete_pool(pool_id: str): |
|
|
- """Delete a pool""" |
|
|
- try: |
|
|
- if pool_id not in _pools_storage: |
|
|
- raise HTTPException(status_code=404, detail="Pool not found") |
|
|
- |
|
|
- del _pools_storage[pool_id] |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'message': f'Pool {pool_id} deleted', |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in delete_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools/{pool_id}/rotate") |
|
|
-async def rotate_pool(pool_id: str): |
|
|
- """Rotate to next provider in pool""" |
|
|
- try: |
|
|
- if pool_id not in _pools_storage: |
|
|
- raise HTTPException(status_code=404, detail="Pool not found") |
|
|
- |
|
|
- pool = _pools_storage[pool_id] |
|
|
- providers = pool.get('providers', []) |
|
|
- |
|
|
- if len(providers) > 1: |
|
|
- # Rotate providers |
|
|
- providers.append(providers.pop(0)) |
|
|
- pool['providers'] = providers |
|
|
- pool['last_rotated'] = datetime.now().isoformat() |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'pool_id': pool_id, |
|
|
- 'current_provider': providers[0] if providers else None, |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in rotate_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
- |
|
|
- |
|
|
[email protected]("/api/pools/{pool_id}/failover") |
|
|
-async def failover_pool(pool_id: str, failed_provider: str = Body(..., embed=True)): |
|
|
- """Trigger failover for a failed provider""" |
|
|
- try: |
|
|
- if pool_id not in _pools_storage: |
|
|
- raise HTTPException(status_code=404, detail="Pool not found") |
|
|
- |
|
|
- pool = _pools_storage[pool_id] |
|
|
- providers = pool.get('providers', []) |
|
|
- |
|
|
- if failed_provider in providers: |
|
|
- # Move failed provider to end |
|
|
- providers.remove(failed_provider) |
|
|
- providers.append(failed_provider) |
|
|
- pool['providers'] = providers |
|
|
- pool['last_failover'] = datetime.now().isoformat() |
|
|
- pool['health'] = 'degraded' |
|
|
- |
|
|
- return { |
|
|
- 'status': 'success', |
|
|
- 'pool_id': pool_id, |
|
|
- 'failed_provider': failed_provider, |
|
|
- 'new_primary': providers[0] if providers else None, |
|
|
- 'meta': MetaInfo().__dict__ |
|
|
- } |
|
|
- else: |
|
|
- raise HTTPException(status_code=400, detail="Provider not in pool") |
|
|
- |
|
|
- except HTTPException: |
|
|
- raise |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error in failover_pool: {e}") |
|
|
- raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -7,6 +7,7 @@ import logging |
|
|
from typing import Dict, List, Any, Optional |
|
|
from fastapi import HTTPException |
|
|
from .api_fallback_manager import get_fallback_manager |
|
|
+import os |
|
|
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
@@ -20,7 +21,7 @@ class OHLCVService: |
|
|
|
|
|
def _setup_providers(self): |
|
|
"""Setup OHLCV providers in priority order""" |
|
|
- # Priority 1: Binance (fastest, most reliable - but may have regional restrictions) |
|
|
+ # Priority 1: Binance (fastest, most reliable) |
|
|
self.manager.add_provider( |
|
|
name="Binance", |
|
|
priority=1, |
|
|
@@ -29,7 +30,7 @@ class OHLCVService: |
|
|
max_failures=3 |
|
|
) |
|
|
|
|
|
- # Priority 2: CoinGecko (reliable alternative, no geo-restrictions) |
|
|
+ # Priority 2: CoinGecko (reliable alternative) |
|
|
self.manager.add_provider( |
|
|
name="CoinGecko", |
|
|
priority=2, |
|
|
@@ -38,7 +39,7 @@ class OHLCVService: |
|
|
max_failures=3 |
|
|
) |
|
|
|
|
|
- # Priority 3: HuggingFace Space (fallback) |
|
|
+ # Priority 3: HuggingFace Space (proxy to other services) |
|
|
self.manager.add_provider( |
|
|
name="HuggingFace", |
|
|
priority=3, |
|
|
@@ -47,16 +48,7 @@ class OHLCVService: |
|
|
max_failures=5 |
|
|
) |
|
|
|
|
|
- # Priority 4: Mock/Demo data (always available) |
|
|
- self.manager.add_provider( |
|
|
- name="Demo", |
|
|
- priority=999, |
|
|
- fetch_function=self._fetch_demo, |
|
|
- cooldown_seconds=0, |
|
|
- max_failures=999 # Never fails |
|
|
- ) |
|
|
- |
|
|
- logger.info("✅ OHLCV Service initialized with 4 providers (Binance, CoinGecko, HuggingFace, Demo)") |
|
|
+ logger.info("✅ OHLCV Service initialized with 3 providers (Binance, CoinGecko, HuggingFace)") |
|
|
|
|
|
async def _fetch_binance(self, symbol: str, timeframe: str, limit: int = 100) -> Dict: |
|
|
"""Fetch from Binance API""" |
|
|
@@ -128,10 +120,10 @@ class OHLCVService: |
|
|
candles.append({ |
|
|
"timestamp": int(timestamp), |
|
|
"open": price, |
|
|
- "high": price * 1.01, # Approximate |
|
|
- "low": price * 0.99, # Approximate |
|
|
+ "high": price, # Approximate |
|
|
+ "low": price, # Approximate |
|
|
"close": price, |
|
|
- "volume": 0 # CoinGecko doesn't provide volume in this endpoint |
|
|
+ "volume": 0 |
|
|
}) |
|
|
|
|
|
return candles |
|
|
@@ -139,7 +131,6 @@ class OHLCVService: |
|
|
async def _fetch_huggingface(self, symbol: str, timeframe: str, limit: int = 100) -> Dict: |
|
|
"""Fetch from HuggingFace Space""" |
|
|
import httpx |
|
|
- import os |
|
|
|
|
|
base_url = os.getenv("HF_SPACE_BASE_URL", "https://really-amin-datasourceforcryptocurrency.hf.space") |
|
|
token = os.getenv("HF_API_TOKEN", "").strip() |
|
|
@@ -156,43 +147,6 @@ class OHLCVService: |
|
|
response.raise_for_status() |
|
|
return response.json() |
|
|
|
|
|
- async def _fetch_demo(self, symbol: str, timeframe: str, limit: int = 100) -> Dict: |
|
|
- """Fetch demo/fallback data""" |
|
|
- import time |
|
|
- import random |
|
|
- |
|
|
- # Generate realistic demo candles |
|
|
- base_price = 50000 if symbol.upper() == "BTC" else 3000 |
|
|
- candles = [] |
|
|
- |
|
|
- for i in range(limit): |
|
|
- timestamp = int(time.time()) - (i * 3600) # 1 hour intervals |
|
|
- open_price = base_price + random.uniform(-1000, 1000) |
|
|
- close_price = open_price + random.uniform(-500, 500) |
|
|
- high_price = max(open_price, close_price) + random.uniform(0, 300) |
|
|
- low_price = min(open_price, close_price) - random.uniform(0, 300) |
|
|
- volume = random.uniform(1000, 10000) |
|
|
- |
|
|
- candles.append({ |
|
|
- "t": timestamp * 1000, |
|
|
- "o": round(open_price, 2), |
|
|
- "h": round(high_price, 2), |
|
|
- "l": round(low_price, 2), |
|
|
- "c": round(close_price, 2), |
|
|
- "v": round(volume, 2) |
|
|
- }) |
|
|
- |
|
|
- return { |
|
|
- "symbol": symbol.upper(), |
|
|
- "timeframe": timeframe, |
|
|
- "interval": timeframe, |
|
|
- "limit": limit, |
|
|
- "count": len(candles), |
|
|
- "ohlcv": candles[::-1], # Reverse to oldest first |
|
|
- "source": "demo", |
|
|
- "warning": "Using demo data - live data unavailable" |
|
|
- } |
|
|
- |
|
|
async def get_ohlcv( |
|
|
self, |
|
|
symbol: str, |
|
|
@@ -236,4 +190,3 @@ def get_ohlcv_service() -> OHLCVService: |
|
|
if _ohlcv_service is None: |
|
|
_ohlcv_service = OHLCVService() |
|
|
return _ohlcv_service |
|
|
- |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -235,26 +235,9 @@ class ProviderFallbackManager: |
|
|
|
|
|
try: |
|
|
# This would call actual HF models/datasets |
|
|
- # For now, simulate HF response |
|
|
- logger.debug(f"Attempting HF for {endpoint}") |
|
|
- |
|
|
- # Simulate HF response based on endpoint |
|
|
- if "/pair" in endpoint: |
|
|
- # Pair metadata MUST come from HF |
|
|
- return { |
|
|
- "pair": params.get("pair", "BTC/USDT"), |
|
|
- "base": "BTC", |
|
|
- "quote": "USDT", |
|
|
- "tick_size": 0.01, |
|
|
- "min_qty": 0.00001 |
|
|
- }, None |
|
|
- |
|
|
- # For other endpoints, simulate occasional failure to test fallback |
|
|
- import random |
|
|
- if random.random() > 0.3: # 70% success rate for testing |
|
|
- return None, "HF data not available" |
|
|
- |
|
|
- return {"source": "hf", "data": "sample"}, None |
|
|
+ # For now, HF integration is not fully implemented in this method |
|
|
+ # Return None to trigger fallback to external providers |
|
|
+ return None, "HF integration pending" |
|
|
|
|
|
except Exception as e: |
|
|
logger.debug(f"HF call failed: {e}") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -891,118 +891,36 @@ async def api_sentiment_global(timeframe: str = "1D"): |
|
|
except Exception as e: |
|
|
logger.error(f"Failed to fetch Fear & Greed |
|
|
|
|
|
- # Fallback to generated data |
|
|
- base_sentiment = random.randint(40, 70) |
|
|
- history = [] |
|
|
- base_time = int(datetime.utcnow().timestamp() * 1000) |
|
|
- |
|
|
- data_points = { |
|
|
- "1D": 24, |
|
|
- "7D": 168, |
|
|
- "30D": 30, |
|
|
- "1Y": 365 |
|
|
- }.get(timeframe, 24) |
|
|
- |
|
|
- interval = { |
|
|
- "1D": 3600000, # 1 hour |
|
|
- "7D": 3600000, # 1 hour |
|
|
- "30D": 86400000, # 1 day |
|
|
- "1Y": 86400000 # 1 day |
|
|
- }.get(timeframe, 3600000) |
|
|
- |
|
|
- for i in range(data_points): |
|
|
- history.append({ |
|
|
- "timestamp": base_time - ((data_points - i) * interval), |
|
|
- "sentiment": max(20, min(80, base_sentiment + random.randint(-10, 10))), |
|
|
- "volume": random.randint(50000, 150000) |
|
|
- }) |
|
|
- |
|
|
- if base_sentiment >= 65: |
|
|
- sentiment = "greed" |
|
|
- market_mood = "bullish" |
|
|
- elif base_sentiment >= 45: |
|
|
- sentiment = "neutral" |
|
|
- market_mood = "neutral" |
|
|
- else: |
|
|
- sentiment = "fear" |
|
|
- market_mood = "bearish" |
|
|
- |
|
|
+ # Fallback - return error or empty (NO MOCK DATA) |
|
|
+ logger.warning("Sentiment data unavailable and mock data is disabled.") |
|
|
return { |
|
|
- "fear_greed_index": base_sentiment, |
|
|
- "sentiment": sentiment, |
|
|
- "market_mood": market_mood, |
|
|
- "confidence": 0.72, |
|
|
- "history": history, |
|
|
+ "fear_greed_index": 50, |
|
|
+ "sentiment": "neutral", |
|
|
+ "market_mood": "neutral", |
|
|
+ "confidence": 0, |
|
|
+ "history": [], |
|
|
"timestamp": datetime.utcnow().isoformat() + "Z", |
|
|
- "source": "fallback" |
|
|
+ "source": "unavailable", |
|
|
+ "error": "Real data unavailable" |
|
|
} |
|
|
|
|
|
|
|
|
@app.get("/api/sentiment/asset/{symbol}") |
|
|
async def api_sentiment_asset(symbol: str): |
|
|
"""Get sentiment analysis for a specific asset""" |
|
|
- import random |
|
|
- |
|
|
- try: |
|
|
- # Normalize symbol |
|
|
- symbol = symbol.upper().replace('USDT', '').replace('USD', '') |
|
|
- |
|
|
- # Generate sentiment score based on symbol (with some consistency based on symbol hash) |
|
|
- hash_val = sum(ord(c) for c in symbol) % 50 |
|
|
- sentiment_value = 40 + hash_val + random.randint(-10, 10) |
|
|
- sentiment_value = max(20, min(90, sentiment_value)) |
|
|
- |
|
|
- # Determine sentiment category |
|
|
- if sentiment_value >= 75: |
|
|
- sentiment = "very_positive" |
|
|
- color = "#10b981" |
|
|
- elif sentiment_value >= 60: |
|
|
- sentiment = "positive" |
|
|
- color = "#3b82f6" |
|
|
- elif sentiment_value >= 40: |
|
|
- sentiment = "neutral" |
|
|
- color = "#94a3b8" |
|
|
- elif sentiment_value >= 25: |
|
|
- sentiment = "negative" |
|
|
- color = "#f59e0b" |
|
|
- else: |
|
|
- sentiment = "very_negative" |
|
|
- color = "#ef4444" |
|
|
- |
|
|
- # Generate social metrics |
|
|
- social_score = random.randint(40, 90) |
|
|
- news_score = random.randint(35, 85) |
|
|
- |
|
|
- return { |
|
|
- "success": True, |
|
|
- "symbol": symbol, |
|
|
- "sentiment": sentiment, |
|
|
- "sentiment_value": sentiment_value, |
|
|
- "color": color, |
|
|
- "social_score": social_score, |
|
|
- "news_score": news_score, |
|
|
- "sources": { |
|
|
- "twitter": random.randint(1000, 50000), |
|
|
- "reddit": random.randint(500, 10000), |
|
|
- "news": random.randint(10, 200) |
|
|
- }, |
|
|
- "timestamp": datetime.utcnow().isoformat() + "Z" |
|
|
- } |
|
|
- |
|
|
- except Exception as e: |
|
|
- logger.error(f"Error getting sentiment for {symbol}: {e}") |
|
|
- return { |
|
|
- "success": False, |
|
|
- "symbol": symbol, |
|
|
- "sentiment": "neutral", |
|
|
- "sentiment_value": 50, |
|
|
- "color": "#94a3b8", |
|
|
- "social_score": 50, |
|
|
- "news_score": 50, |
|
|
- "sources": {"twitter": 0, "reddit": 0, "news": 0}, |
|
|
- "error": str(e), |
|
|
- "timestamp": datetime.utcnow().isoformat() + "Z" |
|
|
- } |
|
|
+ # NO MOCK DATA |
|
|
+ return { |
|
|
+ "success": False, |
|
|
+ "symbol": symbol, |
|
|
+ "sentiment": "neutral", |
|
|
+ "sentiment_value": 50, |
|
|
+ "color": "#94a3b8", |
|
|
+ "social_score": 0, |
|
|
+ "news_score": 0, |
|
|
+ "sources": {"twitter": 0, "reddit": 0, "news": 0}, |
|
|
+ "error": "Asset sentiment unavailable (mock data removed)", |
|
|
+ "timestamp": datetime.utcnow().isoformat() + "Z" |
|
|
+ } |
|
|
|
|
|
|
|
|
@app.get("/api/models/list") |
|
|
@@ -1085,26 +1003,16 @@ async def api_models_reinitialize(): |
|
|
|
|
|
@app.get("/api/ai/signals") |
|
|
async def api_ai_signals(symbol: str = "BTC"): |
|
|
- """AI trading signals for a symbol""" |
|
|
- import random |
|
|
+ """AI trading signals for a symbol - Real signals only""" |
|
|
+ # No mock signals |
|
|
signals = [] |
|
|
- signal_types = ["buy", "sell", "hold"] |
|
|
- for i in range(3): |
|
|
- signals.append({ |
|
|
- "id": f"sig_{int(time.time())}_{i}", |
|
|
- "symbol": symbol, |
|
|
- "type": random.choice(signal_types), |
|
|
- "score": round(random.uniform(0.65, 0.95), 2), |
|
|
- "model": ["cryptobert_elkulako", "finbert", "twitter_sentiment"][i % 3], |
|
|
- "created_at": datetime.utcnow().isoformat() + "Z", |
|
|
- "confidence": round(random.uniform(0.7, 0.95), 2) |
|
|
- }) |
|
|
|
|
|
return { |
|
|
"symbol": symbol, |
|
|
"signals": signals, |
|
|
- "total": len(signals), |
|
|
- "timestamp": datetime.utcnow().isoformat() + "Z" |
|
|
+ "total": 0, |
|
|
+ "timestamp": datetime.utcnow().isoformat() + "Z", |
|
|
+ "message": "No active signals from real models" |
|
|
} |
|
|
|
|
|
|
|
|
@@ -1120,34 +1028,18 @@ class AIDecisionRequest(BaseModel): |
|
|
@app.post("/api/ai/decision") |
|
|
async def api_ai_decision(payload: AIDecisionRequest) -> Dict[str, Any]: |
|
|
"""AI trading decision for AI Analyst page.""" |
|
|
- import random |
|
|
- |
|
|
- base_conf = 0.7 |
|
|
- risk = payload.risk_tolerance.lower() |
|
|
- confidence = base_conf + (0.1 if risk == "aggressive" else -0.05 if risk == "conservative" else 0.0) |
|
|
- confidence = max(0.5, min(confidence, 0.95)) |
|
|
- |
|
|
+ |
|
|
+ # NO MOCK DATA - Return safe default |
|
|
decision = "HOLD" |
|
|
- if confidence > 0.8: |
|
|
- decision = "BUY" |
|
|
- elif confidence < 0.6: |
|
|
- decision = "SELL" |
|
|
- |
|
|
- summary = ( |
|
|
- f"Based on recent market conditions and a {payload.horizon} horizon, " |
|
|
- f"the AI suggests a {decision} stance for {payload.symbol} with " |
|
|
- f"{int(confidence * 100)}% confidence." |
|
|
- ) |
|
|
+ confidence = 0.0 |
|
|
+ summary = "AI analysis unavailable. Real models required." |
|
|
|
|
|
signals: List[Dict[str, Any]] = [ |
|
|
- {"type": "bullish" if decision == "BUY" else "bearish" if decision == "SELL" else "neutral", |
|
|
- "text": f"Primary signal indicates {decision} bias."}, |
|
|
- {"type": "neutral", "text": "Consider position sizing according to your risk tolerance."}, |
|
|
+ {"type": "neutral", "text": "AI models not connected or unavailable."}, |
|
|
] |
|
|
|
|
|
risks: List[str] = [ |
|
|
- "Market volatility may increase around major macro events.", |
|
|
- "On-chain or regulatory news can invalidate this view quickly.", |
|
|
+ "Data unavailable.", |
|
|
] |
|
|
|
|
|
targets = { |
|
|
|
|
|
deleted file mode 100644 |
|
|
|
|
|
|
|
|
|
|
|
@@ -1,408 +0,0 @@ |
|
|
-{ |
|
|
- "version": "6.0.0 - FINAL PROFESSIONAL EDITION", |
|
|
- "release_date": "2025-12-02", |
|
|
- "status": "PRODUCTION READY - ULTIMATE", |
|
|
- |
|
|
- "major_improvements": { |
|
|
- "svg_icons": { |
|
|
- "total_icons": "20+ custom SVG icons", |
|
|
- "locations": [ |
|
|
- "Logo icon (lightning bolt)", |
|
|
- "Live indicator", |
|
|
- "Header stats (clock, activity)", |
|
|
- "Card titles (robot, dollar, target, chart, signal)", |
|
|
- "Crypto cards (custom per coin)", |
|
|
- "Strategy cards (target icons)", |
|
|
- "Agent avatar (robot)", |
|
|
- "Buttons (play, stop, refresh, analyze)", |
|
|
- "Signal badges (arrows)", |
|
|
- "Signal items (price, confidence, stop, target icons)", |
|
|
- "Empty state (signal waves)", |
|
|
- "Toast notifications" |
|
|
- ], |
|
|
- "benefits": [ |
|
|
- "خیلی حرفهایتر", |
|
|
- "جذابیت بصری بالا", |
|
|
- "انیمیشنهای روان", |
|
|
- "سبک و سریع", |
|
|
- "قابل تغییر رنگ", |
|
|
- "کیفیت بالا در هر سایزی" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "advanced_css": { |
|
|
- "features": [ |
|
|
- "CSS Variables برای تمسازی", |
|
|
- "Backdrop filter با blur effect", |
|
|
- "Multiple gradient backgrounds", |
|
|
- "Complex animations (15+ types)", |
|
|
- "Smooth transitions", |
|
|
- "Glass morphism effects", |
|
|
- "Shadow layering", |
|
|
- "Hover states پیشرفته", |
|
|
- "Responsive design کامل", |
|
|
- "Custom scrollbar styling" |
|
|
- ], |
|
|
- "animations": { |
|
|
- "backgroundPulse": "پسزمینه متحرک", |
|
|
- "headerShine": "درخشش header", |
|
|
- "logoFloat": "شناور شدن لوگو", |
|
|
- "livePulse": "تپش نقطه LIVE", |
|
|
- "iconFloat": "شناور شدن آیکونها", |
|
|
- "agentRotate": "چرخش avatar ایجنت", |
|
|
- "signalSlideIn": "ورود سیگنالها", |
|
|
- "emptyFloat": "شناور شدن empty state", |
|
|
- "toastSlideIn": "ورود toast", |
|
|
- "loadingSpin": "چرخش loading" |
|
|
- }, |
|
|
- "effects": { |
|
|
- "glass_morphism": "شیشهای با blur", |
|
|
- "gradient_borders": "border های گرادیانت", |
|
|
- "glow_shadows": "سایههای درخشان", |
|
|
- "hover_transforms": "تبدیل در hover", |
|
|
- "active_states": "حالتهای فعال جذاب", |
|
|
- "shimmer_effects": "افکت درخشش", |
|
|
- "pulse_animations": "انیمیشن تپش" |
|
|
- } |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "css_architecture": { |
|
|
- "variables": { |
|
|
- "colors": "12 متغیر رنگ", |
|
|
- "backgrounds": "3 لایه پسزمینه", |
|
|
- "text": "3 سطح متن", |
|
|
- "shadows": "4 سایز سایه", |
|
|
- "radius": "5 اندازه border-radius", |
|
|
- "transitions": "3 سرعت transition" |
|
|
- }, |
|
|
- |
|
|
- "layout": { |
|
|
- "grid_system": "CSS Grid سه ستونه", |
|
|
- "responsive": "3 breakpoint", |
|
|
- "spacing": "فاصلهگذاری یکنواخت", |
|
|
- "alignment": "تراز مرکزی و flexbox" |
|
|
- }, |
|
|
- |
|
|
- "components": { |
|
|
- "cards": "Glass morphism با hover effects", |
|
|
- "buttons": "Gradient با ripple effect", |
|
|
- "badges": "Pill shape با glow", |
|
|
- "inputs": "Custom styling", |
|
|
- "scrollbar": "Custom design" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "svg_icons_details": { |
|
|
- "logo": { |
|
|
- "icon": "Lightning bolt", |
|
|
- "animation": "Float up/down", |
|
|
- "colors": "Gradient blue to cyan", |
|
|
- "size": "48x48px" |
|
|
- }, |
|
|
- |
|
|
- "agent": { |
|
|
- "icon": "Robot head", |
|
|
- "animation": "360° rotation", |
|
|
- "colors": "Gradient blue to cyan", |
|
|
- "size": "56x56px" |
|
|
- }, |
|
|
- |
|
|
- "crypto_icons": { |
|
|
- "BTC": "₿ symbol", |
|
|
- "ETH": "Ξ symbol", |
|
|
- "BNB": "🔸 diamond", |
|
|
- "SOL": "◎ circle", |
|
|
- "XRP": "✕ cross", |
|
|
- "ADA": "₳ symbol" |
|
|
- }, |
|
|
- |
|
|
- "signal_icons": { |
|
|
- "buy": "Arrow up", |
|
|
- "sell": "Arrow down", |
|
|
- "price": "Dollar sign", |
|
|
- "confidence": "Target", |
|
|
- "stop_loss": "Shield", |
|
|
- "take_profit": "Flag" |
|
|
- }, |
|
|
- |
|
|
- "ui_icons": { |
|
|
- "refresh": "Circular arrows", |
|
|
- "play": "Triangle right", |
|
|
- "stop": "Square", |
|
|
- "analyze": "Lightning", |
|
|
- "clock": "Clock face", |
|
|
- "activity": "Heart rate line", |
|
|
- "chart": "Line chart", |
|
|
- "signal": "Radio waves" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "color_system": { |
|
|
- "primary_palette": { |
|
|
- "primary": "#3b82f6 - آبی اصلی", |
|
|
- "primary_light": "#60a5fa - آبی روشن", |
|
|
- "primary_dark": "#2563eb - آبی تیره", |
|
|
- "secondary": "#8b5cf6 - بنفش", |
|
|
- "accent": "#06b6d4 - فیروزهای" |
|
|
- }, |
|
|
- |
|
|
- "semantic_colors": { |
|
|
- "success": "#10b981 - سبز موفقیت", |
|
|
- "danger": "#ef4444 - قرمز خطر", |
|
|
- "warning": "#f59e0b - نارنجی هشدار" |
|
|
- }, |
|
|
- |
|
|
- "backgrounds": { |
|
|
- "primary": "#0f172a - تیره", |
|
|
- "secondary": "#1e293b - متوسط", |
|
|
- "tertiary": "#334155 - روشنتر" |
|
|
- }, |
|
|
- |
|
|
- "text_hierarchy": { |
|
|
- "primary": "#f1f5f9 - سفید روشن", |
|
|
- "secondary": "#cbd5e1 - خاکستری روشن", |
|
|
- "muted": "#94a3b8 - خاکستری" |
|
|
- }, |
|
|
- |
|
|
- "gradients": { |
|
|
- "primary_gradient": "blue → cyan", |
|
|
- "secondary_gradient": "purple → blue", |
|
|
- "success_gradient": "green → dark green", |
|
|
- "danger_gradient": "red → dark red", |
|
|
- "background_gradient": "dark → darker" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "animation_system": { |
|
|
- "timing_functions": { |
|
|
- "fast": "150ms cubic-bezier(0.4, 0, 0.2, 1)", |
|
|
- "base": "300ms cubic-bezier(0.4, 0, 0.2, 1)", |
|
|
- "slow": "500ms cubic-bezier(0.4, 0, 0.2, 1)" |
|
|
- }, |
|
|
- |
|
|
- "keyframe_animations": { |
|
|
- "backgroundPulse": { |
|
|
- "duration": "20s", |
|
|
- "effect": "opacity change", |
|
|
- "infinite": true |
|
|
- }, |
|
|
- "headerShine": { |
|
|
- "duration": "3s", |
|
|
- "effect": "diagonal sweep", |
|
|
- "infinite": true |
|
|
- }, |
|
|
- "logoFloat": { |
|
|
- "duration": "3s", |
|
|
- "effect": "vertical movement", |
|
|
- "infinite": true |
|
|
- }, |
|
|
- "livePulse": { |
|
|
- "duration": "2s", |
|
|
- "effect": "scale + opacity", |
|
|
- "infinite": true |
|
|
- }, |
|
|
- "agentRotate": { |
|
|
- "duration": "10s", |
|
|
- "effect": "360° rotation", |
|
|
- "infinite": true |
|
|
- }, |
|
|
- "signalSlideIn": { |
|
|
- "duration": "0.5s", |
|
|
- "effect": "slide from right", |
|
|
- "once": true |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "hover_effects": { |
|
|
- "cards": "translateY(-2px) + shadow increase", |
|
|
- "buttons": "translateY(-2px) + shadow + ripple", |
|
|
- "crypto_cards": "translateY(-4px) + scale(1.02)", |
|
|
- "strategy_cards": "translateX(6px) + shadow", |
|
|
- "signal_cards": "translateX(-4px) + shadow" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "glass_morphism": { |
|
|
- "properties": { |
|
|
- "background": "rgba with transparency", |
|
|
- "backdrop_filter": "blur(20px) saturate(180%)", |
|
|
- "border": "1px solid rgba(255, 255, 255, 0.1)", |
|
|
- "box_shadow": "Multiple layers" |
|
|
- }, |
|
|
- |
|
|
- "applied_to": [ |
|
|
- "Header", |
|
|
- "All cards", |
|
|
- "Toast notifications", |
|
|
- "Signal cards" |
|
|
- ], |
|
|
- |
|
|
- "visual_effect": "شیشهای مات با عمق" |
|
|
- }, |
|
|
- |
|
|
- "responsive_design": { |
|
|
- "breakpoints": { |
|
|
- "desktop": "> 1400px - 3 columns", |
|
|
- "laptop": "1200px - 1400px - 3 columns (narrower)", |
|
|
- "tablet": "768px - 1200px - 1 column", |
|
|
- "mobile": "< 768px - 1 column + adjusted spacing" |
|
|
- }, |
|
|
- |
|
|
- "adjustments": { |
|
|
- "mobile": [ |
|
|
- "Single column layout", |
|
|
- "Reduced padding", |
|
|
- "Smaller fonts", |
|
|
- "Stacked header", |
|
|
- "Full width buttons" |
|
|
- ] |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "performance_optimizations": { |
|
|
- "css": { |
|
|
- "will_change": "Used on animated elements", |
|
|
- "transform": "GPU accelerated", |
|
|
- "contain": "Layout containment", |
|
|
- "variables": "Reusable values" |
|
|
- }, |
|
|
- |
|
|
- "animations": { |
|
|
- "60fps": "Smooth 60 FPS", |
|
|
- "hardware_accelerated": "GPU rendering", |
|
|
- "optimized_keyframes": "Minimal repaints" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "visual_hierarchy": { |
|
|
- "level_1": { |
|
|
- "elements": ["Logo", "Live indicator", "Main stats"], |
|
|
- "size": "Largest", |
|
|
- "weight": "800", |
|
|
- "color": "Gradient" |
|
|
- }, |
|
|
- |
|
|
- "level_2": { |
|
|
- "elements": ["Card titles", "Signal badges", "Prices"], |
|
|
- "size": "Large", |
|
|
- "weight": "700", |
|
|
- "color": "Primary/Accent" |
|
|
- }, |
|
|
- |
|
|
- "level_3": { |
|
|
- "elements": ["Crypto names", "Strategy descriptions", "Signal details"], |
|
|
- "size": "Medium", |
|
|
- "weight": "600", |
|
|
- "color": "Secondary" |
|
|
- }, |
|
|
- |
|
|
- "level_4": { |
|
|
- "elements": ["Labels", "Timestamps", "Helper text"], |
|
|
- "size": "Small", |
|
|
- "weight": "400-500", |
|
|
- "color": "Muted" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "comparison_with_previous": { |
|
|
- "icons": { |
|
|
- "before": "❌ Emoji/text icons", |
|
|
- "after": "✅ Professional SVG icons" |
|
|
- }, |
|
|
- |
|
|
- "css": { |
|
|
- "before": "❌ Basic styling", |
|
|
- "after": "✅ Advanced CSS با 15+ animation" |
|
|
- }, |
|
|
- |
|
|
- "colors": { |
|
|
- "before": "❌ رنگهای ساده", |
|
|
- "after": "✅ Gradient system حرفهای" |
|
|
- }, |
|
|
- |
|
|
- "effects": { |
|
|
- "before": "❌ افکتهای ساده", |
|
|
- "after": "✅ Glass morphism + glow + shimmer" |
|
|
- }, |
|
|
- |
|
|
- "animations": { |
|
|
- "before": "❌ انیمیشن کم", |
|
|
- "after": "✅ 10+ keyframe animation" |
|
|
- }, |
|
|
- |
|
|
- "visual_appeal": { |
|
|
- "before": "❌ جذابیت کم", |
|
|
- "after": "✅ خیرهکننده و حرفهای" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "files": { |
|
|
- "html": { |
|
|
- "name": "index-final.html", |
|
|
- "size": "~35KB", |
|
|
- "lines": "~800", |
|
|
- "svg_icons": "20+", |
|
|
- "components": "15+" |
|
|
- }, |
|
|
- |
|
|
- "javascript": { |
|
|
- "name": "trading-assistant-ultimate.js", |
|
|
- "size": "~15KB", |
|
|
- "unchanged": true, |
|
|
- "note": "همان فایل قبلی - فقط HTML/CSS تغییر کرد" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "usage": { |
|
|
- "step_1": "باز کردن index-final.html در مرورگر", |
|
|
- "step_2": "لذت بردن از UI خیرهکننده", |
|
|
- "step_3": "انتخاب ارز و استراتژی", |
|
|
- "step_4": "شروع Agent یا Analyze", |
|
|
- "step_5": "مشاهده سیگنالهای real-time" |
|
|
- }, |
|
|
- |
|
|
- "browser_compatibility": { |
|
|
- "chrome": "✅ Full support (recommended)", |
|
|
- "firefox": "✅ Full support", |
|
|
- "edge": "✅ Full support", |
|
|
- "safari": "✅ Full support (iOS 12+)", |
|
|
- "opera": "✅ Full support" |
|
|
- }, |
|
|
- |
|
|
- "success_criteria": { |
|
|
- "svg_icons": "✅ ACHIEVED - 20+ custom icons", |
|
|
- "advanced_css": "✅ ACHIEVED - 15+ animations", |
|
|
- "glass_morphism": "✅ ACHIEVED - All cards", |
|
|
- "gradient_system": "✅ ACHIEVED - 5+ gradients", |
|
|
- "smooth_animations": "✅ ACHIEVED - 60 FPS", |
|
|
- "professional_look": "✅ ACHIEVED - خیرهکننده", |
|
|
- "visual_appeal": "✅ ACHIEVED - بسیار جذاب", |
|
|
- "user_experience": "✅ ACHIEVED - عالی" |
|
|
- }, |
|
|
- |
|
|
- "highlights": { |
|
|
- "most_impressive": [ |
|
|
- "🎨 20+ SVG icons سفارشی", |
|
|
- "✨ 15+ keyframe animation", |
|
|
- "💎 Glass morphism در همه جا", |
|
|
- "🌈 5+ gradient system", |
|
|
- "⚡ 60 FPS smooth animations", |
|
|
- "🎯 Perfect visual hierarchy", |
|
|
- "📱 Fully responsive", |
|
|
- "🚀 Production ready" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "technical_specs": { |
|
|
- "css_lines": "~1200 lines", |
|
|
- "css_variables": "25+", |
|
|
- "animations": "15+", |
|
|
- "svg_paths": "30+", |
|
|
- "gradients": "10+", |
|
|
- "shadows": "20+", |
|
|
- "transitions": "50+", |
|
|
- "hover_effects": "30+" |
|
|
- } |
|
|
-} |
|
|
- |
|
|
|
|
|
deleted file mode 100644 |
|
|
|
|
|
|
|
|
|
|
|
@@ -1,184 +0,0 @@ |
|
|
-{ |
|
|
- "issue": "503 Error - Backend API Not Available", |
|
|
- "problem_description": "System was trying to connect to backend API (really-amin-datasourceforcryptocurrency-2.hf.space) which returned 503 errors", |
|
|
- "date_fixed": "2025-12-02", |
|
|
- |
|
|
- "root_cause": { |
|
|
- "file": "trading-assistant-professional.js", |
|
|
- "issue": "Backend API dependency in fetchPrice() and fetchOHLCV()", |
|
|
- "backend_url": "window.location.origin + '/api'", |
|
|
- "error_type": "503 Service Unavailable", |
|
|
- "frequency": "Every 5 seconds (price updates)" |
|
|
- }, |
|
|
- |
|
|
- "solution": { |
|
|
- "approach": "Remove ALL backend dependencies", |
|
|
- "primary_source": "Binance API (https://api.binance.com/api/v3)", |
|
|
- "backup_source": "CoinGecko API (for prices only)", |
|
|
- "fallback": "Demo prices (last resort)", |
|
|
- "result": "100% independent system - works without backend" |
|
|
- }, |
|
|
- |
|
|
- "changes_made": [ |
|
|
- { |
|
|
- "file": "trading-assistant-professional.js", |
|
|
- "section": "API_CONFIG", |
|
|
- "before": { |
|
|
- "backend": "window.location.origin + '/api'", |
|
|
- "fallbacks": { |
|
|
- "binance": "https://api.binance.com/api/v3", |
|
|
- "coingecko": "https://api.coingecko.com/api/v3" |
|
|
- } |
|
|
- }, |
|
|
- "after": { |
|
|
- "binance": "https://api.binance.com/api/v3", |
|
|
- "coingecko": "https://api.coingecko.com/api/v3", |
|
|
- "timeout": 10000, |
|
|
- "retries": 2 |
|
|
- }, |
|
|
- "impact": "Removed backend dependency completely" |
|
|
- }, |
|
|
- { |
|
|
- "file": "trading-assistant-professional.js", |
|
|
- "function": "fetchPrice()", |
|
|
- "before": "Tried backend first, then Binance as fallback", |
|
|
- "after": "Uses Binance directly, CoinGecko as backup", |
|
|
- "flow": [ |
|
|
- "1. Check cache", |
|
|
- "2. Try Binance API", |
|
|
- "3. Try CoinGecko API (backup)", |
|
|
- "4. Use demo price (last resort)" |
|
|
- ], |
|
|
- "no_backend": true |
|
|
- }, |
|
|
- { |
|
|
- "file": "trading-assistant-professional.js", |
|
|
- "function": "fetchOHLCV()", |
|
|
- "before": "Tried Binance first, then backend as fallback", |
|
|
- "after": "Uses ONLY Binance API", |
|
|
- "flow": [ |
|
|
- "1. Check cache", |
|
|
- "2. Try Binance klines API", |
|
|
- "3. Generate demo data (last resort)" |
|
|
- ], |
|
|
- "no_backend": true |
|
|
- } |
|
|
- ], |
|
|
- |
|
|
- "api_endpoints_used": { |
|
|
- "binance": { |
|
|
- "price": "https://api.binance.com/api/v3/ticker/price?symbol={SYMBOL}", |
|
|
- "ohlcv": "https://api.binance.com/api/v3/klines?symbol={SYMBOL}&interval={INTERVAL}&limit={LIMIT}", |
|
|
- "rate_limit": "1200 requests/minute", |
|
|
- "reliability": "99.9%", |
|
|
- "cors": "Allowed for public endpoints" |
|
|
- }, |
|
|
- "coingecko": { |
|
|
- "price": "https://api.coingecko.com/api/v3/simple/price?ids={COIN_ID}&vs_currencies=usd", |
|
|
- "rate_limit": "50 calls/minute (free tier)", |
|
|
- "reliability": "95%", |
|
|
- "cors": "Allowed" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "testing": { |
|
|
- "before_fix": { |
|
|
- "errors": "17+ consecutive 503 errors", |
|
|
- "frequency": "Every 5 seconds", |
|
|
- "impact": "System unusable, prices not loading" |
|
|
- }, |
|
|
- "after_fix": { |
|
|
- "errors": "0 backend calls", |
|
|
- "binance_calls": "Working perfectly", |
|
|
- "coingecko_calls": "Available as backup", |
|
|
- "impact": "System fully functional" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "performance_improvements": { |
|
|
- "latency": { |
|
|
- "before": "5000ms timeout + retry = 10+ seconds", |
|
|
- "after": "Direct Binance call = 200-500ms" |
|
|
- }, |
|
|
- "reliability": { |
|
|
- "before": "Dependent on backend availability (0% uptime)", |
|
|
- "after": "Dependent on Binance (99.9% uptime)" |
|
|
- }, |
|
|
- "error_rate": { |
|
|
- "before": "100% (all backend calls failed)", |
|
|
- "after": "< 1% (Binance is very reliable)" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "benefits": { |
|
|
- "independence": "No backend required - fully standalone", |
|
|
- "reliability": "99.9% uptime (Binance SLA)", |
|
|
- "speed": "5-10x faster response times", |
|
|
- "simplicity": "Fewer dependencies, easier to maintain", |
|
|
- "scalability": "Can handle more users (Binance rate limits are generous)" |
|
|
- }, |
|
|
- |
|
|
- "verified_working": { |
|
|
- "price_fetching": true, |
|
|
- "ohlcv_data": true, |
|
|
- "hts_analysis": true, |
|
|
- "agent_monitoring": true, |
|
|
- "tradingview_chart": true, |
|
|
- "no_503_errors": true |
|
|
- }, |
|
|
- |
|
|
- "deployment_notes": { |
|
|
- "requirements": [ |
|
|
- "Modern browser with ES6+ support", |
|
|
- "Internet connection", |
|
|
- "No backend server needed", |
|
|
- "No API keys needed" |
|
|
- ], |
|
|
- "cors_handling": "Binance and CoinGecko allow CORS for public endpoints", |
|
|
- "rate_limits": "Respected with caching and delays", |
|
|
- "fallback_strategy": "Cache -> Binance -> CoinGecko -> Demo data" |
|
|
- }, |
|
|
- |
|
|
- "files_affected": [ |
|
|
- "trading-assistant-professional.js (FIXED)", |
|
|
- "index.html (uses fixed file)", |
|
|
- "index-professional.html (uses fixed file)" |
|
|
- ], |
|
|
- |
|
|
- "files_not_affected": [ |
|
|
- "trading-assistant-enhanced.js (already using Binance only)", |
|
|
- "index-enhanced.html (already correct)", |
|
|
- "hts-engine.js (no API calls)", |
|
|
- "trading-strategies.js (no API calls)" |
|
|
- ], |
|
|
- |
|
|
- "recommended_usage": { |
|
|
- "best": "index-enhanced.html - Beautiful UI + Binance only", |
|
|
- "good": "index.html - Standard UI + Binance only (now fixed)", |
|
|
- "testing": "test-hts-integration.html - For HTS engine testing" |
|
|
- }, |
|
|
- |
|
|
- "monitoring": { |
|
|
- "console_logs": [ |
|
|
- "[API] Fetching price from Binance: ...", |
|
|
- "[API] BTC price: $43250.00", |
|
|
- "[API] Fetching OHLCV from Binance: ...", |
|
|
- "[API] Successfully fetched 100 candles" |
|
|
- ], |
|
|
- "no_more_errors": [ |
|
|
- "No more 503 errors", |
|
|
- "No more backend calls", |
|
|
- "No more failed requests" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "success_criteria": { |
|
|
- "zero_503_errors": "✅ ACHIEVED", |
|
|
- "binance_working": "✅ ACHIEVED", |
|
|
- "prices_loading": "✅ ACHIEVED", |
|
|
- "ohlcv_loading": "✅ ACHIEVED", |
|
|
- "agent_working": "✅ ACHIEVED", |
|
|
- "no_backend_dependency": "✅ ACHIEVED" |
|
|
- } |
|
|
-} |
|
|
- |
|
|
|
|
|
deleted file mode 100644 |
|
|
|
|
|
|
|
|
|
|
|
@@ -1,277 +0,0 @@ |
|
|
-{ |
|
|
- "version": "5.0.0 - ULTIMATE EDITION", |
|
|
- "release_date": "2025-12-02", |
|
|
- "status": "PRODUCTION READY", |
|
|
- |
|
|
- "improvements": { |
|
|
- "ui_design": { |
|
|
- "before": "نامناسب، رنگبندی ضعیف، جذابیت بصری کم", |
|
|
- "after": "حرفهای، رنگبندی عالی، جذابیت بصری بالا", |
|
|
- "changes": [ |
|
|
- "رنگبندی کاملاً جدید با پالت حرفهای", |
|
|
- "گرادیانتهای زیبا و متحرک", |
|
|
- "کارتهای شیشهای با افکت blur", |
|
|
- "انیمیشنهای روان و جذاب", |
|
|
- "تایپوگرافی بهتر و خواناتر", |
|
|
- "فاصلهگذاری و layout بهینه" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "real_data": { |
|
|
- "before": "دادههای غیر واقعی، demo data، mock data", |
|
|
- "after": "100% داده واقعی از Binance", |
|
|
- "changes": [ |
|
|
- "حذف کامل backend dependency", |
|
|
- "اتصال مستقیم به Binance API", |
|
|
- "قیمتهای واقعی هر 3 ثانیه", |
|
|
- "OHLCV واقعی برای تحلیل", |
|
|
- "تغییرات قیمت 24 ساعته واقعی", |
|
|
- "صفر داده جعلی یا نمایشی" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "user_experience": { |
|
|
- "before": "کاربرپسند نبود، جذابیت کم", |
|
|
- "after": "بسیار کاربرپسند و جذاب", |
|
|
- "changes": [ |
|
|
- "کارتهای بزرگتر و واضحتر", |
|
|
- "دکمههای جذاب با hover effects", |
|
|
- "نمایش اطلاعات بهتر", |
|
|
- "رنگبندی معنادار (سبز=خرید، قرمز=فروش)", |
|
|
- "فونتهای خواناتر", |
|
|
- "فضای سفید بهتر" |
|
|
- ] |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "color_palette": { |
|
|
- "primary": { |
|
|
- "blue": "#2563eb - آبی اصلی", |
|
|
- "cyan": "#06b6d4 - فیروزهای", |
|
|
- "purple": "#7c3aed - بنفش" |
|
|
- }, |
|
|
- "semantic": { |
|
|
- "success": "#10b981 - سبز (خرید)", |
|
|
- "danger": "#ef4444 - قرمز (فروش)", |
|
|
- "warning": "#f59e0b - نارنجی (هشدار)" |
|
|
- }, |
|
|
- "backgrounds": { |
|
|
- "dark": "#0f172a - پسزمینه اصلی", |
|
|
- "darker": "#020617 - پسزمینه تیرهتر", |
|
|
- "card": "#1e293b - کارتها", |
|
|
- "card_hover": "#334155 - hover روی کارت" |
|
|
- }, |
|
|
- "text": { |
|
|
- "primary": "#f1f5f9 - متن اصلی", |
|
|
- "secondary": "#cbd5e1 - متن ثانویه", |
|
|
- "muted": "#64748b - متن کمرنگ" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "features": { |
|
|
- "real_time_data": { |
|
|
- "enabled": true, |
|
|
- "source": "Binance API", |
|
|
- "update_frequency": "3 seconds", |
|
|
- "data_types": [ |
|
|
- "Live prices", |
|
|
- "24h price change", |
|
|
- "OHLCV candles", |
|
|
- "Volume data" |
|
|
- ] |
|
|
- }, |
|
|
- |
|
|
- "ai_agent": { |
|
|
- "enabled": true, |
|
|
- "scan_frequency": "45 seconds", |
|
|
- "monitored_pairs": 6, |
|
|
- "confidence_threshold": 75, |
|
|
- "auto_signals": true |
|
|
- }, |
|
|
- |
|
|
- "hts_engine": { |
|
|
- "enabled": true, |
|
|
- "algorithm": "RSI+MACD (40%) + SMC (25%) + Patterns (20%) + Sentiment (10%) + ML (5%)", |
|
|
- "accuracy": "85%", |
|
|
- "real_data_only": true |
|
|
- }, |
|
|
- |
|
|
- "tradingview_chart": { |
|
|
- "enabled": true, |
|
|
- "theme": "Dark (professional)", |
|
|
- "indicators": ["RSI", "MACD", "Volume"], |
|
|
- "real_time": true, |
|
|
- "customized_colors": true |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "ui_components": { |
|
|
- "header": { |
|
|
- "features": [ |
|
|
- "Logo با gradient جذاب", |
|
|
- "Live badge متحرک", |
|
|
- "آمار real-time", |
|
|
- "دکمه refresh" |
|
|
- ], |
|
|
- "colors": "Glass morphism با backdrop blur" |
|
|
- }, |
|
|
- |
|
|
- "crypto_cards": { |
|
|
- "features": [ |
|
|
- "آیکونهای زیبا", |
|
|
- "قیمت real-time", |
|
|
- "تغییرات 24 ساعته", |
|
|
- "رنگبندی معنادار", |
|
|
- "Hover effects جذاب", |
|
|
- "Active state واضح" |
|
|
- ], |
|
|
- "layout": "Grid 2 ستونه" |
|
|
- }, |
|
|
- |
|
|
- "strategy_cards": { |
|
|
- "features": [ |
|
|
- "نام واضح و جذاب", |
|
|
- "توضیحات کامل", |
|
|
- "Badge premium/standard", |
|
|
- "آمار accuracy و timeframe", |
|
|
- "Hover effects", |
|
|
- "Active state با گرادیانت" |
|
|
- ], |
|
|
- "layout": "Vertical stack" |
|
|
- }, |
|
|
- |
|
|
- "chart": { |
|
|
- "features": [ |
|
|
- "TradingView professional", |
|
|
- "Dark theme سفارشی", |
|
|
- "شمعهای سبز/قرمز", |
|
|
- "اندیکاتورهای RSI, MACD, Volume", |
|
|
- "Real-time updates" |
|
|
- ], |
|
|
- "height": "600px" |
|
|
- }, |
|
|
- |
|
|
- "signals": { |
|
|
- "features": [ |
|
|
- "کارتهای جذاب", |
|
|
- "رنگبندی معنادار", |
|
|
- "اطلاعات کامل", |
|
|
- "Slide-in animation", |
|
|
- "Grid layout برای اطلاعات", |
|
|
- "Scrollable container" |
|
|
- ], |
|
|
- "max_signals": 30 |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "animations": { |
|
|
- "background": "Gradient shift متحرک", |
|
|
- "live_dot": "Pulse animation", |
|
|
- "cards": "Hover effects با transform", |
|
|
- "buttons": "Hover lift با shadow", |
|
|
- "signals": "Slide-in از راست", |
|
|
- "toast": "Slide-in از راست", |
|
|
- "agent_avatar": "Rotate 360 degrees" |
|
|
- }, |
|
|
- |
|
|
- "data_flow": { |
|
|
- "prices": { |
|
|
- "source": "Binance /ticker/24hr", |
|
|
- "frequency": "Every 3 seconds", |
|
|
- "data": ["price", "24h change %"], |
|
|
- "caching": "In-memory", |
|
|
- "fallback": "None - shows error if Binance fails" |
|
|
- }, |
|
|
- |
|
|
- "ohlcv": { |
|
|
- "source": "Binance /klines", |
|
|
- "on_demand": true, |
|
|
- "intervals": ["1h", "4h"], |
|
|
- "limit": 100, |
|
|
- "fallback": "None - shows error if Binance fails" |
|
|
- }, |
|
|
- |
|
|
- "analysis": { |
|
|
- "engine": "HTS Engine", |
|
|
- "input": "Real OHLCV from Binance", |
|
|
- "output": "Signal + Confidence + Levels", |
|
|
- "no_fake_data": true |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "performance": { |
|
|
- "page_load": "< 1 second", |
|
|
- "price_update": "3 seconds", |
|
|
- "agent_scan": "45 seconds", |
|
|
- "analysis_time": "2-5 seconds", |
|
|
- "smooth_animations": "60 FPS", |
|
|
- "memory_usage": "< 80MB" |
|
|
- }, |
|
|
- |
|
|
- "comparison": { |
|
|
- "old_version": { |
|
|
- "ui": "❌ نامناسب", |
|
|
- "colors": "❌ ضعیف", |
|
|
- "data": "❌ غیر واقعی", |
|
|
- "ux": "❌ کاربرپسند نبود", |
|
|
- "visual": "❌ جذابیت کم" |
|
|
- }, |
|
|
- "ultimate_version": { |
|
|
- "ui": "✅ حرفهای و مدرن", |
|
|
- "colors": "✅ پالت عالی", |
|
|
- "data": "✅ 100% واقعی", |
|
|
- "ux": "✅ بسیار کاربرپسند", |
|
|
- "visual": "✅ خیرهکننده" |
|
|
- } |
|
|
- }, |
|
|
- |
|
|
- "files": { |
|
|
- "html": "index-ultimate.html (18KB)", |
|
|
- "javascript": "trading-assistant-ultimate.js (15KB)", |
|
|
- "dependencies": ["hts-engine.js", "TradingView widget"] |
|
|
- }, |
|
|
- |
|
|
- "usage": { |
|
|
- "step_1": "باز کردن index-ultimate.html", |
|
|
- "step_2": "انتخاب ارز (کلیک روی کارت)", |
|
|
- "step_3": "انتخاب استراتژی (کلیک روی کارت)", |
|
|
- "step_4": "Start Agent یا Analyze Now", |
|
|
- "step_5": "مشاهده سیگنالهای real-time" |
|
|
- }, |
|
|
- |
|
|
- "api_usage": { |
|
|
- "binance_only": true, |
|
|
- "no_backend": true, |
|
|
- "no_api_key": true, |
|
|
- "public_endpoints": true, |
|
|
- "rate_limits": "Respected with delays" |
|
|
- }, |
|
|
- |
|
|
- "browser_support": { |
|
|
- "chrome": "✅ Full support", |
|
|
- "firefox": "✅ Full support", |
|
|
- "edge": "✅ Full support", |
|
|
- "safari": "✅ Full support", |
|
|
- "mobile": "✅ Responsive" |
|
|
- }, |
|
|
- |
|
|
- "success_criteria": { |
|
|
- "professional_ui": "✅ ACHIEVED", |
|
|
- "beautiful_colors": "✅ ACHIEVED", |
|
|
- "real_data_only": "✅ ACHIEVED", |
|
|
- "user_friendly": "✅ ACHIEVED", |
|
|
- "visual_appeal": "✅ ACHIEVED", |
|
|
- "smooth_animations": "✅ ACHIEVED", |
|
|
- "fast_performance": "✅ ACHIEVED" |
|
|
- }, |
|
|
- |
|
|
- "next_steps": { |
|
|
- "v5.1": [ |
|
|
- "WebSocket برای streaming", |
|
|
- "نمودارهای اضافی", |
|
|
- "تاریخچه معاملات", |
|
|
- "گزارشهای پیشرفته" |
|
|
- ] |
|
|
- } |
|
|
-} |
|
|
- |
|
|
|