import os
import sys
import time
import json
import pytz
import re
from dotenv import load_dotenv
from datetime import datetime, timedelta
import requests
import csv
import pandas as pd
import pandas_market_calendars as mcal
from pathlib import Path

# ps aux | grep python | grep "/var/www/html"
# nohup /bin/python3 -u /var/www/html/process/gettingChains.py > logs/gettingChains.log 2>&1 &

# Cargar variables de entorno
load_dotenv()

# ─── Configuración global ────────────────────────────────────────────────────
PATH_UBUNTU = "/var/www/html/flask_project/chains/"
MAX_RETRIES = 3
RETRY_DELAY_SECONDS = 30
TOTAL_STRIKES = 120
TIME_OPEN = "09:29:00"
TIME_CLOSE = "16:02:00"


def get_nyse_holidays(years=None):
    """
    Retorna un set de fechas feriadas del NYSE en formato MM/DD/YYYY.
    Por defecto cubre el año actual y el siguiente.
    Se actualiza automáticamente con la librería pandas_market_calendars.
    """
    if years is None:
        current_year = datetime.now().year
        years = [current_year, current_year + 1]

    nyse = mcal.get_calendar("NYSE")
    holidays = set()

    for year in years:
        start = f"{year}-01-01"
        end = f"{year}-12-31"
        # Días que el NYSE realmente abre (como fechas naive para comparar)
        valid_days = nyse.valid_days(start_date=start, end_date=end)
        valid_dates = set(d.date() for d in valid_days)
        # Todos los lunes-viernes del año
        all_bdays = pd.bdate_range(start=start, end=end)
        # Feriados = lun-vie que NO están en los días válidos
        for day in all_bdays:
            if day.date() not in valid_dates:
                holidays.add(day.strftime("%m/%d/%Y"))

    return holidays


# Se genera una sola vez al iniciar el proceso
HOLIDAYS = get_nyse_holidays()
print(f"[INFO] Feriados NYSE cargados: {sorted(HOLIDAYS)}")


# Cliente Schwab
try:
    import schwabdev
    client = schwabdev.Client(
        os.getenv('appKey'),
        os.getenv('appSecret'),
        os.getenv('callbackUrl'),
        verbose=True,
    )
except ImportError:
    print("Error: No se pudo importar 'schwabdev'.")
    sys.exit(1)


# ─── Funciones: gettingChains ────────────────────────────────────────────────

def obtener_cadena_con_reintento(symbol, from_date, to_date, total_strikes):
    """
    Obtiene la cadena de opciones del cliente Schwab con reintentos.
    """
    for intento in range(MAX_RETRIES):
        try:
            response = client.option_chains(
                symbol, fromDate=from_date, toDate=to_date, strikeCount=total_strikes
            )
            return response.json()
        except (json.JSONDecodeError, requests.RequestException) as e:
            print(f"[WARN] Intento {intento + 1}/{MAX_RETRIES} fallido para {symbol}: {e}")
            time.sleep(RETRY_DELAY_SECONDS)
    print(f"[ERROR] No se pudo obtener la cadena para {symbol} tras {MAX_RETRIES} intentos.")
    # sys.exit(1)
    raise RuntimeError(f"Fallo definitivo al obtener cadena para {symbol}")


def extract_data(option_data):
    """
    Extrae los datos relevantes de la cadena de opciones,
    incluyendo Open Interest, Gamma y Volumen.
    """
    extracted_data = {}
    timestamp = datetime.now(pytz.timezone('America/New_York')).strftime('%Y-%m-%d %H:%M:%S')
    underlying_price = option_data.get('underlyingPrice', '')
    underlying_price = round(float(underlying_price), 3) if underlying_price else None

    for option_type in ['callExpDateMap', 'putExpDateMap']:
        if option_type in option_data:
            for date_key, strikes in option_data[option_type].items():
                for strike, details in strikes.items():
                    for detail in details:
                        strike_price = detail.get('strikePrice', None)
                        if strike_price is not None:
                            if strike_price not in extracted_data:
                                extracted_data[strike_price] = {
                                    'timestamp': timestamp,
                                    'underlying_price': underlying_price,
                                    'strike': strike_price,
                                    'bid_call': None,
                                    'ask_call': None,
                                    'delta_call': None,
                                    'gamma_call': None,
                                    'open_interest_call': None,
                                    'volume_call': None,
                                    'bid_put': None,
                                    'ask_put': None,
                                    'delta_put': None,
                                    'gamma_put': None,
                                    'open_interest_put': None,
                                    'volume_put': None,
                                }
                            if option_type == 'callExpDateMap':
                                extracted_data[strike_price].update({
                                    'bid_call': detail.get('bid', None),
                                    'ask_call': detail.get('ask', None),
                                    'delta_call': detail.get('delta', None),
                                    'gamma_call': detail.get('gamma', None),
                                    'open_interest_call': detail.get('openInterest', None),
                                    'volume_call': detail.get('totalVolume', None),
                                })
                            elif option_type == 'putExpDateMap':
                                extracted_data[strike_price].update({
                                    'bid_put': detail.get('bid', None),
                                    'ask_put': detail.get('ask', None),
                                    'delta_put': detail.get('delta', None),
                                    'gamma_put': detail.get('gamma', None),
                                    'open_interest_put': detail.get('openInterest', None),
                                    'volume_put': detail.get('totalVolume', None),
                                })

    return list(extracted_data.values())


def calculate_gex(data):
    """
    Calcula el Gamma Exposure (GEX) por cada strike.
    Devuelve también el valor total de GEX (versión simplificada).
    """
    total_gex = 0
    for row in data:
        gamma_call = row.get('gamma_call', 0) or 0
        gamma_put = row.get('gamma_put', 0) or 0
        open_interest_call = row.get('open_interest_call', 0) or 0
        open_interest_put = row.get('open_interest_put', 0) or 0

        gex = ((gamma_call * open_interest_call) + (gamma_put * open_interest_put)) * 100
        row['gex_per_strike'] = gex if (gamma_call or gamma_put) else 0
        total_gex += row['gex_per_strike']

    return total_gex, data


def calculate_gex_precise(data, underlying_price):
    """
    Calcula el Gamma Exposure (GEX) preciso por cada strike y el total.
    """
    total_gex = 0
    multiplier = 100
    percent_move = 0.01

    for row in data:
        gamma_call = row.get('gamma_call', 0) or 0
        gamma_put = row.get('gamma_put', 0) or 0
        oi_call = row.get('open_interest_call', 0) or 0
        oi_put = row.get('open_interest_put', 0) or 0

        gex = (
            (gamma_call * oi_call - gamma_put * oi_put)
            * multiplier * underlying_price * percent_move
        )

        row['gex_precise'] = gex if (gamma_call or gamma_put) else 0
        total_gex += row['gex_precise']

    return total_gex, data


def save_chain_to_cvs(data, symbol):
    """
    Guarda la cadena de opciones procesada en CSV y Parquet,
    incluyendo GEX simple y preciso.
    Filtra SOLO en SPY/QQQ las filas con delta_put < -1000 o gamma_put < -1000.
    """
    # --- Filtro de outliers (solo SPY/QQQ) ---
    sym = str(symbol).upper().replace('$', '').strip()
    if sym in {'SPY', 'QQQ'} and data:
        filtered, dropped = [], 0
        for r in data:
            try:
                dp = float(r.get('delta_put')) if r.get('delta_put') is not None else None
            except Exception:
                dp = None
            try:
                gp = float(r.get('gamma_put')) if r.get('gamma_put') is not None else None
            except Exception:
                gp = None

            if (dp is not None and dp < -1000) or (gp is not None and gp < -1000):
                dropped += 1
                continue
            filtered.append(r)

        if dropped:
            print(f"[{sym}] {dropped} filas descartadas (delta_put/gamma_put < -1000).")

        data = filtered
        if not data:
            print(f"[{sym}] Snapshot vacío tras filtrar; no se guarda.")
            return

    fecha_actual = datetime.now().strftime("%Y-%m-%d")
    filename_csv = PATH_UBUNTU + f"optionChain_{symbol}_{fecha_actual}.csv"
    filename_parquet = PATH_UBUNTU + f"optionChain_{symbol}_{fecha_actual}.parquet"

    header = [
        'timestamp', 'underlying_price', 'strike',
        'bid_call', 'ask_call', 'bid_put', 'ask_put',
        'delta_call', 'delta_put',
        'gamma_call', 'open_interest_call', 'volume_call',
        'gamma_put', 'open_interest_put', 'volume_put',
        'gex_per_strike', 'gex_total',
        'gex_precise', 'gex_total_precise'
    ]

    file_exists = os.path.isfile(filename_csv)

    # GEX simple
    total_gex, data = calculate_gex(data)
    for row in data:
        row['gex_total'] = total_gex

    # GEX preciso
    underlying_price = data[0].get('underlying_price', 0) if data else 0
    total_gex_precise, data = calculate_gex_precise(data, underlying_price)
    for row in data:
        row['gex_total_precise'] = total_gex_precise

    try:
        # Guardar CSV
        with open(filename_csv, mode='a', newline='') as file:
            writer = csv.DictWriter(file, fieldnames=header)
            if not file_exists:
                writer.writeheader()
                os.chmod(filename_csv, 0o664)
            writer.writerows(data)

        # Guardar PARQUET (append)
        df = pd.DataFrame(data)
        df["timestamp"] = pd.to_datetime(df["timestamp"], errors="coerce")
        df["underlying_price"] = pd.to_numeric(df["underlying_price"], errors="coerce")
        df["strike"] = pd.to_numeric(df["strike"], errors="coerce")

        float_cols = [
            "bid_call", "ask_call", "bid_put", "ask_put",
            "delta_call", "delta_put",
            "gamma_call", "open_interest_call", "volume_call",
            "gamma_put", "open_interest_put", "volume_put",
            "gex_per_strike", "gex_total", "gex_precise", "gex_total_precise"
        ]
        for col in float_cols:
            df[col] = pd.to_numeric(df[col], errors="coerce")

        if os.path.exists(filename_parquet):
            df_old = pd.read_parquet(filename_parquet)
            df = pd.concat([df_old, df], ignore_index=True)

        df.to_parquet(filename_parquet, index=False, compression='zstd')

    except Exception as e:
        print(f"❌ Error al guardar CSV o PARQUET para {symbol}: {e}")


def mostrar_precio_en_consola(precios_por_simbolo):
    hora_actual = datetime.now().strftime('%H:%M:%S')
    salida = f"{hora_actual} | " + " | ".join(
        [f"{simbolo}: {precio:.3f}" for simbolo, precio in precios_por_simbolo.items()]
    )
    print(salida)


def procesar_simbolos(symbols):
    fecha_actual = datetime.now().strftime("%Y-%m-%d")
    from_date = to_date = fecha_actual
    precios_por_simbolo = {}

    for symbol in symbols:
        try:
            cadena_opciones = obtener_cadena_con_reintento(symbol, from_date, to_date, TOTAL_STRIKES)
            datos_procesados = extract_data(cadena_opciones)
            precios_por_simbolo[symbol] = datos_procesados[0]['underlying_price']
            save_chain_to_cvs(datos_procesados, symbol)
        except Exception as e:
            print(f"\nError al procesar {symbol}: {e}")

    mostrar_precio_en_consola(precios_por_simbolo)


# ─── Funciones: addprofits ───────────────────────────────────────────────────

def extract_vertical_strikes(idea_str):
    match = re.search(r"(\d{2,5})/(\d{2,5}) (PUT|CALL)", str(idea_str))
    if match:
        return match.group(1), match.group(2), match.group(3)
    return "", "", ""


def extract_condor_strikes_from_verbose(text):
    match = re.search(r"\[CALLS (\d{2,5})/(\d{2,5})\] \+ \[PUTS (\d{2,5})/(\d{2,5})\]", str(text))
    if match:
        return match.group(1), match.group(2), match.group(3), match.group(4)
    return "", "", "", ""


def procesar_por_horario_global(input_folder, output_folder, symbol_value, name_value, fecha_filtro=None):
    input_path = Path(input_folder)
    output_path = Path(output_folder)
    output_path.mkdir(parents=True, exist_ok=True)

    all_data = []

    for file in sorted(input_path.glob("prediction_*.csv")):
        df = pd.read_csv(file)
        df["timestamp"] = pd.to_datetime(df["timestamp"])
        df["Day"] = df["timestamp"].dt.strftime("%m/%d/%Y")
        df["Hour"] = df["timestamp"].dt.strftime("%H:%M")
        df["HourFile"] = df["timestamp"].dt.strftime("%H%M")
        df["Symbol"] = symbol_value
        df["Raw"] = ""

        if fecha_filtro:
            df = df[df["Day"] == fecha_filtro]
            if df.empty:
                continue

        if name_value == "Vertical":
            df["Trade"] = df["IDEA"]
            vertical_mask = df["Trade"].astype(str).str.contains(
                r"\d{2,5}/\d{2,5} (?:PUT|CALL)", regex=True, na=False
            )
            df = df[vertical_mask].copy()
            df[["Strike1", "Strike2", "Option_Type"]] = df["Trade"].apply(
                lambda x: pd.Series(extract_vertical_strikes(x))
            )
            df["Name"] = "Vertical"
            final_df = df[["Day", "Hour", "Symbol", "Name", "Raw", "Trade",
                           "Strike1", "Strike2", "Option_Type", "score_30min",
                           "movimiento_esperado", "HourFile"]]

        elif name_value == "Iron Condor":
            df["Trade"] = df["IDEA_IC"]
            df = df[df["Trade"].notna()].copy()

            condor_rows = []
            for _, row in df.iterrows():
                trade_str = str(row["Trade"])
                strikes = extract_condor_strikes_from_verbose(trade_str)
                if all(strikes):
                    condor_rows.append({
                        "Day": row["Day"],
                        "Hour": row["Hour"],
                        "Symbol": row["Symbol"],
                        "Name": "Iron Condor",
                        "Raw": row["Raw"],
                        "Trade": trade_str,
                        "Call_Strike1": strikes[0],
                        "Call_Strike2": strikes[1],
                        "Put_Strike1": strikes[2],
                        "Put_Strike2": strikes[3],
                        "score_30min": row["score_30min"],
                        "movimiento_esperado": row["movimiento_esperado"],
                        "HourFile": row["HourFile"]
                    })

            final_df = pd.DataFrame(condor_rows)

        else:
            print(f"[ERROR] Estrategia no reconocida: {name_value}")
            return

        if not final_df.empty:
            all_data.append(final_df)

    if not all_data:
        print(f"[INFO] No se encontraron datos válidos para {symbol_value} - {name_value}.")
        return

    full_df = pd.concat(all_data, ignore_index=True)
    horarios_validos = pd.date_range("10:05", "15:30", freq="5min").strftime("%H%M").tolist()

    for h in horarios_validos:
        group = full_df[full_df["HourFile"] == h]
        if not group.empty:
            filename = f"{symbol_value}_{name_value.replace(' ', '')}_strikes_{h}.csv"
            filepath = Path(output_folder) / filename
            new_data = group.drop(columns=["HourFile"])

            if fecha_filtro and filepath.exists():
                existing_df = pd.read_csv(filepath)
                if fecha_filtro in existing_df["Day"].values:
                    print(f"[INFO] Fecha {fecha_filtro} ya existe en {filename}, no se agrega.")
                    continue
                combined_df = pd.concat([existing_df, new_data], ignore_index=True)
                combined_df.to_csv(filepath, index=False)
                print(f"Actualizado: {filename} (+{len(new_data)} filas nuevas)")
            else:
                new_data.to_csv(filepath, index=False)
                print(f"Guardado: {filename} ({len(new_data)} filas)")


def ejecutar_addprofits(fecha_hoy):
    """
    Se ejecuta automáticamente al cierre de cada jornada.
    Recibe la fecha del día que cerró para usarla como filtro exacto.
    """
    symbols = ["SPX", "QQQ", "SPY", "RUT", "XSP"]
    estrategias = ["Vertical", "Iron Condor"]

    print(f"\n{'='*50}")
    print(f"[ADDPROFITS] Iniciando procesamiento post-cierre para {fecha_hoy}")
    print(f"{'='*50}")

    for symbol_value in symbols:
        input_folder = f"/var/www/html/backtestingmarket/predictor_data/data/{symbol_value}"
        output_folder = f"/var/www/html/backtestingmarket/predictor_data/makekos/{symbol_value}"

        print(f"\n=== Procesando {symbol_value} ===")

        for name_value in estrategias:
            print(f"> Estrategia: {name_value}")
            procesar_por_horario_global(
                input_folder, output_folder,
                symbol_value, name_value,
                fecha_filtro=fecha_hoy
            )

    print(f"\n[ADDPROFITS] Procesamiento completado.")


# ─── Helpers de calendario ───────────────────────────────────────────────────

def esperar_hasta(destino_dt):
    """
    Espera activa con logs espaciados para no inundar el log.
    > 10 min: log cada 60s | 10 min - 60s: log cada 10s | < 60s: log cada 1s
    """
    while True:
        ahora = datetime.now()
        if ahora >= destino_dt:
            break
        resto = destino_dt - ahora
        total_seg = int(resto.total_seconds())
        if total_seg > 600:
            step = 60
        elif total_seg > 60:
            step = 10
        else:
            step = 1
        h, rem = divmod(total_seg, 3600)
        m, s = divmod(rem, 60)
        print(
            f"⏳ Esperando próxima apertura en {h:02d}:{m:02d}:{s:02d}"
            f"  (destino: {destino_dt.strftime('%Y-%m-%d %H:%M:%S')})",
            end='\r'
        )
        time.sleep(step)
    print()  # salto de línea tras la espera


def es_habil(fecha, holidays):
    if fecha.weekday() in (5, 6):
        return False
    return fecha.strftime("%m/%d/%Y") not in holidays


def proximo_habil(fecha, holidays):
    d = fecha
    while not es_habil(d, holidays):
        d += timedelta(days=1)
    return d


def limites_sesion(hora_actual, hora_market_open, hora_market_close, holidays):
    """
    Devuelve (estado, apertura, cierre) donde estado ∈ {"esperar", "en_mercado"}.
    - Si hoy no es hábil              => esperar próxima apertura hábil.
    - Si ahora < apertura_hoy         => esperar apertura_hoy.
    - Si apertura_hoy <= ahora < cierre_hoy => en_mercado.
    - Si ahora >= cierre_hoy          => esperar apertura del próximo hábil.
    """
    hoy = hora_actual.date()

    if not es_habil(hoy, holidays):
        d = proximo_habil(hoy, holidays)
        return "esperar", datetime.combine(d, hora_market_open), datetime.combine(d, hora_market_close)

    apertura_hoy = datetime.combine(hoy, hora_market_open)
    cierre_hoy = datetime.combine(hoy, hora_market_close)

    if hora_actual < apertura_hoy:
        return "esperar", apertura_hoy, cierre_hoy
    elif hora_actual < cierre_hoy:
        return "en_mercado", apertura_hoy, cierre_hoy
    else:
        d = proximo_habil(hoy + timedelta(days=1), holidays)
        return "esperar", datetime.combine(d, hora_market_open), datetime.combine(d, hora_market_close)


# ─── Loop principal (daemon) ─────────────────────────────────────────────────

def main():
    """
    Daemon que NO termina:
    - Espera hasta apertura del próximo día hábil NYSE si el mercado está cerrado.
    - Procesa cadenas de opciones cada 10s mientras el mercado está abierto.
    - Al cierre de cada jornada dispara ejecutar_addprofits() con la fecha exacta.
    - Vuelve a esperar la próxima apertura automáticamente.
    """
    print("Daemon iniciado. Controlando aperturas/cierres del mercado...")

    hora_market_open = datetime.strptime(TIME_OPEN, "%H:%M:%S").time()
    hora_market_close = datetime.strptime(TIME_CLOSE, "%H:%M:%S").time()

    while True:
        ahora = datetime.now()
        estado, apertura, cierre = limites_sesion(ahora, hora_market_open, hora_market_close, HOLIDAYS)

        if estado == "esperar":
            print(f"🔒 Mercado cerrado. Próxima apertura: {apertura.strftime('%Y-%m-%d %H:%M:%S')}")
            esperar_hasta(apertura)
            print("🔓 Mercado abrió. Comenzando streaming...")
            continue  # recalcula estado; ahora caerá en 'en_mercado'

        # ── Sesión abierta ──────────────────────────────────────────────────
        # Capturamos la fecha AL INICIO de la sesión para usarla en addprofits
        fecha_sesion = datetime.now().strftime("%m/%d/%Y")
        print("***** Mercado abierto. Iniciando streaming *****")
        print(f"***** Fecha de procesamiento: {fecha_sesion} *****")

        try:
            symbols = ["$RUT", "$XSP", "SPY", "QQQ", "$SPX"]
            while datetime.now() < cierre:
                try:
                    procesar_simbolos(symbols)
                except Exception as e:
                    print(f"\nError inesperado en ciclo de mercado: {e}")
                time.sleep(10)

        finally:
            print("\n***** Mercado cerrado (fin de jornada). *****")
            # Dispara addprofits con la fecha exacta de la sesión que acaba de cerrar
            ejecutar_addprofits(fecha_sesion)

        # El while continúa: recalcula límites y espera la próxima apertura


# ─── Entry point ─────────────────────────────────────────────────────────────

if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        print("\nEjecución interrumpida por el usuario.")
    except Exception as e:
        print(f"Error crítico: {e}")