|
|
import gradio as gr |
|
|
import torch |
|
|
import numpy as np |
|
|
import os |
|
|
|
|
|
|
|
|
try: |
|
|
from chronos import ChronosPipeline |
|
|
LIB_AVAILABLE = True |
|
|
except ImportError as e: |
|
|
LIB_ERROR = str(e) |
|
|
LIB_AVAILABLE = False |
|
|
|
|
|
|
|
|
pipeline = None |
|
|
MODEL_LOAD_ERROR = None |
|
|
|
|
|
def load_model(): |
|
|
global pipeline, MODEL_LOAD_ERROR |
|
|
|
|
|
if not LIB_AVAILABLE: |
|
|
MODEL_LOAD_ERROR = f"Kütüphane Hatası: {LIB_ERROR}" |
|
|
return |
|
|
|
|
|
try: |
|
|
print("🚀 Chronos-2 Modeli Yükleniyor...") |
|
|
|
|
|
pipeline = ChronosPipeline.from_pretrained( |
|
|
"amazon/chronos-2", |
|
|
device_map="cpu", |
|
|
dtype=torch.float32, |
|
|
) |
|
|
print("✅ Model Başarıyla Yüklendi!") |
|
|
except Exception as e: |
|
|
print(f"❌ Model Yükleme Hatası: {e}") |
|
|
MODEL_LOAD_ERROR = str(e) |
|
|
|
|
|
|
|
|
load_model() |
|
|
|
|
|
def predict(context_str, prediction_length): |
|
|
""" |
|
|
Tahmin Fonksiyonu |
|
|
""" |
|
|
|
|
|
if pipeline is None: |
|
|
|
|
|
return f"Error: {MODEL_LOAD_ERROR}" |
|
|
|
|
|
try: |
|
|
|
|
|
series_strings = context_str.split('|') |
|
|
tensor_list = [] |
|
|
|
|
|
for s in series_strings: |
|
|
clean_s = s.strip() |
|
|
if not clean_s: continue |
|
|
data_list = [float(x) for x in clean_s.split(',')] |
|
|
tensor_list.append(data_list) |
|
|
|
|
|
if not tensor_list: |
|
|
return "Error: Veri boş." |
|
|
|
|
|
|
|
|
context_tensor = torch.tensor(tensor_list).unsqueeze(0) |
|
|
|
|
|
|
|
|
forecast = pipeline.predict(context_tensor, int(prediction_length)) |
|
|
|
|
|
|
|
|
future_price = forecast[0].quantile(0.5).item() |
|
|
return str(future_price) |
|
|
|
|
|
except Exception as e: |
|
|
return f"Error: {str(e)}" |
|
|
|
|
|
|
|
|
iface = gr.Interface(fn=predict, inputs=["text", "number"], outputs="text") |
|
|
iface.launch() |