vachaspathi commited on
Commit
9c8efa2
·
verified ·
1 Parent(s): c98d690

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -6
app.py CHANGED
@@ -14,6 +14,8 @@ import traceback
14
  import inspect
15
  import re
16
  import logging
 
 
17
 
18
  # Setup simple logging
19
  logging.basicConfig(level=logging.INFO)
@@ -44,6 +46,12 @@ except Exception as e:
44
  "Make sure config.py exists with CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, API_BASE, LOCAL_MODEL (or set LOCAL_MODEL=None)."
45
  )
46
 
 
 
 
 
 
 
47
  # ----------------------------
48
  # FastMCP init
49
  # ----------------------------
@@ -89,6 +97,33 @@ def _log_llm_call(confidence: Optional[float] = None):
89
 
90
  _init_analytics()
91
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
  # ----------------------------
93
  # DeepSeek / HF model loader
94
  # ----------------------------
@@ -99,10 +134,6 @@ LOADED_MODEL_NAME = None
99
  def init_deepseek_model():
100
  """
101
  Try to load LOCAL_MODEL via transformers.pipeline.
102
- Expected LOCAL_MODEL examples:
103
- - "deepseek-ai/deepseek-r1-7b" (may require GPU; big)
104
- - "deepseek-ai/deepseek-r1-3b" (smaller)
105
- - "deepseek-ai/deepseek-r1-1.3b" (more likely to load on moderate machines)
106
  If loading fails, try a fallback small model (distilgpt2 or flan-t5-small if seq2seq).
107
  """
108
  global LLM_PIPELINE, TOKENIZER, LOADED_MODEL_NAME
@@ -192,7 +223,8 @@ def deepseek_generate(prompt: str, max_tokens: int = 256) -> Dict[str, Any]:
192
  if isinstance(out, list) and len(out) > 0:
193
  first = out[0]
194
  if isinstance(first, dict):
195
- text = first.get("generated_text") or first.get("generated_text", "") or first.get("text") or str(first)
 
196
  else:
197
  text = str(first)
198
  else:
@@ -369,13 +401,15 @@ def deepseek_response(message: str, history: Optional[List[Tuple[str,str]]] = No
369
  prompt = f"{system_prompt}\n{history_text}\nUser: {message}\nAssistant:"
370
  gen = deepseek_generate(prompt, max_tokens=256)
371
  text = gen.get("text", "")
372
- # if text looks like JSON with a tool action, try to invoke
373
  payload = text.strip()
374
  if payload.startswith("{") or payload.startswith("["):
375
  try:
376
  parsed = json.loads(payload)
377
  if isinstance(parsed, dict) and "tool" in parsed:
378
  tool_name = parsed.get("tool"); args = parsed.get("args", {})
 
 
379
  if tool_name in globals() and callable(globals()[tool_name]):
380
  try:
381
  out = globals()[tool_name](**args) if isinstance(args, dict) else globals()[tool_name](args)
 
14
  import inspect
15
  import re
16
  import logging
17
+ import base64
18
+ import tempfile
19
 
20
  # Setup simple logging
21
  logging.basicConfig(level=logging.INFO)
 
46
  "Make sure config.py exists with CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, API_BASE, LOCAL_MODEL (or set LOCAL_MODEL=None)."
47
  )
48
 
49
+ # Safe defaults for optional config values (prevents NameError)
50
+ LOCAL_TOKENIZER = globals().get("LOCAL_TOKENIZER", None)
51
+ OPENROUTER_BASE_URL = globals().get("OPENROUTER_BASE_URL", None)
52
+ OPENROUTER_API_KEY = globals().get("OPENROUTER_API_KEY", None)
53
+ OPENROUTER_MODEL = globals().get("OPENROUTER_MODEL", None)
54
+
55
  # ----------------------------
56
  # FastMCP init
57
  # ----------------------------
 
97
 
98
  _init_analytics()
99
 
100
+ # ----------------------------
101
+ # Helper: normalize local file_path args
102
+ # ----------------------------
103
+ def _normalize_local_path_args(args: Any) -> Any:
104
+ """
105
+ If args is a dict containing 'file_path' that points to a local file (starting with /mnt/data/),
106
+ add file_url: file://<path> and file_b64: <base64> (optional) so tools can use either.
107
+ Returns modified args (same object or new).
108
+ """
109
+ if not isinstance(args, dict):
110
+ return args
111
+ fp = args.get("file_path")
112
+ if isinstance(fp, str) and fp.startswith("/mnt/data/") and os.path.exists(fp):
113
+ try:
114
+ args["file_url"] = f"file://{fp}"
115
+ # include base64 payload for tools that prefer raw bytes
116
+ try:
117
+ with open(fp, "rb") as f:
118
+ args["file_b64"] = base64.b64encode(f.read()).decode("utf-8")
119
+ except Exception as e:
120
+ # If reading fails, still include file_url
121
+ logger.warning("Could not read file for base64 embedding: %s", e)
122
+ args.pop("file_b64", None)
123
+ except Exception as e:
124
+ logger.warning("Normalization error for file_path %s: %s", fp, e)
125
+ return args
126
+
127
  # ----------------------------
128
  # DeepSeek / HF model loader
129
  # ----------------------------
 
134
  def init_deepseek_model():
135
  """
136
  Try to load LOCAL_MODEL via transformers.pipeline.
 
 
 
 
137
  If loading fails, try a fallback small model (distilgpt2 or flan-t5-small if seq2seq).
138
  """
139
  global LLM_PIPELINE, TOKENIZER, LOADED_MODEL_NAME
 
223
  if isinstance(out, list) and len(out) > 0:
224
  first = out[0]
225
  if isinstance(first, dict):
226
+ # many HF pipelines use 'generated_text' or 'text'
227
+ text = first.get("generated_text") or first.get("text") or str(first)
228
  else:
229
  text = str(first)
230
  else:
 
401
  prompt = f"{system_prompt}\n{history_text}\nUser: {message}\nAssistant:"
402
  gen = deepseek_generate(prompt, max_tokens=256)
403
  text = gen.get("text", "")
404
+ # if text looks like JSON with a tool action, try to invoke (normalize args first)
405
  payload = text.strip()
406
  if payload.startswith("{") or payload.startswith("["):
407
  try:
408
  parsed = json.loads(payload)
409
  if isinstance(parsed, dict) and "tool" in parsed:
410
  tool_name = parsed.get("tool"); args = parsed.get("args", {})
411
+ # Normalize local path args if present
412
+ args = _normalize_local_path_args(args) if isinstance(args, dict) else args
413
  if tool_name in globals() and callable(globals()[tool_name]):
414
  try:
415
  out = globals()[tool_name](**args) if isinstance(args, dict) else globals()[tool_name](args)