Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -38,6 +38,18 @@ models = ["Helsinki-NLP", "QUICKMT", "Argos", "Google", "HPLT", "HPLT-OPUS",
|
|
| 38 |
]
|
| 39 |
DEFAULTS = [langs[0], langs[1], models[0]]
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
def model_to_cuda(model):
|
| 42 |
# Move the model to GPU if available
|
| 43 |
if torch.cuda.is_available():
|
|
@@ -75,6 +87,7 @@ class Translators:
|
|
| 75 |
self.max_new_tokens = 512
|
| 76 |
|
| 77 |
def google(self):
|
|
|
|
| 78 |
url = os.environ['GCLIENT'] + f'sl={self.sl}&tl={self.tl}&q={self.input_text}'
|
| 79 |
response = httpx.get(url)
|
| 80 |
return response.json()[0][0][0]
|
|
@@ -518,6 +531,7 @@ class Translators:
|
|
| 518 |
response = error
|
| 519 |
return translated_text, message_text
|
| 520 |
|
|
|
|
| 521 |
@spaces.GPU
|
| 522 |
def translate_text(input_text: str, s_language: str, t_language: str, model_name: str) -> tuple[str, str]:
|
| 523 |
"""
|
|
|
|
| 38 |
]
|
| 39 |
DEFAULTS = [langs[0], langs[1], models[0]]
|
| 40 |
|
| 41 |
+
def timer(func):
|
| 42 |
+
from time import time
|
| 43 |
+
def wrapper(*args, **kwargs):
|
| 44 |
+
start_time = time()
|
| 45 |
+
translated_text, message_text = func(*args, **kwargs)
|
| 46 |
+
end_time = time()
|
| 47 |
+
execution_time = end_time - start_time
|
| 48 |
+
# print(f"Function {func.__name__!r} executed in {execution_time:.2f} seconds.")
|
| 49 |
+
message_text = f'Executed in {execution_time:.2f} seconds! {message_text}'
|
| 50 |
+
return translated_text, message_text
|
| 51 |
+
return wrapper
|
| 52 |
+
|
| 53 |
def model_to_cuda(model):
|
| 54 |
# Move the model to GPU if available
|
| 55 |
if torch.cuda.is_available():
|
|
|
|
| 87 |
self.max_new_tokens = 512
|
| 88 |
|
| 89 |
def google(self):
|
| 90 |
+
self.input_text = " ".join(self.input_text.split())
|
| 91 |
url = os.environ['GCLIENT'] + f'sl={self.sl}&tl={self.tl}&q={self.input_text}'
|
| 92 |
response = httpx.get(url)
|
| 93 |
return response.json()[0][0][0]
|
|
|
|
| 531 |
response = error
|
| 532 |
return translated_text, message_text
|
| 533 |
|
| 534 |
+
@timer
|
| 535 |
@spaces.GPU
|
| 536 |
def translate_text(input_text: str, s_language: str, t_language: str, model_name: str) -> tuple[str, str]:
|
| 537 |
"""
|