Spaces:
Running
Running
Fix running condition of translate_llama2 (#16)
Browse files- Fix running condition of translate_llama2 (1429ca3ec2e6e904152157053e113fefc30248d5)
Co-authored-by: Changsoo Kwak <[email protected]>
- src/utils.py +1 -1
src/utils.py
CHANGED
|
@@ -17,7 +17,7 @@ def translate_llama2(text):
|
|
| 17 |
|
| 18 |
def get_model(model_name: str, library: str, access_token: str):
|
| 19 |
"Finds and grabs model from the Hub, and initializes on `meta`"
|
| 20 |
-
if "meta-llama" in model_name:
|
| 21 |
model_name = translate_llama2(model_name)
|
| 22 |
if library == "auto":
|
| 23 |
library = None
|
|
|
|
| 17 |
|
| 18 |
def get_model(model_name: str, library: str, access_token: str):
|
| 19 |
"Finds and grabs model from the Hub, and initializes on `meta`"
|
| 20 |
+
if "meta-llama" in model_name and "Llama-2" in model_name:
|
| 21 |
model_name = translate_llama2(model_name)
|
| 22 |
if library == "auto":
|
| 23 |
library = None
|