Spaces:
Sleeping
Sleeping
Tuchuanhuhuhu
commited on
Commit
·
c6d16d4
1
Parent(s):
c26dfd8
在UI中加入StableLM
Browse files- modules/models/base_model.py +3 -0
- modules/models/models.py +3 -0
- modules/presets.py +2 -1
modules/models/base_model.py
CHANGED
|
@@ -30,6 +30,7 @@ class ModelType(Enum):
|
|
| 30 |
ChatGLM = 1
|
| 31 |
LLaMA = 2
|
| 32 |
XMChat = 3
|
|
|
|
| 33 |
|
| 34 |
@classmethod
|
| 35 |
def get_type(cls, model_name: str):
|
|
@@ -43,6 +44,8 @@ class ModelType(Enum):
|
|
| 43 |
model_type = ModelType.LLaMA
|
| 44 |
elif "xmchat" in model_name_lower:
|
| 45 |
model_type = ModelType.XMChat
|
|
|
|
|
|
|
| 46 |
else:
|
| 47 |
model_type = ModelType.Unknown
|
| 48 |
return model_type
|
|
|
|
| 30 |
ChatGLM = 1
|
| 31 |
LLaMA = 2
|
| 32 |
XMChat = 3
|
| 33 |
+
StableLM = 4
|
| 34 |
|
| 35 |
@classmethod
|
| 36 |
def get_type(cls, model_name: str):
|
|
|
|
| 44 |
model_type = ModelType.LLaMA
|
| 45 |
elif "xmchat" in model_name_lower:
|
| 46 |
model_type = ModelType.XMChat
|
| 47 |
+
elif "StableLM" in model_name_lower:
|
| 48 |
+
model_type = ModelType.StableLM
|
| 49 |
else:
|
| 50 |
model_type = ModelType.Unknown
|
| 51 |
return model_type
|
modules/models/models.py
CHANGED
|
@@ -577,6 +577,9 @@ def get_model(
|
|
| 577 |
if os.environ.get("XMCHAT_API_KEY") != "":
|
| 578 |
access_key = os.environ.get("XMCHAT_API_KEY")
|
| 579 |
model = XMChat(api_key=access_key)
|
|
|
|
|
|
|
|
|
|
| 580 |
elif model_type == ModelType.Unknown:
|
| 581 |
raise ValueError(f"未知模型: {model_name}")
|
| 582 |
logging.info(msg)
|
|
|
|
| 577 |
if os.environ.get("XMCHAT_API_KEY") != "":
|
| 578 |
access_key = os.environ.get("XMCHAT_API_KEY")
|
| 579 |
model = XMChat(api_key=access_key)
|
| 580 |
+
elif model_type == ModelType.StableLM:
|
| 581 |
+
from StableLM import StableLM_Client
|
| 582 |
+
model = StableLM_Client(model_name)
|
| 583 |
elif model_type == ModelType.Unknown:
|
| 584 |
raise ValueError(f"未知模型: {model_name}")
|
| 585 |
logging.info(msg)
|
modules/presets.py
CHANGED
|
@@ -77,7 +77,8 @@ LOCAL_MODELS = [
|
|
| 77 |
"llama-7b-hf",
|
| 78 |
"llama-13b-hf",
|
| 79 |
"llama-30b-hf",
|
| 80 |
-
"llama-65b-hf"
|
|
|
|
| 81 |
]
|
| 82 |
|
| 83 |
if os.environ.get('HIDE_LOCAL_MODELS', 'false') == 'true':
|
|
|
|
| 77 |
"llama-7b-hf",
|
| 78 |
"llama-13b-hf",
|
| 79 |
"llama-30b-hf",
|
| 80 |
+
"llama-65b-hf",
|
| 81 |
+
"StableLM"
|
| 82 |
]
|
| 83 |
|
| 84 |
if os.environ.get('HIDE_LOCAL_MODELS', 'false') == 'true':
|