Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -20,7 +20,8 @@ from transformers import LlamaForCausalLM, LlamaTokenizer
|
|
| 20 |
#Alternativ mit beliebigen Modellen:
|
| 21 |
#base_model = "project-baize/baize-v2-7b" #load_8bit = False (in load_tokenizer_and_model)
|
| 22 |
#base_model = "meta-llama/Llama-2-13b"
|
| 23 |
-
base_model="codellama/CodeLlama-13b-Instruct-hf"
|
|
|
|
| 24 |
#base_model = "MAGAer13/mPLUG-Owl" #load_8bit = False (in load_tokenizer_and_model)
|
| 25 |
#base_model = "alexkueck/li-tis-tuned-2" #load_8bit = False (in load_tokenizer_and_model)
|
| 26 |
#base_model = "TheBloke/airoboros-13B-HF" #load_8bit = False (in load_tokenizer_and_model)
|
|
|
|
| 20 |
#Alternativ mit beliebigen Modellen:
|
| 21 |
#base_model = "project-baize/baize-v2-7b" #load_8bit = False (in load_tokenizer_and_model)
|
| 22 |
#base_model = "meta-llama/Llama-2-13b"
|
| 23 |
+
#base_model="codellama/CodeLlama-13b-Instruct-hf"
|
| 24 |
+
base_model = "tiiuae/falcon-40b"
|
| 25 |
#base_model = "MAGAer13/mPLUG-Owl" #load_8bit = False (in load_tokenizer_and_model)
|
| 26 |
#base_model = "alexkueck/li-tis-tuned-2" #load_8bit = False (in load_tokenizer_and_model)
|
| 27 |
#base_model = "TheBloke/airoboros-13B-HF" #load_8bit = False (in load_tokenizer_and_model)
|