Commit
·
208b03b
1
Parent(s):
def7d81
Make example script consistent
Browse filesSigned-off-by: GiulioZizzo <[email protected]>
granite-3.3-8b-instruct-lora-system-prompt-leakage/README.md
CHANGED
|
@@ -46,7 +46,7 @@ from peft import PeftModel
|
|
| 46 |
INVOCATION_PROMPT = "<|start_of_role|>prompt_leakage<|end_of_role|>"
|
| 47 |
|
| 48 |
BASE_NAME = "ibm-granite/granite-3.3-8b-instruct"
|
| 49 |
-
LORA_NAME = "
|
| 50 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 51 |
|
| 52 |
# Load model
|
|
|
|
| 46 |
INVOCATION_PROMPT = "<|start_of_role|>prompt_leakage<|end_of_role|>"
|
| 47 |
|
| 48 |
BASE_NAME = "ibm-granite/granite-3.3-8b-instruct"
|
| 49 |
+
LORA_NAME = "intrinsics/granite-3.3-8b-instruct-lora-system-prompt-leakage" # LoRA download location. We assume the directory shown in the top level README.md example for the lib was followed.
|
| 50 |
device=torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 51 |
|
| 52 |
# Load model
|