chansung commited on
Commit
f558867
·
verified ·
1 Parent(s): 0572869

Training in progress, step 100

Browse files
adapter_config.json CHANGED
@@ -3,11 +3,9 @@
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "mistralai/Mistral-7B-v0.3",
5
  "bias": "none",
6
- "eva_config": null,
7
- "exclude_modules": null,
8
  "fan_in_fan_out": false,
9
  "inference_mode": true,
10
- "init_lora_weights": true,
11
  "layer_replication": null,
12
  "layers_pattern": null,
13
  "layers_to_transform": null,
 
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "mistralai/Mistral-7B-v0.3",
5
  "bias": "none",
 
 
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
8
+ "init_lora_weights": "pissa",
9
  "layer_replication": null,
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9393d9a8255bc86d958b4e3c962ce0d34dfc0829e0736a4ac4432cda71038ee2
3
  size 1087456736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0245342cab2794608be0c29276d16faeae265c1e6fac26cde230ad1ea2de498
3
  size 1087456736
runs/Nov18_07-52-35_main-pissa-mistral-alpaca-0-0/events.out.tfevents.1731936026.main-pissa-mistral-alpaca-0-0.458.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fec628ea6c98860e1c15c01497d24b6aa9bf7962ea8989df7909e68694bdc7b
3
+ size 9844
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6fa939fcf9cb5205d54e6e795d3b6126c918eafec7f2b6cb32ee4e531240518f
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:044f8ef6f32487a23e45f87feef4bfef02d9d0c93a35da3dcfeb0d591235a7aa
3
  size 5624