Peng commited on
Commit
d6956f9
·
verified ·
1 Parent(s): c00bdf2

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. checkpoints/latest-checkpoint.pt +3 -0
  2. config.json +59 -0
checkpoints/latest-checkpoint.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:617036333bd331dcd883691d78f08e723cbd6583deaea993a672441144bc9c26
3
+ size 13520988270
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "align_stage_components": [
4
+ "download/llava-laion-cc-sbu-558k/chat.json",
5
+ "download/llava-laion-cc-sbu-558k"
6
+ ],
7
+ "dataset_id": "llava-v15",
8
+ "dataset_root_dir": "data",
9
+ "finetune_stage_components": [
10
+ "download/llava-v1.5-instruct/llava_v1_5_mix665k.json",
11
+ "download/llava-v1.5-instruct"
12
+ ],
13
+ "type": "llava-v15"
14
+ },
15
+ "hf_token": ".hf_token",
16
+ "model": {
17
+ "align_epochs": 1,
18
+ "align_global_batch_size": 256,
19
+ "align_learning_rate": 0.001,
20
+ "align_lr_scheduler_type": "linear-warmup+cosine-decay",
21
+ "align_max_grad_norm": 1.0,
22
+ "align_max_steps": null,
23
+ "align_per_device_batch_size": 16,
24
+ "align_train_strategy": "fsdp-shard-grad-op",
25
+ "align_warmup_ratio": 0.03,
26
+ "align_weight_decay": 0.0,
27
+ "arch_specifier": "no-align+gelu-mlp",
28
+ "enable_gradient_checkpointing": true,
29
+ "enable_mixed_precision_training": true,
30
+ "finetune_epochs": 1,
31
+ "finetune_global_batch_size": 128,
32
+ "finetune_learning_rate": 2e-05,
33
+ "finetune_lr_scheduler_type": "linear-warmup+cosine-decay",
34
+ "finetune_max_grad_norm": 1.0,
35
+ "finetune_max_steps": null,
36
+ "finetune_per_device_batch_size": 16,
37
+ "finetune_train_strategy": "fsdp-full-shard",
38
+ "finetune_warmup_ratio": 0.03,
39
+ "finetune_weight_decay": 0.1,
40
+ "image_resize_strategy": "letterbox",
41
+ "llm_backbone_id": "vicuna-v15-7b",
42
+ "llm_max_length": 2048,
43
+ "model_id": "custom-convnext+7b",
44
+ "reduce_in_full_precision": false,
45
+ "type": "custom-convnext+7b",
46
+ "vision_backbone_id": "convnext-xxlarge-clip-laion2b"
47
+ },
48
+ "pretrained_checkpoint": null,
49
+ "run_id": "custom-convnext+7b",
50
+ "run_root_dir": "runs",
51
+ "seed": 7,
52
+ "stage": "finetune",
53
+ "trackers": [
54
+ "jsonl",
55
+ "wandb"
56
+ ],
57
+ "wandb_entity": null,
58
+ "wandb_project": "prismatic"
59
+ }