lake-synctv commited on
Commit
d9e5d90
·
verified ·
1 Parent(s): 6a7b6c3

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. .gitattributes +35 -0
  2. Dockerfile +19 -7
  3. README.md +1 -1
  4. sync_data.sh +127 -0
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
Dockerfile CHANGED
@@ -1,10 +1,22 @@
1
- FROM ghcr.io/yanqingyan-zuzhi/docker-heroku-synctv-hf:latest
2
 
3
- # 设置 HuggingFace Spaces 需要的端口
4
- ENV PORT=7860
5
 
6
- # 设置时区
7
- ENV TZ=Asia/Shanghai
8
 
9
- # 直接运行主脚本,不做任何权限修改
10
- CMD ["/app/sync_data.sh"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM alpine:latest
2
 
3
+ WORKDIR /app
 
4
 
5
+ RUN apk --no-cache add ca-certificates wget python3 py3-pip curl jq tar bash
 
6
 
7
+ RUN wget https://github.com/synctv-org/synctv/releases/latest/download/synctv-linux-amd64 -O synctv
8
+
9
+ RUN chmod +x synctv
10
+
11
+ RUN mkdir -p /app/data
12
+ RUN chmod -R 777 /app/data
13
+
14
+ ENV VIRTUAL_ENV=/opt/venv
15
+ RUN python3 -m venv $VIRTUAL_ENV
16
+ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
17
+ RUN pip install --no-cache-dir huggingface_hub
18
+
19
+ COPY sync_data.sh /app/
20
+ RUN chmod +x /app/sync_data.sh
21
+
22
+ CMD ["/bin/bash", "-c", "/app/sync_data.sh"]
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: "🚀"
4
  colorFrom: blue
5
  colorTo: green
6
  sdk: docker
7
- app_port: 7860
8
  ---
9
 
10
  ### 🚀 一键部署
 
4
  colorFrom: blue
5
  colorTo: green
6
  sdk: docker
7
+ app_port: 8080
8
  ---
9
 
10
  ### 🚀 一键部署
sync_data.sh ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # 检查环境变量
4
+ if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then
5
+ echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
6
+ exec ./synctv server --data-dir /app/data
7
+ exit 0
8
+ fi
9
+
10
+ # 激活虚拟环境
11
+ source /opt/venv/bin/activate
12
+
13
+ # 上传备份
14
+ upload_backup() {
15
+ file_path="$1"
16
+ file_name="$2"
17
+ token="$HF_TOKEN"
18
+ repo_id="$DATASET_ID"
19
+
20
+ python3 -c "
21
+ from huggingface_hub import HfApi
22
+ import sys
23
+ import os
24
+ def manage_backups(api, repo_id, max_files=10):
25
+ files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
26
+ backup_files = [f for f in files if f.startswith('synctv_backup_') and f.endswith('.tar.gz')]
27
+ backup_files.sort()
28
+
29
+ if len(backup_files) >= max_files:
30
+ files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
31
+ for file_to_delete in files_to_delete:
32
+ try:
33
+ api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
34
+ print(f'Deleted old backup: {file_to_delete}')
35
+ except Exception as e:
36
+ print(f'Error deleting {file_to_delete}: {str(e)}')
37
+ api = HfApi(token='$token')
38
+ try:
39
+ api.upload_file(
40
+ path_or_fileobj='$file_path',
41
+ path_in_repo='$file_name',
42
+ repo_id='$repo_id',
43
+ repo_type='dataset'
44
+ )
45
+ print(f'Successfully uploaded $file_name')
46
+
47
+ manage_backups(api, '$repo_id')
48
+ except Exception as e:
49
+ print(f'Error uploading file: {str(e)}')
50
+ "
51
+ }
52
+
53
+ # 下载最新备份
54
+ download_latest_backup() {
55
+ token="$HF_TOKEN"
56
+ repo_id="$DATASET_ID"
57
+
58
+ python3 -c "
59
+ from huggingface_hub import HfApi
60
+ import sys
61
+ import os
62
+ import tarfile
63
+ import tempfile
64
+ api = HfApi(token='$token')
65
+ try:
66
+ files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset')
67
+ backup_files = [f for f in files if f.startswith('synctv_backup_') and f.endswith('.tar.gz')]
68
+
69
+ if not backup_files:
70
+ print('No backup files found')
71
+ sys.exit()
72
+
73
+ latest_backup = sorted(backup_files)[-1]
74
+
75
+ with tempfile.TemporaryDirectory() as temp_dir:
76
+ filepath = api.hf_hub_download(
77
+ repo_id='$repo_id',
78
+ filename=latest_backup,
79
+ repo_type='dataset',
80
+ local_dir=temp_dir
81
+ )
82
+
83
+ if filepath and os.path.exists(filepath):
84
+ with tarfile.open(filepath, 'r:gz') as tar:
85
+ tar.extractall('/app/data')
86
+ print(f'Successfully restored backup from {latest_backup}')
87
+
88
+ except Exception as e:
89
+ print(f'Error downloading backup: {str(e)}')
90
+ "
91
+ }
92
+
93
+ # 首次启动时下载最新备份
94
+ echo "Downloading latest backup from HuggingFace..."
95
+ download_latest_backup
96
+
97
+ # 同步函数
98
+ sync_data() {
99
+ while true; do
100
+ echo "Starting sync process at $(date)"
101
+
102
+ if [ -d /app/data ]; then
103
+ timestamp=$(date +%Y%m%d_%H%M%S)
104
+ backup_file="synctv_backup_${timestamp}.tar.gz"
105
+
106
+ # 压缩数据目录
107
+ tar -czf "/tmp/${backup_file}" -C /app/data .
108
+
109
+ echo "Uploading backup to HuggingFace..."
110
+ upload_backup "/tmp/${backup_file}" "${backup_file}"
111
+
112
+ rm -f "/tmp/${backup_file}"
113
+ else
114
+ echo "Data directory does not exist yet, waiting for next sync..."
115
+ fi
116
+
117
+ SYNC_INTERVAL=${SYNC_INTERVAL:-3600}
118
+ echo "Next sync in ${SYNC_INTERVAL} seconds..."
119
+ sleep $SYNC_INTERVAL
120
+ done
121
+ }
122
+
123
+ # 后台启动同步进程
124
+ sync_data &
125
+
126
+ # 启动 SyncTV
127
+ exec ./synctv server --data-dir /app/data