Aduc-sdr-2_5 / start.sh
Carlexxx
feat: ✨ aBINC 2.2
fb56537
raw
history blame
6.19 kB
#!/bin/bash
# =============================================================================
# START.SH - Complete AI Video Suite (Production Ready)
# =============================================================================
set -euo pipefail
python3 -c "
import os
import subprocess
import tempfile
import os
import sys
import shutil
from pathlib import Path
#os.system(\"pip install --upgrade --pre --extra-index-url https://download.pytorch.org/whl/nightly/cu12 'torch<2.9' spaces\")
"
# =============================================================================
# GLOBAL VARIABLE INITIALIZATION
# =============================================================================
# Initialize critical variables with default values
export GPU_COUNT=0
export GPU_MODEL="Unknown"
export GPU_MEMORY=0
export GPU_ARCH="CPU"
export HAS_GPU=false
export IS_OPTIMIZED_GPU=false
export CPU_CORES=1
export TOTAL_RAM="0G"
export AVAILABLE_RAM="0G"
export DISK_SPACE="unknown"
# TERM variable fix
export TERM="${TERM:-xterm-256color}"
# Diretório do installer
INSTALLER_DIR="/app/installer"
# =============================================================================
# BANNER E INFORMAÇÕES
# =============================================================================
print_banner() {
clear || true
echo "=================================================================="
echo " 🚀 Aduc-sdr firmware"
echo " ⚡ Multi-GPU Video Generation Suite"
echo " 🎬 LTX FP8 | Q8 Kernels | SeedVR | Wan2.2 | VINCIE | MMAudio"
echo "=================================================================="
echo ""
}
detect_hardware() {
log_info "🔍 Detectando hardware do sistema..."
# CPU Info (export as global variables)
export CPU_CORES=$(nproc)
export CPU_MODEL=$(grep "model name" /proc/cpuinfo | head -1 | cut -d: -f2 | xargs)
# Memory Info
export TOTAL_RAM=$(free -h | awk '/^Mem:/ {print $2}')
export AVAILABLE_RAM=$(free -h | awk '/^Mem:/ {print $7}')
# GPU Detection
if command -v nvidia-smi >/dev/null 2>&1; then
export GPU_COUNT=$(nvidia-smi --list-gpus | wc -l)
export GPU_MODEL=$(nvidia-smi --query-gpu=name --format=csv,noheader,nounits | head -1)
export GPU_MEMORY=$(nvidia-smi --query-gpu=memory.total --format=csv,noheader,nounits | head -1)
# Detect specific GPU architecture
if echo "$GPU_MODEL" | grep -q "L40S"; then
export GPU_ARCH="ADA_LOVELACE"
export COMPUTE_CAP="8.9"
export IS_OPTIMIZED_GPU=true
elif echo "$GPU_MODEL" | grep -q "A100"; then
export GPU_ARCH="AMPERE"
export COMPUTE_CAP="8.0"
export IS_OPTIMIZED_GPU=true
elif echo "$GPU_MODEL" | grep -q "H100"; then
export GPU_ARCH="HOPPER"
export COMPUTE_CAP="9.0"
export IS_OPTIMIZED_GPU=true
else
export GPU_ARCH="OTHER"
export COMPUTE_CAP="unknown"
export IS_OPTIMIZED_GPU=false
fi
export HAS_GPU=true
else
export GPU_COUNT=0
export GPU_MODEL="None"
export GPU_MEMORY=0
export GPU_ARCH="CPU"
export HAS_GPU=false
export IS_OPTIMIZED_GPU=false
fi
# Storage Info
export DISK_SPACE=$(df -h /app 2>/dev/null | awk 'NR==2 {print $4}' || echo "unknown")
log_info "Hardware detectado:"
log_info " 🖥️ CPU: $CPU_MODEL ($CPU_CORES cores)"
log_info " 💾 RAM: $AVAILABLE_RAM / $TOTAL_RAM disponível"
log_info " 🎮 GPU: $GPU_MODEL x$GPU_COUNT"
log_info " 🏗️ Arquitetura: $GPU_ARCH (CC: $COMPUTE_CAP)"
log_info " 💿 Disco: $DISK_SPACE disponível"
if [[ "$IS_OPTIMIZED_GPU" == "true" ]]; then
log_success "GPU otimizada detectada - performance máxima disponível!"
fi
}
load_installer_modules() {
log_info "📦 Carregando módulos do installer..."
local modules=(
"utils.sh"
"setup_env.sh"
"check_gpu.sh"
"multi_gpu_config.sh"
)
for module in "${modules[@]}"; do
local module_path="$INSTALLER_DIR/$module"
if [[ -f "$module_path" ]]; then
# shellcheck source=/dev/null
source "$module_path"
log_debug "Módulo carregado: $module"
else
log_warning "Módulo não encontrado: $module_path"
fi
done
log_success "Módulos do installer carregados"
}
start_application() {
log_info "🚀 Iniciando Complete AI Video Suite..."
# Preparar argumentos da aplicação
local app_args=()
if [[ "$LISTEN" == "true" ]]; then
app_args+=("--listen")
fi
if [[ "$SHARE" == "true" ]]; then
app_args+=("--share")
fi
app_args+=("--port" "$PORT")
if [[ "$MULTI_GPU" == "true" ]]; then
app_args+=("--multi-gpu" "--gpus" "$NUM_GPUS")
fi
if [[ "$DEBUG_MODE" == "true" ]]; then
app_args+=("--debug")
fi
if [[ "$PROFILE" == "true" ]]; then
app_args+=("--profile")
fi
# Configurar variáveis de ambiente para a aplicação
export AI_SUITE_MULTI_GPU="$MULTI_GPU"
export AI_SUITE_NUM_GPUS="$NUM_GPUS"
export AI_SUITE_DEBUG="$DEBUG_MODE"
export AI_SUITE_PROFILE="$PROFILE"
# Logs finais
echo ""
log_success "=================================================================="
log_success "🎬 Complete Video Suite Ready!"
log_success "=================================================================="
log_info "🌐 Servidor: http://$HOST:$PORT"
log_info "🎮 GPUs: $GPU_COUNT x $GPU_MODEL"
log_info "⚡ Multi-GPU: $MULTI_GPU"
log_info "🚀 Otimizado: $OPTIMIZE"
log_info "📊 Profiling: $PROFILE"
echo "🚀 Iniciando app.py..."
python3 /app/app.py --listen --port ${PORT:-7860}
if [[ "$SHARE" == "true" ]]; then
log_info "🌍 Link público será exibido pelo Gradio"
fi
log_success "=================================================================="
echo ""
}
start_application