| #!/bin/bash |
|
|
| |
| |
| |
|
|
| set -euo pipefail |
|
|
| |
| SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
| DTO_ROOT="$(dirname "$SCRIPT_DIR")" |
| ENV_FILE="$DTO_ROOT/.env" |
|
|
| if [ -f "$ENV_FILE" ]; then |
| |
| set -a |
| source "$ENV_FILE" |
| set +a |
| echo "β
Loaded .env file from $ENV_FILE" |
| else |
| echo "β .env file not found at $ENV_FILE" |
| exit 1 |
| fi |
|
|
| |
| PROTOCOL_LOG="/tmp/archive_protocol_$(date +%Y%m%d_%H%M%S).log" |
| BACKUP_DIR="/tmp/archive_backup_$(date +%Y%m%d_%H%M%S)" |
| UPLOAD_MANIFEST="$BACKUP_DIR/upload_manifest.txt" |
| DELETE_MANIFEST="$BACKUP_DIR/delete_manifest.txt" |
|
|
| |
| REPO_MODELS="$HF_REPO_MODELS" |
| REPO_DATASETS="$HF_REPO_DATASETS" |
| REPO_ARTIFACTS="$HF_REPO_ARTIFACTS" |
|
|
| |
| RED='\033[0;31m' |
| GREEN='\033[0;32m' |
| YELLOW='\033[1;33m' |
| BLUE='\033[0;34m' |
| NC='\033[0m' |
|
|
| |
| log() { |
| echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$PROTOCOL_LOG" |
| } |
|
|
| error() { |
| echo -e "${RED}[ERROR]${NC} $1" | tee -a "$PROTOCOL_LOG" >&2 |
| } |
|
|
| warning() { |
| echo -e "${YELLOW}[WARNING]${NC} $1" | tee -a "$PROTOCOL_LOG" |
| } |
|
|
| info() { |
| echo -e "${BLUE}[INFO]${NC} $1" | tee -a "$PROTOCOL_LOG" |
| } |
|
|
| |
| discovery_phase() { |
| log "=== PHASE 1: DISCOVERY & INVENTORY ===" |
| mkdir -p "$BACKUP_DIR" |
| |
| |
| log "Discovering space optimization opportunities..." |
| |
| |
| log "Inventory: /data/experiments/" |
| find /data/experiments -type f \( -name "*.safetensors" -o -name "*.pt" -o -name "*.bin" \) > "$BACKUP_DIR/experiments_files.txt" |
| find /data/experiments -type f \( -name "*.parquet" -o -name "*.jsonl" -o -name "*.csv" \) >> "$BACKUP_DIR/experiments_files.txt" |
| |
| |
| log "Inventory: /data/data/ workspace (excluding cache)" |
| find /data/data/workspace -type f -size +100M ! -path "*/.cache/*" ! -path "*/.local/*" 2>/dev/null > "$BACKUP_DIR/data_files.txt" || true |
| |
| |
| cat "$BACKUP_DIR/experiments_files.txt" "$BACKUP_DIR/data_files.txt" | sort -u > "$UPLOAD_MANIFEST" |
| |
| local total_files=$(wc -l < "$UPLOAD_MANIFEST") |
| local total_size=$(while read -r f; do du -b "$f" 2>/dev/null | cut -f1; done < "$UPLOAD_MANIFEST" | awk '{sum += $1} END {print sum}') |
| |
| log "Discovered $total_files files for potential archiving" |
| log "Total size: $(numfmt --to=iec $total_size)" |
| |
| |
| log "Top 5 largest files:" |
| while read -r f; do du -h "$f" 2>/dev/null | cut -f1 | xargs -I {} echo " {} - $f"; done < "$UPLOAD_MANIFEST" | sort -rh | head -5 | tee -a "$PROTOCOL_LOG" |
| } |
|
|
| |
| upload_phase() { |
| log "=== PHASE 2: UPLOAD TO HF/XET ===" |
| |
| local uploaded_count=0 |
| local skipped_count=0 |
| local failed_count=0 |
| |
| |
| echo "# Archive Protocol Upload Log - $(date)" > "$BACKUP_DIR/upload_results.txt" |
| |
| while read -r file_path; do |
| if [ ! -f "$file_path" ]; then |
| warning "File not found: $file_path" |
| skipped_count=$((skipped_count + 1)) |
| continue |
| fi |
| |
| |
| local repo="$REPO_ARTIFACTS" |
| local filename=$(basename "$file_path") |
| |
| if [[ "$filename" == *.safetensors ]] || [[ "$filename" == *.pt ]] || [[ "$filename" == *.bin ]]; then |
| repo="$REPO_MODELS" |
| elif [[ "$filename" == *.parquet ]] || [[ "$filename" == *.jsonl ]] || [[ "$filename" == *.csv ]]; then |
| repo="$REPO_DATASETS" |
| fi |
| |
| |
| local relative_path="" |
| if [[ "$file_path" == /data/experiments/* ]]; then |
| relative_path="experiments/$(echo "$file_path" | sed 's|/data/experiments/||')" |
| elif [[ "$file_path" == /data/data/* ]]; then |
| relative_path="data/$(echo "$file_path" | sed 's|/data/data/||')" |
| else |
| relative_path="other/$(basename "$file_path")" |
| fi |
| |
| log "Uploading: $relative_path β $repo" |
| |
| |
| if [ -z "$HF_TOKEN" ]; then |
| error "HF_TOKEN is not set" |
| exit 1 |
| fi |
| |
| |
| if python3 -c " |
| import os |
| from integrations.huggingface_client import HuggingFaceClient |
| |
| # Use token directly instead of environment variable |
| client = HuggingFaceClient(token='$HF_TOKEN') |
| if not client.is_authenticated(): |
| print('NOT_AUTHENTICATED') |
| exit(1) |
| |
| success = client.upload_artifact('$file_path', '$relative_path', '$repo') |
| if success: |
| print('SUCCESS') |
| else: |
| print('FAILED') |
| exit(1) |
| " 2>> "$PROTOCOL_LOG"; then |
| echo "β
SUCCESS: $file_path β $repo/$relative_path" | tee -a "$BACKUP_DIR/upload_results.txt" |
| echo "$file_path" >> "$DELETE_MANIFEST" |
| uploaded_count=$((uploaded_count + 1)) |
| else |
| error "FAILED: $file_path" |
| echo "β FAILED: $file_path" >> "$BACKUP_DIR/upload_results.txt" |
| failed_count=$((failed_count + 1)) |
| fi |
| |
| done < "$UPLOAD_MANIFEST" |
| |
| log "Upload phase completed:" |
| log " β
Uploaded: $uploaded_count files" |
| log " β οΈ Skipped: $skipped_count files" |
| log " β Failed: $failed_count files" |
| |
| if [ $uploaded_count -eq 0 ]; then |
| error "No files were uploaded successfully" |
| exit 1 |
| fi |
| } |
|
|
| |
| verification_phase() { |
| log "=== PHASE 3: VERIFICATION ===" |
| |
| log "Verifying uploads on HF repositories..." |
| |
| python3 -c " |
| import os |
| from huggingface_hub import HfApi |
| |
| api = HfApi(token='$HF_TOKEN') |
| |
| repos = ['$REPO_MODELS', '$REPO_DATASETS', '$REPO_ARTIFACTS'] |
| |
| for repo in repos: |
| try: |
| info = api.repo_info(repo) |
| print(f'β
{repo}: {len(info.siblings)} files, {info.size} bytes') |
| |
| # List files if repository has content |
| if info.siblings: |
| for file in info.siblings[:5]: # Show first 5 files |
| print(f' - {file.rfilename} ({file.size} bytes)') |
| |
| except Exception as e: |
| print(f'β {repo}: {e}') |
| " | tee -a "$PROTOCOL_LOG" |
| |
| log "Verification completed" |
| } |
|
|
| |
| deletion_phase() { |
| log "=== PHASE 4: SAFE DELETION (OPTIONAL) ===" |
| |
| if [ ! -f "$DELETE_MANIFEST" ] || [ ! -s "$DELETE_MANIFEST" ]; then |
| warning "No files marked for deletion - skipping deletion phase" |
| return |
| fi |
| |
| local delete_count=$(wc -l < "$DELETE_MANIFEST") |
| local total_size=0 |
| |
| log "Found $delete_count files ready for safe deletion" |
| |
| |
| while read -r file; do |
| if [ -f "$file" ]; then |
| size=$(du -b "$file" 2>/dev/null | cut -f1 || echo 0) |
| total_size=$((total_size + size)) |
| fi |
| done < "$DELETE_MANIFEST" |
| |
| log "Total space to free: $(numfmt --to=iec $total_size)" |
| |
| |
| read -p "Do you want to proceed with deletion? (y/N): " -n 1 -r |
| echo |
| if [[ ! $REPLY =~ ^[Yy]$ ]]; then |
| log "Deletion cancelled by user" |
| return |
| fi |
| |
| |
| local deleted_count=0 |
| local failed_count=0 |
| |
| while read -r file_path; do |
| if [ -f "$file_path" ]; then |
| if rm -v "$file_path" 2>/dev/null; then |
| echo "β
DELETED: $file_path" | tee -a "$PROTOCOL_LOG" |
| deleted_count=$((deleted_count + 1)) |
| else |
| error "FAILED to delete: $file_path" |
| failed_count=$((failed_count + 1)) |
| fi |
| else |
| warning "File already gone: $file_path" |
| fi |
| done < "$DELETE_MANIFEST" |
| |
| log "Deletion completed:" |
| log " β
Deleted: $deleted_count files" |
| log " β Failed: $failed_count files" |
| |
| |
| log "Disk space after deletion:" |
| df -h /data | tee -a "$PROTOCOL_LOG" |
| } |
|
|
| |
| cleanup_phase() { |
| log "=== PHASE 5: CLEANUP & REPORT ===" |
| |
| |
| local total_uploaded=$(grep -c "β
SUCCESS" "$BACKUP_DIR/upload_results.txt" 2>/dev/null || echo 0) |
| local total_failed=$(grep -c "β FAILED" "$BACKUP_DIR/upload_results.txt" 2>/dev/null || echo 0) |
| |
| cat > "$BACKUP_DIR/summary_report.md" << EOF |
| # DTO Archive Protocol Summary |
| |
| ## Execution Details |
| - **Timestamp**: $(date) |
| - **Protocol Log**: $PROTOCOL_LOG |
| - **Backup Directory**: $BACKUP_DIR |
| |
| ## Results |
| - **Files Discovered**: $(wc -l < "$UPLOAD_MANIFEST" 2>/dev/null || echo 0) |
| - **Successfully Uploaded**: $total_uploaded |
| - **Upload Failures**: $total_failed |
| - **Files Deleted**: $(wc -l < "$DELETE_MANIFEST" 2>/dev/null || echo 0) |
| |
| ## Repository Status |
| - **Models Repository**: $REPO_MODELS |
| - **Datasets Repository**: $REPO_DATASETS |
| - **Artifacts Repository**: $REPO_ARTIFACTS |
| |
| ## Next Steps |
| 1. Verify uploads on Hugging Face Hub |
| 2. Monitor repository storage usage |
| 3. Run regular archive protocols for new data |
| 4. Review failed uploads for manual intervention |
| |
| EOF |
| |
| log "Summary report created: $BACKUP_DIR/summary_report.md" |
| log "Protocol completed successfully!" |
| } |
|
|
| |
| main() { |
| log "π STARTING DTO ARCHIVE PROTOCOL" |
| log "Timestamp: $(date)" |
| log "Protocol log: $PROTOCOL_LOG" |
| log "Backup directory: $BACKUP_DIR" |
| |
| |
| discovery_phase |
| upload_phase |
| verification_phase |
| |
| |
| if [ "${1:-}" = "--delete" ]; then |
| deletion_phase |
| else |
| log "Skipping deletion phase (use --delete to enable)" |
| fi |
| |
| cleanup_phase |
| |
| log "β
ARCHIVE PROTOCOL COMPLETED SUCCESSFULLY" |
| } |
|
|
| |
| main "$@" |