Spaces:
Running
Running
File size: 780 Bytes
76db545 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 | """
Phase 3b: Fine-tune LoRA adapter for Fula (ful).
Trains on the same frozen backbone as Bambara — base model weights are NOT modified.
Usage:
python scripts/train_fula.py
"""
import logging
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from dotenv import load_dotenv
load_dotenv()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s — %(message)s")
from src.training.trainer import WhisperLoRATrainer
if __name__ == "__main__":
trainer = WhisperLoRATrainer(
base_config_path="configs/base_config.yaml",
language_config_path="configs/lora_fula.yaml",
)
trainer.setup()
trainer.train()
print("\nFula training complete. Adapter saved to adapters/fula/")
|