pretrained_model_name_or_path = "path/to/graupel-v1-nobody-fp32.safetensors" output_dir = "path/to/emb_graupel" # hyperparameters learning_rate = 6e-4 lr_scheduler = "cosine_with_restarts" lr_scheduler_num_cycles = 20 lr_warmup_steps = 50 max_train_epochs = 20 optimizer_type = "Lion" seed = 3047 mixed_precision = "fp16" xformers = true save_every_n_epochs = 2 # textual inversion init_word = "horror" num_vectors_per_token = 16 token_string = "badquality" # v2 v2 = true v_parameterization = true # logging log_prefix = "test-13-" log_tracker_name = "badquality-fix" log_with = "wandb" # logging_dir = "" # sample sample_every_n_epochs = 1 sample_prompts = "path/to/sample.txt" # dataset dataset_config = "path/to/dataset.toml" debug_dataset = false