melihcatal's picture
Add files using upload-large-folder tool
076fd74 verified
{
"audit/delta": 1e-05,
"audit/embedding/auc": 0.52,
"audit/embedding/empirical_epsilon/0.01": 3.023197554051876,
"audit/embedding/empirical_epsilon/0.05": 3.4791953936219215,
"audit/embedding/empirical_epsilon_details/0.01/correct_guesses": 100.0,
"audit/embedding/empirical_epsilon_details/0.01/epsilon": 3.023197554051876,
"audit/embedding/empirical_epsilon_details/0.01/num_guesses": 100.0,
"audit/embedding/empirical_epsilon_details/0.05/correct_guesses": 100.0,
"audit/embedding/empirical_epsilon_details/0.05/epsilon": 3.4791953936219215,
"audit/embedding/empirical_epsilon_details/0.05/num_guesses": 100.0,
"audit/loss/auc": 0.997368,
"audit/loss/empirical_epsilon/0.01": 3.023197554051876,
"audit/loss/empirical_epsilon/0.05": 3.4791953936219215,
"audit/loss/empirical_epsilon_details/0.01/correct_guesses": 100.0,
"audit/loss/empirical_epsilon_details/0.01/epsilon": 3.023197554051876,
"audit/loss/empirical_epsilon_details/0.01/num_guesses": 100.0,
"audit/loss/empirical_epsilon_details/0.05/correct_guesses": 100.0,
"audit/loss/empirical_epsilon_details/0.05/epsilon": 3.4791953936219215,
"audit/loss/empirical_epsilon_details/0.05/num_guesses": 100.0,
"audit/num_canaries": 500.0,
"audit/num_members": 250.0,
"audit/paper_guess_fraction": 0.2,
"audit/paper_guess_steps": 20.0,
"energy/codecarbon/cpu_count": 8.0,
"energy/codecarbon/cpu_energy": 0.07906645068697907,
"energy/codecarbon/cpu_power": 80.40813034674923,
"energy/codecarbon/cpu_utilization_percent": 8.855084745762712,
"energy/codecarbon/duration": 3685.56630371185,
"energy/codecarbon/emissions": 0.16952444726420043,
"energy/codecarbon/emissions_rate": 4.599685185244585e-05,
"energy/codecarbon/energy_consumed": 4.865380342226571,
"energy/codecarbon/gpu_count": 8.0,
"energy/codecarbon/gpu_energy": 4.748948588600271,
"energy/codecarbon/gpu_power": 4660.729357242909,
"energy/codecarbon/gpu_utilization_percent": 93.662247129579,
"energy/codecarbon/latitude": 47.4843,
"energy/codecarbon/longitude": 8.212,
"energy/codecarbon/pue": 1.0,
"energy/codecarbon/ram_energy": 0.03736530293931913,
"energy/codecarbon/ram_power": 38.0,
"energy/codecarbon/ram_total_size": 256.0,
"energy/codecarbon/ram_used_gb": 515.4519262068776,
"energy/codecarbon/ram_utilization_percent": 26.020803717878625,
"energy/codecarbon/water_consumed": 0.0,
"energy/codecarbon/wue": 0.0,
"eval/duration_sec": 14.38155033509247,
"eval/loss": 0.7171374095434493,
"perf/audit_duration_sec": 7.9593279850669205,
"perf/epoch_duration_sec": 1156.3163079482038,
"perf/epoch_samples": 53331.0,
"perf/epoch_samples_per_sec": 46.12146316143534,
"perf/epoch_tokens": 43842337.0,
"perf/epoch_tokens_per_sec": 37915.522507673464,
"perf/gradient_accumulation_steps": 4.0,
"perf/logical_batch_size": 32.0,
"perf/logical_token_count": 25716.0,
"perf/samples_per_sec": 7.637238639755865,
"perf/step_duration_sec": 4.189996084896848,
"perf/tokens_per_sec": 6137.475901873807,
"system/cuda_epoch_peak_memory_gb": 81.2615852355957,
"system/cuda_max_memory_allocated_gb": 81.2615852355957,
"system/cuda_memory_allocated_gb": 17.816345691680908,
"train/epoch_canary_loss": 1.9868655627132747,
"train/epoch_loss": 0.7704904898906849,
"train/epoch_real_loss": 0.762895334188057,
"train/lr": 6.50087836514208e-06,
"train/step_canary_loss": 0.08935546875,
"train/step_loss": 0.691263422369957,
"train/step_real_loss": 0.691263422369957
}