gitgraph-model / config.json
blankenshipdanielj's picture
Upload trained GitGraph model
743faab verified
raw
history blame contribute delete
678 Bytes
{
"code_encoder_config": {
"model_name": "Salesforce/codet5p-220m",
"max_length": 512,
"use_lora": true,
"lora_r": 16,
"lora_alpha": 32,
"lora_dropout": 0.1,
"gradient_checkpointing": false,
"freeze_encoder": false
},
"gnn_config": {
"input_dim": 768,
"hidden_dim": 256,
"output_dim": 256,
"num_layers": 3,
"num_heads": 4,
"dropout": 0.1,
"use_gatv2": true,
"use_residual": true,
"use_layer_norm": true,
"pooling": "mean"
},
"fusion_type": "concat",
"fusion_hidden_dim": 512,
"fusion_dropout": 0.1,
"vocab_size": 32128,
"num_files": 1000,
"head_hidden_dim": 256,
"head_dropout": 0.1
}