splade-code-06B / config.json
maxoul's picture
Update config.json
071f001 verified
{
"archi_type": "decoder",
"architectures": [
"Splade"
],
"attn_implementation": "flash_attention_2",
"attn_type": "causal",
"bidirectional": true,
"lora": false,
"lora_r": 0,
"model_name_or_path": "Qwen/Qwen3-0.6B",
"model_type": "splade",
"n_layers": null,
"padding_side": "left",
"torch_dtype": "bfloat16",
"train_head": false,
"transformers_version": "4.53.3",
"auto_map": {
"AutoConfig": "splade.SpladeConfig",
"AutoModelForCausalLM": "splade.Splade"
}
}