{ "connector": { "hidden_size": 2048, "intermediate_size": 11946, "num_hidden_layers": 6, "num_attention_heads": 32, "hidden_act": "gelu_pytorch_tanh", "layer_norm_eps": 1e-06, "attention_dropout": 0.0 }, "num_queries": 128, "projector_1_in": 12288, "projector_1_out": 2048, "projector_2_in": 2048, "projector_2_out": 2048, "projector_3_in": 2048, "projector_3_out": 4096, "llm_hidden_size": 2048, "max_length": 1024 }