RecursiveMAS commited on
Commit
7d4a8e2
·
verified ·
1 Parent(s): 52a744f

Clear old files before upload

Browse files
README.md DELETED
@@ -1,7 +0,0 @@
1
- ---
2
- base_model:
3
- - BioMistral/BioMistral-7B
4
- license: mit
5
- ---
6
-
7
- The Science Agent of Mixture-Style MAS in the paper: Recursive Multi-Agent Systems. (base model: BioMistral/BioMistral-7B)
 
 
 
 
 
 
 
 
adapter.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a2dbaf316e5377c292ffec7b6334a9f2b29721df734ae45796c1e48d055084c
3
- size 67161229
 
 
 
 
adapter_config.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "adapter_type": "ln_res_adapter"
3
- }
 
 
 
 
chat_template.jinja DELETED
@@ -1 +0,0 @@
1
- {{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}
 
 
config.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "architectures": [
3
- "MistralForCausalLM"
4
- ],
5
- "attention_dropout": 0.0,
6
- "bos_token_id": 1,
7
- "dtype": "bfloat16",
8
- "eos_token_id": 2,
9
- "head_dim": 128,
10
- "hidden_act": "silu",
11
- "hidden_size": 4096,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 14336,
14
- "max_position_embeddings": 32768,
15
- "model_type": "mistral",
16
- "num_attention_heads": 32,
17
- "num_hidden_layers": 32,
18
- "num_key_value_heads": 8,
19
- "pad_token_id": null,
20
- "rms_norm_eps": 1e-05,
21
- "rope_parameters": {
22
- "rope_theta": 10000.0,
23
- "rope_type": "default"
24
- },
25
- "sliding_window": 4096,
26
- "tie_word_embeddings": false,
27
- "transformers_version": "5.3.0",
28
- "use_cache": false,
29
- "vocab_size": 32000
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json DELETED
@@ -1,6 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "eos_token_id": 2,
5
- "transformers_version": "5.3.0"
6
- }
 
 
 
 
 
 
 
model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6f64111c92b726af0e1ba52d848d4c5e2bc627411c93663ca39a0fff27e6726
3
- size 14483498224
 
 
 
 
tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json DELETED
@@ -1,16 +0,0 @@
1
- {
2
- "backend": "tokenizers",
3
- "bos_token": "<s>",
4
- "clean_up_tokenization_spaces": false,
5
- "eos_token": "</s>",
6
- "extra_special_tokens": [],
7
- "is_local": true,
8
- "legacy": true,
9
- "model_max_length": 1000000000000000019884624838656,
10
- "pad_token": "</s>",
11
- "sp_model_kwargs": {},
12
- "spaces_between_special_tokens": false,
13
- "tokenizer_class": "TokenizersBackend",
14
- "unk_token": "<unk>",
15
- "use_default_system_prompt": false
16
- }