From 660604adce74a5b6b763a5b39e022dab1e2c0317 Mon Sep 17 00:00:00 2001 From: yang yingjie Date: Mon, 24 Mar 2025 11:14:43 +0800 Subject: [PATCH] first commit --- added_tokens.json | 3 + chat_template.json | 3 + config.json | 62 + generation_config.json | 11 + model-00001-of-00002.safetensors | 3 + model-00002-of-00002.safetensors | 3 + model.safetensors.index.json | 890 + preprocessor_config.json | 29 + processor_config.json | 4 + special_tokens_map.json | 33 + tokenizer.json | 2379611 +++++++++++++++++++++++++++ tokenizer.model | 3 + tokenizer_config.json | 51347 + 13 files changed, 2432002 insertions(+) create mode 100644 added_tokens.json create mode 100644 chat_template.json create mode 100644 config.json create mode 100644 generation_config.json create mode 100644 model-00001-of-00002.safetensors create mode 100644 model-00002-of-00002.safetensors create mode 100644 model.safetensors.index.json create mode 100644 preprocessor_config.json create mode 100644 processor_config.json create mode 100644 special_tokens_map.json create mode 100644 tokenizer.json create mode 100644 tokenizer.model create mode 100644 tokenizer_config.json diff --git a/added_tokens.json b/added_tokens.json new file mode 100644 index 0000000..e17bde0 --- /dev/null +++ b/added_tokens.json @@ -0,0 +1,3 @@ +{ + "": 262144 +} diff --git a/chat_template.json b/chat_template.json new file mode 100644 index 0000000..719b0cd --- /dev/null +++ b/chat_template.json @@ -0,0 +1,3 @@ +{ + "chat_template": "{{ bos_token }}\n{%- if messages[0]['role'] == 'system' -%}\n {%- if messages[0]['content'] is string -%}\n {%- set first_user_prefix = messages[0]['content'] + '\n\n' -%}\n {%- else -%}\n {%- set first_user_prefix = messages[0]['content'][0]['text'] + '\n\n' -%}\n {%- endif -%}\n {%- set loop_messages = messages[1:] -%}\n{%- else -%}\n {%- set first_user_prefix = \"\" -%}\n {%- set loop_messages = messages -%}\n{%- endif -%}\n{%- for message in loop_messages -%}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) -%}\n {{ raise_exception(\"Conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif -%}\n {%- if (message['role'] == 'assistant') -%}\n {%- set role = \"model\" -%}\n {%- else -%}\n {%- set role = message['role'] -%}\n {%- endif -%}\n {{ '' + role + '\n' + (first_user_prefix if loop.first else \"\") }}\n {%- if message['content'] is string -%}\n {{ message['content'] | trim }}\n {%- elif message['content'] is iterable -%}\n {%- for item in message['content'] -%}\n {%- if item['type'] == 'image' -%}\n {{ '' }}\n {%- elif item['type'] == 'text' -%}\n {{ item['text'] | trim }}\n {%- endif -%}\n {%- endfor -%}\n {%- else -%}\n {{ raise_exception(\"Invalid content type\") }}\n {%- endif -%}\n {{ '\n' }}\n{%- endfor -%}\n{%- if add_generation_prompt -%}\n {{'model\n'}}\n{%- endif -%}\n" +} diff --git a/config.json b/config.json new file mode 100644 index 0000000..6adbe00 --- /dev/null +++ b/config.json @@ -0,0 +1,62 @@ +{ + "architectures": [ + "Gemma3ForConditionalGeneration" + ], + "boi_token_index": 255999, + "bos_token_id": 2, + "eoi_token_index": 256000, + "eos_token_id": 106, + "image_token_index": 262144, + "initializer_range": 0.02, + "mm_tokens_per_image": 256, + "model_type": "gemma3", + "pad_token_id": 0, + "text_config": { + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": null, + "cache_implementation": "hybrid", + "final_logit_softcapping": null, + "head_dim": 256, + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 2560, + "initializer_range": 0.02, + "intermediate_size": 10240, + "max_position_embeddings": 131072, + "model_type": "gemma3_text", + "num_attention_heads": 8, + "num_hidden_layers": 34, + "num_key_value_heads": 4, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_local_base_freq": 10000.0, + "rope_scaling": { + "factor": 8.0, + "rope_type": "linear" + }, + "rope_theta": 1000000.0, + "sliding_window": 1024, + "sliding_window_pattern": 6, + "torch_dtype": "bfloat16", + "use_cache": true, + "vocab_size": 262208 + }, + "torch_dtype": "bfloat16", + "transformers_version": "4.50.0.dev0", + "unsloth_fixed": true, + "vision_config": { + "attention_dropout": 0.0, + "hidden_act": "gelu_pytorch_tanh", + "hidden_size": 1152, + "image_size": 896, + "intermediate_size": 4304, + "layer_norm_eps": 1e-06, + "model_type": "siglip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 27, + "patch_size": 14, + "torch_dtype": "bfloat16", + "vision_use_head": false + } +} diff --git a/generation_config.json b/generation_config.json new file mode 100644 index 0000000..f60a673 --- /dev/null +++ b/generation_config.json @@ -0,0 +1,11 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": [ + 1, + 106 + ], + "pad_token_id": 0, + "transformers_version": "4.50.0.dev0" +} diff --git a/model-00001-of-00002.safetensors b/model-00001-of-00002.safetensors new file mode 100644 index 0000000..325ea06 --- /dev/null +++ b/model-00001-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb5fd5e97ddd07b56778733e9653c07312529cb00980a318fc3e1c4e3b5a8f1f +size 4961251752 diff --git a/model-00002-of-00002.safetensors b/model-00002-of-00002.safetensors new file mode 100644 index 0000000..300ab0d --- /dev/null +++ b/model-00002-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdde0e5aa5ced0fa203b3d50f4ab78168b7e3a3e08c6349f5cc9326666e1bb13 +size 3639026128 diff --git a/model.safetensors.index.json b/model.safetensors.index.json new file mode 100644 index 0000000..4b95241 --- /dev/null +++ b/model.safetensors.index.json @@ -0,0 +1,890 @@ +{ + "metadata": { + "total_size": 8600158944 + }, + "weight_map": { + "language_model.model.embed_tokens.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.norm.weight": "model-00002-of-00002.safetensors", + "multi_modal_projector.mm_input_projection_weight": "model-00001-of-00002.safetensors", + "multi_modal_projector.mm_soft_emb_norm.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.position_embedding.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.post_layernorm.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.post_layernorm.weight": "model-00001-of-00002.safetensors" + } +} diff --git a/preprocessor_config.json b/preprocessor_config.json new file mode 100644 index 0000000..b1e00fc --- /dev/null +++ b/preprocessor_config.json @@ -0,0 +1,29 @@ +{ + "do_convert_rgb": null, + "do_normalize": true, + "do_pan_and_scan": null, + "do_rescale": true, + "do_resize": true, + "image_mean": [ + 0.5, + 0.5, + 0.5 + ], + "image_processor_type": "Gemma3ImageProcessor", + "image_seq_length": 256, + "image_std": [ + 0.5, + 0.5, + 0.5 + ], + "pan_and_scan_max_num_crops": null, + "pan_and_scan_min_crop_size": null, + "pan_and_scan_min_ratio_to_activate": null, + "processor_class": "Gemma3Processor", + "resample": 2, + "rescale_factor": 0.00392156862745098, + "size": { + "height": 896, + "width": 896 + } +} diff --git a/processor_config.json b/processor_config.json new file mode 100644 index 0000000..453c796 --- /dev/null +++ b/processor_config.json @@ -0,0 +1,4 @@ +{ + "image_seq_length": 256, + "processor_class": "Gemma3Processor" +} diff --git a/special_tokens_map.json b/special_tokens_map.json new file mode 100644 index 0000000..bdd437b --- /dev/null +++ b/special_tokens_map.json @@ -0,0 +1,33 @@ +{ + "boi_token": "", + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eoi_token": "", + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "image_token": "", + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tokenizer.json b/tokenizer.json new file mode 100644 index 0000000..298fb15 --- /dev/null +++ b/tokenizer.json @@ -0,0 +1,2379611 @@ +{ + "version": "1.0", + "truncation": null, + "padding": null, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 3, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 4, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 5, + "content": "[multimodal]", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 6, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 7, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 8, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 9, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 10, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 11, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 12, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 13, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 14, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 15, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 16, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 17, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 18, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 19, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 20, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 21, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 22, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 23, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 24, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 25, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 26, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 27, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 28, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 29, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 30, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 31, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 32, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 33, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 34, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 35, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 36, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 37, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 38, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 39, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 40, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 41, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 42, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 43, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 44, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 45, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 46, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 47, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 48, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 49, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 50, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 51, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 52, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 53, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 54, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 55, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 56, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 57, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 58, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 59, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 60, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 61, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 62, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 63, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 64, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 65, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 66, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 67, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 68, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 69, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 70, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 71, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 72, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 73, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 74, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 75, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 76, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 77, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 78, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 79, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 80, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 81, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 82, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 83, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 84, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 85, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 86, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 87, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 88, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 89, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 90, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 91, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 92, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 93, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 94, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 95, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 96, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 97, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 98, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 99, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 100, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 101, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 102, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 103, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 104, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 105, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 106, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 107, + "content": "\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 108, + "content": "\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 109, + "content": "\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 110, + "content": "\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 111, + "content": "\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 112, + "content": "\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 113, + "content": "\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 114, + "content": "\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 115, + "content": "\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 116, + "content": "\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 117, + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 118, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 119, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 120, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 121, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 122, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 123, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 124, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 125, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 126, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 127, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 128, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 129, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 130, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 131, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 132, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 133, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 134, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 135, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 136, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 137, + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 138, + "content": "▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 139, + "content": "▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 140, + "content": "▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 141, + "content": "▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 142, + "content": "▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 143, + "content": "▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 144, + "content": "▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 145, + "content": "▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 146, + "content": "▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 147, + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 148, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 149, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 150, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 151, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 152, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 153, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 154, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 155, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 156, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 157, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 158, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 159, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 160, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 161, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 162, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 163, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 164, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 165, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 166, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 167, + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 168, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 169, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 171, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 172, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 173, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 174, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 170, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 175, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 176, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 177, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 178, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 179, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 180, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 181, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 182, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 183, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 184, + "content": "

", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 185, + "content": "

", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 186, + "content": "

", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 187, + "content": "

", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 188, + "content": "

", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 189, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 190, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 191, + "content": "
", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 192, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 193, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 194, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 195, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 196, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 197, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 198, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 199, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 200, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 201, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 202, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 203, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 204, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 205, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 206, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 207, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 208, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 209, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 210, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 211, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 212, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 213, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 214, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 215, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 216, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 217, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 218, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 219, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 220, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 221, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": false + }, + { + "id": 222, + "content": "