diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..bc5f30d6632ac0efdc7be2e9095e9e9579af2e33
--- /dev/null
+++ b/README.md
@@ -0,0 +1,199 @@
+---
+library_name: transformers
+tags: []
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated.
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
\ No newline at end of file
diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..0560ab155af3daeafd37d193438157f5383f7820
--- /dev/null
+++ b/config.json
@@ -0,0 +1,196 @@
+{
+ "_name_or_path": "/Users/zhuang/repos/ultravox/artifacts/model-zhuang.2024-08-21-ultravox.medium-1j:v5",
+ "architectures": [
+ "UltravoxModel"
+ ],
+ "audio_config": {
+ "_name_or_path": "openai/whisper-medium",
+ "activation_dropout": 0.0,
+ "activation_function": "gelu",
+ "apply_spec_augment": false,
+ "architectures": [
+ "WhisperForConditionalGeneration"
+ ],
+ "attention_dropout": 0.0,
+ "begin_suppress_tokens": [
+ 220,
+ 50257
+ ],
+ "bos_token_id": 50257,
+ "d_model": 1024,
+ "decoder_attention_heads": 16,
+ "decoder_ffn_dim": 4096,
+ "decoder_layerdrop": 0.0,
+ "decoder_layers": 24,
+ "decoder_start_token_id": 50258,
+ "dropout": 0.0,
+ "encoder_attention_heads": 16,
+ "encoder_ffn_dim": 4096,
+ "encoder_layerdrop": 0.0,
+ "encoder_layers": 24,
+ "eos_token_id": 50257,
+ "forced_decoder_ids": [
+ [
+ 1,
+ 50259
+ ],
+ [
+ 2,
+ 50359
+ ],
+ [
+ 3,
+ 50363
+ ]
+ ],
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 448,
+ "max_source_positions": 1500,
+ "max_target_positions": 448,
+ "median_filter_width": 7,
+ "model_type": "whisper",
+ "num_hidden_layers": 24,
+ "num_mel_bins": 80,
+ "pad_token_id": 50257,
+ "scale_embedding": false,
+ "suppress_tokens": [
+ 1,
+ 2,
+ 7,
+ 8,
+ 9,
+ 10,
+ 14,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 31,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 90,
+ 91,
+ 92,
+ 93,
+ 359,
+ 503,
+ 522,
+ 542,
+ 873,
+ 893,
+ 902,
+ 918,
+ 922,
+ 931,
+ 1350,
+ 1853,
+ 1982,
+ 2460,
+ 2627,
+ 3246,
+ 3253,
+ 3268,
+ 3536,
+ 3846,
+ 3961,
+ 4183,
+ 4667,
+ 6585,
+ 6647,
+ 7273,
+ 9061,
+ 9383,
+ 10428,
+ 10929,
+ 11938,
+ 12033,
+ 12331,
+ 12562,
+ 13793,
+ 14157,
+ 14635,
+ 15265,
+ 15618,
+ 16553,
+ 16604,
+ 18362,
+ 18956,
+ 20075,
+ 21675,
+ 22520,
+ 26130,
+ 26161,
+ 26435,
+ 28279,
+ 29464,
+ 31650,
+ 32302,
+ 32470,
+ 36865,
+ 42863,
+ 47425,
+ 49870,
+ 50254,
+ 50258,
+ 50358,
+ 50359,
+ 50360,
+ 50361,
+ 50362
+ ],
+ "torch_dtype": "float32",
+ "use_cache": true,
+ "vocab_size": 51865
+ },
+ "audio_model_id": "openai/whisper-medium",
+ "audio_token_index": 32000,
+ "auto_map": {
+ "AutoConfig": "ultravox_config.UltravoxConfig",
+ "AutoModel": "ultravox_model.UltravoxModel"
+ },
+ "custom_pipelines": {
+ "ultravox-pipeline": {
+ "impl": "ultravox_pipeline.UltravoxPipeline",
+ "pt": [
+ "AutoModel"
+ ],
+ "tf": [],
+ "type": "multimodal"
+ }
+ },
+ "hidden_size": 4096,
+ "ignore_index": -100,
+ "initializer_range": 0.02,
+ "model_type": "ultravox",
+ "norm_init": 0.4,
+ "projector_act": "swiglu",
+ "stack_factor": 8,
+ "text_config": {
+ "_name_or_path": "mistralai/Mistral-Nemo-Instruct-2407",
+ "architectures": [
+ "MistralForCausalLM"
+ ],
+ "head_dim": 128,
+ "hidden_size": 5120,
+ "intermediate_size": 14336,
+ "max_position_embeddings": 1024000,
+ "model_type": "mistral",
+ "num_hidden_layers": 40,
+ "num_key_value_heads": 8,
+ "rms_norm_eps": 1e-05,
+ "rope_theta": 1000000.0,
+ "sliding_window": null,
+ "torch_dtype": "bfloat16",
+ "vocab_size": 131072
+ },
+ "text_model_id": null,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.44.0",
+ "vocab_size": 131072
+}
diff --git a/generation_config.json b/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..d76ea45f910f5fceda4f9752bfc5dfaa0e23b454
--- /dev/null
+++ b/generation_config.json
@@ -0,0 +1,7 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "pad_token_id": 2,
+ "transformers_version": "4.44.0"
+}
diff --git a/model-00001-of-00005.safetensors b/model-00001-of-00005.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0adb4be5ddb37c358b097fff6fbdd5ba069ab7b3
--- /dev/null
+++ b/model-00001-of-00005.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8d5fd3d46b9695c013286bfd5bec2d36c174880da4ca081806885cc717b9e2a5
+size 4953630856
diff --git a/model-00002-of-00005.safetensors b/model-00002-of-00005.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4a9a7ad0a0959e589fdcc42b009d56240d9b9b7a
--- /dev/null
+++ b/model-00002-of-00005.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:51d105843958b7619bd1a61b82342b5ae2fd98c52a30e955b3871578e3358ed2
+size 4907530640
diff --git a/model-00003-of-00005.safetensors b/model-00003-of-00005.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..3f72a264d2d45ef7d20ffe5ea6c05357c2c9cef3
--- /dev/null
+++ b/model-00003-of-00005.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:85f7e6b53b07ce006b8d86363d3ae4ac2569787ec483b206542faed1a758dae8
+size 4907530672
diff --git a/model-00004-of-00005.safetensors b/model-00004-of-00005.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b8c02832e2437e4fdaf9d0f3fc7b611d23ed4889
--- /dev/null
+++ b/model-00004-of-00005.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:70ca37fe132206dc600cc9388ae53318cb4d63b3d3f698cfea99f8a90555dd35
+size 4907530672
diff --git a/model-00005-of-00005.safetensors b/model-00005-of-00005.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f0ae5e0d8843e5e02f45c7c8591ed4d1c2cb2321
--- /dev/null
+++ b/model-00005-of-00005.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99266a783cd09c5d8832a88797f30ab1a5d3c8a29ecade15662bbc4d6a153f0d
+size 4907497168
diff --git a/model.safetensors.index.json b/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..70de608b340289b21197c193ec29eea7c46c227d
--- /dev/null
+++ b/model.safetensors.index.json
@@ -0,0 +1,374 @@
+{
+ "metadata": {
+ "total_size": 24583671808
+ },
+ "weight_map": {
+ "language_model.lm_head.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.embed_tokens.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
+ "language_model.model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
+ "language_model.model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
+ "language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
+ "language_model.model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
+ "language_model.model.norm.weight": "model-00005-of-00005.safetensors",
+ "multi_modal_projector.linear_1.weight": "model-00001-of-00005.safetensors",
+ "multi_modal_projector.linear_2.weight": "model-00001-of-00005.safetensors",
+ "multi_modal_projector.ln_post.weight": "model-00001-of-00005.safetensors",
+ "multi_modal_projector.ln_pre.weight": "model-00001-of-00005.safetensors"
+ }
+}
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..72ecfeeb7e14d244c936169d2ed139eeae235ef1
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.json b/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..ec5acd742ca3df11b3b7933152376b737565842d
--- /dev/null
+++ b/tokenizer.json
@@ -0,0 +1,409625 @@
+{
+ "version": "1.0",
+ "truncation": null,
+ "padding": null,
+ "added_tokens": [
+ {
+ "id": 0,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 1,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 2,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 3,
+ "content": "[INST]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 4,
+ "content": "[/INST]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 5,
+ "content": "[AVAILABLE_TOOLS]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 6,
+ "content": "[/AVAILABLE_TOOLS]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 7,
+ "content": "[TOOL_RESULTS]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 8,
+ "content": "[/TOOL_RESULTS]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 9,
+ "content": "[TOOL_CALLS]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 10,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 11,
+ "content": "[PREFIX]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 12,
+ "content": "[MIDDLE]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 13,
+ "content": "[SUFFIX]",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 14,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 15,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 16,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 17,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 18,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 19,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 20,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 21,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 22,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 23,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 24,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 25,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 26,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 27,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 28,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 29,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 30,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 31,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 32,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 33,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 34,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 35,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 36,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 37,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 38,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 39,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 40,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 41,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 42,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 43,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 44,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 45,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 46,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 47,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 48,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 49,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 50,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 51,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 52,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 53,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 54,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 55,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 56,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 57,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 58,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 59,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 60,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 61,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 62,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 63,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 64,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 65,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 66,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 67,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 68,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 69,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 70,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 71,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 72,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 73,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 74,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 75,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 76,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 77,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 78,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 79,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 80,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 81,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 82,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 83,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 84,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 85,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 86,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 87,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 88,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 89,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 90,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 91,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 92,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 93,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 94,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 95,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 96,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 97,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 98,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 99,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 100,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 101,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 102,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 103,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 104,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 105,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 106,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 107,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 108,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 109,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 110,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 111,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 112,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 113,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 114,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 115,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 116,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 117,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 118,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 119,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 120,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 121,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 122,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 123,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 124,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 125,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 126,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 127,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 128,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 129,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 130,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 131,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 132,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 133,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 134,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 135,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 136,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 137,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 138,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 139,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 140,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 141,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 142,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 143,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 144,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 145,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 146,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 147,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 148,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 149,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 150,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 151,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 152,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 153,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 154,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 155,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 156,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 157,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 158,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 159,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 160,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 161,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 162,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 163,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 164,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 165,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 166,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 167,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 168,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 169,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 170,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 171,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 172,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 173,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 174,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 175,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 176,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 177,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 178,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 179,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 180,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 181,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 182,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 183,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 184,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 185,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 186,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 187,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 188,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 189,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 190,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 191,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 192,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 193,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 194,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 195,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 196,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 197,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 198,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 199,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 200,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 201,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 202,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 203,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 204,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 205,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 206,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 207,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 208,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 209,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 210,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 211,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 212,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 213,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 214,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 215,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 216,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 217,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 218,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 219,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 220,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 221,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 222,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 223,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 224,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 225,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 226,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 227,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 228,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 229,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 230,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 231,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 232,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 233,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 234,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 235,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 236,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 237,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 238,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 239,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 240,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 241,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 242,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 243,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 244,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 245,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 246,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 247,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 248,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 249,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 250,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 251,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 252,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 253,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 254,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 255,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 256,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 257,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 258,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 259,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 260,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 261,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 262,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 263,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 264,
+ "content": "",
+ "single_word": false,
+ "lstrip": false,
+ "rstrip": false,
+ "normalized": false,
+ "special": true
+ },
+ {
+ "id": 265,
+ "content": "