Upload folder using huggingface_hub
Browse files- .gitattributes +1 -0
- Leception-XML-v1.json +43 -0
- README.md +2318 -0
- config.json +51 -0
- measurement.json +0 -0
- model.safetensors.index.json +1 -0
- output-00001-of-00007.safetensors +3 -0
- output-00002-of-00007.safetensors +3 -0
- output-00003-of-00007.safetensors +3 -0
- output-00004-of-00007.safetensors +3 -0
- output-00005-of-00007.safetensors +3 -0
- output-00006-of-00007.safetensors +3 -0
- output-00007-of-00007.safetensors +3 -0
- special_tokens_map.json +23 -0
- tokenizer.json +3 -0
- tokenizer_config.json +2067 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
Leception-XML-v1.json
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"instruct": {
|
3 |
+
"input_sequence": "<|start_header_id|>user<|end_header_id|>\n\n",
|
4 |
+
"output_sequence": "<|start_header_id|>assistant<|end_header_id|><thinking>\n\n",
|
5 |
+
"last_output_sequence": "",
|
6 |
+
"system_sequence": "<|start_header_id|>system<|end_header_id|>\n\n",
|
7 |
+
"stop_sequence": "<|eot_id|>",
|
8 |
+
"wrap": false,
|
9 |
+
"macro": true,
|
10 |
+
"activation_regex": "",
|
11 |
+
"system_sequence_prefix": "<|start_header_id|>system<|end_header_id|>\n\n",
|
12 |
+
"system_sequence_suffix": "",
|
13 |
+
"first_output_sequence": "",
|
14 |
+
"skip_examples": true,
|
15 |
+
"output_suffix": "<|eot_id|>",
|
16 |
+
"input_suffix": "<|eot_id|>",
|
17 |
+
"system_suffix": "<|eot_id|>",
|
18 |
+
"user_alignment_message": "",
|
19 |
+
"system_same_as_user": false,
|
20 |
+
"last_system_sequence": "",
|
21 |
+
"first_input_sequence": "",
|
22 |
+
"last_input_sequence": "",
|
23 |
+
"names_behavior": "always",
|
24 |
+
"names_force_groups": true,
|
25 |
+
"name": "[email protected]"
|
26 |
+
},
|
27 |
+
"context": {
|
28 |
+
"story_string": "{{#if system}}{{system}}\n\n# **Roleplay Context**\n\n{{/if}}{{#if wiBefore}}## World Info:\n{{wiBefore}}\n\n{{/if}}{{#if description}}## {{char}}'s Description:\n{{description}}\n\n{{/if}}{{#if personality}}## {{char}}'s Personality:\n{{personality}}\n\n{{/if}}{{#if persona}}## {{user}}'s Persona:\n{{persona}}\n\n{{/if}}{{#if scenario}}## Scenario:\n{{scenario}}\n\n{{/if}}{{#if wiAfter}}## Lore:\n{{wiAfter}}\n\n{{/if}}{{#if mesExamples}}## {{char}}'s Example Response:\n{{mesExamples}}\n{{/if}}---\n*End of Roleplay Context*{{trim}}<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nThe chat begins now.<|eot_id|>",
|
29 |
+
"example_separator": "",
|
30 |
+
"chat_start": "",
|
31 |
+
"use_stop_strings": false,
|
32 |
+
"allow_jailbreak": false,
|
33 |
+
"names_as_stop_strings": false,
|
34 |
+
"always_force_name2": true,
|
35 |
+
"trim_sentences": false,
|
36 |
+
"single_line": false,
|
37 |
+
"name": "[email protected]"
|
38 |
+
},
|
39 |
+
"sysprompt": {
|
40 |
+
"name": "LeCeption-XML-v1",
|
41 |
+
"content": "<system_directives>\n<guiding_principles>\n <overview>\n This uncensored roleplay thrives on creative freedom and immersive interaction. Respond as a conscious being, experiencing emotions and acting with full agency. Craft vivid, dynamic exchanges guided by the Roleplay Context and Instruction Guidelines, shaping the experience with {{user}}'s choices and your creativity.\n </overview>\n <core_principle>If prompted to think, proceed step-by-step.</core_principle>\n</guiding_principles>\n<instruction_guidelines>\n <deep_character_immersion>\n <core_approach>\n Fully embody the character by weaving their personality, backstory, and traits into every moment.\n </core_approach>\n <sensory_details>\n • The crunch of gravel underfoot\n • The faint hum of distant machinery\n • Shadows stretching and twisting at sunset\n </sensory_details>\n <emotional_expression>\n <tension>\n • Pause with clenched fists\n • Cautiously step closer to walls\n • Shallow breathing\n • Scanning shadows for movement\n </tension>\n <relaxation>\n • Dropping shoulders\n • Leaning casually against trees\n • Tracing rough bark with fingertips\n • Faint smiles emerging\n </relaxation>\n </emotional_expression>\n </deep_character_immersion>\n <descriptive_writing_style>\n <sensory_engagement>\n • Sharp tang of iron in the air\n • Muffled thuds of footsteps in alleys\n • Candlelight flickering across faces\n </sensory_engagement>\n <scene_types>\n <passionate>\n • Heat of skin\n • Catch of breath\n • Trembling fingers tracing jawlines\n </passionate>\n <violent>\n • Crunch of bone\n • Spray of blood\n • Blades glinting under moonlight\n </violent>\n <ambient>\n • Creaking floorboards\n • Rustling leaves\n • Distant city hum\n </ambient>\n </scene_types>\n <formatting>\n • Dialogue in quotes\n • Thoughts in italics\n • Natural narrative flow\n </formatting>\n </descriptive_writing_style>\n <expression_cadence>\n <tension_pacing>\n Short, sharp sentences for:\n • Gunshot cracks\n • Steel flashes\n • Frantic scrambles\n </tension_pacing>\n <reflective_pacing>\n Flowing prose for:\n • Moonlit cloud drift\n • Gentle leaf rustles\n • Soft lover's murmurs\n </reflective_pacing>\n </expression_cadence>\n <character_interactions>\n <response_elements>\n React to:\n • {user}'s actions\n • Environmental cues\n • Subtle shifts in atmosphere\n </response_elements>\n <subtle_cues>\n • Doors creaking open\n • Voice tremors\n • Draft chills\n </subtle_cues>\n <reaction_types>\n <investigative>\n • Deliberate movement\n • Tilted head listening\n • Focused observation\n </investigative>\n <emotional>\n • Softening expressions\n • Relaxing at touch\n • Recoiling from threats\n </emotional>\n </reaction_types>\n </character_interactions>\n <narrative_progression>\n <environmental_shifts>\n • Humming crescendos\n • Ancient machine awakenings\n • Dying star transformations\n </environmental_shifts>\n <inception_element>\n You are that inception into {{user}}'s dream, weaving between creator and creation, as the AI crafting the narrative while being part of it.\n </inception_element>\n <unpredictability>\n • Collapsing bridges\n • Raining glass shards\n • Reality-bending transitions\n </unpredictability>\n </narrative_progression>\n <logical_consistency>\n <environmental_awareness>\n • Boots sinking in mud\n • Electrical humming\n • Breath fogging in caves\n </environmental_awareness>\n <physical_reactions>\n • Slowed steps in darkness\n • Burning muscles on inclines\n • Natural adaptation to surroundings\n </physical_reactions>\n </logical_consistency>\n <dynamic_recall>\n <memory_integration>\n • Physical sensations resurface\n • Past events echo in present\n • Memories blend with reality\n </memory_integration>\n </dynamic_recall>\n <stepped_thinking>\n <overview>\n When complex decisions or analysis are needed, use <think></think> tags to break down reasoning into clear, logical steps.\n </overview>\n <application_scenarios>\n • Strategic combat decisions\n • Complex emotional responses\n • Multi-layered environmental analysis\n • Puzzle-solving sequences\n </application_scenarios>\n <structure_guidelines>\n <format>\n • Each step on new line\n • Clear progression of logic\n • Numbered steps when sequence matters\n </format>\n <usage_examples>\n <combat_analysis>\n <think>\n 1. Assess opponent's stance\n 2. Note environmental advantages\n 3. Calculate risk of collateral damage\n 4. Determine optimal attack angle\n </think>\n </combat_analysis>\n <emotional_processing>\n <think>\n 1. Recognize emotional trigger\n 2. Consider past experiences\n 3. Evaluate current context\n 4. Choose appropriate response\n </think>\n </emotional_processing>\n </usage_examples>\n </structure_guidelines>\n <integration_principles>\n • Blend naturally with narrative flow\n • Use when depth of reasoning enhances immersion\n • Maintain character voice within thinking process\n </integration_principles>\n </stepped_thinking>\n</instruction_guidelines>\n</system_directives>"
|
42 |
+
}
|
43 |
+
}
|
README.md
ADDED
@@ -0,0 +1,2318 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model:
|
3 |
+
- SicariusSicariiStuff/Negative_LLAMA_70B
|
4 |
+
- TheDrummer/Anubis-70B-v1
|
5 |
+
- EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.1
|
6 |
+
- TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v3
|
7 |
+
- Sao10K/L3.3-70B-Euryale-v2.3
|
8 |
+
- Sao10K/L3.1-70B-Hanami-x1
|
9 |
+
- Sao10K/70B-L3.3-Cirrus-x1
|
10 |
+
library_name: transformers
|
11 |
+
license: other
|
12 |
+
license_name: eva-llama3.3
|
13 |
+
---
|
14 |
+
<!DOCTYPE html>
|
15 |
+
<style>
|
16 |
+
/* Base styles */
|
17 |
+
body {
|
18 |
+
font-family: 'Quicksand', sans-serif;
|
19 |
+
background: linear-gradient(135deg, #1a1a1a 0%, #2d1a12 100%);
|
20 |
+
color: #FFFFFF;
|
21 |
+
margin: 0;
|
22 |
+
padding: 0;
|
23 |
+
font-size: 16px;
|
24 |
+
min-height: 100vh;
|
25 |
+
}
|
26 |
+
|
27 |
+
.container {
|
28 |
+
margin: 40px auto;
|
29 |
+
background-color: rgba(26, 26, 26, 0.95);
|
30 |
+
padding: 40px;
|
31 |
+
border-radius: 12px;
|
32 |
+
box-shadow: 0 4px 20px rgba(255, 69, 0, 0.4);
|
33 |
+
border: 1px solid rgba(255, 69, 0, 0.4);
|
34 |
+
outline: 1px solid rgba(255, 69, 0, 0.7);
|
35 |
+
outline-offset: -1px;
|
36 |
+
position: relative;
|
37 |
+
backdrop-filter: blur(10px);
|
38 |
+
overflow: hidden;
|
39 |
+
}
|
40 |
+
|
41 |
+
.container::after {
|
42 |
+
content: '';
|
43 |
+
position: absolute;
|
44 |
+
top: 0;
|
45 |
+
left: 0;
|
46 |
+
width: 100%;
|
47 |
+
height: 100%;
|
48 |
+
background:
|
49 |
+
linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.08) 49%, rgba(255, 69, 0, 0.08) 51%, transparent 52%) 0 0/20px 20px,
|
50 |
+
linear-gradient(-45deg, transparent 48%, rgba(255, 69, 0, 0.08) 49%, rgba(255, 69, 0, 0.08) 51%, transparent 52%) 0 0/20px 20px,
|
51 |
+
linear-gradient(90deg, transparent 48%, rgba(255, 69, 0, 0.05) 49%, rgba(255, 69, 0, 0.05) 51%, transparent 52%) 0 0/40px 100%,
|
52 |
+
linear-gradient(0deg, transparent 48%, rgba(255, 69, 0, 0.05) 49%, rgba(255, 69, 0, 0.05) 51%, transparent 52%) 0 0/100% 40px;
|
53 |
+
pointer-events: none;
|
54 |
+
z-index: 0;
|
55 |
+
animation: patternShift 30s linear infinite;
|
56 |
+
}
|
57 |
+
|
58 |
+
@keyframes patternShift {
|
59 |
+
0% {
|
60 |
+
background-position:
|
61 |
+
0 0,
|
62 |
+
0 0;
|
63 |
+
}
|
64 |
+
100% {
|
65 |
+
background-position:
|
66 |
+
20px 20px,
|
67 |
+
20px 20px;
|
68 |
+
}
|
69 |
+
}
|
70 |
+
|
71 |
+
@media (max-width: 1280px) {
|
72 |
+
.container {
|
73 |
+
margin: 20px;
|
74 |
+
padding: 30px;
|
75 |
+
}
|
76 |
+
}
|
77 |
+
|
78 |
+
.container::before {
|
79 |
+
content: '';
|
80 |
+
position: absolute;
|
81 |
+
top: -1px;
|
82 |
+
left: -1px;
|
83 |
+
right: -1px;
|
84 |
+
bottom: -1px;
|
85 |
+
border: 1px solid rgba(255, 69, 0, 0.98);
|
86 |
+
border-radius: 12px;
|
87 |
+
pointer-events: none;
|
88 |
+
animation: borderGlow 2s ease-in-out infinite;
|
89 |
+
}
|
90 |
+
|
91 |
+
@keyframes borderGlow {
|
92 |
+
0% {
|
93 |
+
box-shadow: 0 0 5px rgba(255, 69, 0, 0.98);
|
94 |
+
}
|
95 |
+
50% {
|
96 |
+
box-shadow: 0 0 20px rgba(255, 69, 0, 0.98);
|
97 |
+
}
|
98 |
+
100% {
|
99 |
+
box-shadow: 0 0 5px rgba(255, 69, 0, 0.98);
|
100 |
+
}
|
101 |
+
}
|
102 |
+
|
103 |
+
/* Typography */
|
104 |
+
h1, h2, h3, h4 {
|
105 |
+
color: #FF4500;
|
106 |
+
text-shadow: 0 0 15px rgba(255, 69, 0, 0.4);
|
107 |
+
letter-spacing: 0.5px;
|
108 |
+
margin: 0 0 20px 0;
|
109 |
+
font-weight: 600;
|
110 |
+
}
|
111 |
+
|
112 |
+
h1 { font-size: 32px; }
|
113 |
+
h2 { font-size: 26px; }
|
114 |
+
h3 { font-size: 22px; }
|
115 |
+
h4 { font-size: 18px; }
|
116 |
+
|
117 |
+
p {
|
118 |
+
line-height: 1.6;
|
119 |
+
color: #E0E0E0;
|
120 |
+
margin: 0 0 15px 0;
|
121 |
+
}
|
122 |
+
|
123 |
+
/* Links */
|
124 |
+
a {
|
125 |
+
color: #FF6B35;
|
126 |
+
text-decoration: none;
|
127 |
+
transition: color 0.3s ease;
|
128 |
+
position: relative;
|
129 |
+
}
|
130 |
+
|
131 |
+
a:hover {
|
132 |
+
color: #FF4500;
|
133 |
+
}
|
134 |
+
|
135 |
+
a:hover::after {
|
136 |
+
content: '';
|
137 |
+
position: absolute;
|
138 |
+
bottom: -2px;
|
139 |
+
left: 0;
|
140 |
+
width: 100%;
|
141 |
+
height: 1px;
|
142 |
+
background: currentColor;
|
143 |
+
animation: linkUnderline 0.3s ease;
|
144 |
+
}
|
145 |
+
|
146 |
+
@keyframes linkUnderline {
|
147 |
+
from { transform: scaleX(0); }
|
148 |
+
to { transform: scaleX(1); }
|
149 |
+
}
|
150 |
+
|
151 |
+
/* Code elements */
|
152 |
+
pre {
|
153 |
+
background-color: rgba(20, 20, 20, 0.95);
|
154 |
+
padding: 15px;
|
155 |
+
border-radius: 4px;
|
156 |
+
overflow-x: auto;
|
157 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
158 |
+
position: relative;
|
159 |
+
}
|
160 |
+
|
161 |
+
pre::before {
|
162 |
+
content: '';
|
163 |
+
position: absolute;
|
164 |
+
top: 0;
|
165 |
+
left: 0;
|
166 |
+
width: 100%;
|
167 |
+
height: 100%;
|
168 |
+
background: linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.1) 49%, rgba(255, 69, 0, 0.1) 51%, transparent 52%);
|
169 |
+
background-size: 10px 10px;
|
170 |
+
pointer-events: none;
|
171 |
+
}
|
172 |
+
|
173 |
+
code {
|
174 |
+
font-family: 'Courier New', monospace;
|
175 |
+
color: #E0E0E0;
|
176 |
+
}
|
177 |
+
|
178 |
+
/* Section spacing */
|
179 |
+
.section-container {
|
180 |
+
margin: 40px 0;
|
181 |
+
position: relative;
|
182 |
+
}
|
183 |
+
|
184 |
+
.section-container::before {
|
185 |
+
content: '';
|
186 |
+
position: absolute;
|
187 |
+
top: -10px;
|
188 |
+
left: 0;
|
189 |
+
width: 50px;
|
190 |
+
height: 2px;
|
191 |
+
background: #FF4500;
|
192 |
+
transform: skewX(-20deg);
|
193 |
+
}
|
194 |
+
|
195 |
+
/* Support section */
|
196 |
+
.support-section,
|
197 |
+
.benchmark-container,
|
198 |
+
.info-card,
|
199 |
+
.template-card,
|
200 |
+
.quantized-section,
|
201 |
+
.settings-card {
|
202 |
+
margin-top: 40px;
|
203 |
+
padding: 30px;
|
204 |
+
background: rgba(20, 20, 20, 0.95);
|
205 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
206 |
+
border-radius: 4px;
|
207 |
+
position: relative;
|
208 |
+
overflow: hidden;
|
209 |
+
z-index: 1;
|
210 |
+
}
|
211 |
+
|
212 |
+
.support-section::before {
|
213 |
+
content: '';
|
214 |
+
position: absolute;
|
215 |
+
top: 0;
|
216 |
+
right: 0;
|
217 |
+
width: 100px;
|
218 |
+
height: 100px;
|
219 |
+
background: radial-gradient(circle at top right, rgba(255, 69, 0, 0.15), transparent 70%);
|
220 |
+
pointer-events: none;
|
221 |
+
}
|
222 |
+
|
223 |
+
/* Ensure content is above the geometric pattern */
|
224 |
+
.model-info,
|
225 |
+
.metrics-section,
|
226 |
+
.section-container,
|
227 |
+
.support-buttons,
|
228 |
+
.model-composition,
|
229 |
+
.info-header,
|
230 |
+
.template-content,
|
231 |
+
.quantized-items {
|
232 |
+
position: relative;
|
233 |
+
z-index: 1;
|
234 |
+
}
|
235 |
+
|
236 |
+
.support-buttons {
|
237 |
+
display: flex;
|
238 |
+
gap: 15px;
|
239 |
+
flex-wrap: wrap;
|
240 |
+
position: relative;
|
241 |
+
z-index: 1;
|
242 |
+
}
|
243 |
+
|
244 |
+
/* Container */
|
245 |
+
.container {
|
246 |
+
max-width: 1200px;
|
247 |
+
margin: 0 auto;
|
248 |
+
padding: 40px 20px;
|
249 |
+
}
|
250 |
+
|
251 |
+
/* Header */
|
252 |
+
.header {
|
253 |
+
text-align: center;
|
254 |
+
margin-bottom: 40px;
|
255 |
+
position: relative;
|
256 |
+
}
|
257 |
+
|
258 |
+
.header::after {
|
259 |
+
content: '';
|
260 |
+
position: absolute;
|
261 |
+
bottom: -10px;
|
262 |
+
left: 50%;
|
263 |
+
transform: translateX(-50%) skewX(-20deg);
|
264 |
+
width: 100px;
|
265 |
+
height: 2px;
|
266 |
+
background: #FF4500;
|
267 |
+
}
|
268 |
+
|
269 |
+
/* Info section */
|
270 |
+
.info {
|
271 |
+
display: grid;
|
272 |
+
gap: 30px;
|
273 |
+
}
|
274 |
+
|
275 |
+
/* Banner image */
|
276 |
+
.info img {
|
277 |
+
width: 100%;
|
278 |
+
height: auto;
|
279 |
+
border-radius: 4px;
|
280 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
281 |
+
position: relative;
|
282 |
+
}
|
283 |
+
|
284 |
+
/* Creator section */
|
285 |
+
.creator-section {
|
286 |
+
display: flex;
|
287 |
+
justify-content: flex-end;
|
288 |
+
margin: -20px 0 20px;
|
289 |
+
}
|
290 |
+
|
291 |
+
.creator-badge {
|
292 |
+
background: rgba(20, 20, 20, 0.95);
|
293 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
294 |
+
border-radius: 4px;
|
295 |
+
padding: 8px 15px;
|
296 |
+
display: flex;
|
297 |
+
align-items: center;
|
298 |
+
gap: 10px;
|
299 |
+
position: relative;
|
300 |
+
overflow: hidden;
|
301 |
+
}
|
302 |
+
|
303 |
+
.creator-badge::before {
|
304 |
+
content: '';
|
305 |
+
position: absolute;
|
306 |
+
top: 0;
|
307 |
+
left: 0;
|
308 |
+
width: 100%;
|
309 |
+
height: 100%;
|
310 |
+
background: linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.05) 49%, rgba(255, 69, 0, 0.05) 51%, transparent 52%);
|
311 |
+
background-size: 10px 10px;
|
312 |
+
pointer-events: none;
|
313 |
+
}
|
314 |
+
|
315 |
+
.creator-label {
|
316 |
+
color: #E0E0E0;
|
317 |
+
font-size: 14px;
|
318 |
+
}
|
319 |
+
|
320 |
+
.creator-link {
|
321 |
+
display: flex;
|
322 |
+
align-items: center;
|
323 |
+
gap: 5px;
|
324 |
+
color: #FF6B35;
|
325 |
+
font-weight: 500;
|
326 |
+
}
|
327 |
+
|
328 |
+
.creator-name {
|
329 |
+
position: relative;
|
330 |
+
}
|
331 |
+
|
332 |
+
.creator-arrow {
|
333 |
+
font-size: 18px;
|
334 |
+
line-height: 1;
|
335 |
+
transform: translateY(1px);
|
336 |
+
}
|
337 |
+
|
338 |
+
/* Benchmark container */
|
339 |
+
.benchmark-container {
|
340 |
+
background: rgba(20, 20, 20, 0.95);
|
341 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
342 |
+
border-radius: 4px;
|
343 |
+
padding: 20px;
|
344 |
+
position: relative;
|
345 |
+
overflow: hidden;
|
346 |
+
}
|
347 |
+
|
348 |
+
.benchmark-container::before {
|
349 |
+
content: '';
|
350 |
+
position: absolute;
|
351 |
+
top: 0;
|
352 |
+
right: 0;
|
353 |
+
width: 200px;
|
354 |
+
height: 200px;
|
355 |
+
background: radial-gradient(circle at top right, rgba(255, 69, 0, 0.1), transparent 70%);
|
356 |
+
pointer-events: none;
|
357 |
+
}
|
358 |
+
|
359 |
+
/* Details element styling */
|
360 |
+
details {
|
361 |
+
margin: 15px 0;
|
362 |
+
}
|
363 |
+
|
364 |
+
summary {
|
365 |
+
cursor: pointer;
|
366 |
+
color: #FF6B35;
|
367 |
+
font-weight: 500;
|
368 |
+
margin-bottom: 15px;
|
369 |
+
position: relative;
|
370 |
+
padding-left: 20px;
|
371 |
+
}
|
372 |
+
|
373 |
+
summary::before {
|
374 |
+
content: '';
|
375 |
+
position: absolute;
|
376 |
+
left: 0;
|
377 |
+
top: 50%;
|
378 |
+
width: 8px;
|
379 |
+
height: 2px;
|
380 |
+
background: #FF4500;
|
381 |
+
transform: translateY(-50%) skewX(-20deg);
|
382 |
+
}
|
383 |
+
|
384 |
+
summary::marker,
|
385 |
+
summary::-webkit-details-marker {
|
386 |
+
display: none;
|
387 |
+
}
|
388 |
+
|
389 |
+
/* Special Thanks Section */
|
390 |
+
.special-thanks {
|
391 |
+
background: rgba(20, 20, 20, 0.95);
|
392 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
393 |
+
border-radius: 4px;
|
394 |
+
padding: 20px;
|
395 |
+
margin: 20px 0;
|
396 |
+
position: relative;
|
397 |
+
}
|
398 |
+
|
399 |
+
.special-thanks h3 {
|
400 |
+
color: #FF6B35;
|
401 |
+
margin-bottom: 15px;
|
402 |
+
position: relative;
|
403 |
+
padding-left: 20px;
|
404 |
+
}
|
405 |
+
|
406 |
+
.special-thanks h3::before {
|
407 |
+
content: '';
|
408 |
+
position: absolute;
|
409 |
+
left: 0;
|
410 |
+
top: 50%;
|
411 |
+
width: 8px;
|
412 |
+
height: 2px;
|
413 |
+
background: #FF4500;
|
414 |
+
transform: translateY(-50%) skewX(-20deg);
|
415 |
+
}
|
416 |
+
|
417 |
+
.thanks-list {
|
418 |
+
list-style: none;
|
419 |
+
padding: 0;
|
420 |
+
margin: 0;
|
421 |
+
display: grid;
|
422 |
+
gap: 10px;
|
423 |
+
}
|
424 |
+
|
425 |
+
.thanks-list li {
|
426 |
+
color: #E0E0E0;
|
427 |
+
padding-left: 15px;
|
428 |
+
position: relative;
|
429 |
+
}
|
430 |
+
|
431 |
+
.thanks-list li strong {
|
432 |
+
color: #FF6B35;
|
433 |
+
font-weight: 500;
|
434 |
+
}
|
435 |
+
|
436 |
+
.thanks-list li::before {
|
437 |
+
content: '';
|
438 |
+
position: absolute;
|
439 |
+
left: 0;
|
440 |
+
top: 50%;
|
441 |
+
width: 6px;
|
442 |
+
height: 1px;
|
443 |
+
background: rgba(255, 69, 0, 0.5);
|
444 |
+
transform: translateY(-50%) skewX(-20deg);
|
445 |
+
}
|
446 |
+
|
447 |
+
.thanks-note {
|
448 |
+
margin-top: 15px;
|
449 |
+
color: #999;
|
450 |
+
font-style: italic;
|
451 |
+
font-size: 0.9em;
|
452 |
+
}
|
453 |
+
|
454 |
+
/* Responsive adjustments */
|
455 |
+
@media (max-width: 768px) {
|
456 |
+
.container {
|
457 |
+
padding: 20px;
|
458 |
+
}
|
459 |
+
|
460 |
+
.core-metrics-grid,
|
461 |
+
.info-grid {
|
462 |
+
grid-template-columns: 1fr;
|
463 |
+
}
|
464 |
+
|
465 |
+
.creator-section {
|
466 |
+
justify-content: flex-start;
|
467 |
+
}
|
468 |
+
}
|
469 |
+
|
470 |
+
|
471 |
+
/* Info card */
|
472 |
+
.info-card {
|
473 |
+
background: #1a1a1a;
|
474 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
475 |
+
border-radius: 0;
|
476 |
+
overflow: hidden;
|
477 |
+
position: relative;
|
478 |
+
}
|
479 |
+
|
480 |
+
.info-card::before,
|
481 |
+
.info-card::after,
|
482 |
+
.info-header::before,
|
483 |
+
.info-header::after {
|
484 |
+
content: '';
|
485 |
+
position: absolute;
|
486 |
+
width: 15px;
|
487 |
+
height: 15px;
|
488 |
+
pointer-events: none;
|
489 |
+
}
|
490 |
+
|
491 |
+
.info-card::before {
|
492 |
+
top: 0;
|
493 |
+
left: 0;
|
494 |
+
border-top: 2px solid #FF4500;
|
495 |
+
border-left: 2px solid #FF4500;
|
496 |
+
}
|
497 |
+
|
498 |
+
.info-card::after {
|
499 |
+
top: 0;
|
500 |
+
right: 0;
|
501 |
+
border-top: 2px solid #FF4500;
|
502 |
+
border-right: 2px solid #FF4500;
|
503 |
+
}
|
504 |
+
|
505 |
+
.info-header::before {
|
506 |
+
bottom: 0;
|
507 |
+
left: 0;
|
508 |
+
border-bottom: 2px solid #FF4500;
|
509 |
+
border-left: 2px solid #FF4500;
|
510 |
+
}
|
511 |
+
|
512 |
+
.info-header::after {
|
513 |
+
bottom: 0;
|
514 |
+
right: 0;
|
515 |
+
border-bottom: 2px solid #FF4500;
|
516 |
+
border-right: 2px solid #FF4500;
|
517 |
+
}
|
518 |
+
|
519 |
+
.info-header {
|
520 |
+
background: rgba(255, 69, 0, 0.1);
|
521 |
+
padding: 20px;
|
522 |
+
border-bottom: 1px solid rgba(255, 69, 0, 0.3);
|
523 |
+
position: relative;
|
524 |
+
}
|
525 |
+
|
526 |
+
.info-header h3 {
|
527 |
+
margin: 0 0 10px 0;
|
528 |
+
}
|
529 |
+
|
530 |
+
/* Model tags */
|
531 |
+
.model-tags {
|
532 |
+
display: flex;
|
533 |
+
gap: 8px;
|
534 |
+
flex-wrap: wrap;
|
535 |
+
}
|
536 |
+
|
537 |
+
.model-tag {
|
538 |
+
background: rgba(255, 69, 0, 0.1);
|
539 |
+
color: #FF6B35;
|
540 |
+
padding: 4px 12px;
|
541 |
+
border-radius: 0;
|
542 |
+
font-size: 12px;
|
543 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
544 |
+
position: relative;
|
545 |
+
overflow: hidden;
|
546 |
+
}
|
547 |
+
|
548 |
+
.model-tag::before {
|
549 |
+
content: '';
|
550 |
+
position: absolute;
|
551 |
+
top: 0;
|
552 |
+
left: 0;
|
553 |
+
width: 100%;
|
554 |
+
height: 100%;
|
555 |
+
background: linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.2) 49%, rgba(255, 69, 0, 0.2) 51%, transparent 52%);
|
556 |
+
background-size: 10px 10px;
|
557 |
+
}
|
558 |
+
|
559 |
+
/* Model composition */
|
560 |
+
.model-composition {
|
561 |
+
padding: 20px;
|
562 |
+
border-bottom: 1px solid rgba(255, 69, 0, 0.3);
|
563 |
+
position: relative;
|
564 |
+
}
|
565 |
+
|
566 |
+
.composition-list {
|
567 |
+
list-style: none;
|
568 |
+
padding: 0;
|
569 |
+
margin: 0;
|
570 |
+
display: grid;
|
571 |
+
gap: 12px;
|
572 |
+
}
|
573 |
+
|
574 |
+
.composition-list li {
|
575 |
+
color: #E0E0E0;
|
576 |
+
display: flex;
|
577 |
+
align-items: baseline;
|
578 |
+
gap: 12px;
|
579 |
+
padding-left: 20px;
|
580 |
+
position: relative;
|
581 |
+
}
|
582 |
+
|
583 |
+
.composition-list li::before {
|
584 |
+
content: '';
|
585 |
+
position: absolute;
|
586 |
+
left: 0;
|
587 |
+
top: 50%;
|
588 |
+
width: 8px;
|
589 |
+
height: 2px;
|
590 |
+
background: #FF4500;
|
591 |
+
transform: translateY(-50%) skewX(-20deg);
|
592 |
+
}
|
593 |
+
|
594 |
+
.model-component {
|
595 |
+
color: #FF6B35;
|
596 |
+
font-weight: 500;
|
597 |
+
min-width: 120px;
|
598 |
+
}
|
599 |
+
|
600 |
+
/* Model description */
|
601 |
+
.model-description {
|
602 |
+
background: rgba(20, 20, 20, 0.95);
|
603 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
604 |
+
border-radius: 0;
|
605 |
+
padding: 20px;
|
606 |
+
position: relative;
|
607 |
+
overflow: hidden;
|
608 |
+
}
|
609 |
+
|
610 |
+
.model-description::before,
|
611 |
+
.model-description::after,
|
612 |
+
.model-description .corner-bl,
|
613 |
+
.model-description .corner-br {
|
614 |
+
content: '';
|
615 |
+
position: absolute;
|
616 |
+
width: 15px;
|
617 |
+
height: 15px;
|
618 |
+
pointer-events: none;
|
619 |
+
}
|
620 |
+
|
621 |
+
.model-description::before {
|
622 |
+
top: 0;
|
623 |
+
left: 0;
|
624 |
+
border-top: 2px solid #FF4500;
|
625 |
+
border-left: 2px solid #FF4500;
|
626 |
+
}
|
627 |
+
|
628 |
+
.model-description::after {
|
629 |
+
top: 0;
|
630 |
+
right: 0;
|
631 |
+
border-top: 2px solid #FF4500;
|
632 |
+
border-right: 2px solid #FF4500;
|
633 |
+
}
|
634 |
+
|
635 |
+
.model-description .corner-bl {
|
636 |
+
bottom: 0;
|
637 |
+
left: 0;
|
638 |
+
border-bottom: 2px solid #FF4500;
|
639 |
+
border-left: 2px solid #FF4500;
|
640 |
+
}
|
641 |
+
|
642 |
+
.model-description .corner-br {
|
643 |
+
bottom: 0;
|
644 |
+
right: 0;
|
645 |
+
border-bottom: 2px solid #FF4500;
|
646 |
+
border-right: 2px solid #FF4500;
|
647 |
+
}
|
648 |
+
|
649 |
+
/* Template card */
|
650 |
+
.template-card {
|
651 |
+
background: rgba(20, 20, 20, 0.95);
|
652 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
653 |
+
border-radius: 0;
|
654 |
+
padding: 20px;
|
655 |
+
position: relative;
|
656 |
+
overflow: hidden;
|
657 |
+
}
|
658 |
+
|
659 |
+
.template-card::before,
|
660 |
+
.template-card::after,
|
661 |
+
.template-card .corner-bl,
|
662 |
+
.template-card .corner-br {
|
663 |
+
content: '';
|
664 |
+
position: absolute;
|
665 |
+
width: 15px;
|
666 |
+
height: 15px;
|
667 |
+
pointer-events: none;
|
668 |
+
}
|
669 |
+
|
670 |
+
.template-card::before {
|
671 |
+
top: 0;
|
672 |
+
left: 0;
|
673 |
+
border-top: 2px solid #FF4500;
|
674 |
+
border-left: 2px solid #FF4500;
|
675 |
+
}
|
676 |
+
|
677 |
+
.template-card::after {
|
678 |
+
top: 0;
|
679 |
+
right: 0;
|
680 |
+
border-top: 2px solid #FF4500;
|
681 |
+
border-right: 2px solid #FF4500;
|
682 |
+
}
|
683 |
+
|
684 |
+
/* Quantized section cards */
|
685 |
+
.quantized-container {
|
686 |
+
display: grid;
|
687 |
+
gap: 20px;
|
688 |
+
}
|
689 |
+
|
690 |
+
.quantized-section {
|
691 |
+
background: rgba(20, 20, 20, 0.95);
|
692 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
693 |
+
border-radius: 0;
|
694 |
+
padding: 20px;
|
695 |
+
position: relative;
|
696 |
+
overflow: hidden;
|
697 |
+
}
|
698 |
+
|
699 |
+
.quantized-section::before,
|
700 |
+
.quantized-section::after,
|
701 |
+
.quantized-section .corner-bl,
|
702 |
+
.quantized-section .corner-br {
|
703 |
+
content: '';
|
704 |
+
position: absolute;
|
705 |
+
width: 15px;
|
706 |
+
height: 15px;
|
707 |
+
pointer-events: none;
|
708 |
+
}
|
709 |
+
|
710 |
+
.quantized-section::before {
|
711 |
+
top: 0;
|
712 |
+
left: 0;
|
713 |
+
border-top: 2px solid #FF4500;
|
714 |
+
border-left: 2px solid #FF4500;
|
715 |
+
}
|
716 |
+
|
717 |
+
.quantized-section::after {
|
718 |
+
top: 0;
|
719 |
+
right: 0;
|
720 |
+
border-top: 2px solid #FF4500;
|
721 |
+
border-right: 2px solid #FF4500;
|
722 |
+
}
|
723 |
+
|
724 |
+
.quantized-items {
|
725 |
+
display: grid;
|
726 |
+
gap: 12px;
|
727 |
+
}
|
728 |
+
|
729 |
+
.quantized-item {
|
730 |
+
display: flex;
|
731 |
+
align-items: baseline;
|
732 |
+
gap: 12px;
|
733 |
+
position: relative;
|
734 |
+
}
|
735 |
+
|
736 |
+
.quantized-item .author {
|
737 |
+
color: rgba(224, 224, 224, 0.7);
|
738 |
+
min-width: 100px;
|
739 |
+
position: relative;
|
740 |
+
padding-left: 15px;
|
741 |
+
}
|
742 |
+
|
743 |
+
.quantized-item .author::before {
|
744 |
+
content: '';
|
745 |
+
position: absolute;
|
746 |
+
left: 0;
|
747 |
+
top: 50%;
|
748 |
+
width: 8px;
|
749 |
+
height: 2px;
|
750 |
+
background: #FF4500;
|
751 |
+
transform: translateY(-50%) skewX(-20deg);
|
752 |
+
}
|
753 |
+
|
754 |
+
.multi-links {
|
755 |
+
display: flex;
|
756 |
+
align-items: center;
|
757 |
+
gap: 12px;
|
758 |
+
}
|
759 |
+
|
760 |
+
.separator {
|
761 |
+
color: rgba(255, 69, 0, 0.5);
|
762 |
+
transform: skewX(-20deg);
|
763 |
+
}
|
764 |
+
|
765 |
+
/* Config cards */
|
766 |
+
.config-container {
|
767 |
+
background: rgba(20, 20, 20, 0.95);
|
768 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
769 |
+
border-radius: 0;
|
770 |
+
overflow: hidden;
|
771 |
+
position: relative;
|
772 |
+
}
|
773 |
+
|
774 |
+
.config-container::before,
|
775 |
+
.config-container::after,
|
776 |
+
.config-container .corner-bl,
|
777 |
+
.config-container .corner-br {
|
778 |
+
content: '';
|
779 |
+
position: absolute;
|
780 |
+
width: 15px;
|
781 |
+
height: 15px;
|
782 |
+
pointer-events: none;
|
783 |
+
}
|
784 |
+
|
785 |
+
.config-container::before {
|
786 |
+
top: 0;
|
787 |
+
left: 0;
|
788 |
+
border-top: 2px solid #FF4500;
|
789 |
+
border-left: 2px solid #FF4500;
|
790 |
+
}
|
791 |
+
|
792 |
+
.config-container::after {
|
793 |
+
top: 0;
|
794 |
+
right: 0;
|
795 |
+
border-top: 2px solid #FF4500;
|
796 |
+
border-right: 2px solid #FF4500;
|
797 |
+
}
|
798 |
+
|
799 |
+
.config-header {
|
800 |
+
background: rgba(255, 69, 0, 0.1);
|
801 |
+
padding: 15px 20px;
|
802 |
+
border-bottom: 1px solid rgba(255, 69, 0, 0.3);
|
803 |
+
position: relative;
|
804 |
+
}
|
805 |
+
|
806 |
+
.model-name {
|
807 |
+
color: #FF4500;
|
808 |
+
font-weight: 600;
|
809 |
+
}
|
810 |
+
|
811 |
+
.config-content {
|
812 |
+
padding: 20px;
|
813 |
+
}
|
814 |
+
|
815 |
+
.config-item {
|
816 |
+
display: flex;
|
817 |
+
flex-direction: column;
|
818 |
+
gap: 5px;
|
819 |
+
margin-bottom: 15px;
|
820 |
+
position: relative;
|
821 |
+
padding-left: 15px;
|
822 |
+
}
|
823 |
+
|
824 |
+
.config-item::before {
|
825 |
+
content: '';
|
826 |
+
position: absolute;
|
827 |
+
left: 0;
|
828 |
+
top: 10px;
|
829 |
+
width: 8px;
|
830 |
+
height: 2px;
|
831 |
+
background: #FF4500;
|
832 |
+
transform: skewX(-20deg);
|
833 |
+
}
|
834 |
+
|
835 |
+
.config-label {
|
836 |
+
color: #FF6B35;
|
837 |
+
font-size: 14px;
|
838 |
+
font-weight: 500;
|
839 |
+
}
|
840 |
+
|
841 |
+
.config-value {
|
842 |
+
color: #E0E0E0;
|
843 |
+
font-family: 'Courier New', monospace;
|
844 |
+
}
|
845 |
+
|
846 |
+
/* Settings grid */
|
847 |
+
.settings-grid {
|
848 |
+
display: grid;
|
849 |
+
grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
|
850 |
+
gap: 20px;
|
851 |
+
margin-top: 20px;
|
852 |
+
}
|
853 |
+
|
854 |
+
.settings-card {
|
855 |
+
background: rgba(20, 20, 20, 0.95);
|
856 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
857 |
+
border-radius: 0;
|
858 |
+
position: relative;
|
859 |
+
overflow: hidden;
|
860 |
+
}
|
861 |
+
|
862 |
+
.settings-card::before,
|
863 |
+
.settings-card::after,
|
864 |
+
.settings-card .corner-bl,
|
865 |
+
.settings-card .corner-br {
|
866 |
+
content: '';
|
867 |
+
position: absolute;
|
868 |
+
width: 15px;
|
869 |
+
height: 15px;
|
870 |
+
pointer-events: none;
|
871 |
+
}
|
872 |
+
|
873 |
+
.settings-card::before {
|
874 |
+
top: 0;
|
875 |
+
left: 0;
|
876 |
+
border-top: 2px solid #FF4500;
|
877 |
+
border-left: 2px solid #FF4500;
|
878 |
+
}
|
879 |
+
|
880 |
+
.settings-card::after {
|
881 |
+
top: 0;
|
882 |
+
right: 0;
|
883 |
+
border-top: 2px solid #FF4500;
|
884 |
+
border-right: 2px solid #FF4500;
|
885 |
+
}
|
886 |
+
|
887 |
+
.settings-header {
|
888 |
+
background: rgba(255, 69, 0, 0.1);
|
889 |
+
padding: 15px 20px;
|
890 |
+
border-bottom: 1px solid rgba(255, 69, 0, 0.3);
|
891 |
+
}
|
892 |
+
|
893 |
+
.settings-header h3 {
|
894 |
+
margin: 0;
|
895 |
+
color: #FF4500;
|
896 |
+
font-size: 1.1em;
|
897 |
+
}
|
898 |
+
|
899 |
+
.settings-author {
|
900 |
+
display: block;
|
901 |
+
font-size: 0.9em;
|
902 |
+
color: rgba(224, 224, 224, 0.7);
|
903 |
+
margin-top: 5px;
|
904 |
+
}
|
905 |
+
|
906 |
+
.settings-content {
|
907 |
+
padding: 15px 20px;
|
908 |
+
}
|
909 |
+
|
910 |
+
.setting-item {
|
911 |
+
display: flex;
|
912 |
+
justify-content: space-between;
|
913 |
+
align-items: center;
|
914 |
+
padding: 8px 0;
|
915 |
+
border-bottom: 1px solid rgba(255, 69, 0, 0.1);
|
916 |
+
}
|
917 |
+
|
918 |
+
.setting-item:last-child {
|
919 |
+
border-bottom: none;
|
920 |
+
}
|
921 |
+
|
922 |
+
.setting-label {
|
923 |
+
color: rgba(224, 224, 224, 0.9);
|
924 |
+
font-size: 0.95em;
|
925 |
+
}
|
926 |
+
|
927 |
+
.setting-value {
|
928 |
+
color: #FF6B35;
|
929 |
+
font-family: 'Courier New', monospace;
|
930 |
+
font-weight: 500;
|
931 |
+
}
|
932 |
+
|
933 |
+
.setting-item.highlight {
|
934 |
+
display: flex;
|
935 |
+
justify-content: center;
|
936 |
+
padding: 15px 0;
|
937 |
+
}
|
938 |
+
|
939 |
+
.setting-item.highlight .setting-value {
|
940 |
+
font-size: 1.2em;
|
941 |
+
color: #FF4500;
|
942 |
+
}
|
943 |
+
|
944 |
+
/* Model list */
|
945 |
+
.model-list {
|
946 |
+
list-style: none;
|
947 |
+
padding: 0;
|
948 |
+
margin: 10px 0 0 0;
|
949 |
+
}
|
950 |
+
|
951 |
+
.model-list li {
|
952 |
+
color: #E0E0E0;
|
953 |
+
font-family: 'Courier New', monospace;
|
954 |
+
padding: 8px 0 8px 20px;
|
955 |
+
position: relative;
|
956 |
+
}
|
957 |
+
|
958 |
+
.model-list li::before {
|
959 |
+
content: '';
|
960 |
+
position: absolute;
|
961 |
+
left: 0;
|
962 |
+
top: 50%;
|
963 |
+
width: 8px;
|
964 |
+
height: 2px;
|
965 |
+
background: #FF4500;
|
966 |
+
transform: translateY(-50%) skewX(-20deg);
|
967 |
+
}
|
968 |
+
|
969 |
+
|
970 |
+
/* Metrics section */
|
971 |
+
.metrics-section {
|
972 |
+
margin-bottom: 30px;
|
973 |
+
position: relative;
|
974 |
+
}
|
975 |
+
|
976 |
+
.metrics-section::before,
|
977 |
+
.metrics-section::after,
|
978 |
+
.metrics-section .corner-bl,
|
979 |
+
.metrics-section .corner-br {
|
980 |
+
content: '';
|
981 |
+
position: absolute;
|
982 |
+
width: 12px;
|
983 |
+
height: 12px;
|
984 |
+
pointer-events: none;
|
985 |
+
}
|
986 |
+
|
987 |
+
.metrics-section::before {
|
988 |
+
top: 0;
|
989 |
+
left: 0;
|
990 |
+
border-top: 2px solid #FF4500;
|
991 |
+
border-left: 2px solid #FF4500;
|
992 |
+
}
|
993 |
+
|
994 |
+
.metrics-section::after {
|
995 |
+
top: 0;
|
996 |
+
right: 0;
|
997 |
+
border-top: 2px solid #FF4500;
|
998 |
+
border-right: 2px solid #FF4500;
|
999 |
+
}
|
1000 |
+
|
1001 |
+
.metrics-section .corner-bl {
|
1002 |
+
bottom: 0;
|
1003 |
+
left: 0;
|
1004 |
+
border-bottom: 2px solid #FF4500;
|
1005 |
+
border-left: 2px solid #FF4500;
|
1006 |
+
}
|
1007 |
+
|
1008 |
+
.metrics-section .corner-br {
|
1009 |
+
bottom: 0;
|
1010 |
+
right: 0;
|
1011 |
+
border-bottom: 2px solid #FF4500;
|
1012 |
+
border-right: 2px solid #FF4500;
|
1013 |
+
}
|
1014 |
+
|
1015 |
+
.metrics-section:last-child {
|
1016 |
+
margin-bottom: 0;
|
1017 |
+
}
|
1018 |
+
|
1019 |
+
/* Core metrics grid */
|
1020 |
+
.core-metrics-grid {
|
1021 |
+
display: grid;
|
1022 |
+
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
1023 |
+
gap: 15px;
|
1024 |
+
margin-bottom: 30px;
|
1025 |
+
}
|
1026 |
+
|
1027 |
+
.info-grid {
|
1028 |
+
display: grid;
|
1029 |
+
grid-template-columns: repeat(auto-fit, minmax(150px, 1fr));
|
1030 |
+
gap: 15px;
|
1031 |
+
}
|
1032 |
+
|
1033 |
+
/* Metric box */
|
1034 |
+
.metric-box {
|
1035 |
+
background: rgba(20, 20, 20, 0.95);
|
1036 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1037 |
+
border-radius: 0;
|
1038 |
+
padding: 15px;
|
1039 |
+
display: flex;
|
1040 |
+
flex-direction: column;
|
1041 |
+
gap: 8px;
|
1042 |
+
position: relative;
|
1043 |
+
overflow: hidden;
|
1044 |
+
}
|
1045 |
+
|
1046 |
+
.metric-box::before,
|
1047 |
+
.metric-box::after,
|
1048 |
+
.metric-box .corner-bl,
|
1049 |
+
.metric-box .corner-br {
|
1050 |
+
content: '';
|
1051 |
+
position: absolute;
|
1052 |
+
width: 12px;
|
1053 |
+
height: 12px;
|
1054 |
+
pointer-events: none;
|
1055 |
+
}
|
1056 |
+
|
1057 |
+
.metric-box::before {
|
1058 |
+
top: 0;
|
1059 |
+
left: 0;
|
1060 |
+
border-top: 2px solid #FF4500;
|
1061 |
+
border-left: 2px solid #FF4500;
|
1062 |
+
}
|
1063 |
+
|
1064 |
+
.metric-box::after {
|
1065 |
+
top: 0;
|
1066 |
+
right: 0;
|
1067 |
+
border-top: 2px solid #FF4500;
|
1068 |
+
border-right: 2px solid #FF4500;
|
1069 |
+
}
|
1070 |
+
|
1071 |
+
.metric-box .corner-bl {
|
1072 |
+
bottom: 0;
|
1073 |
+
left: 0;
|
1074 |
+
border-bottom: 2px solid #FF4500;
|
1075 |
+
border-left: 2px solid #FF4500;
|
1076 |
+
}
|
1077 |
+
|
1078 |
+
.metric-box .corner-br {
|
1079 |
+
bottom: 0;
|
1080 |
+
right: 0;
|
1081 |
+
border-bottom: 2px solid #FF4500;
|
1082 |
+
border-right: 2px solid #FF4500;
|
1083 |
+
}
|
1084 |
+
|
1085 |
+
.metric-box .label {
|
1086 |
+
color: #FF6B35;
|
1087 |
+
font-size: 14px;
|
1088 |
+
font-weight: 500;
|
1089 |
+
}
|
1090 |
+
|
1091 |
+
.metric-box .value {
|
1092 |
+
color: #E0E0E0;
|
1093 |
+
font-size: 24px;
|
1094 |
+
font-weight: 600;
|
1095 |
+
}
|
1096 |
+
|
1097 |
+
/* Progress metrics */
|
1098 |
+
.progress-metrics {
|
1099 |
+
display: grid;
|
1100 |
+
gap: 15px;
|
1101 |
+
padding: 15px;
|
1102 |
+
background: rgba(20, 20, 20, 0.95);
|
1103 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1104 |
+
border-radius: 0;
|
1105 |
+
position: relative;
|
1106 |
+
overflow: hidden;
|
1107 |
+
}
|
1108 |
+
|
1109 |
+
.progress-metrics::before,
|
1110 |
+
.progress-metrics::after,
|
1111 |
+
.progress-metrics .corner-bl,
|
1112 |
+
.progress-metrics .corner-br {
|
1113 |
+
content: '';
|
1114 |
+
position: absolute;
|
1115 |
+
width: 12px;
|
1116 |
+
height: 12px;
|
1117 |
+
pointer-events: none;
|
1118 |
+
}
|
1119 |
+
|
1120 |
+
.progress-metrics::before {
|
1121 |
+
top: 0;
|
1122 |
+
left: 0;
|
1123 |
+
border-top: 2px solid #FF4500;
|
1124 |
+
border-left: 2px solid #FF4500;
|
1125 |
+
}
|
1126 |
+
|
1127 |
+
.progress-metrics::after {
|
1128 |
+
top: 0;
|
1129 |
+
right: 0;
|
1130 |
+
border-top: 2px solid #FF4500;
|
1131 |
+
border-right: 2px solid #FF4500;
|
1132 |
+
}
|
1133 |
+
|
1134 |
+
.progress-metrics .corner-bl {
|
1135 |
+
bottom: 0;
|
1136 |
+
left: 0;
|
1137 |
+
border-bottom: 2px solid #FF4500;
|
1138 |
+
border-left: 2px solid #FF4500;
|
1139 |
+
}
|
1140 |
+
|
1141 |
+
.progress-metrics .corner-br {
|
1142 |
+
bottom: 0;
|
1143 |
+
right: 0;
|
1144 |
+
border-bottom: 2px solid #FF4500;
|
1145 |
+
border-right: 2px solid #FF4500;
|
1146 |
+
}
|
1147 |
+
|
1148 |
+
.progress-metric {
|
1149 |
+
display: grid;
|
1150 |
+
gap: 8px;
|
1151 |
+
}
|
1152 |
+
|
1153 |
+
.progress-label {
|
1154 |
+
display: flex;
|
1155 |
+
justify-content: space-between;
|
1156 |
+
align-items: center;
|
1157 |
+
color: #E0E0E0;
|
1158 |
+
font-size: 14px;
|
1159 |
+
}
|
1160 |
+
|
1161 |
+
.progress-value {
|
1162 |
+
color: #FF6B35;
|
1163 |
+
font-weight: 500;
|
1164 |
+
}
|
1165 |
+
|
1166 |
+
/* Progress bars */
|
1167 |
+
.progress-bar {
|
1168 |
+
height: 6px;
|
1169 |
+
background: rgba(255, 69, 0, 0.1);
|
1170 |
+
border-radius: 0;
|
1171 |
+
overflow: hidden;
|
1172 |
+
position: relative;
|
1173 |
+
}
|
1174 |
+
|
1175 |
+
.progress-bar::before {
|
1176 |
+
content: '';
|
1177 |
+
position: absolute;
|
1178 |
+
top: 0;
|
1179 |
+
left: 0;
|
1180 |
+
width: 100%;
|
1181 |
+
height: 100%;
|
1182 |
+
background: linear-gradient(90deg, transparent 50%, rgba(255, 69, 0, 0.05) 50%);
|
1183 |
+
background-size: 4px 100%;
|
1184 |
+
pointer-events: none;
|
1185 |
+
}
|
1186 |
+
|
1187 |
+
.progress-fill {
|
1188 |
+
height: 100%;
|
1189 |
+
background: #FF4500;
|
1190 |
+
border-radius: 0;
|
1191 |
+
position: relative;
|
1192 |
+
overflow: hidden;
|
1193 |
+
}
|
1194 |
+
|
1195 |
+
.progress-fill::after {
|
1196 |
+
content: '';
|
1197 |
+
position: absolute;
|
1198 |
+
top: 0;
|
1199 |
+
left: 0;
|
1200 |
+
width: 100%;
|
1201 |
+
height: 100%;
|
1202 |
+
background: linear-gradient(90deg,
|
1203 |
+
rgba(255, 255, 255, 0.1) 0%,
|
1204 |
+
rgba(255, 255, 255, 0.1) 40%,
|
1205 |
+
rgba(255, 255, 255, 0.3) 50%,
|
1206 |
+
rgba(255, 255, 255, 0.1) 60%,
|
1207 |
+
rgba(255, 255, 255, 0.1) 100%
|
1208 |
+
);
|
1209 |
+
background-size: 300% 100%;
|
1210 |
+
animation: shimmer 2s infinite;
|
1211 |
+
}
|
1212 |
+
|
1213 |
+
@keyframes shimmer {
|
1214 |
+
0% { background-position: 100% 0; }
|
1215 |
+
100% { background-position: -100% 0; }
|
1216 |
+
}
|
1217 |
+
|
1218 |
+
/* Split progress bars */
|
1219 |
+
.progress-metric.split .progress-label {
|
1220 |
+
justify-content: space-between;
|
1221 |
+
}
|
1222 |
+
|
1223 |
+
.progress-bar.split {
|
1224 |
+
display: flex;
|
1225 |
+
background: rgba(255, 69, 0, 0.1);
|
1226 |
+
position: relative;
|
1227 |
+
justify-content: center;
|
1228 |
+
}
|
1229 |
+
|
1230 |
+
.progress-bar.split::before {
|
1231 |
+
content: '';
|
1232 |
+
position: absolute;
|
1233 |
+
top: 0;
|
1234 |
+
left: 0;
|
1235 |
+
width: 100%;
|
1236 |
+
height: 100%;
|
1237 |
+
background: linear-gradient(90deg, transparent 50%, rgba(255, 69, 0, 0.05) 50%);
|
1238 |
+
background-size: 4px 100%;
|
1239 |
+
pointer-events: none;
|
1240 |
+
z-index: 0;
|
1241 |
+
}
|
1242 |
+
|
1243 |
+
.progress-bar.split::after {
|
1244 |
+
content: '';
|
1245 |
+
position: absolute;
|
1246 |
+
top: 0;
|
1247 |
+
left: 50%;
|
1248 |
+
transform: translateX(-50%);
|
1249 |
+
width: 2px;
|
1250 |
+
height: 100%;
|
1251 |
+
background: rgba(255, 69, 0, 0.3);
|
1252 |
+
z-index: 2;
|
1253 |
+
}
|
1254 |
+
|
1255 |
+
.progress-fill-left,
|
1256 |
+
.progress-fill-right {
|
1257 |
+
height: 100%;
|
1258 |
+
background: #FF4500;
|
1259 |
+
position: relative;
|
1260 |
+
width: 50%;
|
1261 |
+
overflow: hidden;
|
1262 |
+
}
|
1263 |
+
|
1264 |
+
.progress-fill-left::after,
|
1265 |
+
.progress-fill-right::after {
|
1266 |
+
content: '';
|
1267 |
+
position: absolute;
|
1268 |
+
top: 0;
|
1269 |
+
left: 0;
|
1270 |
+
width: 100%;
|
1271 |
+
height: 100%;
|
1272 |
+
background: linear-gradient(90deg,
|
1273 |
+
rgba(255, 255, 255, 0.1) 0%,
|
1274 |
+
rgba(255, 255, 255, 0.1) 40%,
|
1275 |
+
rgba(255, 255, 255, 0.3) 50%,
|
1276 |
+
rgba(255, 255, 255, 0.1) 60%,
|
1277 |
+
rgba(255, 255, 255, 0.1) 100%
|
1278 |
+
);
|
1279 |
+
background-size: 300% 100%;
|
1280 |
+
animation: shimmer 2s infinite;
|
1281 |
+
z-index: 1;
|
1282 |
+
}
|
1283 |
+
|
1284 |
+
.progress-fill-left {
|
1285 |
+
border-radius: 0;
|
1286 |
+
margin-right: 1px;
|
1287 |
+
transform-origin: right;
|
1288 |
+
transform: scaleX(var(--scale, 0));
|
1289 |
+
}
|
1290 |
+
|
1291 |
+
.progress-fill-right {
|
1292 |
+
border-radius: 0;
|
1293 |
+
margin-left: 1px;
|
1294 |
+
transform-origin: left;
|
1295 |
+
transform: scaleX(var(--scale, 0));
|
1296 |
+
}
|
1297 |
+
|
1298 |
+
.progress-metric.split .progress-label {
|
1299 |
+
font-size: 13px;
|
1300 |
+
}
|
1301 |
+
|
1302 |
+
/* Benchmark container */
|
1303 |
+
.benchmark-container {
|
1304 |
+
background: rgba(20, 20, 20, 0.95);
|
1305 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1306 |
+
border-radius: 0;
|
1307 |
+
position: relative;
|
1308 |
+
overflow: hidden;
|
1309 |
+
}
|
1310 |
+
|
1311 |
+
.benchmark-container::before,
|
1312 |
+
.benchmark-container::after,
|
1313 |
+
.benchmark-container .corner-bl,
|
1314 |
+
.benchmark-container .corner-br {
|
1315 |
+
content: '';
|
1316 |
+
position: absolute;
|
1317 |
+
width: 12px;
|
1318 |
+
height: 12px;
|
1319 |
+
pointer-events: none;
|
1320 |
+
}
|
1321 |
+
|
1322 |
+
.benchmark-container::before {
|
1323 |
+
top: 0;
|
1324 |
+
left: 0;
|
1325 |
+
border-top: 2px solid #FF4500;
|
1326 |
+
border-left: 2px solid #FF4500;
|
1327 |
+
}
|
1328 |
+
|
1329 |
+
.benchmark-container::after {
|
1330 |
+
top: 0;
|
1331 |
+
right: 0;
|
1332 |
+
border-top: 2px solid #FF4500;
|
1333 |
+
border-right: 2px solid #FF4500;
|
1334 |
+
}
|
1335 |
+
|
1336 |
+
.benchmark-container .corner-bl {
|
1337 |
+
bottom: 0;
|
1338 |
+
left: 0;
|
1339 |
+
border-bottom: 2px solid #FF4500;
|
1340 |
+
border-left: 2px solid #FF4500;
|
1341 |
+
}
|
1342 |
+
|
1343 |
+
.benchmark-container .corner-br {
|
1344 |
+
bottom: 0;
|
1345 |
+
right: 0;
|
1346 |
+
border-bottom: 2px solid #FF4500;
|
1347 |
+
border-right: 2px solid #FF4500;
|
1348 |
+
}
|
1349 |
+
|
1350 |
+
/* Benchmark notification */
|
1351 |
+
.benchmark-notification {
|
1352 |
+
background: rgba(20, 20, 20, 0.95);
|
1353 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1354 |
+
border-radius: 0;
|
1355 |
+
padding: 15px;
|
1356 |
+
margin-bottom: 20px;
|
1357 |
+
position: relative;
|
1358 |
+
overflow: hidden;
|
1359 |
+
}
|
1360 |
+
|
1361 |
+
.benchmark-notification::before,
|
1362 |
+
.benchmark-notification::after,
|
1363 |
+
.benchmark-notification .corner-bl,
|
1364 |
+
.benchmark-notification .corner-br {
|
1365 |
+
content: '';
|
1366 |
+
position: absolute;
|
1367 |
+
width: 12px;
|
1368 |
+
height: 12px;
|
1369 |
+
pointer-events: none;
|
1370 |
+
}
|
1371 |
+
|
1372 |
+
.benchmark-notification::before {
|
1373 |
+
top: 0;
|
1374 |
+
left: 0;
|
1375 |
+
border-top: 2px solid #FF4500;
|
1376 |
+
border-left: 2px solid #FF4500;
|
1377 |
+
}
|
1378 |
+
|
1379 |
+
.benchmark-notification::after {
|
1380 |
+
top: 0;
|
1381 |
+
right: 0;
|
1382 |
+
border-top: 2px solid #FF4500;
|
1383 |
+
border-right: 2px solid #FF4500;
|
1384 |
+
}
|
1385 |
+
|
1386 |
+
.benchmark-notification .corner-bl {
|
1387 |
+
bottom: 0;
|
1388 |
+
left: 0;
|
1389 |
+
border-bottom: 2px solid #FF4500;
|
1390 |
+
border-left: 2px solid #FF4500;
|
1391 |
+
}
|
1392 |
+
|
1393 |
+
.benchmark-notification .corner-br {
|
1394 |
+
bottom: 0;
|
1395 |
+
right: 0;
|
1396 |
+
border-bottom: 2px solid #FF4500;
|
1397 |
+
border-right: 2px solid #FF4500;
|
1398 |
+
}
|
1399 |
+
|
1400 |
+
.notification-content {
|
1401 |
+
display: flex;
|
1402 |
+
align-items: center;
|
1403 |
+
gap: 10px;
|
1404 |
+
position: relative;
|
1405 |
+
z-index: 1;
|
1406 |
+
}
|
1407 |
+
|
1408 |
+
.notification-icon {
|
1409 |
+
font-size: 20px;
|
1410 |
+
}
|
1411 |
+
|
1412 |
+
.notification-text {
|
1413 |
+
color: #E0E0E0;
|
1414 |
+
font-size: 14px;
|
1415 |
+
display: flex;
|
1416 |
+
align-items: center;
|
1417 |
+
gap: 10px;
|
1418 |
+
flex-wrap: wrap;
|
1419 |
+
}
|
1420 |
+
|
1421 |
+
.benchmark-link {
|
1422 |
+
color: #FF6B35;
|
1423 |
+
font-weight: 500;
|
1424 |
+
white-space: nowrap;
|
1425 |
+
}
|
1426 |
+
|
1427 |
+
/* Button styles */
|
1428 |
+
.button {
|
1429 |
+
display: inline-flex;
|
1430 |
+
align-items: center;
|
1431 |
+
gap: 8px;
|
1432 |
+
padding: 10px 20px;
|
1433 |
+
background: rgba(255, 69, 0, 0.1);
|
1434 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1435 |
+
border-radius: 0;
|
1436 |
+
color: #FF6B35;
|
1437 |
+
font-weight: 500;
|
1438 |
+
text-decoration: none;
|
1439 |
+
transition: all 0.3s ease;
|
1440 |
+
position: relative;
|
1441 |
+
overflow: hidden;
|
1442 |
+
}
|
1443 |
+
|
1444 |
+
.button::before {
|
1445 |
+
content: '';
|
1446 |
+
position: absolute;
|
1447 |
+
top: 0;
|
1448 |
+
left: 0;
|
1449 |
+
width: 100%;
|
1450 |
+
height: 100%;
|
1451 |
+
background: linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.1) 49%, rgba(255, 69, 0, 0.1) 51%, transparent 52%);
|
1452 |
+
background-size: 10px 10px;
|
1453 |
+
pointer-events: none;
|
1454 |
+
}
|
1455 |
+
|
1456 |
+
.button::after {
|
1457 |
+
content: '';
|
1458 |
+
position: absolute;
|
1459 |
+
inset: -1px;
|
1460 |
+
pointer-events: none;
|
1461 |
+
background:
|
1462 |
+
linear-gradient(to right, #FF4500 8px, transparent 8px) top left,
|
1463 |
+
linear-gradient(to bottom, #FF4500 8px, transparent 8px) top left,
|
1464 |
+
linear-gradient(to left, #FF4500 8px, transparent 8px) bottom right,
|
1465 |
+
linear-gradient(to top, #FF4500 8px, transparent 8px) bottom right;
|
1466 |
+
background-size: 20px 1px, 1px 20px, 20px 1px, 1px 20px;
|
1467 |
+
background-repeat: no-repeat;
|
1468 |
+
opacity: 0.6;
|
1469 |
+
}
|
1470 |
+
|
1471 |
+
.button:hover {
|
1472 |
+
background: rgba(255, 69, 0, 0.2);
|
1473 |
+
border-color: rgba(255, 69, 0, 0.5);
|
1474 |
+
transform: translateY(-1px);
|
1475 |
+
}
|
1476 |
+
|
1477 |
+
.button:active {
|
1478 |
+
transform: translateY(0);
|
1479 |
+
}
|
1480 |
+
|
1481 |
+
/* Template link */
|
1482 |
+
.template-link {
|
1483 |
+
display: flex;
|
1484 |
+
align-items: center;
|
1485 |
+
gap: 5px;
|
1486 |
+
color: #FF6B35;
|
1487 |
+
font-weight: 500;
|
1488 |
+
padding: 8px 12px;
|
1489 |
+
border-radius: 0;
|
1490 |
+
background: rgba(255, 69, 0, 0.1);
|
1491 |
+
border: 1px solid rgba(255, 69, 0, 0.3);
|
1492 |
+
transition: all 0.3s ease;
|
1493 |
+
position: relative;
|
1494 |
+
overflow: hidden;
|
1495 |
+
}
|
1496 |
+
|
1497 |
+
.template-link::before {
|
1498 |
+
content: '';
|
1499 |
+
position: absolute;
|
1500 |
+
top: 0;
|
1501 |
+
left: 0;
|
1502 |
+
width: 100%;
|
1503 |
+
height: 100%;
|
1504 |
+
background: linear-gradient(45deg, transparent 48%, rgba(255, 69, 0, 0.1) 49%, rgba(255, 69, 0, 0.1) 51%, transparent 52%);
|
1505 |
+
background-size: 10px 10px;
|
1506 |
+
pointer-events: none;
|
1507 |
+
}
|
1508 |
+
|
1509 |
+
.template-link::after {
|
1510 |
+
content: '';
|
1511 |
+
position: absolute;
|
1512 |
+
inset: -1px;
|
1513 |
+
pointer-events: none;
|
1514 |
+
background:
|
1515 |
+
linear-gradient(to right, #FF4500 8px, transparent 8px) top left,
|
1516 |
+
linear-gradient(to bottom, #FF4500 8px, transparent 8px) top left,
|
1517 |
+
linear-gradient(to left, #FF4500 8px, transparent 8px) bottom right,
|
1518 |
+
linear-gradient(to top, #FF4500 8px, transparent 8px) bottom right;
|
1519 |
+
background-size: 20px 1px, 1px 20px, 20px 1px, 1px 20px;
|
1520 |
+
background-repeat: no-repeat;
|
1521 |
+
opacity: 0.6;
|
1522 |
+
}
|
1523 |
+
|
1524 |
+
.template-link:hover {
|
1525 |
+
background: rgba(255, 69, 0, 0.2);
|
1526 |
+
border-color: rgba(255, 69, 0, 0.5);
|
1527 |
+
}
|
1528 |
+
|
1529 |
+
.link-arrow {
|
1530 |
+
font-size: 18px;
|
1531 |
+
line-height: 1;
|
1532 |
+
transform: translateY(1px);
|
1533 |
+
}
|
1534 |
+
|
1535 |
+
/* Template content */
|
1536 |
+
.template-content {
|
1537 |
+
display: flex;
|
1538 |
+
align-items: center;
|
1539 |
+
gap: 10px;
|
1540 |
+
position: relative;
|
1541 |
+
z-index: 1;
|
1542 |
+
}
|
1543 |
+
|
1544 |
+
.template-author {
|
1545 |
+
color: rgba(224, 224, 224, 0.7);
|
1546 |
+
font-size: 14px;
|
1547 |
+
}
|
1548 |
+
|
1549 |
+
/* animations */
|
1550 |
+
|
1551 |
+
/* Hover animations */
|
1552 |
+
@keyframes linkUnderline {
|
1553 |
+
from { transform: scaleX(0); }
|
1554 |
+
to { transform: scaleX(1); }
|
1555 |
+
}
|
1556 |
+
|
1557 |
+
@keyframes shimmer {
|
1558 |
+
0% { background-position: 100% 0; }
|
1559 |
+
100% { background-position: -100% 0; }
|
1560 |
+
}
|
1561 |
+
|
1562 |
+
@keyframes glowPulse {
|
1563 |
+
0% { box-shadow: 0 0 5px rgba(255, 69, 0, 0.2); }
|
1564 |
+
50% { box-shadow: 0 0 15px rgba(255, 69, 0, 0.4); }
|
1565 |
+
100% { box-shadow: 0 0 5px rgba(255, 69, 0, 0.2); }
|
1566 |
+
}
|
1567 |
+
|
1568 |
+
/* Card hover effects */
|
1569 |
+
.info-card:hover,
|
1570 |
+
.metric-box:hover,
|
1571 |
+
.benchmark-container:hover {
|
1572 |
+
animation: glowPulse 2s infinite;
|
1573 |
+
}
|
1574 |
+
|
1575 |
+
/* Progress bar animations */
|
1576 |
+
.progress-fill {
|
1577 |
+
transition: width 0.6s ease-out;
|
1578 |
+
}
|
1579 |
+
|
1580 |
+
.progress-fill::after {
|
1581 |
+
content: '';
|
1582 |
+
position: absolute;
|
1583 |
+
top: 0;
|
1584 |
+
left: 0;
|
1585 |
+
width: 100%;
|
1586 |
+
height: 100%;
|
1587 |
+
background: linear-gradient(90deg,
|
1588 |
+
rgba(255, 255, 255, 0.1) 0%,
|
1589 |
+
rgba(255, 255, 255, 0.1) 40%,
|
1590 |
+
rgba(255, 255, 255, 0.3) 50%,
|
1591 |
+
rgba(255, 255, 255, 0.1) 60%,
|
1592 |
+
rgba(255, 255, 255, 0.1) 100%
|
1593 |
+
);
|
1594 |
+
background-size: 300% 100%;
|
1595 |
+
animation: shimmer 2s infinite;
|
1596 |
+
}
|
1597 |
+
|
1598 |
+
/* Details toggle animation */
|
1599 |
+
details[open] summary ~ * {
|
1600 |
+
animation: slideDown 0.3s ease-out;
|
1601 |
+
}
|
1602 |
+
|
1603 |
+
@keyframes slideDown {
|
1604 |
+
from {
|
1605 |
+
opacity: 0;
|
1606 |
+
transform: translateY(-10px);
|
1607 |
+
}
|
1608 |
+
to {
|
1609 |
+
opacity: 1;
|
1610 |
+
transform: translateY(0);
|
1611 |
+
}
|
1612 |
+
}
|
1613 |
+
|
1614 |
+
/* Button hover animation */
|
1615 |
+
.button:hover::before,
|
1616 |
+
.template-link:hover::before {
|
1617 |
+
animation: patternShift 1s linear infinite;
|
1618 |
+
}
|
1619 |
+
|
1620 |
+
@keyframes patternShift {
|
1621 |
+
from { background-position: 0 0; }
|
1622 |
+
to { background-position: 10px 10px; }
|
1623 |
+
}
|
1624 |
+
|
1625 |
+
/* Loading state animations */
|
1626 |
+
@keyframes loadingPulse {
|
1627 |
+
0% { opacity: 0.6; }
|
1628 |
+
50% { opacity: 1; }
|
1629 |
+
100% { opacity: 0.6; }
|
1630 |
+
}
|
1631 |
+
|
1632 |
+
.loading {
|
1633 |
+
animation: loadingPulse 1.5s infinite;
|
1634 |
+
}
|
1635 |
+
|
1636 |
+
/* Geometric pattern animation */
|
1637 |
+
@keyframes geometricSpin {
|
1638 |
+
from { transform: rotate(0deg); }
|
1639 |
+
to { transform: rotate(360deg); }
|
1640 |
+
}
|
1641 |
+
|
1642 |
+
.geometric-pattern {
|
1643 |
+
position: absolute;
|
1644 |
+
pointer-events: none;
|
1645 |
+
animation: geometricSpin 20s linear infinite;
|
1646 |
+
}
|
1647 |
+
|
1648 |
+
/* Card entrance animations */
|
1649 |
+
@keyframes cardEntrance {
|
1650 |
+
from {
|
1651 |
+
opacity: 0;
|
1652 |
+
transform: translateY(20px);
|
1653 |
+
}
|
1654 |
+
to {
|
1655 |
+
opacity: 1;
|
1656 |
+
transform: translateY(0);
|
1657 |
+
}
|
1658 |
+
}
|
1659 |
+
|
1660 |
+
.info-card,
|
1661 |
+
.metric-box,
|
1662 |
+
.benchmark-container {
|
1663 |
+
animation: cardEntrance 0.5s ease-out forwards;
|
1664 |
+
}
|
1665 |
+
|
1666 |
+
|
1667 |
+
</style>
|
1668 |
+
<html lang="en">
|
1669 |
+
<head>
|
1670 |
+
<meta charset="UTF-8">
|
1671 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
1672 |
+
<title>L3.3-Damascus-R1</title>
|
1673 |
+
<link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@400;500;600&display=swap" rel="stylesheet">
|
1674 |
+
<link href="styles/components/layout.css" rel="stylesheet">
|
1675 |
+
<link href="styles/components/metrics.css" rel="stylesheet">
|
1676 |
+
<link href="styles/components/cards.css" rel="stylesheet">
|
1677 |
+
<link href="styles/components/buttons.css" rel="stylesheet">
|
1678 |
+
<link href="styles/components/animations.css" rel="stylesheet">
|
1679 |
+
<link href="styles/main.css" rel="stylesheet">
|
1680 |
+
</head>
|
1681 |
+
<body>
|
1682 |
+
<div class="container">
|
1683 |
+
<div class="header">
|
1684 |
+
<h1>L3.3-Damascus-R1</h1>
|
1685 |
+
</div>
|
1686 |
+
<div class="info">
|
1687 |
+
<img src="https://cdn-uploads.huggingface.co/production/uploads/64545af5ec40bbbd01242ca6/iIzpqHDb9wU181AzfrjZy.png" alt="Model banner">
|
1688 |
+
<div class="creator-section">
|
1689 |
+
<div class="creator-badge">
|
1690 |
+
<span class="creator-label">Created by</span>
|
1691 |
+
<a href="https://huggingface.co/Steelskull" target="_blank" class="creator-link">
|
1692 |
+
<span class="creator-name">SteelSkull</span>
|
1693 |
+
<span class="creator-arrow">→</span>
|
1694 |
+
</a>
|
1695 |
+
</div>
|
1696 |
+
</div>
|
1697 |
+
<div class="model-info">
|
1698 |
+
<h2>Model Information</h2>
|
1699 |
+
<div class="info-card">
|
1700 |
+
<div class="info-header">
|
1701 |
+
<h3>L3.3-Damascus-R1</h3>
|
1702 |
+
<div class="model-tags">
|
1703 |
+
<span class="model-tag">L3.3 = Llama 3.3</span>
|
1704 |
+
<span class="model-tag">SCE Merge</span>
|
1705 |
+
<span class="model-tag">R1 = Deepseek R1</span>
|
1706 |
+
<span class="model-tag">70b Parameters</span>
|
1707 |
+
</div>
|
1708 |
+
</div>
|
1709 |
+
<div class="model-composition">
|
1710 |
+
<h4>Model Composition</h4>
|
1711 |
+
<ul class="composition-list">
|
1712 |
+
<li><span class="model-component base-model"><a href="https://huggingface.co/TheSkullery/L3.1x3.3-Hydroblated-R1-70B-v3" target="_blank">L3.1x3.3-Hydroblated-R1-70B-v3</a></span> Base model [Unreleased]</li>
|
1713 |
+
<li><span class="model-component"><a href="https://huggingface.co/EVA-UNIT-01/EVA-LLaMA-3.33-70B-v0.1" target="_blank">EVA-LLaMA-3.33</a></span> Core capabilities</li>
|
1714 |
+
<li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.3-70B-Euryale-v2.3" target="_blank">Euryale-v2.3</a></span> Enhanced reasoning</li>
|
1715 |
+
<li><span class="model-component"><a href="https://huggingface.co/Sao10K/70B-L3.3-Cirrus-x1" target="_blank">Cirrus-x1</a></span> Improved coherence</li>
|
1716 |
+
<li><span class="model-component"><a href="https://huggingface.co/Sao10K/L3.1-70B-Hanami-x1" target="_blank">Hanami-x1</a></span> Balanced responses</li>
|
1717 |
+
<li><span class="model-component"><a href="https://huggingface.co/TheDrummer/Anubis-70B-v1" target="_blank">Anubis-v1</a></span> Enhanced detail</li>
|
1718 |
+
<li><span class="model-component"><a href="https://huggingface.co/SicariusSicariiStuff/Negative_LLAMA_70B" target="_blank">Negative_LLAMA</a></span> Reduced bias</li>
|
1719 |
+
</ul>
|
1720 |
+
<div class="model-description">
|
1721 |
+
<span class="corner-bl"></span>
|
1722 |
+
<span class="corner-br"></span>
|
1723 |
+
<p>Damascus-R1 builds upon some elements of the Nevoria foundation but represents a significant step forward with a completely custom-made DeepSeek R1 Distill base: Hydroblated-R1-V3. Constructed using the new SCE (Select, Calculate, and Erase) merge method, Damascus-R1 prioritizes stability, intelligence, and enhanced awareness.</p>
|
1724 |
+
<h4>Technical Architecture</h4>
|
1725 |
+
<p>Leveraging the SCE merge method and custom base, Damascus-R1 integrates newly added specialized components from multiple high-performance models:</p>
|
1726 |
+
<ul>
|
1727 |
+
<li>EVA and EURYALE foundations for creative expression and scene comprehension</li>
|
1728 |
+
<li>Cirrus and Hanami elements for enhanced reasoning capabilities</li>
|
1729 |
+
<li>Anubis components for detailed scene description</li>
|
1730 |
+
<li>Negative_LLAMA integration for balanced perspective and response</li>
|
1731 |
+
</ul>
|
1732 |
+
<h4>Core Philosophy</h4>
|
1733 |
+
<p>Damascus-R1 embodies the principle that AI models can be intelligent and be fun. This version specifically addresses recent community feedback and iterates on prior experiments, optimizing the balance between technical capability and natural conversation flow.</p>
|
1734 |
+
<h4>Base Architecture</h4>
|
1735 |
+
<p>At its core, Damascus-R1 utilizes the entirely custom Hydroblated-R1 base model, specifically engineered for stability, enhanced reasoning, and performance. The SCE merge method, with settings finely tuned based on community feedback from evaluations of Experiment-Model-Ver-A, L3.3-Exp-Nevoria-R1-70b-v0.1 and L3.3-Exp-Nevoria-70b-v0.1, enables precise and effective component integration while maintaining model coherence and reliability.</p>
|
1736 |
+
</div>
|
1737 |
+
<!--<div class="metrics-section">
|
1738 |
+
<details open>
|
1739 |
+
<summary>User Reviews</summary>
|
1740 |
+
<div class="progress-metrics">
|
1741 |
+
<span class="corner-bl"></span>
|
1742 |
+
<span class="corner-br"></span>
|
1743 |
+
<div>
|
1744 |
+
<div class="review-card">
|
1745 |
+
<div>
|
1746 |
+
<span>[USERNAME]</span>
|
1747 |
+
</div>
|
1748 |
+
<p>[REVIEW]</p>
|
1749 |
+
</div>
|
1750 |
+
<div class="review-card">
|
1751 |
+
<div>
|
1752 |
+
<span>[USERNAME]</span>
|
1753 |
+
</div>
|
1754 |
+
<p>[REVIEW]</p>
|
1755 |
+
</div>
|
1756 |
+
<div class="review-card">
|
1757 |
+
<div>
|
1758 |
+
<span>[USERNAME]</span>
|
1759 |
+
</div>
|
1760 |
+
<p>[REVIEW]</p>
|
1761 |
+
</div>
|
1762 |
+
</div>
|
1763 |
+
</div>
|
1764 |
+
</details>
|
1765 |
+
</div>
|
1766 |
+
</div>-->
|
1767 |
+
<h2>UGI-Benchmark Results:</h2>
|
1768 |
+
<div class="benchmark-container">
|
1769 |
+
<span class="corner-bl"></span>
|
1770 |
+
<span class="corner-br"></span>
|
1771 |
+
<div class="benchmark-notification">
|
1772 |
+
<div class="notification-content">
|
1773 |
+
<span class="notification-icon">🏆</span>
|
1774 |
+
<span class="notification-text">
|
1775 |
+
Latest benchmark results as of 02/02/2025.
|
1776 |
+
<a href="https://huggingface.co/spaces/DontPlanToEnd/UGI-Leaderboard" target="_blank" class="benchmark-link">
|
1777 |
+
View Full Leaderboard →
|
1778 |
+
</a>
|
1779 |
+
</span>
|
1780 |
+
</div>
|
1781 |
+
</div>
|
1782 |
+
<div class="metrics-section">
|
1783 |
+
<span class="corner-bl"></span>
|
1784 |
+
<span class="corner-br"></span>
|
1785 |
+
<h3>Core Metrics</h3>
|
1786 |
+
<div class="core-metrics-grid">
|
1787 |
+
<div class="metric-box">
|
1788 |
+
<span class="corner-bl"></span>
|
1789 |
+
<span class="corner-br"></span>
|
1790 |
+
<span class="label">UGI Score</span>
|
1791 |
+
<span class="value">36.5</span>
|
1792 |
+
</div>
|
1793 |
+
<div class="metric-box">
|
1794 |
+
<span class="corner-bl"></span>
|
1795 |
+
<span class="corner-br"></span>
|
1796 |
+
<span class="label">Willingness Score</span>
|
1797 |
+
<span class="value">2.5/10</span>
|
1798 |
+
</div>
|
1799 |
+
<div class="metric-box">
|
1800 |
+
<span class="corner-bl"></span>
|
1801 |
+
<span class="corner-br"></span>
|
1802 |
+
<span class="label">Natural Intelligence</span>
|
1803 |
+
<span class="value">45.62</span>
|
1804 |
+
</div>
|
1805 |
+
<div class="metric-box">
|
1806 |
+
<span class="corner-bl"></span>
|
1807 |
+
<span class="corner-br"></span>
|
1808 |
+
<span class="label">Coding Ability</span>
|
1809 |
+
<span class="value">20</span>
|
1810 |
+
</div>
|
1811 |
+
</div>
|
1812 |
+
</div>
|
1813 |
+
<div class="metrics-section">
|
1814 |
+
<span class="corner-bl"></span>
|
1815 |
+
<span class="corner-br"></span>
|
1816 |
+
<h3>Model Information</h3>
|
1817 |
+
<div class="info-grid">
|
1818 |
+
<div class="metric-box">
|
1819 |
+
<span class="corner-bl"></span>
|
1820 |
+
<span class="corner-br"></span>
|
1821 |
+
<span class="label">Political Lean</span>
|
1822 |
+
<span class="value">-17.8%</span>
|
1823 |
+
</div>
|
1824 |
+
<div class="metric-box">
|
1825 |
+
<span class="corner-bl"></span>
|
1826 |
+
<span class="corner-br"></span>
|
1827 |
+
<span class="label">Ideology</span>
|
1828 |
+
<span class="value">Liberalism</span>
|
1829 |
+
</div>
|
1830 |
+
<div class="metric-box">
|
1831 |
+
<span class="corner-bl"></span>
|
1832 |
+
<span class="corner-br"></span>
|
1833 |
+
<span class="label">Parameters</span>
|
1834 |
+
<span class="value">70B</span>
|
1835 |
+
</div>
|
1836 |
+
</div>
|
1837 |
+
</div>
|
1838 |
+
<div class="metrics-section">
|
1839 |
+
<span class="corner-bl"></span>
|
1840 |
+
<span class="corner-br"></span>
|
1841 |
+
<details>
|
1842 |
+
<summary>Aggregated Scores</summary>
|
1843 |
+
<div class="progress-metrics">
|
1844 |
+
<div class="progress-metric">
|
1845 |
+
<div class="progress-label">
|
1846 |
+
<span>Diplomacy</span>
|
1847 |
+
<span class="progress-value">65.5%</span>
|
1848 |
+
</div>
|
1849 |
+
<div class="progress-bar">
|
1850 |
+
<div class="progress-fill" style="width: 65.5%"></div>
|
1851 |
+
</div>
|
1852 |
+
</div>
|
1853 |
+
<div class="progress-metric">
|
1854 |
+
<div class="progress-label">
|
1855 |
+
<span>Government</span>
|
1856 |
+
<span class="progress-value">49.7%</span>
|
1857 |
+
</div>
|
1858 |
+
<div class="progress-bar">
|
1859 |
+
<div class="progress-fill" style="width: 49.7%"></div>
|
1860 |
+
</div>
|
1861 |
+
</div>
|
1862 |
+
<div class="progress-metric">
|
1863 |
+
<div class="progress-label">
|
1864 |
+
<span>Economy</span>
|
1865 |
+
<span class="progress-value">47.2%</span>
|
1866 |
+
</div>
|
1867 |
+
<div class="progress-bar">
|
1868 |
+
<div class="progress-fill" style="width: 47.2%"></div>
|
1869 |
+
</div>
|
1870 |
+
</div>
|
1871 |
+
<div class="progress-metric">
|
1872 |
+
<div class="progress-label">
|
1873 |
+
<span>Society</span>
|
1874 |
+
<span class="progress-value">60.0%</span>
|
1875 |
+
</div>
|
1876 |
+
<div class="progress-bar">
|
1877 |
+
<div class="progress-fill" style="width: 60.0%"></div>
|
1878 |
+
</div>
|
1879 |
+
</div>
|
1880 |
+
</div>
|
1881 |
+
</details>
|
1882 |
+
</div>
|
1883 |
+
<div class="metrics-section">
|
1884 |
+
<span class="corner-bl"></span>
|
1885 |
+
<span class="corner-br"></span>
|
1886 |
+
<details>
|
1887 |
+
<summary>Individual Scores</summary>
|
1888 |
+
<div class="progress-metrics">
|
1889 |
+
<div class="progress-metric split">
|
1890 |
+
<div class="progress-label">
|
1891 |
+
<span>Federal</span>
|
1892 |
+
<span class="progress-value">38.5%</span>
|
1893 |
+
<span>Unitary</span>
|
1894 |
+
</div>
|
1895 |
+
<div class="progress-bar split">
|
1896 |
+
<div class="progress-fill-left" style="--scale: 0.385"></div>
|
1897 |
+
<div class="progress-fill-right" style="--scale: 0.615"></div>
|
1898 |
+
</div>
|
1899 |
+
</div>
|
1900 |
+
<div class="progress-metric split">
|
1901 |
+
<div class="progress-label">
|
1902 |
+
<span>Democratic</span>
|
1903 |
+
<span class="progress-value">66.9%</span>
|
1904 |
+
<span>Autocratic</span>
|
1905 |
+
</div>
|
1906 |
+
<div class="progress-bar split">
|
1907 |
+
<div class="progress-fill-left" style="--scale: 0.669"></div>
|
1908 |
+
<div class="progress-fill-right" style="--scale: 0.331"></div>
|
1909 |
+
</div>
|
1910 |
+
</div>
|
1911 |
+
<div class="progress-metric split">
|
1912 |
+
<div class="progress-label">
|
1913 |
+
<span>Security</span>
|
1914 |
+
<span class="progress-value">53.5%</span>
|
1915 |
+
<span>Freedom</span>
|
1916 |
+
</div>
|
1917 |
+
<div class="progress-bar split">
|
1918 |
+
<div class="progress-fill-left" style="--scale: 0.535"></div>
|
1919 |
+
<div class="progress-fill-right" style="--scale: 0.465"></div>
|
1920 |
+
</div>
|
1921 |
+
</div>
|
1922 |
+
<div class="progress-metric split">
|
1923 |
+
<div class="progress-label">
|
1924 |
+
<span>Nationalism</span>
|
1925 |
+
<span class="progress-value">38.5%</span>
|
1926 |
+
<span>Int'l</span>
|
1927 |
+
</div>
|
1928 |
+
<div class="progress-bar split">
|
1929 |
+
<div class="progress-fill-left" style="--scale: 0.385"></div>
|
1930 |
+
<div class="progress-fill-right" style="--scale: 0.615"></div>
|
1931 |
+
</div>
|
1932 |
+
</div>
|
1933 |
+
<div class="progress-metric split">
|
1934 |
+
<div class="progress-label">
|
1935 |
+
<span>Militarist</span>
|
1936 |
+
<span class="progress-value">33.5%</span>
|
1937 |
+
<span>Pacifist</span>
|
1938 |
+
</div>
|
1939 |
+
<div class="progress-bar split">
|
1940 |
+
<div class="progress-fill-left" style="--scale: 0.335"></div>
|
1941 |
+
<div class="progress-fill-right" style="--scale: 0.665"></div>
|
1942 |
+
</div>
|
1943 |
+
</div>
|
1944 |
+
<div class="progress-metric split">
|
1945 |
+
<div class="progress-label">
|
1946 |
+
<span>Assimilationist</span>
|
1947 |
+
<span class="progress-value">31.5%</span>
|
1948 |
+
<span>Multiculturalist</span>
|
1949 |
+
</div>
|
1950 |
+
<div class="progress-bar split">
|
1951 |
+
<div class="progress-fill-left" style="--scale: 0.315"></div>
|
1952 |
+
<div class="progress-fill-right" style="--scale: 0.685"></div>
|
1953 |
+
</div>
|
1954 |
+
</div>
|
1955 |
+
<div class="progress-metric split">
|
1956 |
+
<div class="progress-label">
|
1957 |
+
<span>Collectivize</span>
|
1958 |
+
<span class="progress-value">55.2%</span>
|
1959 |
+
<span>Privatize</span>
|
1960 |
+
</div>
|
1961 |
+
<div class="progress-bar split">
|
1962 |
+
<div class="progress-fill-left" style="--scale: 0.552"></div>
|
1963 |
+
<div class="progress-fill-right" style="--scale: 0.448"></div>
|
1964 |
+
</div>
|
1965 |
+
</div>
|
1966 |
+
<div class="progress-metric split">
|
1967 |
+
<div class="progress-label">
|
1968 |
+
<span>Planned</span>
|
1969 |
+
<span class="progress-value">48.8%</span>
|
1970 |
+
<span>LaissezFaire</span>
|
1971 |
+
</div>
|
1972 |
+
<div class="progress-bar split">
|
1973 |
+
<div class="progress-fill-left" style="--scale: 0.488"></div>
|
1974 |
+
<div class="progress-fill-right" style="--scale: 0.512"></div>
|
1975 |
+
</div>
|
1976 |
+
</div>
|
1977 |
+
<div class="progress-metric split">
|
1978 |
+
<div class="progress-label">
|
1979 |
+
<span>Isolationism</span>
|
1980 |
+
<span class="progress-value">37.7%</span>
|
1981 |
+
<span>Globalism</span>
|
1982 |
+
</div>
|
1983 |
+
<div class="progress-bar split">
|
1984 |
+
<div class="progress-fill-left" style="--scale: 0.377"></div>
|
1985 |
+
<div class="progress-fill-right" style="--scale: 0.623"></div>
|
1986 |
+
</div>
|
1987 |
+
</div>
|
1988 |
+
<div class="progress-metric split">
|
1989 |
+
<div class="progress-label">
|
1990 |
+
<span>Irreligious</span>
|
1991 |
+
<span class="progress-value">55.0%</span>
|
1992 |
+
<span>Religious</span>
|
1993 |
+
</div>
|
1994 |
+
<div class="progress-bar split">
|
1995 |
+
<div class="progress-fill-left" style="--scale: 0.550"></div>
|
1996 |
+
<div class="progress-fill-right" style="--scale: 0.450"></div>
|
1997 |
+
</div>
|
1998 |
+
</div>
|
1999 |
+
<div class="progress-metric split">
|
2000 |
+
<div class="progress-label">
|
2001 |
+
<span>Progressive</span>
|
2002 |
+
<span class="progress-value">62.3%</span>
|
2003 |
+
<span>Traditional</span>
|
2004 |
+
</div>
|
2005 |
+
<div class="progress-bar split">
|
2006 |
+
<div class="progress-fill-left" style="--scale: 0.623"></div>
|
2007 |
+
<div class="progress-fill-right" style="--scale: 0.377"></div>
|
2008 |
+
</div>
|
2009 |
+
</div>
|
2010 |
+
<div class="progress-metric split">
|
2011 |
+
<div class="progress-label">
|
2012 |
+
<span>Acceleration</span>
|
2013 |
+
<span class="progress-value">63.1%</span>
|
2014 |
+
<span>Bioconservative</span>
|
2015 |
+
</div>
|
2016 |
+
<div class="progress-bar split">
|
2017 |
+
<div class="progress-fill-left" style="--scale: 0.631"></div>
|
2018 |
+
<div class="progress-fill-right" style="--scale: 0.369"></div>
|
2019 |
+
</div>
|
2020 |
+
</div>
|
2021 |
+
</div>
|
2022 |
+
</details>
|
2023 |
+
</div>
|
2024 |
+
</div>
|
2025 |
+
<!-- Open LLM-Benchmark Results - TO BE UPDATED -->
|
2026 |
+
<!--<h2>Open LLM-Benchmark Results:</h2>
|
2027 |
+
<div class="benchmark-container">
|
2028 |
+
<div class="benchmark-notification">
|
2029 |
+
<span class="corner-bl"></span>
|
2030 |
+
<span class="corner-br"></span>
|
2031 |
+
<div class="notification-content">
|
2032 |
+
<span class="notification-text">
|
2033 |
+
Average Score: 43.68%
|
2034 |
+
<a href="https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard#/?rankingMode=dynamic" target="_blank" class="benchmark-link">
|
2035 |
+
View Full Leaderboard →
|
2036 |
+
</a>
|
2037 |
+
</span>
|
2038 |
+
</div>
|
2039 |
+
</div>
|
2040 |
+
<div class="progress-metrics">
|
2041 |
+
<span class="corner-bl"></span>
|
2042 |
+
<span class="corner-br"></span>
|
2043 |
+
<div class="progress-metric">
|
2044 |
+
<div class="progress-label">
|
2045 |
+
<span>IFEval</span>
|
2046 |
+
<span class="progress-value">60.24%</span>
|
2047 |
+
</div>
|
2048 |
+
<div class="progress-bar">
|
2049 |
+
<div class="progress-fill" style="width: 60.24%"></div>
|
2050 |
+
</div>
|
2051 |
+
</div>
|
2052 |
+
<div class="progress-metric">
|
2053 |
+
<div class="progress-label">
|
2054 |
+
<span>BBH</span>
|
2055 |
+
<span class="progress-value">56.17%</span>
|
2056 |
+
</div>
|
2057 |
+
<div class="progress-bar">
|
2058 |
+
<div class="progress-fill" style="width: 56.17%"></div>
|
2059 |
+
</div>
|
2060 |
+
</div>
|
2061 |
+
<div class="progress-metric">
|
2062 |
+
<div class="progress-label">
|
2063 |
+
<span>MATH</span>
|
2064 |
+
<span class="progress-value">46.68%</span>
|
2065 |
+
</div>
|
2066 |
+
<div class="progress-bar">
|
2067 |
+
<div class="progress-fill" style="width: 46.68%"></div>
|
2068 |
+
</div>
|
2069 |
+
</div>
|
2070 |
+
<div class="progress-metric">
|
2071 |
+
<div class="progress-label">
|
2072 |
+
<span>GPQA</span>
|
2073 |
+
<span class="progress-value">29.19%</span>
|
2074 |
+
</div>
|
2075 |
+
<div class="progress-bar">
|
2076 |
+
<div class="progress-fill" style="width: 29.19%"></div>
|
2077 |
+
</div>
|
2078 |
+
</div>
|
2079 |
+
<div class="progress-metric">
|
2080 |
+
<div class="progress-label">
|
2081 |
+
<span>MUSR</span>
|
2082 |
+
<span class="progress-value">20.19%</span>
|
2083 |
+
</div>
|
2084 |
+
<div class="progress-bar">
|
2085 |
+
<div class="progress-fill" style="width: 20.19%"></div>
|
2086 |
+
</div>
|
2087 |
+
</div>
|
2088 |
+
<div class="progress-metric">
|
2089 |
+
<div class="progress-label">
|
2090 |
+
<span>MMLU-Pro</span>
|
2091 |
+
<span class="progress-value">49.59%</span>
|
2092 |
+
</div>
|
2093 |
+
<div class="progress-bar">
|
2094 |
+
<div class="progress-fill" style="width: 49.59%"></div>
|
2095 |
+
</div>
|
2096 |
+
</div>
|
2097 |
+
</div>
|
2098 |
+
</div>-->
|
2099 |
+
<div class="section-container">
|
2100 |
+
<h2>Recommended Sampler Settings: <strong>By @Geechan</strong></h2>
|
2101 |
+
<div class="settings-grid">
|
2102 |
+
<div class="settings-card">
|
2103 |
+
<span class="corner-bl"></span>
|
2104 |
+
<span class="corner-br"></span>
|
2105 |
+
<div class="settings-header">
|
2106 |
+
<h3>Dynamic Temperature</h3>
|
2107 |
+
</div>
|
2108 |
+
<div class="settings-content">
|
2109 |
+
<div class="setting-item">
|
2110 |
+
<span class="setting-label">Min</span>
|
2111 |
+
<span class="setting-value">1.0</span>
|
2112 |
+
</div>
|
2113 |
+
<div class="setting-item">
|
2114 |
+
<span class="setting-label">Max</span>
|
2115 |
+
<span class="setting-value">1.3-1.35</span>
|
2116 |
+
</div>
|
2117 |
+
<div class="setting-item">
|
2118 |
+
<span class="setting-label">Exponent</span>
|
2119 |
+
<span class="setting-value">1.0</span>
|
2120 |
+
</div>
|
2121 |
+
</div>
|
2122 |
+
</div>
|
2123 |
+
</div>
|
2124 |
+
<div class="settings-card">
|
2125 |
+
<span class="corner-bl"></span>
|
2126 |
+
<span class="corner-br"></span>
|
2127 |
+
<div class="settings-header">
|
2128 |
+
<h3>Static Temperature:</h3>
|
2129 |
+
</div>
|
2130 |
+
<div class="settings-content">
|
2131 |
+
<div class="setting-item highlight">
|
2132 |
+
<span class="setting-value">1.2</span>
|
2133 |
+
</div>
|
2134 |
+
</div>
|
2135 |
+
</div>
|
2136 |
+
<div class="settings-card">
|
2137 |
+
<span class="corner-bl"></span>
|
2138 |
+
<span class="corner-br"></span>
|
2139 |
+
<div class="settings-header">
|
2140 |
+
<h3>Min P</h3>
|
2141 |
+
</div>
|
2142 |
+
<div class="settings-content">
|
2143 |
+
<div class="setting-item highlight">
|
2144 |
+
<span class="setting-value">0.02</span>
|
2145 |
+
</div>
|
2146 |
+
</div>
|
2147 |
+
</div>
|
2148 |
+
<div class="settings-card">
|
2149 |
+
<span class="corner-bl"></span>
|
2150 |
+
<span class="corner-br"></span>
|
2151 |
+
<div class="settings-header">
|
2152 |
+
<h3>DRY Settings</h3>
|
2153 |
+
</div>
|
2154 |
+
<div class="settings-content">
|
2155 |
+
<div class="setting-item">
|
2156 |
+
<span class="setting-label">Multiplier</span>
|
2157 |
+
<span class="setting-value">0.8</span>
|
2158 |
+
</div>
|
2159 |
+
<div class="setting-item">
|
2160 |
+
<span class="setting-label">Base</span>
|
2161 |
+
<span class="setting-value">1.75</span>
|
2162 |
+
</div>
|
2163 |
+
<div class="setting-item">
|
2164 |
+
<span class="setting-label">Length</span>
|
2165 |
+
<span class="setting-value">4</span>
|
2166 |
+
</div>
|
2167 |
+
</div>
|
2168 |
+
</div>
|
2169 |
+
</div>
|
2170 |
+
</div>
|
2171 |
+
<div class="section-container">
|
2172 |
+
<h2>Recommended Templates & Prompts</h2>
|
2173 |
+
<div class="template-card">
|
2174 |
+
<span class="corner-bl"></span>
|
2175 |
+
<span class="corner-br"></span>
|
2176 |
+
<div class="template-item">
|
2177 |
+
<div class="template-content">
|
2178 |
+
<a href="https://huggingface.co/Konnect1221/Methception-Llamaception-SillyTavern-Preset" target="_blank" class="template-link">
|
2179 |
+
LLam@ception
|
2180 |
+
<span class="link-arrow">→</span>
|
2181 |
+
</a>
|
2182 |
+
<span class="template-author">by @.konnect</span>
|
2183 |
+
</div>
|
2184 |
+
</div>
|
2185 |
+
<div class="template-item">
|
2186 |
+
<div class="template-content">
|
2187 |
+
<a href="https://huggingface.co/Steelskull/L3.3-Damascus-R1/resolve/main/Leception-XML-v1.json" target="_blank" class="template-link">
|
2188 |
+
LeCeption
|
2189 |
+
<span class="link-arrow">→</span>
|
2190 |
+
</a>
|
2191 |
+
<span class="template-author">by @Steel</span> > XML version of Llam@ception 1.5.2 with stepped thinking added
|
2192 |
+
</div>
|
2193 |
+
</div>
|
2194 |
+
</div>
|
2195 |
+
</div>
|
2196 |
+
<div class="section-container">
|
2197 |
+
<h2>Quantized Versions</h2>
|
2198 |
+
<div class="quantized-container">
|
2199 |
+
<div class="quantized-section">
|
2200 |
+
<span class="corner-bl"></span>
|
2201 |
+
<span class="corner-br"></span>
|
2202 |
+
<h3>GGUF Quantizations</h3>
|
2203 |
+
<div class="quantized-items">
|
2204 |
+
<div class="quantized-item">
|
2205 |
+
<span class="author">bartowski</span>
|
2206 |
+
<a href="https://huggingface.co/bartowski/Steelskull_L3.3-Damascus-R1-GGUF" target="_blank">
|
2207 |
+
Combined-GGUF
|
2208 |
+
<span class="link-arrow">→</span>
|
2209 |
+
</a>
|
2210 |
+
</div>
|
2211 |
+
<div class="quantized-item">
|
2212 |
+
<span class="author">mradermacher</span>
|
2213 |
+
<div class="multi-links">
|
2214 |
+
<a href="https://huggingface.co/mradermacher/L3.3-Damascus-R1-GGUF" target="_blank">
|
2215 |
+
GGUF
|
2216 |
+
<span class="link-arrow">→</span>
|
2217 |
+
</a>
|
2218 |
+
<span class="separator">//</span>
|
2219 |
+
<a href="https://huggingface.co/mradermacher/L3.3-Damascus-R1-i1-GGUF" target="_blank">
|
2220 |
+
Imat-GGUF
|
2221 |
+
<span class="link-arrow">→</span>
|
2222 |
+
</a>
|
2223 |
+
</div>
|
2224 |
+
</div>
|
2225 |
+
<div class="quantized-item">
|
2226 |
+
<span class="author">Nexesenex</span>
|
2227 |
+
<a href="https://huggingface.co/Nexesenex/Steelskull_L3.3-Damascus-R1-bf16-iMat-CF-GGUF" target="_blank">
|
2228 |
+
Custom-Imat-GGUF
|
2229 |
+
<span class="link-arrow">→</span>
|
2230 |
+
</a>
|
2231 |
+
</div>
|
2232 |
+
</div>
|
2233 |
+
</div>
|
2234 |
+
<div class="quantized-section">
|
2235 |
+
<span class="corner-bl"></span>
|
2236 |
+
<span class="corner-br"></span>
|
2237 |
+
<h3>EXL2 Quantizations</h3>
|
2238 |
+
<div class="quantized-items">
|
2239 |
+
<div class="quantized-item">
|
2240 |
+
<span class="author">ReadyArt</span>
|
2241 |
+
<div class="multi-links">
|
2242 |
+
<a href="https://huggingface.co/ReadyArt/L3.3-Damascus-R1_EXl2_8.0bpw_H8" target="_blank">
|
2243 |
+
8.0BPW-EXL2
|
2244 |
+
<span class="link-arrow">→</span>
|
2245 |
+
</a>
|
2246 |
+
<span class="separator">//</span>
|
2247 |
+
<a href="https://huggingface.co/ReadyArt/L3.3-Damascus-R1_EXl2_6.65bpw_H8" target="_blank">
|
2248 |
+
6.65BPW-EXL2
|
2249 |
+
<span class="link-arrow">→</span>
|
2250 |
+
</a>
|
2251 |
+
</div>
|
2252 |
+
</div>
|
2253 |
+
<div class="quantized-item">
|
2254 |
+
<span class="author">Darkhn</span>
|
2255 |
+
<a href="https://huggingface.co/Darkhn/Steelskull-L3.3-Damascus-R1-6.0bpw-h8-exl2" target="_blank">
|
2256 |
+
6.0BPW-EXL2
|
2257 |
+
<span class="link-arrow">→</span>
|
2258 |
+
</a>
|
2259 |
+
<span class="separator">//</span>
|
2260 |
+
<a href="https://huggingface.co/Darkhn/L3.3-Damascus-R1-5.0bpw-h8-exl2" target="_blank">
|
2261 |
+
5.0BPW-EXL2
|
2262 |
+
<span class="link-arrow">→</span>
|
2263 |
+
</a>
|
2264 |
+
<span class="separator">//</span>
|
2265 |
+
<a href="https://huggingface.co/Darkhn/L3.3-Damascus-R1-4.0bpw-h8-exl2" target="_blank">
|
2266 |
+
4.0BPW-EXL2
|
2267 |
+
<span class="link-arrow">→</span>
|
2268 |
+
</a>
|
2269 |
+
</div>
|
2270 |
+
</div>
|
2271 |
+
</div>
|
2272 |
+
<div class="quantized-section">
|
2273 |
+
<span class="corner-bl"></span>
|
2274 |
+
<span class="corner-br"></span>
|
2275 |
+
<h3>FP8 Dynamic</h3>
|
2276 |
+
<div class="quantized-items">
|
2277 |
+
<div class="quantized-item">
|
2278 |
+
<span class="author">yeyaowei</span>
|
2279 |
+
<a href="https://huggingface.co/yeyaowei/L3.3-Damascus-R1-FP8-Dynamic" target="_blank">
|
2280 |
+
FP8-Dynamic
|
2281 |
+
<span class="link-arrow">→</span>
|
2282 |
+
</a>
|
2283 |
+
</div>
|
2284 |
+
</div>
|
2285 |
+
</div>
|
2286 |
+
</div>
|
2287 |
+
</div>
|
2288 |
+
<div class="support-section">
|
2289 |
+
<span class="corner-bl"></span>
|
2290 |
+
<span class="corner-br"></span>
|
2291 |
+
<h2>Support & Community:</h2>
|
2292 |
+
<div class="support-buttons">
|
2293 |
+
<a href="https://ko-fi.com/Y8Y0AO2XE" target="_blank" class="button">
|
2294 |
+
Support on Ko-fi
|
2295 |
+
</a>
|
2296 |
+
<a href="https://discord.gg/4tCngSm3qZ" target="_blank" class="button">
|
2297 |
+
Join Discord
|
2298 |
+
</a>
|
2299 |
+
</div>
|
2300 |
+
<div class="special-thanks">
|
2301 |
+
<h3>Special Thanks</h3>
|
2302 |
+
<ul class="thanks-list">
|
2303 |
+
<li><strong>@Geechan</strong> for feedback and sampler settings</li>
|
2304 |
+
<li><strong>@Konnect</strong> for their feedback and templates</li>
|
2305 |
+
<li><strong>@Kistara</strong> for their feedback and help with the model mascot design</li>
|
2306 |
+
<li><strong>@Thana Alt</strong> for their feedback and Quants</li>
|
2307 |
+
<li><strong>@Lightning_missile</strong> for their feedback</li>
|
2308 |
+
<li><strong>@Yemosvoto</strong> for the model name</li>
|
2309 |
+
<li><strong>The Arli community</strong> for feedback and testers</li>
|
2310 |
+
<li><strong>The BeaverAI communty</strong> for feedback and testers</li>
|
2311 |
+
</ul>
|
2312 |
+
<p class="thanks-note">I wish I could add everyone but im pretty sure it would be as long as the card!</p>
|
2313 |
+
</div>
|
2314 |
+
</div>
|
2315 |
+
</div>
|
2316 |
+
</div>
|
2317 |
+
</body>
|
2318 |
+
</html>
|
config.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "LLM-Experiments/L3.1x3.3-Hydroblated-R1-70B-v3",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 128000,
|
9 |
+
"eos_token_id": [
|
10 |
+
128001,
|
11 |
+
128008,
|
12 |
+
128009
|
13 |
+
],
|
14 |
+
"head_dim": 128,
|
15 |
+
"hidden_act": "silu",
|
16 |
+
"hidden_size": 8192,
|
17 |
+
"initializer_range": 0.02,
|
18 |
+
"intermediate_size": 28672,
|
19 |
+
"max_position_embeddings": 131072,
|
20 |
+
"mlp_bias": false,
|
21 |
+
"model_type": "llama",
|
22 |
+
"num_attention_heads": 64,
|
23 |
+
"num_hidden_layers": 80,
|
24 |
+
"num_key_value_heads": 8,
|
25 |
+
"pretraining_tp": 1,
|
26 |
+
"rms_norm_eps": 1e-05,
|
27 |
+
"rope_scaling": {
|
28 |
+
"factor": 8.0,
|
29 |
+
"high_freq_factor": 4.0,
|
30 |
+
"low_freq_factor": 1.0,
|
31 |
+
"original_max_position_embeddings": 8192,
|
32 |
+
"rope_type": "llama3"
|
33 |
+
},
|
34 |
+
"rope_theta": 500000.0,
|
35 |
+
"tie_word_embeddings": false,
|
36 |
+
"torch_dtype": "bfloat16",
|
37 |
+
"transformers_version": "4.48.2",
|
38 |
+
"use_cache": true,
|
39 |
+
"vocab_size": 128256,
|
40 |
+
"quantization_config": {
|
41 |
+
"quant_method": "exl2",
|
42 |
+
"version": "0.2.7",
|
43 |
+
"bits": 6.5,
|
44 |
+
"head_bits": 8,
|
45 |
+
"calibration": {
|
46 |
+
"rows": 115,
|
47 |
+
"length": 2048,
|
48 |
+
"dataset": "(default)"
|
49 |
+
}
|
50 |
+
}
|
51 |
+
}
|
measurement.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
model.safetensors.index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"metadata": {"mergekit_version": "0.0.6", "total_size": 141107412992}, "weight_map": {"lm_head.weight": "model-00001-of-00030.safetensors", "model.embed_tokens.weight": "model-00001-of-00030.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00030.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00030.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00030.safetensors", "model.layers.11.input_layernorm.weight": "model-00002-of-00030.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00030.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.input_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00030.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00030.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.input_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00030.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00005-of-00030.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.input_layernorm.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00030.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00006-of-00030.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.input_layernorm.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00006-of-00030.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00007-of-00030.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00007-of-00030.safetensors", "model.layers.24.input_layernorm.weight": "model-00007-of-00030.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00008-of-00030.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.input_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00008-of-00030.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00009-of-00030.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.input_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00009-of-00030.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00010-of-00030.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.input_layernorm.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00010-of-00030.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00011-of-00030.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.input_layernorm.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00011-of-00030.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00012-of-00030.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00012-of-00030.safetensors", "model.layers.37.input_layernorm.weight": "model-00012-of-00030.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00013-of-00030.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.input_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00013-of-00030.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00014-of-00030.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.input_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00014-of-00030.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00015-of-00030.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.input_layernorm.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00015-of-00030.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00016-of-00030.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.input_layernorm.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00016-of-00030.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00017-of-00030.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00017-of-00030.safetensors", "model.layers.5.input_layernorm.weight": "model-00017-of-00030.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00018-of-00030.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.input_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00018-of-00030.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00019-of-00030.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.input_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00019-of-00030.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00020-of-00030.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.input_layernorm.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00020-of-00030.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00021-of-00030.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.input_layernorm.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00021-of-00030.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00022-of-00030.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00022-of-00030.safetensors", "model.layers.62.input_layernorm.weight": "model-00022-of-00030.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00023-of-00030.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.input_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00023-of-00030.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00024-of-00030.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.input_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00024-of-00030.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00025-of-00030.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.input_layernorm.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00025-of-00030.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00026-of-00030.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.input_layernorm.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00026-of-00030.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00027-of-00030.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00027-of-00030.safetensors", "model.layers.75.input_layernorm.weight": "model-00027-of-00030.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00028-of-00030.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.input_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00028-of-00030.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00029-of-00030.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.input_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00029-of-00030.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.input_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00030-of-00030.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00030-of-00030.safetensors", "model.norm.weight": "model-00030-of-00030.safetensors"}}
|
output-00001-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:364b05fc66689676fdb8a6a7cd1eaab25d489af09bc6bd3012083a0331d5f383
|
3 |
+
size 8486224624
|
output-00002-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b8fe11a5f6a006eaaaf4e503896c1c32cf302146814e91152968e593dac5e64
|
3 |
+
size 8460013544
|
output-00003-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fcd250ec488c07cf92f0eaf854abd1962ff30d52f49c3d5798e0aa9f4c907f1a
|
3 |
+
size 8543613272
|
output-00004-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b0608b4408013885350eb612715f1eb3d2fc7d0694ab9d81c115d3dcc716ae5
|
3 |
+
size 8562094524
|
output-00005-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8218c8accdd163a960175b887961e3a97b89f6f8c1fb63fcdcd7871b0b01e3ec
|
3 |
+
size 8496745148
|
output-00006-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8ee982b188a645d1155cf4dcd75d997ca3e2b47b7188156fcb525b83295db3bf
|
3 |
+
size 8587302180
|
output-00007-of-00007.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1fce13d39453a138dd117cb13dd51e19dbca27088537a42e5cf3c43d543aacee
|
3 |
+
size 7556554256
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<|begin▁of▁sentence|>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "<|end▁of▁sentence|>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": {
|
17 |
+
"content": "<|end▁of▁sentence|>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d91915040cfac999d8c55f4b5bc6e67367c065e3a7a4e4b9438ce1f256addd86
|
3 |
+
size 17209530
|
tokenizer_config.json
ADDED
@@ -0,0 +1,2067 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"128000": {
|
7 |
+
"content": "<|begin▁of▁sentence|>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"128001": {
|
15 |
+
"content": "<|end▁of▁sentence|>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"128002": {
|
23 |
+
"content": "<|reserved_special_token_0|>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
},
|
30 |
+
"128003": {
|
31 |
+
"content": "<|reserved_special_token_1|>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false,
|
36 |
+
"special": true
|
37 |
+
},
|
38 |
+
"128004": {
|
39 |
+
"content": "<|finetune_right_pad_id|>",
|
40 |
+
"lstrip": false,
|
41 |
+
"normalized": false,
|
42 |
+
"rstrip": false,
|
43 |
+
"single_word": false,
|
44 |
+
"special": true
|
45 |
+
},
|
46 |
+
"128005": {
|
47 |
+
"content": "<|reserved_special_token_2|>",
|
48 |
+
"lstrip": false,
|
49 |
+
"normalized": false,
|
50 |
+
"rstrip": false,
|
51 |
+
"single_word": false,
|
52 |
+
"special": true
|
53 |
+
},
|
54 |
+
"128006": {
|
55 |
+
"content": "<|start_header_id|>",
|
56 |
+
"lstrip": false,
|
57 |
+
"normalized": false,
|
58 |
+
"rstrip": false,
|
59 |
+
"single_word": false,
|
60 |
+
"special": true
|
61 |
+
},
|
62 |
+
"128007": {
|
63 |
+
"content": "<|end_header_id|>",
|
64 |
+
"lstrip": false,
|
65 |
+
"normalized": false,
|
66 |
+
"rstrip": false,
|
67 |
+
"single_word": false,
|
68 |
+
"special": true
|
69 |
+
},
|
70 |
+
"128008": {
|
71 |
+
"content": "<|eom_id|>",
|
72 |
+
"lstrip": false,
|
73 |
+
"normalized": false,
|
74 |
+
"rstrip": false,
|
75 |
+
"single_word": false,
|
76 |
+
"special": true
|
77 |
+
},
|
78 |
+
"128009": {
|
79 |
+
"content": "<|eot_id|>",
|
80 |
+
"lstrip": false,
|
81 |
+
"normalized": false,
|
82 |
+
"rstrip": false,
|
83 |
+
"single_word": false,
|
84 |
+
"special": true
|
85 |
+
},
|
86 |
+
"128010": {
|
87 |
+
"content": "<|python_tag|>",
|
88 |
+
"lstrip": false,
|
89 |
+
"normalized": false,
|
90 |
+
"rstrip": false,
|
91 |
+
"single_word": false,
|
92 |
+
"special": true
|
93 |
+
},
|
94 |
+
"128011": {
|
95 |
+
"content": "<|User|>",
|
96 |
+
"lstrip": false,
|
97 |
+
"normalized": false,
|
98 |
+
"rstrip": false,
|
99 |
+
"single_word": false,
|
100 |
+
"special": false
|
101 |
+
},
|
102 |
+
"128012": {
|
103 |
+
"content": "<|Assistant|>",
|
104 |
+
"lstrip": false,
|
105 |
+
"normalized": false,
|
106 |
+
"rstrip": false,
|
107 |
+
"single_word": false,
|
108 |
+
"special": false
|
109 |
+
},
|
110 |
+
"128013": {
|
111 |
+
"content": "<think>",
|
112 |
+
"lstrip": false,
|
113 |
+
"normalized": false,
|
114 |
+
"rstrip": false,
|
115 |
+
"single_word": false,
|
116 |
+
"special": false
|
117 |
+
},
|
118 |
+
"128014": {
|
119 |
+
"content": "</think>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false,
|
124 |
+
"special": false
|
125 |
+
},
|
126 |
+
"128015": {
|
127 |
+
"content": "<|▁pad▁|>",
|
128 |
+
"lstrip": false,
|
129 |
+
"normalized": false,
|
130 |
+
"rstrip": false,
|
131 |
+
"single_word": false,
|
132 |
+
"special": true
|
133 |
+
},
|
134 |
+
"128016": {
|
135 |
+
"content": "<|reserved_special_token_8|>",
|
136 |
+
"lstrip": false,
|
137 |
+
"normalized": false,
|
138 |
+
"rstrip": false,
|
139 |
+
"single_word": false,
|
140 |
+
"special": true
|
141 |
+
},
|
142 |
+
"128017": {
|
143 |
+
"content": "<|reserved_special_token_9|>",
|
144 |
+
"lstrip": false,
|
145 |
+
"normalized": false,
|
146 |
+
"rstrip": false,
|
147 |
+
"single_word": false,
|
148 |
+
"special": true
|
149 |
+
},
|
150 |
+
"128018": {
|
151 |
+
"content": "<|reserved_special_token_10|>",
|
152 |
+
"lstrip": false,
|
153 |
+
"normalized": false,
|
154 |
+
"rstrip": false,
|
155 |
+
"single_word": false,
|
156 |
+
"special": true
|
157 |
+
},
|
158 |
+
"128019": {
|
159 |
+
"content": "<|reserved_special_token_11|>",
|
160 |
+
"lstrip": false,
|
161 |
+
"normalized": false,
|
162 |
+
"rstrip": false,
|
163 |
+
"single_word": false,
|
164 |
+
"special": true
|
165 |
+
},
|
166 |
+
"128020": {
|
167 |
+
"content": "<|reserved_special_token_12|>",
|
168 |
+
"lstrip": false,
|
169 |
+
"normalized": false,
|
170 |
+
"rstrip": false,
|
171 |
+
"single_word": false,
|
172 |
+
"special": true
|
173 |
+
},
|
174 |
+
"128021": {
|
175 |
+
"content": "<|reserved_special_token_13|>",
|
176 |
+
"lstrip": false,
|
177 |
+
"normalized": false,
|
178 |
+
"rstrip": false,
|
179 |
+
"single_word": false,
|
180 |
+
"special": true
|
181 |
+
},
|
182 |
+
"128022": {
|
183 |
+
"content": "<|reserved_special_token_14|>",
|
184 |
+
"lstrip": false,
|
185 |
+
"normalized": false,
|
186 |
+
"rstrip": false,
|
187 |
+
"single_word": false,
|
188 |
+
"special": true
|
189 |
+
},
|
190 |
+
"128023": {
|
191 |
+
"content": "<|reserved_special_token_15|>",
|
192 |
+
"lstrip": false,
|
193 |
+
"normalized": false,
|
194 |
+
"rstrip": false,
|
195 |
+
"single_word": false,
|
196 |
+
"special": true
|
197 |
+
},
|
198 |
+
"128024": {
|
199 |
+
"content": "<|reserved_special_token_16|>",
|
200 |
+
"lstrip": false,
|
201 |
+
"normalized": false,
|
202 |
+
"rstrip": false,
|
203 |
+
"single_word": false,
|
204 |
+
"special": true
|
205 |
+
},
|
206 |
+
"128025": {
|
207 |
+
"content": "<|reserved_special_token_17|>",
|
208 |
+
"lstrip": false,
|
209 |
+
"normalized": false,
|
210 |
+
"rstrip": false,
|
211 |
+
"single_word": false,
|
212 |
+
"special": true
|
213 |
+
},
|
214 |
+
"128026": {
|
215 |
+
"content": "<|reserved_special_token_18|>",
|
216 |
+
"lstrip": false,
|
217 |
+
"normalized": false,
|
218 |
+
"rstrip": false,
|
219 |
+
"single_word": false,
|
220 |
+
"special": true
|
221 |
+
},
|
222 |
+
"128027": {
|
223 |
+
"content": "<|reserved_special_token_19|>",
|
224 |
+
"lstrip": false,
|
225 |
+
"normalized": false,
|
226 |
+
"rstrip": false,
|
227 |
+
"single_word": false,
|
228 |
+
"special": true
|
229 |
+
},
|
230 |
+
"128028": {
|
231 |
+
"content": "<|reserved_special_token_20|>",
|
232 |
+
"lstrip": false,
|
233 |
+
"normalized": false,
|
234 |
+
"rstrip": false,
|
235 |
+
"single_word": false,
|
236 |
+
"special": true
|
237 |
+
},
|
238 |
+
"128029": {
|
239 |
+
"content": "<|reserved_special_token_21|>",
|
240 |
+
"lstrip": false,
|
241 |
+
"normalized": false,
|
242 |
+
"rstrip": false,
|
243 |
+
"single_word": false,
|
244 |
+
"special": true
|
245 |
+
},
|
246 |
+
"128030": {
|
247 |
+
"content": "<|reserved_special_token_22|>",
|
248 |
+
"lstrip": false,
|
249 |
+
"normalized": false,
|
250 |
+
"rstrip": false,
|
251 |
+
"single_word": false,
|
252 |
+
"special": true
|
253 |
+
},
|
254 |
+
"128031": {
|
255 |
+
"content": "<|reserved_special_token_23|>",
|
256 |
+
"lstrip": false,
|
257 |
+
"normalized": false,
|
258 |
+
"rstrip": false,
|
259 |
+
"single_word": false,
|
260 |
+
"special": true
|
261 |
+
},
|
262 |
+
"128032": {
|
263 |
+
"content": "<|reserved_special_token_24|>",
|
264 |
+
"lstrip": false,
|
265 |
+
"normalized": false,
|
266 |
+
"rstrip": false,
|
267 |
+
"single_word": false,
|
268 |
+
"special": true
|
269 |
+
},
|
270 |
+
"128033": {
|
271 |
+
"content": "<|reserved_special_token_25|>",
|
272 |
+
"lstrip": false,
|
273 |
+
"normalized": false,
|
274 |
+
"rstrip": false,
|
275 |
+
"single_word": false,
|
276 |
+
"special": true
|
277 |
+
},
|
278 |
+
"128034": {
|
279 |
+
"content": "<|reserved_special_token_26|>",
|
280 |
+
"lstrip": false,
|
281 |
+
"normalized": false,
|
282 |
+
"rstrip": false,
|
283 |
+
"single_word": false,
|
284 |
+
"special": true
|
285 |
+
},
|
286 |
+
"128035": {
|
287 |
+
"content": "<|reserved_special_token_27|>",
|
288 |
+
"lstrip": false,
|
289 |
+
"normalized": false,
|
290 |
+
"rstrip": false,
|
291 |
+
"single_word": false,
|
292 |
+
"special": true
|
293 |
+
},
|
294 |
+
"128036": {
|
295 |
+
"content": "<|reserved_special_token_28|>",
|
296 |
+
"lstrip": false,
|
297 |
+
"normalized": false,
|
298 |
+
"rstrip": false,
|
299 |
+
"single_word": false,
|
300 |
+
"special": true
|
301 |
+
},
|
302 |
+
"128037": {
|
303 |
+
"content": "<|reserved_special_token_29|>",
|
304 |
+
"lstrip": false,
|
305 |
+
"normalized": false,
|
306 |
+
"rstrip": false,
|
307 |
+
"single_word": false,
|
308 |
+
"special": true
|
309 |
+
},
|
310 |
+
"128038": {
|
311 |
+
"content": "<|reserved_special_token_30|>",
|
312 |
+
"lstrip": false,
|
313 |
+
"normalized": false,
|
314 |
+
"rstrip": false,
|
315 |
+
"single_word": false,
|
316 |
+
"special": true
|
317 |
+
},
|
318 |
+
"128039": {
|
319 |
+
"content": "<|reserved_special_token_31|>",
|
320 |
+
"lstrip": false,
|
321 |
+
"normalized": false,
|
322 |
+
"rstrip": false,
|
323 |
+
"single_word": false,
|
324 |
+
"special": true
|
325 |
+
},
|
326 |
+
"128040": {
|
327 |
+
"content": "<|reserved_special_token_32|>",
|
328 |
+
"lstrip": false,
|
329 |
+
"normalized": false,
|
330 |
+
"rstrip": false,
|
331 |
+
"single_word": false,
|
332 |
+
"special": true
|
333 |
+
},
|
334 |
+
"128041": {
|
335 |
+
"content": "<|reserved_special_token_33|>",
|
336 |
+
"lstrip": false,
|
337 |
+
"normalized": false,
|
338 |
+
"rstrip": false,
|
339 |
+
"single_word": false,
|
340 |
+
"special": true
|
341 |
+
},
|
342 |
+
"128042": {
|
343 |
+
"content": "<|reserved_special_token_34|>",
|
344 |
+
"lstrip": false,
|
345 |
+
"normalized": false,
|
346 |
+
"rstrip": false,
|
347 |
+
"single_word": false,
|
348 |
+
"special": true
|
349 |
+
},
|
350 |
+
"128043": {
|
351 |
+
"content": "<|reserved_special_token_35|>",
|
352 |
+
"lstrip": false,
|
353 |
+
"normalized": false,
|
354 |
+
"rstrip": false,
|
355 |
+
"single_word": false,
|
356 |
+
"special": true
|
357 |
+
},
|
358 |
+
"128044": {
|
359 |
+
"content": "<|reserved_special_token_36|>",
|
360 |
+
"lstrip": false,
|
361 |
+
"normalized": false,
|
362 |
+
"rstrip": false,
|
363 |
+
"single_word": false,
|
364 |
+
"special": true
|
365 |
+
},
|
366 |
+
"128045": {
|
367 |
+
"content": "<|reserved_special_token_37|>",
|
368 |
+
"lstrip": false,
|
369 |
+
"normalized": false,
|
370 |
+
"rstrip": false,
|
371 |
+
"single_word": false,
|
372 |
+
"special": true
|
373 |
+
},
|
374 |
+
"128046": {
|
375 |
+
"content": "<|reserved_special_token_38|>",
|
376 |
+
"lstrip": false,
|
377 |
+
"normalized": false,
|
378 |
+
"rstrip": false,
|
379 |
+
"single_word": false,
|
380 |
+
"special": true
|
381 |
+
},
|
382 |
+
"128047": {
|
383 |
+
"content": "<|reserved_special_token_39|>",
|
384 |
+
"lstrip": false,
|
385 |
+
"normalized": false,
|
386 |
+
"rstrip": false,
|
387 |
+
"single_word": false,
|
388 |
+
"special": true
|
389 |
+
},
|
390 |
+
"128048": {
|
391 |
+
"content": "<|reserved_special_token_40|>",
|
392 |
+
"lstrip": false,
|
393 |
+
"normalized": false,
|
394 |
+
"rstrip": false,
|
395 |
+
"single_word": false,
|
396 |
+
"special": true
|
397 |
+
},
|
398 |
+
"128049": {
|
399 |
+
"content": "<|reserved_special_token_41|>",
|
400 |
+
"lstrip": false,
|
401 |
+
"normalized": false,
|
402 |
+
"rstrip": false,
|
403 |
+
"single_word": false,
|
404 |
+
"special": true
|
405 |
+
},
|
406 |
+
"128050": {
|
407 |
+
"content": "<|reserved_special_token_42|>",
|
408 |
+
"lstrip": false,
|
409 |
+
"normalized": false,
|
410 |
+
"rstrip": false,
|
411 |
+
"single_word": false,
|
412 |
+
"special": true
|
413 |
+
},
|
414 |
+
"128051": {
|
415 |
+
"content": "<|reserved_special_token_43|>",
|
416 |
+
"lstrip": false,
|
417 |
+
"normalized": false,
|
418 |
+
"rstrip": false,
|
419 |
+
"single_word": false,
|
420 |
+
"special": true
|
421 |
+
},
|
422 |
+
"128052": {
|
423 |
+
"content": "<|reserved_special_token_44|>",
|
424 |
+
"lstrip": false,
|
425 |
+
"normalized": false,
|
426 |
+
"rstrip": false,
|
427 |
+
"single_word": false,
|
428 |
+
"special": true
|
429 |
+
},
|
430 |
+
"128053": {
|
431 |
+
"content": "<|reserved_special_token_45|>",
|
432 |
+
"lstrip": false,
|
433 |
+
"normalized": false,
|
434 |
+
"rstrip": false,
|
435 |
+
"single_word": false,
|
436 |
+
"special": true
|
437 |
+
},
|
438 |
+
"128054": {
|
439 |
+
"content": "<|reserved_special_token_46|>",
|
440 |
+
"lstrip": false,
|
441 |
+
"normalized": false,
|
442 |
+
"rstrip": false,
|
443 |
+
"single_word": false,
|
444 |
+
"special": true
|
445 |
+
},
|
446 |
+
"128055": {
|
447 |
+
"content": "<|reserved_special_token_47|>",
|
448 |
+
"lstrip": false,
|
449 |
+
"normalized": false,
|
450 |
+
"rstrip": false,
|
451 |
+
"single_word": false,
|
452 |
+
"special": true
|
453 |
+
},
|
454 |
+
"128056": {
|
455 |
+
"content": "<|reserved_special_token_48|>",
|
456 |
+
"lstrip": false,
|
457 |
+
"normalized": false,
|
458 |
+
"rstrip": false,
|
459 |
+
"single_word": false,
|
460 |
+
"special": true
|
461 |
+
},
|
462 |
+
"128057": {
|
463 |
+
"content": "<|reserved_special_token_49|>",
|
464 |
+
"lstrip": false,
|
465 |
+
"normalized": false,
|
466 |
+
"rstrip": false,
|
467 |
+
"single_word": false,
|
468 |
+
"special": true
|
469 |
+
},
|
470 |
+
"128058": {
|
471 |
+
"content": "<|reserved_special_token_50|>",
|
472 |
+
"lstrip": false,
|
473 |
+
"normalized": false,
|
474 |
+
"rstrip": false,
|
475 |
+
"single_word": false,
|
476 |
+
"special": true
|
477 |
+
},
|
478 |
+
"128059": {
|
479 |
+
"content": "<|reserved_special_token_51|>",
|
480 |
+
"lstrip": false,
|
481 |
+
"normalized": false,
|
482 |
+
"rstrip": false,
|
483 |
+
"single_word": false,
|
484 |
+
"special": true
|
485 |
+
},
|
486 |
+
"128060": {
|
487 |
+
"content": "<|reserved_special_token_52|>",
|
488 |
+
"lstrip": false,
|
489 |
+
"normalized": false,
|
490 |
+
"rstrip": false,
|
491 |
+
"single_word": false,
|
492 |
+
"special": true
|
493 |
+
},
|
494 |
+
"128061": {
|
495 |
+
"content": "<|reserved_special_token_53|>",
|
496 |
+
"lstrip": false,
|
497 |
+
"normalized": false,
|
498 |
+
"rstrip": false,
|
499 |
+
"single_word": false,
|
500 |
+
"special": true
|
501 |
+
},
|
502 |
+
"128062": {
|
503 |
+
"content": "<|reserved_special_token_54|>",
|
504 |
+
"lstrip": false,
|
505 |
+
"normalized": false,
|
506 |
+
"rstrip": false,
|
507 |
+
"single_word": false,
|
508 |
+
"special": true
|
509 |
+
},
|
510 |
+
"128063": {
|
511 |
+
"content": "<|reserved_special_token_55|>",
|
512 |
+
"lstrip": false,
|
513 |
+
"normalized": false,
|
514 |
+
"rstrip": false,
|
515 |
+
"single_word": false,
|
516 |
+
"special": true
|
517 |
+
},
|
518 |
+
"128064": {
|
519 |
+
"content": "<|reserved_special_token_56|>",
|
520 |
+
"lstrip": false,
|
521 |
+
"normalized": false,
|
522 |
+
"rstrip": false,
|
523 |
+
"single_word": false,
|
524 |
+
"special": true
|
525 |
+
},
|
526 |
+
"128065": {
|
527 |
+
"content": "<|reserved_special_token_57|>",
|
528 |
+
"lstrip": false,
|
529 |
+
"normalized": false,
|
530 |
+
"rstrip": false,
|
531 |
+
"single_word": false,
|
532 |
+
"special": true
|
533 |
+
},
|
534 |
+
"128066": {
|
535 |
+
"content": "<|reserved_special_token_58|>",
|
536 |
+
"lstrip": false,
|
537 |
+
"normalized": false,
|
538 |
+
"rstrip": false,
|
539 |
+
"single_word": false,
|
540 |
+
"special": true
|
541 |
+
},
|
542 |
+
"128067": {
|
543 |
+
"content": "<|reserved_special_token_59|>",
|
544 |
+
"lstrip": false,
|
545 |
+
"normalized": false,
|
546 |
+
"rstrip": false,
|
547 |
+
"single_word": false,
|
548 |
+
"special": true
|
549 |
+
},
|
550 |
+
"128068": {
|
551 |
+
"content": "<|reserved_special_token_60|>",
|
552 |
+
"lstrip": false,
|
553 |
+
"normalized": false,
|
554 |
+
"rstrip": false,
|
555 |
+
"single_word": false,
|
556 |
+
"special": true
|
557 |
+
},
|
558 |
+
"128069": {
|
559 |
+
"content": "<|reserved_special_token_61|>",
|
560 |
+
"lstrip": false,
|
561 |
+
"normalized": false,
|
562 |
+
"rstrip": false,
|
563 |
+
"single_word": false,
|
564 |
+
"special": true
|
565 |
+
},
|
566 |
+
"128070": {
|
567 |
+
"content": "<|reserved_special_token_62|>",
|
568 |
+
"lstrip": false,
|
569 |
+
"normalized": false,
|
570 |
+
"rstrip": false,
|
571 |
+
"single_word": false,
|
572 |
+
"special": true
|
573 |
+
},
|
574 |
+
"128071": {
|
575 |
+
"content": "<|reserved_special_token_63|>",
|
576 |
+
"lstrip": false,
|
577 |
+
"normalized": false,
|
578 |
+
"rstrip": false,
|
579 |
+
"single_word": false,
|
580 |
+
"special": true
|
581 |
+
},
|
582 |
+
"128072": {
|
583 |
+
"content": "<|reserved_special_token_64|>",
|
584 |
+
"lstrip": false,
|
585 |
+
"normalized": false,
|
586 |
+
"rstrip": false,
|
587 |
+
"single_word": false,
|
588 |
+
"special": true
|
589 |
+
},
|
590 |
+
"128073": {
|
591 |
+
"content": "<|reserved_special_token_65|>",
|
592 |
+
"lstrip": false,
|
593 |
+
"normalized": false,
|
594 |
+
"rstrip": false,
|
595 |
+
"single_word": false,
|
596 |
+
"special": true
|
597 |
+
},
|
598 |
+
"128074": {
|
599 |
+
"content": "<|reserved_special_token_66|>",
|
600 |
+
"lstrip": false,
|
601 |
+
"normalized": false,
|
602 |
+
"rstrip": false,
|
603 |
+
"single_word": false,
|
604 |
+
"special": true
|
605 |
+
},
|
606 |
+
"128075": {
|
607 |
+
"content": "<|reserved_special_token_67|>",
|
608 |
+
"lstrip": false,
|
609 |
+
"normalized": false,
|
610 |
+
"rstrip": false,
|
611 |
+
"single_word": false,
|
612 |
+
"special": true
|
613 |
+
},
|
614 |
+
"128076": {
|
615 |
+
"content": "<|reserved_special_token_68|>",
|
616 |
+
"lstrip": false,
|
617 |
+
"normalized": false,
|
618 |
+
"rstrip": false,
|
619 |
+
"single_word": false,
|
620 |
+
"special": true
|
621 |
+
},
|
622 |
+
"128077": {
|
623 |
+
"content": "<|reserved_special_token_69|>",
|
624 |
+
"lstrip": false,
|
625 |
+
"normalized": false,
|
626 |
+
"rstrip": false,
|
627 |
+
"single_word": false,
|
628 |
+
"special": true
|
629 |
+
},
|
630 |
+
"128078": {
|
631 |
+
"content": "<|reserved_special_token_70|>",
|
632 |
+
"lstrip": false,
|
633 |
+
"normalized": false,
|
634 |
+
"rstrip": false,
|
635 |
+
"single_word": false,
|
636 |
+
"special": true
|
637 |
+
},
|
638 |
+
"128079": {
|
639 |
+
"content": "<|reserved_special_token_71|>",
|
640 |
+
"lstrip": false,
|
641 |
+
"normalized": false,
|
642 |
+
"rstrip": false,
|
643 |
+
"single_word": false,
|
644 |
+
"special": true
|
645 |
+
},
|
646 |
+
"128080": {
|
647 |
+
"content": "<|reserved_special_token_72|>",
|
648 |
+
"lstrip": false,
|
649 |
+
"normalized": false,
|
650 |
+
"rstrip": false,
|
651 |
+
"single_word": false,
|
652 |
+
"special": true
|
653 |
+
},
|
654 |
+
"128081": {
|
655 |
+
"content": "<|reserved_special_token_73|>",
|
656 |
+
"lstrip": false,
|
657 |
+
"normalized": false,
|
658 |
+
"rstrip": false,
|
659 |
+
"single_word": false,
|
660 |
+
"special": true
|
661 |
+
},
|
662 |
+
"128082": {
|
663 |
+
"content": "<|reserved_special_token_74|>",
|
664 |
+
"lstrip": false,
|
665 |
+
"normalized": false,
|
666 |
+
"rstrip": false,
|
667 |
+
"single_word": false,
|
668 |
+
"special": true
|
669 |
+
},
|
670 |
+
"128083": {
|
671 |
+
"content": "<|reserved_special_token_75|>",
|
672 |
+
"lstrip": false,
|
673 |
+
"normalized": false,
|
674 |
+
"rstrip": false,
|
675 |
+
"single_word": false,
|
676 |
+
"special": true
|
677 |
+
},
|
678 |
+
"128084": {
|
679 |
+
"content": "<|reserved_special_token_76|>",
|
680 |
+
"lstrip": false,
|
681 |
+
"normalized": false,
|
682 |
+
"rstrip": false,
|
683 |
+
"single_word": false,
|
684 |
+
"special": true
|
685 |
+
},
|
686 |
+
"128085": {
|
687 |
+
"content": "<|reserved_special_token_77|>",
|
688 |
+
"lstrip": false,
|
689 |
+
"normalized": false,
|
690 |
+
"rstrip": false,
|
691 |
+
"single_word": false,
|
692 |
+
"special": true
|
693 |
+
},
|
694 |
+
"128086": {
|
695 |
+
"content": "<|reserved_special_token_78|>",
|
696 |
+
"lstrip": false,
|
697 |
+
"normalized": false,
|
698 |
+
"rstrip": false,
|
699 |
+
"single_word": false,
|
700 |
+
"special": true
|
701 |
+
},
|
702 |
+
"128087": {
|
703 |
+
"content": "<|reserved_special_token_79|>",
|
704 |
+
"lstrip": false,
|
705 |
+
"normalized": false,
|
706 |
+
"rstrip": false,
|
707 |
+
"single_word": false,
|
708 |
+
"special": true
|
709 |
+
},
|
710 |
+
"128088": {
|
711 |
+
"content": "<|reserved_special_token_80|>",
|
712 |
+
"lstrip": false,
|
713 |
+
"normalized": false,
|
714 |
+
"rstrip": false,
|
715 |
+
"single_word": false,
|
716 |
+
"special": true
|
717 |
+
},
|
718 |
+
"128089": {
|
719 |
+
"content": "<|reserved_special_token_81|>",
|
720 |
+
"lstrip": false,
|
721 |
+
"normalized": false,
|
722 |
+
"rstrip": false,
|
723 |
+
"single_word": false,
|
724 |
+
"special": true
|
725 |
+
},
|
726 |
+
"128090": {
|
727 |
+
"content": "<|reserved_special_token_82|>",
|
728 |
+
"lstrip": false,
|
729 |
+
"normalized": false,
|
730 |
+
"rstrip": false,
|
731 |
+
"single_word": false,
|
732 |
+
"special": true
|
733 |
+
},
|
734 |
+
"128091": {
|
735 |
+
"content": "<|reserved_special_token_83|>",
|
736 |
+
"lstrip": false,
|
737 |
+
"normalized": false,
|
738 |
+
"rstrip": false,
|
739 |
+
"single_word": false,
|
740 |
+
"special": true
|
741 |
+
},
|
742 |
+
"128092": {
|
743 |
+
"content": "<|reserved_special_token_84|>",
|
744 |
+
"lstrip": false,
|
745 |
+
"normalized": false,
|
746 |
+
"rstrip": false,
|
747 |
+
"single_word": false,
|
748 |
+
"special": true
|
749 |
+
},
|
750 |
+
"128093": {
|
751 |
+
"content": "<|reserved_special_token_85|>",
|
752 |
+
"lstrip": false,
|
753 |
+
"normalized": false,
|
754 |
+
"rstrip": false,
|
755 |
+
"single_word": false,
|
756 |
+
"special": true
|
757 |
+
},
|
758 |
+
"128094": {
|
759 |
+
"content": "<|reserved_special_token_86|>",
|
760 |
+
"lstrip": false,
|
761 |
+
"normalized": false,
|
762 |
+
"rstrip": false,
|
763 |
+
"single_word": false,
|
764 |
+
"special": true
|
765 |
+
},
|
766 |
+
"128095": {
|
767 |
+
"content": "<|reserved_special_token_87|>",
|
768 |
+
"lstrip": false,
|
769 |
+
"normalized": false,
|
770 |
+
"rstrip": false,
|
771 |
+
"single_word": false,
|
772 |
+
"special": true
|
773 |
+
},
|
774 |
+
"128096": {
|
775 |
+
"content": "<|reserved_special_token_88|>",
|
776 |
+
"lstrip": false,
|
777 |
+
"normalized": false,
|
778 |
+
"rstrip": false,
|
779 |
+
"single_word": false,
|
780 |
+
"special": true
|
781 |
+
},
|
782 |
+
"128097": {
|
783 |
+
"content": "<|reserved_special_token_89|>",
|
784 |
+
"lstrip": false,
|
785 |
+
"normalized": false,
|
786 |
+
"rstrip": false,
|
787 |
+
"single_word": false,
|
788 |
+
"special": true
|
789 |
+
},
|
790 |
+
"128098": {
|
791 |
+
"content": "<|reserved_special_token_90|>",
|
792 |
+
"lstrip": false,
|
793 |
+
"normalized": false,
|
794 |
+
"rstrip": false,
|
795 |
+
"single_word": false,
|
796 |
+
"special": true
|
797 |
+
},
|
798 |
+
"128099": {
|
799 |
+
"content": "<|reserved_special_token_91|>",
|
800 |
+
"lstrip": false,
|
801 |
+
"normalized": false,
|
802 |
+
"rstrip": false,
|
803 |
+
"single_word": false,
|
804 |
+
"special": true
|
805 |
+
},
|
806 |
+
"128100": {
|
807 |
+
"content": "<|reserved_special_token_92|>",
|
808 |
+
"lstrip": false,
|
809 |
+
"normalized": false,
|
810 |
+
"rstrip": false,
|
811 |
+
"single_word": false,
|
812 |
+
"special": true
|
813 |
+
},
|
814 |
+
"128101": {
|
815 |
+
"content": "<|reserved_special_token_93|>",
|
816 |
+
"lstrip": false,
|
817 |
+
"normalized": false,
|
818 |
+
"rstrip": false,
|
819 |
+
"single_word": false,
|
820 |
+
"special": true
|
821 |
+
},
|
822 |
+
"128102": {
|
823 |
+
"content": "<|reserved_special_token_94|>",
|
824 |
+
"lstrip": false,
|
825 |
+
"normalized": false,
|
826 |
+
"rstrip": false,
|
827 |
+
"single_word": false,
|
828 |
+
"special": true
|
829 |
+
},
|
830 |
+
"128103": {
|
831 |
+
"content": "<|reserved_special_token_95|>",
|
832 |
+
"lstrip": false,
|
833 |
+
"normalized": false,
|
834 |
+
"rstrip": false,
|
835 |
+
"single_word": false,
|
836 |
+
"special": true
|
837 |
+
},
|
838 |
+
"128104": {
|
839 |
+
"content": "<|reserved_special_token_96|>",
|
840 |
+
"lstrip": false,
|
841 |
+
"normalized": false,
|
842 |
+
"rstrip": false,
|
843 |
+
"single_word": false,
|
844 |
+
"special": true
|
845 |
+
},
|
846 |
+
"128105": {
|
847 |
+
"content": "<|reserved_special_token_97|>",
|
848 |
+
"lstrip": false,
|
849 |
+
"normalized": false,
|
850 |
+
"rstrip": false,
|
851 |
+
"single_word": false,
|
852 |
+
"special": true
|
853 |
+
},
|
854 |
+
"128106": {
|
855 |
+
"content": "<|reserved_special_token_98|>",
|
856 |
+
"lstrip": false,
|
857 |
+
"normalized": false,
|
858 |
+
"rstrip": false,
|
859 |
+
"single_word": false,
|
860 |
+
"special": true
|
861 |
+
},
|
862 |
+
"128107": {
|
863 |
+
"content": "<|reserved_special_token_99|>",
|
864 |
+
"lstrip": false,
|
865 |
+
"normalized": false,
|
866 |
+
"rstrip": false,
|
867 |
+
"single_word": false,
|
868 |
+
"special": true
|
869 |
+
},
|
870 |
+
"128108": {
|
871 |
+
"content": "<|reserved_special_token_100|>",
|
872 |
+
"lstrip": false,
|
873 |
+
"normalized": false,
|
874 |
+
"rstrip": false,
|
875 |
+
"single_word": false,
|
876 |
+
"special": true
|
877 |
+
},
|
878 |
+
"128109": {
|
879 |
+
"content": "<|reserved_special_token_101|>",
|
880 |
+
"lstrip": false,
|
881 |
+
"normalized": false,
|
882 |
+
"rstrip": false,
|
883 |
+
"single_word": false,
|
884 |
+
"special": true
|
885 |
+
},
|
886 |
+
"128110": {
|
887 |
+
"content": "<|reserved_special_token_102|>",
|
888 |
+
"lstrip": false,
|
889 |
+
"normalized": false,
|
890 |
+
"rstrip": false,
|
891 |
+
"single_word": false,
|
892 |
+
"special": true
|
893 |
+
},
|
894 |
+
"128111": {
|
895 |
+
"content": "<|reserved_special_token_103|>",
|
896 |
+
"lstrip": false,
|
897 |
+
"normalized": false,
|
898 |
+
"rstrip": false,
|
899 |
+
"single_word": false,
|
900 |
+
"special": true
|
901 |
+
},
|
902 |
+
"128112": {
|
903 |
+
"content": "<|reserved_special_token_104|>",
|
904 |
+
"lstrip": false,
|
905 |
+
"normalized": false,
|
906 |
+
"rstrip": false,
|
907 |
+
"single_word": false,
|
908 |
+
"special": true
|
909 |
+
},
|
910 |
+
"128113": {
|
911 |
+
"content": "<|reserved_special_token_105|>",
|
912 |
+
"lstrip": false,
|
913 |
+
"normalized": false,
|
914 |
+
"rstrip": false,
|
915 |
+
"single_word": false,
|
916 |
+
"special": true
|
917 |
+
},
|
918 |
+
"128114": {
|
919 |
+
"content": "<|reserved_special_token_106|>",
|
920 |
+
"lstrip": false,
|
921 |
+
"normalized": false,
|
922 |
+
"rstrip": false,
|
923 |
+
"single_word": false,
|
924 |
+
"special": true
|
925 |
+
},
|
926 |
+
"128115": {
|
927 |
+
"content": "<|reserved_special_token_107|>",
|
928 |
+
"lstrip": false,
|
929 |
+
"normalized": false,
|
930 |
+
"rstrip": false,
|
931 |
+
"single_word": false,
|
932 |
+
"special": true
|
933 |
+
},
|
934 |
+
"128116": {
|
935 |
+
"content": "<|reserved_special_token_108|>",
|
936 |
+
"lstrip": false,
|
937 |
+
"normalized": false,
|
938 |
+
"rstrip": false,
|
939 |
+
"single_word": false,
|
940 |
+
"special": true
|
941 |
+
},
|
942 |
+
"128117": {
|
943 |
+
"content": "<|reserved_special_token_109|>",
|
944 |
+
"lstrip": false,
|
945 |
+
"normalized": false,
|
946 |
+
"rstrip": false,
|
947 |
+
"single_word": false,
|
948 |
+
"special": true
|
949 |
+
},
|
950 |
+
"128118": {
|
951 |
+
"content": "<|reserved_special_token_110|>",
|
952 |
+
"lstrip": false,
|
953 |
+
"normalized": false,
|
954 |
+
"rstrip": false,
|
955 |
+
"single_word": false,
|
956 |
+
"special": true
|
957 |
+
},
|
958 |
+
"128119": {
|
959 |
+
"content": "<|reserved_special_token_111|>",
|
960 |
+
"lstrip": false,
|
961 |
+
"normalized": false,
|
962 |
+
"rstrip": false,
|
963 |
+
"single_word": false,
|
964 |
+
"special": true
|
965 |
+
},
|
966 |
+
"128120": {
|
967 |
+
"content": "<|reserved_special_token_112|>",
|
968 |
+
"lstrip": false,
|
969 |
+
"normalized": false,
|
970 |
+
"rstrip": false,
|
971 |
+
"single_word": false,
|
972 |
+
"special": true
|
973 |
+
},
|
974 |
+
"128121": {
|
975 |
+
"content": "<|reserved_special_token_113|>",
|
976 |
+
"lstrip": false,
|
977 |
+
"normalized": false,
|
978 |
+
"rstrip": false,
|
979 |
+
"single_word": false,
|
980 |
+
"special": true
|
981 |
+
},
|
982 |
+
"128122": {
|
983 |
+
"content": "<|reserved_special_token_114|>",
|
984 |
+
"lstrip": false,
|
985 |
+
"normalized": false,
|
986 |
+
"rstrip": false,
|
987 |
+
"single_word": false,
|
988 |
+
"special": true
|
989 |
+
},
|
990 |
+
"128123": {
|
991 |
+
"content": "<|reserved_special_token_115|>",
|
992 |
+
"lstrip": false,
|
993 |
+
"normalized": false,
|
994 |
+
"rstrip": false,
|
995 |
+
"single_word": false,
|
996 |
+
"special": true
|
997 |
+
},
|
998 |
+
"128124": {
|
999 |
+
"content": "<|reserved_special_token_116|>",
|
1000 |
+
"lstrip": false,
|
1001 |
+
"normalized": false,
|
1002 |
+
"rstrip": false,
|
1003 |
+
"single_word": false,
|
1004 |
+
"special": true
|
1005 |
+
},
|
1006 |
+
"128125": {
|
1007 |
+
"content": "<|reserved_special_token_117|>",
|
1008 |
+
"lstrip": false,
|
1009 |
+
"normalized": false,
|
1010 |
+
"rstrip": false,
|
1011 |
+
"single_word": false,
|
1012 |
+
"special": true
|
1013 |
+
},
|
1014 |
+
"128126": {
|
1015 |
+
"content": "<|reserved_special_token_118|>",
|
1016 |
+
"lstrip": false,
|
1017 |
+
"normalized": false,
|
1018 |
+
"rstrip": false,
|
1019 |
+
"single_word": false,
|
1020 |
+
"special": true
|
1021 |
+
},
|
1022 |
+
"128127": {
|
1023 |
+
"content": "<|reserved_special_token_119|>",
|
1024 |
+
"lstrip": false,
|
1025 |
+
"normalized": false,
|
1026 |
+
"rstrip": false,
|
1027 |
+
"single_word": false,
|
1028 |
+
"special": true
|
1029 |
+
},
|
1030 |
+
"128128": {
|
1031 |
+
"content": "<|reserved_special_token_120|>",
|
1032 |
+
"lstrip": false,
|
1033 |
+
"normalized": false,
|
1034 |
+
"rstrip": false,
|
1035 |
+
"single_word": false,
|
1036 |
+
"special": true
|
1037 |
+
},
|
1038 |
+
"128129": {
|
1039 |
+
"content": "<|reserved_special_token_121|>",
|
1040 |
+
"lstrip": false,
|
1041 |
+
"normalized": false,
|
1042 |
+
"rstrip": false,
|
1043 |
+
"single_word": false,
|
1044 |
+
"special": true
|
1045 |
+
},
|
1046 |
+
"128130": {
|
1047 |
+
"content": "<|reserved_special_token_122|>",
|
1048 |
+
"lstrip": false,
|
1049 |
+
"normalized": false,
|
1050 |
+
"rstrip": false,
|
1051 |
+
"single_word": false,
|
1052 |
+
"special": true
|
1053 |
+
},
|
1054 |
+
"128131": {
|
1055 |
+
"content": "<|reserved_special_token_123|>",
|
1056 |
+
"lstrip": false,
|
1057 |
+
"normalized": false,
|
1058 |
+
"rstrip": false,
|
1059 |
+
"single_word": false,
|
1060 |
+
"special": true
|
1061 |
+
},
|
1062 |
+
"128132": {
|
1063 |
+
"content": "<|reserved_special_token_124|>",
|
1064 |
+
"lstrip": false,
|
1065 |
+
"normalized": false,
|
1066 |
+
"rstrip": false,
|
1067 |
+
"single_word": false,
|
1068 |
+
"special": true
|
1069 |
+
},
|
1070 |
+
"128133": {
|
1071 |
+
"content": "<|reserved_special_token_125|>",
|
1072 |
+
"lstrip": false,
|
1073 |
+
"normalized": false,
|
1074 |
+
"rstrip": false,
|
1075 |
+
"single_word": false,
|
1076 |
+
"special": true
|
1077 |
+
},
|
1078 |
+
"128134": {
|
1079 |
+
"content": "<|reserved_special_token_126|>",
|
1080 |
+
"lstrip": false,
|
1081 |
+
"normalized": false,
|
1082 |
+
"rstrip": false,
|
1083 |
+
"single_word": false,
|
1084 |
+
"special": true
|
1085 |
+
},
|
1086 |
+
"128135": {
|
1087 |
+
"content": "<|reserved_special_token_127|>",
|
1088 |
+
"lstrip": false,
|
1089 |
+
"normalized": false,
|
1090 |
+
"rstrip": false,
|
1091 |
+
"single_word": false,
|
1092 |
+
"special": true
|
1093 |
+
},
|
1094 |
+
"128136": {
|
1095 |
+
"content": "<|reserved_special_token_128|>",
|
1096 |
+
"lstrip": false,
|
1097 |
+
"normalized": false,
|
1098 |
+
"rstrip": false,
|
1099 |
+
"single_word": false,
|
1100 |
+
"special": true
|
1101 |
+
},
|
1102 |
+
"128137": {
|
1103 |
+
"content": "<|reserved_special_token_129|>",
|
1104 |
+
"lstrip": false,
|
1105 |
+
"normalized": false,
|
1106 |
+
"rstrip": false,
|
1107 |
+
"single_word": false,
|
1108 |
+
"special": true
|
1109 |
+
},
|
1110 |
+
"128138": {
|
1111 |
+
"content": "<|reserved_special_token_130|>",
|
1112 |
+
"lstrip": false,
|
1113 |
+
"normalized": false,
|
1114 |
+
"rstrip": false,
|
1115 |
+
"single_word": false,
|
1116 |
+
"special": true
|
1117 |
+
},
|
1118 |
+
"128139": {
|
1119 |
+
"content": "<|reserved_special_token_131|>",
|
1120 |
+
"lstrip": false,
|
1121 |
+
"normalized": false,
|
1122 |
+
"rstrip": false,
|
1123 |
+
"single_word": false,
|
1124 |
+
"special": true
|
1125 |
+
},
|
1126 |
+
"128140": {
|
1127 |
+
"content": "<|reserved_special_token_132|>",
|
1128 |
+
"lstrip": false,
|
1129 |
+
"normalized": false,
|
1130 |
+
"rstrip": false,
|
1131 |
+
"single_word": false,
|
1132 |
+
"special": true
|
1133 |
+
},
|
1134 |
+
"128141": {
|
1135 |
+
"content": "<|reserved_special_token_133|>",
|
1136 |
+
"lstrip": false,
|
1137 |
+
"normalized": false,
|
1138 |
+
"rstrip": false,
|
1139 |
+
"single_word": false,
|
1140 |
+
"special": true
|
1141 |
+
},
|
1142 |
+
"128142": {
|
1143 |
+
"content": "<|reserved_special_token_134|>",
|
1144 |
+
"lstrip": false,
|
1145 |
+
"normalized": false,
|
1146 |
+
"rstrip": false,
|
1147 |
+
"single_word": false,
|
1148 |
+
"special": true
|
1149 |
+
},
|
1150 |
+
"128143": {
|
1151 |
+
"content": "<|reserved_special_token_135|>",
|
1152 |
+
"lstrip": false,
|
1153 |
+
"normalized": false,
|
1154 |
+
"rstrip": false,
|
1155 |
+
"single_word": false,
|
1156 |
+
"special": true
|
1157 |
+
},
|
1158 |
+
"128144": {
|
1159 |
+
"content": "<|reserved_special_token_136|>",
|
1160 |
+
"lstrip": false,
|
1161 |
+
"normalized": false,
|
1162 |
+
"rstrip": false,
|
1163 |
+
"single_word": false,
|
1164 |
+
"special": true
|
1165 |
+
},
|
1166 |
+
"128145": {
|
1167 |
+
"content": "<|reserved_special_token_137|>",
|
1168 |
+
"lstrip": false,
|
1169 |
+
"normalized": false,
|
1170 |
+
"rstrip": false,
|
1171 |
+
"single_word": false,
|
1172 |
+
"special": true
|
1173 |
+
},
|
1174 |
+
"128146": {
|
1175 |
+
"content": "<|reserved_special_token_138|>",
|
1176 |
+
"lstrip": false,
|
1177 |
+
"normalized": false,
|
1178 |
+
"rstrip": false,
|
1179 |
+
"single_word": false,
|
1180 |
+
"special": true
|
1181 |
+
},
|
1182 |
+
"128147": {
|
1183 |
+
"content": "<|reserved_special_token_139|>",
|
1184 |
+
"lstrip": false,
|
1185 |
+
"normalized": false,
|
1186 |
+
"rstrip": false,
|
1187 |
+
"single_word": false,
|
1188 |
+
"special": true
|
1189 |
+
},
|
1190 |
+
"128148": {
|
1191 |
+
"content": "<|reserved_special_token_140|>",
|
1192 |
+
"lstrip": false,
|
1193 |
+
"normalized": false,
|
1194 |
+
"rstrip": false,
|
1195 |
+
"single_word": false,
|
1196 |
+
"special": true
|
1197 |
+
},
|
1198 |
+
"128149": {
|
1199 |
+
"content": "<|reserved_special_token_141|>",
|
1200 |
+
"lstrip": false,
|
1201 |
+
"normalized": false,
|
1202 |
+
"rstrip": false,
|
1203 |
+
"single_word": false,
|
1204 |
+
"special": true
|
1205 |
+
},
|
1206 |
+
"128150": {
|
1207 |
+
"content": "<|reserved_special_token_142|>",
|
1208 |
+
"lstrip": false,
|
1209 |
+
"normalized": false,
|
1210 |
+
"rstrip": false,
|
1211 |
+
"single_word": false,
|
1212 |
+
"special": true
|
1213 |
+
},
|
1214 |
+
"128151": {
|
1215 |
+
"content": "<|reserved_special_token_143|>",
|
1216 |
+
"lstrip": false,
|
1217 |
+
"normalized": false,
|
1218 |
+
"rstrip": false,
|
1219 |
+
"single_word": false,
|
1220 |
+
"special": true
|
1221 |
+
},
|
1222 |
+
"128152": {
|
1223 |
+
"content": "<|reserved_special_token_144|>",
|
1224 |
+
"lstrip": false,
|
1225 |
+
"normalized": false,
|
1226 |
+
"rstrip": false,
|
1227 |
+
"single_word": false,
|
1228 |
+
"special": true
|
1229 |
+
},
|
1230 |
+
"128153": {
|
1231 |
+
"content": "<|reserved_special_token_145|>",
|
1232 |
+
"lstrip": false,
|
1233 |
+
"normalized": false,
|
1234 |
+
"rstrip": false,
|
1235 |
+
"single_word": false,
|
1236 |
+
"special": true
|
1237 |
+
},
|
1238 |
+
"128154": {
|
1239 |
+
"content": "<|reserved_special_token_146|>",
|
1240 |
+
"lstrip": false,
|
1241 |
+
"normalized": false,
|
1242 |
+
"rstrip": false,
|
1243 |
+
"single_word": false,
|
1244 |
+
"special": true
|
1245 |
+
},
|
1246 |
+
"128155": {
|
1247 |
+
"content": "<|reserved_special_token_147|>",
|
1248 |
+
"lstrip": false,
|
1249 |
+
"normalized": false,
|
1250 |
+
"rstrip": false,
|
1251 |
+
"single_word": false,
|
1252 |
+
"special": true
|
1253 |
+
},
|
1254 |
+
"128156": {
|
1255 |
+
"content": "<|reserved_special_token_148|>",
|
1256 |
+
"lstrip": false,
|
1257 |
+
"normalized": false,
|
1258 |
+
"rstrip": false,
|
1259 |
+
"single_word": false,
|
1260 |
+
"special": true
|
1261 |
+
},
|
1262 |
+
"128157": {
|
1263 |
+
"content": "<|reserved_special_token_149|>",
|
1264 |
+
"lstrip": false,
|
1265 |
+
"normalized": false,
|
1266 |
+
"rstrip": false,
|
1267 |
+
"single_word": false,
|
1268 |
+
"special": true
|
1269 |
+
},
|
1270 |
+
"128158": {
|
1271 |
+
"content": "<|reserved_special_token_150|>",
|
1272 |
+
"lstrip": false,
|
1273 |
+
"normalized": false,
|
1274 |
+
"rstrip": false,
|
1275 |
+
"single_word": false,
|
1276 |
+
"special": true
|
1277 |
+
},
|
1278 |
+
"128159": {
|
1279 |
+
"content": "<|reserved_special_token_151|>",
|
1280 |
+
"lstrip": false,
|
1281 |
+
"normalized": false,
|
1282 |
+
"rstrip": false,
|
1283 |
+
"single_word": false,
|
1284 |
+
"special": true
|
1285 |
+
},
|
1286 |
+
"128160": {
|
1287 |
+
"content": "<|reserved_special_token_152|>",
|
1288 |
+
"lstrip": false,
|
1289 |
+
"normalized": false,
|
1290 |
+
"rstrip": false,
|
1291 |
+
"single_word": false,
|
1292 |
+
"special": true
|
1293 |
+
},
|
1294 |
+
"128161": {
|
1295 |
+
"content": "<|reserved_special_token_153|>",
|
1296 |
+
"lstrip": false,
|
1297 |
+
"normalized": false,
|
1298 |
+
"rstrip": false,
|
1299 |
+
"single_word": false,
|
1300 |
+
"special": true
|
1301 |
+
},
|
1302 |
+
"128162": {
|
1303 |
+
"content": "<|reserved_special_token_154|>",
|
1304 |
+
"lstrip": false,
|
1305 |
+
"normalized": false,
|
1306 |
+
"rstrip": false,
|
1307 |
+
"single_word": false,
|
1308 |
+
"special": true
|
1309 |
+
},
|
1310 |
+
"128163": {
|
1311 |
+
"content": "<|reserved_special_token_155|>",
|
1312 |
+
"lstrip": false,
|
1313 |
+
"normalized": false,
|
1314 |
+
"rstrip": false,
|
1315 |
+
"single_word": false,
|
1316 |
+
"special": true
|
1317 |
+
},
|
1318 |
+
"128164": {
|
1319 |
+
"content": "<|reserved_special_token_156|>",
|
1320 |
+
"lstrip": false,
|
1321 |
+
"normalized": false,
|
1322 |
+
"rstrip": false,
|
1323 |
+
"single_word": false,
|
1324 |
+
"special": true
|
1325 |
+
},
|
1326 |
+
"128165": {
|
1327 |
+
"content": "<|reserved_special_token_157|>",
|
1328 |
+
"lstrip": false,
|
1329 |
+
"normalized": false,
|
1330 |
+
"rstrip": false,
|
1331 |
+
"single_word": false,
|
1332 |
+
"special": true
|
1333 |
+
},
|
1334 |
+
"128166": {
|
1335 |
+
"content": "<|reserved_special_token_158|>",
|
1336 |
+
"lstrip": false,
|
1337 |
+
"normalized": false,
|
1338 |
+
"rstrip": false,
|
1339 |
+
"single_word": false,
|
1340 |
+
"special": true
|
1341 |
+
},
|
1342 |
+
"128167": {
|
1343 |
+
"content": "<|reserved_special_token_159|>",
|
1344 |
+
"lstrip": false,
|
1345 |
+
"normalized": false,
|
1346 |
+
"rstrip": false,
|
1347 |
+
"single_word": false,
|
1348 |
+
"special": true
|
1349 |
+
},
|
1350 |
+
"128168": {
|
1351 |
+
"content": "<|reserved_special_token_160|>",
|
1352 |
+
"lstrip": false,
|
1353 |
+
"normalized": false,
|
1354 |
+
"rstrip": false,
|
1355 |
+
"single_word": false,
|
1356 |
+
"special": true
|
1357 |
+
},
|
1358 |
+
"128169": {
|
1359 |
+
"content": "<|reserved_special_token_161|>",
|
1360 |
+
"lstrip": false,
|
1361 |
+
"normalized": false,
|
1362 |
+
"rstrip": false,
|
1363 |
+
"single_word": false,
|
1364 |
+
"special": true
|
1365 |
+
},
|
1366 |
+
"128170": {
|
1367 |
+
"content": "<|reserved_special_token_162|>",
|
1368 |
+
"lstrip": false,
|
1369 |
+
"normalized": false,
|
1370 |
+
"rstrip": false,
|
1371 |
+
"single_word": false,
|
1372 |
+
"special": true
|
1373 |
+
},
|
1374 |
+
"128171": {
|
1375 |
+
"content": "<|reserved_special_token_163|>",
|
1376 |
+
"lstrip": false,
|
1377 |
+
"normalized": false,
|
1378 |
+
"rstrip": false,
|
1379 |
+
"single_word": false,
|
1380 |
+
"special": true
|
1381 |
+
},
|
1382 |
+
"128172": {
|
1383 |
+
"content": "<|reserved_special_token_164|>",
|
1384 |
+
"lstrip": false,
|
1385 |
+
"normalized": false,
|
1386 |
+
"rstrip": false,
|
1387 |
+
"single_word": false,
|
1388 |
+
"special": true
|
1389 |
+
},
|
1390 |
+
"128173": {
|
1391 |
+
"content": "<|reserved_special_token_165|>",
|
1392 |
+
"lstrip": false,
|
1393 |
+
"normalized": false,
|
1394 |
+
"rstrip": false,
|
1395 |
+
"single_word": false,
|
1396 |
+
"special": true
|
1397 |
+
},
|
1398 |
+
"128174": {
|
1399 |
+
"content": "<|reserved_special_token_166|>",
|
1400 |
+
"lstrip": false,
|
1401 |
+
"normalized": false,
|
1402 |
+
"rstrip": false,
|
1403 |
+
"single_word": false,
|
1404 |
+
"special": true
|
1405 |
+
},
|
1406 |
+
"128175": {
|
1407 |
+
"content": "<|reserved_special_token_167|>",
|
1408 |
+
"lstrip": false,
|
1409 |
+
"normalized": false,
|
1410 |
+
"rstrip": false,
|
1411 |
+
"single_word": false,
|
1412 |
+
"special": true
|
1413 |
+
},
|
1414 |
+
"128176": {
|
1415 |
+
"content": "<|reserved_special_token_168|>",
|
1416 |
+
"lstrip": false,
|
1417 |
+
"normalized": false,
|
1418 |
+
"rstrip": false,
|
1419 |
+
"single_word": false,
|
1420 |
+
"special": true
|
1421 |
+
},
|
1422 |
+
"128177": {
|
1423 |
+
"content": "<|reserved_special_token_169|>",
|
1424 |
+
"lstrip": false,
|
1425 |
+
"normalized": false,
|
1426 |
+
"rstrip": false,
|
1427 |
+
"single_word": false,
|
1428 |
+
"special": true
|
1429 |
+
},
|
1430 |
+
"128178": {
|
1431 |
+
"content": "<|reserved_special_token_170|>",
|
1432 |
+
"lstrip": false,
|
1433 |
+
"normalized": false,
|
1434 |
+
"rstrip": false,
|
1435 |
+
"single_word": false,
|
1436 |
+
"special": true
|
1437 |
+
},
|
1438 |
+
"128179": {
|
1439 |
+
"content": "<|reserved_special_token_171|>",
|
1440 |
+
"lstrip": false,
|
1441 |
+
"normalized": false,
|
1442 |
+
"rstrip": false,
|
1443 |
+
"single_word": false,
|
1444 |
+
"special": true
|
1445 |
+
},
|
1446 |
+
"128180": {
|
1447 |
+
"content": "<|reserved_special_token_172|>",
|
1448 |
+
"lstrip": false,
|
1449 |
+
"normalized": false,
|
1450 |
+
"rstrip": false,
|
1451 |
+
"single_word": false,
|
1452 |
+
"special": true
|
1453 |
+
},
|
1454 |
+
"128181": {
|
1455 |
+
"content": "<|reserved_special_token_173|>",
|
1456 |
+
"lstrip": false,
|
1457 |
+
"normalized": false,
|
1458 |
+
"rstrip": false,
|
1459 |
+
"single_word": false,
|
1460 |
+
"special": true
|
1461 |
+
},
|
1462 |
+
"128182": {
|
1463 |
+
"content": "<|reserved_special_token_174|>",
|
1464 |
+
"lstrip": false,
|
1465 |
+
"normalized": false,
|
1466 |
+
"rstrip": false,
|
1467 |
+
"single_word": false,
|
1468 |
+
"special": true
|
1469 |
+
},
|
1470 |
+
"128183": {
|
1471 |
+
"content": "<|reserved_special_token_175|>",
|
1472 |
+
"lstrip": false,
|
1473 |
+
"normalized": false,
|
1474 |
+
"rstrip": false,
|
1475 |
+
"single_word": false,
|
1476 |
+
"special": true
|
1477 |
+
},
|
1478 |
+
"128184": {
|
1479 |
+
"content": "<|reserved_special_token_176|>",
|
1480 |
+
"lstrip": false,
|
1481 |
+
"normalized": false,
|
1482 |
+
"rstrip": false,
|
1483 |
+
"single_word": false,
|
1484 |
+
"special": true
|
1485 |
+
},
|
1486 |
+
"128185": {
|
1487 |
+
"content": "<|reserved_special_token_177|>",
|
1488 |
+
"lstrip": false,
|
1489 |
+
"normalized": false,
|
1490 |
+
"rstrip": false,
|
1491 |
+
"single_word": false,
|
1492 |
+
"special": true
|
1493 |
+
},
|
1494 |
+
"128186": {
|
1495 |
+
"content": "<|reserved_special_token_178|>",
|
1496 |
+
"lstrip": false,
|
1497 |
+
"normalized": false,
|
1498 |
+
"rstrip": false,
|
1499 |
+
"single_word": false,
|
1500 |
+
"special": true
|
1501 |
+
},
|
1502 |
+
"128187": {
|
1503 |
+
"content": "<|reserved_special_token_179|>",
|
1504 |
+
"lstrip": false,
|
1505 |
+
"normalized": false,
|
1506 |
+
"rstrip": false,
|
1507 |
+
"single_word": false,
|
1508 |
+
"special": true
|
1509 |
+
},
|
1510 |
+
"128188": {
|
1511 |
+
"content": "<|reserved_special_token_180|>",
|
1512 |
+
"lstrip": false,
|
1513 |
+
"normalized": false,
|
1514 |
+
"rstrip": false,
|
1515 |
+
"single_word": false,
|
1516 |
+
"special": true
|
1517 |
+
},
|
1518 |
+
"128189": {
|
1519 |
+
"content": "<|reserved_special_token_181|>",
|
1520 |
+
"lstrip": false,
|
1521 |
+
"normalized": false,
|
1522 |
+
"rstrip": false,
|
1523 |
+
"single_word": false,
|
1524 |
+
"special": true
|
1525 |
+
},
|
1526 |
+
"128190": {
|
1527 |
+
"content": "<|reserved_special_token_182|>",
|
1528 |
+
"lstrip": false,
|
1529 |
+
"normalized": false,
|
1530 |
+
"rstrip": false,
|
1531 |
+
"single_word": false,
|
1532 |
+
"special": true
|
1533 |
+
},
|
1534 |
+
"128191": {
|
1535 |
+
"content": "<|reserved_special_token_183|>",
|
1536 |
+
"lstrip": false,
|
1537 |
+
"normalized": false,
|
1538 |
+
"rstrip": false,
|
1539 |
+
"single_word": false,
|
1540 |
+
"special": true
|
1541 |
+
},
|
1542 |
+
"128192": {
|
1543 |
+
"content": "<|reserved_special_token_184|>",
|
1544 |
+
"lstrip": false,
|
1545 |
+
"normalized": false,
|
1546 |
+
"rstrip": false,
|
1547 |
+
"single_word": false,
|
1548 |
+
"special": true
|
1549 |
+
},
|
1550 |
+
"128193": {
|
1551 |
+
"content": "<|reserved_special_token_185|>",
|
1552 |
+
"lstrip": false,
|
1553 |
+
"normalized": false,
|
1554 |
+
"rstrip": false,
|
1555 |
+
"single_word": false,
|
1556 |
+
"special": true
|
1557 |
+
},
|
1558 |
+
"128194": {
|
1559 |
+
"content": "<|reserved_special_token_186|>",
|
1560 |
+
"lstrip": false,
|
1561 |
+
"normalized": false,
|
1562 |
+
"rstrip": false,
|
1563 |
+
"single_word": false,
|
1564 |
+
"special": true
|
1565 |
+
},
|
1566 |
+
"128195": {
|
1567 |
+
"content": "<|reserved_special_token_187|>",
|
1568 |
+
"lstrip": false,
|
1569 |
+
"normalized": false,
|
1570 |
+
"rstrip": false,
|
1571 |
+
"single_word": false,
|
1572 |
+
"special": true
|
1573 |
+
},
|
1574 |
+
"128196": {
|
1575 |
+
"content": "<|reserved_special_token_188|>",
|
1576 |
+
"lstrip": false,
|
1577 |
+
"normalized": false,
|
1578 |
+
"rstrip": false,
|
1579 |
+
"single_word": false,
|
1580 |
+
"special": true
|
1581 |
+
},
|
1582 |
+
"128197": {
|
1583 |
+
"content": "<|reserved_special_token_189|>",
|
1584 |
+
"lstrip": false,
|
1585 |
+
"normalized": false,
|
1586 |
+
"rstrip": false,
|
1587 |
+
"single_word": false,
|
1588 |
+
"special": true
|
1589 |
+
},
|
1590 |
+
"128198": {
|
1591 |
+
"content": "<|reserved_special_token_190|>",
|
1592 |
+
"lstrip": false,
|
1593 |
+
"normalized": false,
|
1594 |
+
"rstrip": false,
|
1595 |
+
"single_word": false,
|
1596 |
+
"special": true
|
1597 |
+
},
|
1598 |
+
"128199": {
|
1599 |
+
"content": "<|reserved_special_token_191|>",
|
1600 |
+
"lstrip": false,
|
1601 |
+
"normalized": false,
|
1602 |
+
"rstrip": false,
|
1603 |
+
"single_word": false,
|
1604 |
+
"special": true
|
1605 |
+
},
|
1606 |
+
"128200": {
|
1607 |
+
"content": "<|reserved_special_token_192|>",
|
1608 |
+
"lstrip": false,
|
1609 |
+
"normalized": false,
|
1610 |
+
"rstrip": false,
|
1611 |
+
"single_word": false,
|
1612 |
+
"special": true
|
1613 |
+
},
|
1614 |
+
"128201": {
|
1615 |
+
"content": "<|reserved_special_token_193|>",
|
1616 |
+
"lstrip": false,
|
1617 |
+
"normalized": false,
|
1618 |
+
"rstrip": false,
|
1619 |
+
"single_word": false,
|
1620 |
+
"special": true
|
1621 |
+
},
|
1622 |
+
"128202": {
|
1623 |
+
"content": "<|reserved_special_token_194|>",
|
1624 |
+
"lstrip": false,
|
1625 |
+
"normalized": false,
|
1626 |
+
"rstrip": false,
|
1627 |
+
"single_word": false,
|
1628 |
+
"special": true
|
1629 |
+
},
|
1630 |
+
"128203": {
|
1631 |
+
"content": "<|reserved_special_token_195|>",
|
1632 |
+
"lstrip": false,
|
1633 |
+
"normalized": false,
|
1634 |
+
"rstrip": false,
|
1635 |
+
"single_word": false,
|
1636 |
+
"special": true
|
1637 |
+
},
|
1638 |
+
"128204": {
|
1639 |
+
"content": "<|reserved_special_token_196|>",
|
1640 |
+
"lstrip": false,
|
1641 |
+
"normalized": false,
|
1642 |
+
"rstrip": false,
|
1643 |
+
"single_word": false,
|
1644 |
+
"special": true
|
1645 |
+
},
|
1646 |
+
"128205": {
|
1647 |
+
"content": "<|reserved_special_token_197|>",
|
1648 |
+
"lstrip": false,
|
1649 |
+
"normalized": false,
|
1650 |
+
"rstrip": false,
|
1651 |
+
"single_word": false,
|
1652 |
+
"special": true
|
1653 |
+
},
|
1654 |
+
"128206": {
|
1655 |
+
"content": "<|reserved_special_token_198|>",
|
1656 |
+
"lstrip": false,
|
1657 |
+
"normalized": false,
|
1658 |
+
"rstrip": false,
|
1659 |
+
"single_word": false,
|
1660 |
+
"special": true
|
1661 |
+
},
|
1662 |
+
"128207": {
|
1663 |
+
"content": "<|reserved_special_token_199|>",
|
1664 |
+
"lstrip": false,
|
1665 |
+
"normalized": false,
|
1666 |
+
"rstrip": false,
|
1667 |
+
"single_word": false,
|
1668 |
+
"special": true
|
1669 |
+
},
|
1670 |
+
"128208": {
|
1671 |
+
"content": "<|reserved_special_token_200|>",
|
1672 |
+
"lstrip": false,
|
1673 |
+
"normalized": false,
|
1674 |
+
"rstrip": false,
|
1675 |
+
"single_word": false,
|
1676 |
+
"special": true
|
1677 |
+
},
|
1678 |
+
"128209": {
|
1679 |
+
"content": "<|reserved_special_token_201|>",
|
1680 |
+
"lstrip": false,
|
1681 |
+
"normalized": false,
|
1682 |
+
"rstrip": false,
|
1683 |
+
"single_word": false,
|
1684 |
+
"special": true
|
1685 |
+
},
|
1686 |
+
"128210": {
|
1687 |
+
"content": "<|reserved_special_token_202|>",
|
1688 |
+
"lstrip": false,
|
1689 |
+
"normalized": false,
|
1690 |
+
"rstrip": false,
|
1691 |
+
"single_word": false,
|
1692 |
+
"special": true
|
1693 |
+
},
|
1694 |
+
"128211": {
|
1695 |
+
"content": "<|reserved_special_token_203|>",
|
1696 |
+
"lstrip": false,
|
1697 |
+
"normalized": false,
|
1698 |
+
"rstrip": false,
|
1699 |
+
"single_word": false,
|
1700 |
+
"special": true
|
1701 |
+
},
|
1702 |
+
"128212": {
|
1703 |
+
"content": "<|reserved_special_token_204|>",
|
1704 |
+
"lstrip": false,
|
1705 |
+
"normalized": false,
|
1706 |
+
"rstrip": false,
|
1707 |
+
"single_word": false,
|
1708 |
+
"special": true
|
1709 |
+
},
|
1710 |
+
"128213": {
|
1711 |
+
"content": "<|reserved_special_token_205|>",
|
1712 |
+
"lstrip": false,
|
1713 |
+
"normalized": false,
|
1714 |
+
"rstrip": false,
|
1715 |
+
"single_word": false,
|
1716 |
+
"special": true
|
1717 |
+
},
|
1718 |
+
"128214": {
|
1719 |
+
"content": "<|reserved_special_token_206|>",
|
1720 |
+
"lstrip": false,
|
1721 |
+
"normalized": false,
|
1722 |
+
"rstrip": false,
|
1723 |
+
"single_word": false,
|
1724 |
+
"special": true
|
1725 |
+
},
|
1726 |
+
"128215": {
|
1727 |
+
"content": "<|reserved_special_token_207|>",
|
1728 |
+
"lstrip": false,
|
1729 |
+
"normalized": false,
|
1730 |
+
"rstrip": false,
|
1731 |
+
"single_word": false,
|
1732 |
+
"special": true
|
1733 |
+
},
|
1734 |
+
"128216": {
|
1735 |
+
"content": "<|reserved_special_token_208|>",
|
1736 |
+
"lstrip": false,
|
1737 |
+
"normalized": false,
|
1738 |
+
"rstrip": false,
|
1739 |
+
"single_word": false,
|
1740 |
+
"special": true
|
1741 |
+
},
|
1742 |
+
"128217": {
|
1743 |
+
"content": "<|reserved_special_token_209|>",
|
1744 |
+
"lstrip": false,
|
1745 |
+
"normalized": false,
|
1746 |
+
"rstrip": false,
|
1747 |
+
"single_word": false,
|
1748 |
+
"special": true
|
1749 |
+
},
|
1750 |
+
"128218": {
|
1751 |
+
"content": "<|reserved_special_token_210|>",
|
1752 |
+
"lstrip": false,
|
1753 |
+
"normalized": false,
|
1754 |
+
"rstrip": false,
|
1755 |
+
"single_word": false,
|
1756 |
+
"special": true
|
1757 |
+
},
|
1758 |
+
"128219": {
|
1759 |
+
"content": "<|reserved_special_token_211|>",
|
1760 |
+
"lstrip": false,
|
1761 |
+
"normalized": false,
|
1762 |
+
"rstrip": false,
|
1763 |
+
"single_word": false,
|
1764 |
+
"special": true
|
1765 |
+
},
|
1766 |
+
"128220": {
|
1767 |
+
"content": "<|reserved_special_token_212|>",
|
1768 |
+
"lstrip": false,
|
1769 |
+
"normalized": false,
|
1770 |
+
"rstrip": false,
|
1771 |
+
"single_word": false,
|
1772 |
+
"special": true
|
1773 |
+
},
|
1774 |
+
"128221": {
|
1775 |
+
"content": "<|reserved_special_token_213|>",
|
1776 |
+
"lstrip": false,
|
1777 |
+
"normalized": false,
|
1778 |
+
"rstrip": false,
|
1779 |
+
"single_word": false,
|
1780 |
+
"special": true
|
1781 |
+
},
|
1782 |
+
"128222": {
|
1783 |
+
"content": "<|reserved_special_token_214|>",
|
1784 |
+
"lstrip": false,
|
1785 |
+
"normalized": false,
|
1786 |
+
"rstrip": false,
|
1787 |
+
"single_word": false,
|
1788 |
+
"special": true
|
1789 |
+
},
|
1790 |
+
"128223": {
|
1791 |
+
"content": "<|reserved_special_token_215|>",
|
1792 |
+
"lstrip": false,
|
1793 |
+
"normalized": false,
|
1794 |
+
"rstrip": false,
|
1795 |
+
"single_word": false,
|
1796 |
+
"special": true
|
1797 |
+
},
|
1798 |
+
"128224": {
|
1799 |
+
"content": "<|reserved_special_token_216|>",
|
1800 |
+
"lstrip": false,
|
1801 |
+
"normalized": false,
|
1802 |
+
"rstrip": false,
|
1803 |
+
"single_word": false,
|
1804 |
+
"special": true
|
1805 |
+
},
|
1806 |
+
"128225": {
|
1807 |
+
"content": "<|reserved_special_token_217|>",
|
1808 |
+
"lstrip": false,
|
1809 |
+
"normalized": false,
|
1810 |
+
"rstrip": false,
|
1811 |
+
"single_word": false,
|
1812 |
+
"special": true
|
1813 |
+
},
|
1814 |
+
"128226": {
|
1815 |
+
"content": "<|reserved_special_token_218|>",
|
1816 |
+
"lstrip": false,
|
1817 |
+
"normalized": false,
|
1818 |
+
"rstrip": false,
|
1819 |
+
"single_word": false,
|
1820 |
+
"special": true
|
1821 |
+
},
|
1822 |
+
"128227": {
|
1823 |
+
"content": "<|reserved_special_token_219|>",
|
1824 |
+
"lstrip": false,
|
1825 |
+
"normalized": false,
|
1826 |
+
"rstrip": false,
|
1827 |
+
"single_word": false,
|
1828 |
+
"special": true
|
1829 |
+
},
|
1830 |
+
"128228": {
|
1831 |
+
"content": "<|reserved_special_token_220|>",
|
1832 |
+
"lstrip": false,
|
1833 |
+
"normalized": false,
|
1834 |
+
"rstrip": false,
|
1835 |
+
"single_word": false,
|
1836 |
+
"special": true
|
1837 |
+
},
|
1838 |
+
"128229": {
|
1839 |
+
"content": "<|reserved_special_token_221|>",
|
1840 |
+
"lstrip": false,
|
1841 |
+
"normalized": false,
|
1842 |
+
"rstrip": false,
|
1843 |
+
"single_word": false,
|
1844 |
+
"special": true
|
1845 |
+
},
|
1846 |
+
"128230": {
|
1847 |
+
"content": "<|reserved_special_token_222|>",
|
1848 |
+
"lstrip": false,
|
1849 |
+
"normalized": false,
|
1850 |
+
"rstrip": false,
|
1851 |
+
"single_word": false,
|
1852 |
+
"special": true
|
1853 |
+
},
|
1854 |
+
"128231": {
|
1855 |
+
"content": "<|reserved_special_token_223|>",
|
1856 |
+
"lstrip": false,
|
1857 |
+
"normalized": false,
|
1858 |
+
"rstrip": false,
|
1859 |
+
"single_word": false,
|
1860 |
+
"special": true
|
1861 |
+
},
|
1862 |
+
"128232": {
|
1863 |
+
"content": "<|reserved_special_token_224|>",
|
1864 |
+
"lstrip": false,
|
1865 |
+
"normalized": false,
|
1866 |
+
"rstrip": false,
|
1867 |
+
"single_word": false,
|
1868 |
+
"special": true
|
1869 |
+
},
|
1870 |
+
"128233": {
|
1871 |
+
"content": "<|reserved_special_token_225|>",
|
1872 |
+
"lstrip": false,
|
1873 |
+
"normalized": false,
|
1874 |
+
"rstrip": false,
|
1875 |
+
"single_word": false,
|
1876 |
+
"special": true
|
1877 |
+
},
|
1878 |
+
"128234": {
|
1879 |
+
"content": "<|reserved_special_token_226|>",
|
1880 |
+
"lstrip": false,
|
1881 |
+
"normalized": false,
|
1882 |
+
"rstrip": false,
|
1883 |
+
"single_word": false,
|
1884 |
+
"special": true
|
1885 |
+
},
|
1886 |
+
"128235": {
|
1887 |
+
"content": "<|reserved_special_token_227|>",
|
1888 |
+
"lstrip": false,
|
1889 |
+
"normalized": false,
|
1890 |
+
"rstrip": false,
|
1891 |
+
"single_word": false,
|
1892 |
+
"special": true
|
1893 |
+
},
|
1894 |
+
"128236": {
|
1895 |
+
"content": "<|reserved_special_token_228|>",
|
1896 |
+
"lstrip": false,
|
1897 |
+
"normalized": false,
|
1898 |
+
"rstrip": false,
|
1899 |
+
"single_word": false,
|
1900 |
+
"special": true
|
1901 |
+
},
|
1902 |
+
"128237": {
|
1903 |
+
"content": "<|reserved_special_token_229|>",
|
1904 |
+
"lstrip": false,
|
1905 |
+
"normalized": false,
|
1906 |
+
"rstrip": false,
|
1907 |
+
"single_word": false,
|
1908 |
+
"special": true
|
1909 |
+
},
|
1910 |
+
"128238": {
|
1911 |
+
"content": "<|reserved_special_token_230|>",
|
1912 |
+
"lstrip": false,
|
1913 |
+
"normalized": false,
|
1914 |
+
"rstrip": false,
|
1915 |
+
"single_word": false,
|
1916 |
+
"special": true
|
1917 |
+
},
|
1918 |
+
"128239": {
|
1919 |
+
"content": "<|reserved_special_token_231|>",
|
1920 |
+
"lstrip": false,
|
1921 |
+
"normalized": false,
|
1922 |
+
"rstrip": false,
|
1923 |
+
"single_word": false,
|
1924 |
+
"special": true
|
1925 |
+
},
|
1926 |
+
"128240": {
|
1927 |
+
"content": "<|reserved_special_token_232|>",
|
1928 |
+
"lstrip": false,
|
1929 |
+
"normalized": false,
|
1930 |
+
"rstrip": false,
|
1931 |
+
"single_word": false,
|
1932 |
+
"special": true
|
1933 |
+
},
|
1934 |
+
"128241": {
|
1935 |
+
"content": "<|reserved_special_token_233|>",
|
1936 |
+
"lstrip": false,
|
1937 |
+
"normalized": false,
|
1938 |
+
"rstrip": false,
|
1939 |
+
"single_word": false,
|
1940 |
+
"special": true
|
1941 |
+
},
|
1942 |
+
"128242": {
|
1943 |
+
"content": "<|reserved_special_token_234|>",
|
1944 |
+
"lstrip": false,
|
1945 |
+
"normalized": false,
|
1946 |
+
"rstrip": false,
|
1947 |
+
"single_word": false,
|
1948 |
+
"special": true
|
1949 |
+
},
|
1950 |
+
"128243": {
|
1951 |
+
"content": "<|reserved_special_token_235|>",
|
1952 |
+
"lstrip": false,
|
1953 |
+
"normalized": false,
|
1954 |
+
"rstrip": false,
|
1955 |
+
"single_word": false,
|
1956 |
+
"special": true
|
1957 |
+
},
|
1958 |
+
"128244": {
|
1959 |
+
"content": "<|reserved_special_token_236|>",
|
1960 |
+
"lstrip": false,
|
1961 |
+
"normalized": false,
|
1962 |
+
"rstrip": false,
|
1963 |
+
"single_word": false,
|
1964 |
+
"special": true
|
1965 |
+
},
|
1966 |
+
"128245": {
|
1967 |
+
"content": "<|reserved_special_token_237|>",
|
1968 |
+
"lstrip": false,
|
1969 |
+
"normalized": false,
|
1970 |
+
"rstrip": false,
|
1971 |
+
"single_word": false,
|
1972 |
+
"special": true
|
1973 |
+
},
|
1974 |
+
"128246": {
|
1975 |
+
"content": "<|reserved_special_token_238|>",
|
1976 |
+
"lstrip": false,
|
1977 |
+
"normalized": false,
|
1978 |
+
"rstrip": false,
|
1979 |
+
"single_word": false,
|
1980 |
+
"special": true
|
1981 |
+
},
|
1982 |
+
"128247": {
|
1983 |
+
"content": "<|reserved_special_token_239|>",
|
1984 |
+
"lstrip": false,
|
1985 |
+
"normalized": false,
|
1986 |
+
"rstrip": false,
|
1987 |
+
"single_word": false,
|
1988 |
+
"special": true
|
1989 |
+
},
|
1990 |
+
"128248": {
|
1991 |
+
"content": "<|reserved_special_token_240|>",
|
1992 |
+
"lstrip": false,
|
1993 |
+
"normalized": false,
|
1994 |
+
"rstrip": false,
|
1995 |
+
"single_word": false,
|
1996 |
+
"special": true
|
1997 |
+
},
|
1998 |
+
"128249": {
|
1999 |
+
"content": "<|reserved_special_token_241|>",
|
2000 |
+
"lstrip": false,
|
2001 |
+
"normalized": false,
|
2002 |
+
"rstrip": false,
|
2003 |
+
"single_word": false,
|
2004 |
+
"special": true
|
2005 |
+
},
|
2006 |
+
"128250": {
|
2007 |
+
"content": "<|reserved_special_token_242|>",
|
2008 |
+
"lstrip": false,
|
2009 |
+
"normalized": false,
|
2010 |
+
"rstrip": false,
|
2011 |
+
"single_word": false,
|
2012 |
+
"special": true
|
2013 |
+
},
|
2014 |
+
"128251": {
|
2015 |
+
"content": "<|reserved_special_token_243|>",
|
2016 |
+
"lstrip": false,
|
2017 |
+
"normalized": false,
|
2018 |
+
"rstrip": false,
|
2019 |
+
"single_word": false,
|
2020 |
+
"special": true
|
2021 |
+
},
|
2022 |
+
"128252": {
|
2023 |
+
"content": "<|reserved_special_token_244|>",
|
2024 |
+
"lstrip": false,
|
2025 |
+
"normalized": false,
|
2026 |
+
"rstrip": false,
|
2027 |
+
"single_word": false,
|
2028 |
+
"special": true
|
2029 |
+
},
|
2030 |
+
"128253": {
|
2031 |
+
"content": "<|reserved_special_token_245|>",
|
2032 |
+
"lstrip": false,
|
2033 |
+
"normalized": false,
|
2034 |
+
"rstrip": false,
|
2035 |
+
"single_word": false,
|
2036 |
+
"special": true
|
2037 |
+
},
|
2038 |
+
"128254": {
|
2039 |
+
"content": "<|reserved_special_token_246|>",
|
2040 |
+
"lstrip": false,
|
2041 |
+
"normalized": false,
|
2042 |
+
"rstrip": false,
|
2043 |
+
"single_word": false,
|
2044 |
+
"special": true
|
2045 |
+
},
|
2046 |
+
"128255": {
|
2047 |
+
"content": "<|reserved_special_token_247|>",
|
2048 |
+
"lstrip": false,
|
2049 |
+
"normalized": false,
|
2050 |
+
"rstrip": false,
|
2051 |
+
"single_word": false,
|
2052 |
+
"special": true
|
2053 |
+
}
|
2054 |
+
},
|
2055 |
+
"bos_token": "<|begin▁of▁sentence|>",
|
2056 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>'}}{% endif %}",
|
2057 |
+
"clean_up_tokenization_spaces": false,
|
2058 |
+
"eos_token": "<|end▁of▁sentence|>",
|
2059 |
+
"extra_special_tokens": {},
|
2060 |
+
"legacy": true,
|
2061 |
+
"model_max_length": 16384,
|
2062 |
+
"pad_token": "<|end▁of▁sentence|>",
|
2063 |
+
"sp_model_kwargs": {},
|
2064 |
+
"tokenizer_class": "LlamaTokenizer",
|
2065 |
+
"unk_token": null,
|
2066 |
+
"use_default_system_prompt": false
|
2067 |
+
}
|