TurboPascal commited on
Commit
7d92ff5
·
1 Parent(s): fffbd6a

Upload 6 files

Browse files
config.json CHANGED
@@ -11,13 +11,13 @@
11
  "intermediate_size": 3840,
12
  "max_position_embeddings": 2048,
13
  "model_type": "llama",
14
- "num_attention_heads": 32,
15
  "num_hidden_layers": 24,
16
  "pad_token_id": 0,
17
  "rms_norm_eps": 1e-06,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
- "transformers_version": "4.28.1",
21
  "use_cache": true,
22
- "vocab_size": 54000
23
  }
 
11
  "intermediate_size": 3840,
12
  "max_position_embeddings": 2048,
13
  "model_type": "llama",
14
+ "num_attention_heads": 16,
15
  "num_hidden_layers": 24,
16
  "pad_token_id": 0,
17
  "rms_norm_eps": 1e-06,
18
  "tie_word_embeddings": false,
19
  "torch_dtype": "float16",
20
+ "transformers_version": "4.29.2",
21
  "use_cache": true,
22
+ "vocab_size": 46000
23
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.28.1"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.29.2"
7
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f546c5f6e7ec5842911ad98bea023561e4b15d1f323c8211a130e109d0a127ff
3
- size 1634336217
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6af03d4d3e08f75b9990fe50882e6cf84dcfc31257c6169102e09e638a8e7534
3
+ size 1585144892
special_tokens_map.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
 
4
  "unk_token": "<unk>"
5
  }
 
1
  {
2
  "bos_token": "<s>",
3
  "eos_token": "</s>",
4
+ "pad_token": "[PAD]",
5
  "unk_token": "<unk>"
6
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -3,6 +3,7 @@
3
  "clean_up_tokenization_spaces": false,
4
  "eos_token": "</s>",
5
  "model_max_length": 1000000000000000019884624838656,
 
6
  "tokenizer_class": "LlamaTokenizer",
7
  "unk_token": "<unk>"
8
  }
 
3
  "clean_up_tokenization_spaces": false,
4
  "eos_token": "</s>",
5
  "model_max_length": 1000000000000000019884624838656,
6
+ "pad_token": "[PAD]",
7
  "tokenizer_class": "LlamaTokenizer",
8
  "unk_token": "<unk>"
9
  }