Starchik commited on
Commit
3c50e75
·
verified ·
1 Parent(s): 3cd9587

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +17 -10
main.py CHANGED
@@ -1,13 +1,20 @@
1
- from huggingface_hub import snapshot_download
2
- from pathlib import Path
3
- import os
4
 
5
- # Убедитесь, что переменная окружения HF_TOKEN установлена
6
- token = os.getenv("HF_TOKEN")
7
- if not token:
8
- raise ValueError("Hugging Face token not found in environment variables")
9
 
10
- mistral_models_path = Path.home().joinpath('mistral_models', 'Mamba-Codestral-7B-v0.1')
11
- mistral_models_path.mkdir(parents=True, exist_ok=True)
12
 
13
- snapshot_download(repo_id="mistralai/Mamba-Codestral-7B-v0.1", allow_patterns=["params.json", "consolidated.safetensors", "tokenizer.model.v3"], local_dir=mistral_models_path)
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM
 
 
2
 
3
+ # Load model and tokenizer
4
+ model_name = "mistralai/Codestral-22B-v0.1"
5
+ model = AutoModelForCausalLM.from_pretrained(model_name)
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
 
8
+ # No need to move model to GPU, default is CPU
9
+ # model.to("cpu") # This line can be omitted since it's already on CPU by default
10
 
11
+ # Encode input tokens
12
+ input_text = "Your input text here"
13
+ tokens = tokenizer(input_text, return_tensors="pt").input_ids
14
+
15
+ # Generate output
16
+ generated_ids = model.generate(tokens, max_new_tokens=1000, do_sample=True)
17
+
18
+ # Decode generated tokens
19
+ result = tokenizer.decode(generated_ids[0].tolist(), skip_special_tokens=True)
20
+ print(result)