Starchik commited on
Commit
b7b31d4
·
verified ·
1 Parent(s): aabacb1

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +19 -6
main.py CHANGED
@@ -1,9 +1,22 @@
1
- from mistral_inference.transformer import Transformer
2
- from mistral_inference.generate import generate
3
 
4
- model = Transformer.from_folder(mistral_models_path)
5
- out_tokens, _ = generate([tokens], model, max_tokens=64, temperature=0.0, eos_id=tokenizer.instruct_tokenizer.tokenizer.eos_id)
6
 
7
- result = tokenizer.decode(out_tokens[0])
 
 
8
 
9
- print(result)
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from huggingface_hub import InferenceClient
3
 
4
+ # Создаем клиент для модели
5
+ client = InferenceClient(model="mistralai/Mamba-Codestral-7B-v0.1")
6
 
7
+ def generate_code(prompt):
8
+ response = client.text_generation(prompt, max_new_tokens=512)
9
+ return response
10
 
11
+ # Создаем интерфейс Gradio
12
+ with gr.Blocks() as demo:
13
+ gr.Markdown("## Mamba-Codestral-7B Code Generator")
14
+ with gr.Row():
15
+ prompt_input = gr.Textbox(label="Введите запрос", placeholder="Например: Напишите функцию сортировки на Python")
16
+ submit_button = gr.Button("Сгенерировать код")
17
+ output = gr.Code(label="Результат")
18
+
19
+ submit_button.click(generate_code, inputs=prompt_input, outputs=output)
20
+
21
+ # Запуск интерфейса
22
+ demo.launch()