akhaliq HF staff commited on
Commit
c44529f
·
1 Parent(s): 1d99642

add mistral codestral

Browse files
Files changed (5) hide show
  1. app.py +5 -3
  2. app_mistral.py +14 -19
  3. app_mistral_coder.py +23 -0
  4. pyproject.toml +1 -1
  5. requirements.txt +4 -2
app.py CHANGED
@@ -7,7 +7,6 @@ from app_fal import demo as demo_fal
7
  from app_fireworks import demo as demo_fireworks
8
  from app_huggingface import demo as demo_huggingface
9
  from app_meta import demo as demo_meta
10
- from app_mistral import demo as demo_mistral
11
  from app_nvidia import demo as demo_nvidia
12
  from app_omini import demo as demo_omini
13
  from app_paligemma import demo as demo_paligemma
@@ -34,10 +33,13 @@ from app_groq import demo as demo_groq
34
  from app_groq_coder import demo as demo_groq_coder
35
  from app_openai_coder import demo as demo_openai_coder
36
  from app_langchain import demo as demo_langchain
 
 
37
  from utils import get_app
38
 
39
  # Create mapping of providers to their demos
40
  PROVIDERS = {
 
41
  "Langchain Agent": demo_langchain,
42
  "Gemini Camera": demo_gemini_camera,
43
  "Gemini Coder": demo_gemini_coder,
@@ -45,6 +47,7 @@ PROVIDERS = {
45
  "OpenAI": demo_openai,
46
  "Gemini": demo_gemini,
47
  "Gemini Voice": demo_gemini_voice,
 
48
  "Groq Coder": demo_groq_coder,
49
  "Hyperbolic Coder": demo_hyperbolic_coder,
50
  "SmolAgents": demo_smolagents,
@@ -72,13 +75,12 @@ PROVIDERS = {
72
  "Allen AI": demo_allenai,
73
  "Perplexity": demo_perplexity,
74
  "Experimental": demo_experimental,
75
- "Mistral": demo_mistral,
76
  "NVIDIA": demo_nvidia,
77
  }
78
 
79
  demo = get_app(
80
  models=list(PROVIDERS.keys()),
81
- default_model="Langchain Agent",
82
  src=PROVIDERS,
83
  dropdown_label="Select Provider",
84
  )
 
7
  from app_fireworks import demo as demo_fireworks
8
  from app_huggingface import demo as demo_huggingface
9
  from app_meta import demo as demo_meta
 
10
  from app_nvidia import demo as demo_nvidia
11
  from app_omini import demo as demo_omini
12
  from app_paligemma import demo as demo_paligemma
 
33
  from app_groq_coder import demo as demo_groq_coder
34
  from app_openai_coder import demo as demo_openai_coder
35
  from app_langchain import demo as demo_langchain
36
+ from app_mistral_coder import demo as demo_mistral_coder
37
+ from app_mistral import demo as demo_mistral
38
  from utils import get_app
39
 
40
  # Create mapping of providers to their demos
41
  PROVIDERS = {
42
+ "Mistral Codestral": demo_mistral_coder,
43
  "Langchain Agent": demo_langchain,
44
  "Gemini Camera": demo_gemini_camera,
45
  "Gemini Coder": demo_gemini_coder,
 
47
  "OpenAI": demo_openai,
48
  "Gemini": demo_gemini,
49
  "Gemini Voice": demo_gemini_voice,
50
+ "Mistral": demo_mistral,
51
  "Groq Coder": demo_groq_coder,
52
  "Hyperbolic Coder": demo_hyperbolic_coder,
53
  "SmolAgents": demo_smolagents,
 
75
  "Allen AI": demo_allenai,
76
  "Perplexity": demo_perplexity,
77
  "Experimental": demo_experimental,
 
78
  "NVIDIA": demo_nvidia,
79
  }
80
 
81
  demo = get_app(
82
  models=list(PROVIDERS.keys()),
83
+ default_model="Mistral Codestral",
84
  src=PROVIDERS,
85
  dropdown_label="Select Provider",
86
  )
app_mistral.py CHANGED
@@ -1,26 +1,21 @@
1
- import os
2
 
3
- import mistral_gradio
4
 
5
- from utils import get_app
 
6
 
 
 
 
 
 
7
  demo = get_app(
8
- models=[
9
- "mistral-large-latest",
10
- "pixtral-large-latest",
11
- "ministral-3b-latest",
12
- "ministral-8b-latest",
13
- "mistral-small-latest",
14
- "codestral-latest",
15
- "mistral-embed",
16
- "mistral-moderation-latest",
17
- "pixtral-12b-2409",
18
- "open-mistral-nemo",
19
- "open-codestral-mamba",
20
- ],
21
- default_model="pixtral-large-latest",
22
- src=mistral_gradio.registry,
23
- accept_token=not os.getenv("MISTRAL_API_KEY"),
24
  )
25
 
26
  if __name__ == "__main__":
 
1
+ import ai_gradio
2
 
3
+ from utils_ai_gradio import get_app
4
 
5
+ # Get the mistral models but keep their full names for loading
6
+ MISTRAL_MODELS_FULL = [k for k in ai_gradio.registry.keys() if k.startswith("mistral:")]
7
 
8
+ # Create display names without the prefix
9
+ MISTRAL_MODELS_DISPLAY = [k.replace("mistral:", "") for k in MISTRAL_MODELS_FULL]
10
+
11
+
12
+ # Create and launch the interface using get_app utility
13
  demo = get_app(
14
+ models=MISTRAL_MODELS_FULL, # Use the full names with prefix
15
+ default_model=MISTRAL_MODELS_FULL[5],
16
+ dropdown_label="Select Mistral Model",
17
+ choices=MISTRAL_MODELS_DISPLAY, # Display names without prefix
18
+ fill_height=True,
 
 
 
 
 
 
 
 
 
 
 
19
  )
20
 
21
  if __name__ == "__main__":
app_mistral_coder.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ai_gradio
2
+
3
+ from utils_ai_gradio import get_app
4
+
5
+ # Get the mistral models but keep their full names for loading
6
+ MISTRAL_MODELS_FULL = [k for k in ai_gradio.registry.keys() if k.startswith("mistral:")]
7
+
8
+ # Create display names without the prefix
9
+ MISTRAL_MODELS_DISPLAY = [k.replace("mistral:", "") for k in MISTRAL_MODELS_FULL]
10
+
11
+
12
+ # Create and launch the interface using get_app utility
13
+ demo2 = get_app(
14
+ models=MISTRAL_MODELS_FULL, # Use the full names with prefix
15
+ default_model=MISTRAL_MODELS_FULL[5],
16
+ dropdown_label="Select Mistral Model",
17
+ choices=MISTRAL_MODELS_DISPLAY, # Display names without prefix
18
+ fill_height=True,
19
+ coder=True,
20
+ )
21
+
22
+ if __name__ == "__main__":
23
+ demo2.launch()
pyproject.toml CHANGED
@@ -38,7 +38,7 @@ dependencies = [
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
- "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain]>=0.2.14",
42
  ]
43
 
44
  [tool.uv.sources]
 
38
  "langchain>=0.3.14",
39
  "chromadb>=0.5.23",
40
  "openai>=1.55.0",
41
+ "ai-gradio[crewai,deepseek,gemini,groq,hyperbolic,openai,smolagents,transformers, langchain, mistral]>=0.2.15",
42
  ]
43
 
44
  [tool.uv.sources]
requirements.txt CHANGED
@@ -2,7 +2,7 @@
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
- ai-gradio==0.2.14
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
@@ -586,7 +586,9 @@ mem0ai==0.1.41
586
  mistral-gradio @ git+https://github.com/AK391/mistral-gradio.git@dfef7dc871ea35100743a415fde8a57a30c49fcb
587
  # via anychat (pyproject.toml)
588
  mistralai==1.2.6
589
- # via mistral-gradio
 
 
590
  mmh3==5.0.1
591
  # via chromadb
592
  modelscope-studio==1.0.2
 
2
  # uv pip compile pyproject.toml -o requirements.txt
3
  accelerate==1.2.1
4
  # via ai-gradio
5
+ ai-gradio==0.2.15
6
  # via anychat (pyproject.toml)
7
  aiofiles==23.2.1
8
  # via gradio
 
586
  mistral-gradio @ git+https://github.com/AK391/mistral-gradio.git@dfef7dc871ea35100743a415fde8a57a30c49fcb
587
  # via anychat (pyproject.toml)
588
  mistralai==1.2.6
589
+ # via
590
+ # ai-gradio
591
+ # mistral-gradio
592
  mmh3==5.0.1
593
  # via chromadb
594
  modelscope-studio==1.0.2