|
import gradio as gr |
|
import gemini_gradio |
|
import openai_gradio |
|
import anthropic_gradio |
|
import sambanova_gradio |
|
import xai_gradio |
|
import hyperbolic_gradio |
|
|
|
|
|
with gr.Blocks(fill_height=True) as demo: |
|
with gr.Tab("Gemini"): |
|
with gr.Row(): |
|
gemini_model = gr.Dropdown( |
|
choices=[ |
|
'gemini-1.5-flash', |
|
'gemini-1.5-flash-8b', |
|
'gemini-1.5-pro', |
|
'gemini-exp-1114' |
|
], |
|
value='gemini-1.5-pro', |
|
label="Select Gemini Model", |
|
interactive=True |
|
) |
|
|
|
gemini_interface = gr.load( |
|
name=gemini_model.value, |
|
src=gemini_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
def update_gemini_model(new_model): |
|
return gr.load( |
|
name=new_model, |
|
src=gemini_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
gemini_model.change( |
|
fn=update_gemini_model, |
|
inputs=[gemini_model], |
|
outputs=[gemini_interface] |
|
) |
|
with gr.Tab("ChatGPT"): |
|
with gr.Row(): |
|
model_choice = gr.Dropdown( |
|
choices=[ |
|
'gpt-4o', |
|
'gpt-4o-2024-08-06', |
|
'gpt-4o-2024-05-13', |
|
'chatgpt-4o-latest', |
|
'gpt-4o-mini', |
|
'gpt-4o-mini-2024-07-18', |
|
'o1-preview', |
|
'o1-preview-2024-09-12', |
|
'o1-mini', |
|
'o1-mini-2024-09-12', |
|
'gpt-4-turbo', |
|
'gpt-4-turbo-2024-04-09', |
|
'gpt-4-turbo-preview', |
|
'gpt-4-0125-preview', |
|
'gpt-4-1106-preview', |
|
'gpt-4', |
|
'gpt-4-0613' |
|
], |
|
value='gpt-4o', |
|
label="Select Model", |
|
interactive=True |
|
) |
|
|
|
chatgpt_interface = gr.load( |
|
name=model_choice.value, |
|
src=openai_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
def update_model(new_model): |
|
return gr.load( |
|
name=new_model, |
|
src=openai_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
model_choice.change( |
|
fn=update_model, |
|
inputs=[model_choice], |
|
outputs=[chatgpt_interface] |
|
) |
|
with gr.Tab("Claude"): |
|
with gr.Row(): |
|
claude_model = gr.Dropdown( |
|
choices=[ |
|
'claude-3-5-sonnet-20241022', |
|
'claude-3-5-haiku-20241022', |
|
'claude-3-opus-20240229', |
|
'claude-3-sonnet-20240229', |
|
'claude-3-haiku-20240307' |
|
], |
|
value='claude-3-5-sonnet-20241022', |
|
label="Select Model", |
|
interactive=True |
|
) |
|
|
|
claude_interface = gr.load( |
|
name=claude_model.value, |
|
src=anthropic_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
def update_claude_model(new_model): |
|
return gr.load( |
|
name=new_model, |
|
src=anthropic_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
claude_model.change( |
|
fn=update_claude_model, |
|
inputs=[claude_model], |
|
outputs=[claude_interface] |
|
) |
|
with gr.Tab("Meta Llama-3.2-90B-Vision-Instruct"): |
|
gr.load( |
|
name='Llama-3.2-90B-Vision-Instruct', |
|
src=sambanova_gradio.registry, |
|
accept_token=True, |
|
multimodal=True, |
|
description="Requires SambaNova API key" |
|
) |
|
with gr.Tab("Grok"): |
|
gr.load( |
|
name='grok-beta', |
|
src=xai_gradio.registry, |
|
accept_token=True |
|
) |
|
with gr.Tab("Qwen2.5 72B"): |
|
gr.load( |
|
name='Qwen/Qwen2.5-72B-Instruct', |
|
src=hyperbolic_gradio.registry, |
|
accept_token=True |
|
) |
|
|
|
|
|
demo.launch() |
|
|
|
|
|
|