Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
10 |
model_repo_id = "tensorart/stable-diffusion-3.5-large-TurboX"
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
-
torch_dtype = torch.
|
14 |
else:
|
15 |
torch_dtype = torch.float32
|
16 |
|
@@ -63,8 +63,8 @@ css = """
|
|
63 |
|
64 |
with gr.Blocks(css=css) as demo:
|
65 |
with gr.Column(elem_id="col-container"):
|
66 |
-
gr.Markdown(" # [Stable Diffusion 3.5 Large
|
67 |
-
gr.Markdown("
|
68 |
with gr.Row():
|
69 |
prompt = gr.Text(
|
70 |
label="Prompt",
|
|
|
10 |
model_repo_id = "tensorart/stable-diffusion-3.5-large-TurboX"
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
+
torch_dtype = torch.float16
|
14 |
else:
|
15 |
torch_dtype = torch.float32
|
16 |
|
|
|
63 |
|
64 |
with gr.Blocks(css=css) as demo:
|
65 |
with gr.Column(elem_id="col-container"):
|
66 |
+
gr.Markdown(" # [TensorArt Stable Diffusion 3.5 Large TurboX)](https://huggingface.co/tensorart/stable-diffusion-3.5-large-TurboX)")
|
67 |
+
gr.Markdown("8-step distilled turbo model")
|
68 |
with gr.Row():
|
69 |
prompt = gr.Text(
|
70 |
label="Prompt",
|