Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -14,11 +14,14 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
14 |
#torch._inductor.config.epilogue_fusion = False
|
15 |
#torch._inductor.config.coordinate_descent_check_all_directions = True
|
16 |
|
17 |
-
|
18 |
"fal/AuraFlow",
|
19 |
torch_dtype=torch.float16
|
20 |
).to("cuda")
|
21 |
-
|
|
|
|
|
|
|
22 |
#pipe.transformer.to(memory_format=torch.channels_last)
|
23 |
#pipe.transformer = torch.compile(pipe.transformer, mode="reduce-overhead", fullgraph=True)
|
24 |
#pipe.transformer.to(memory_format=torch.channels_last)
|
@@ -31,22 +34,32 @@ MAX_SEED = np.iinfo(np.int32).max
|
|
31 |
MAX_IMAGE_SIZE = 1024
|
32 |
|
33 |
@spaces.GPU
|
34 |
-
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True)):
|
35 |
-
|
36 |
if randomize_seed:
|
37 |
seed = random.randint(0, MAX_SEED)
|
38 |
|
39 |
generator = torch.Generator().manual_seed(seed)
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
return image, seed
|
52 |
|
@@ -74,7 +87,7 @@ with gr.Blocks(css=css) as demo:
|
|
74 |
with gr.Column(elem_id="col-container"):
|
75 |
gr.Markdown(f"""
|
76 |
# AuraFlow 0.1
|
77 |
-
Demo of the [AuraFlow 0.
|
78 |
[[blog](https://blog.fal.ai/auraflow/)] [[model](https://huggingface.co/fal/AuraFlow)] [[fal](https://fal.ai/models/fal-ai/aura-flow)]
|
79 |
""")
|
80 |
|
@@ -94,6 +107,10 @@ with gr.Blocks(css=css) as demo:
|
|
94 |
|
95 |
with gr.Accordion("Advanced Settings", open=False):
|
96 |
|
|
|
|
|
|
|
|
|
97 |
negative_prompt = gr.Text(
|
98 |
label="Negative prompt",
|
99 |
max_lines=1,
|
@@ -157,7 +174,7 @@ with gr.Blocks(css=css) as demo:
|
|
157 |
gr.on(
|
158 |
triggers=[run_button.click, prompt.submit, negative_prompt.submit],
|
159 |
fn = infer,
|
160 |
-
inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
|
161 |
outputs = [result, seed]
|
162 |
)
|
163 |
|
|
|
14 |
#torch._inductor.config.epilogue_fusion = False
|
15 |
#torch._inductor.config.coordinate_descent_check_all_directions = True
|
16 |
|
17 |
+
pipe_v1 = AuraFlowPipeline.from_pretrained(
|
18 |
"fal/AuraFlow",
|
19 |
torch_dtype=torch.float16
|
20 |
).to("cuda")
|
21 |
+
pipe = AuraFlowPipeline.from_pretrained(
|
22 |
+
"fal/AuraFlow-v0.2",
|
23 |
+
torch_dtype=torch.float16
|
24 |
+
).to("cuda")
|
25 |
#pipe.transformer.to(memory_format=torch.channels_last)
|
26 |
#pipe.transformer = torch.compile(pipe.transformer, mode="reduce-overhead", fullgraph=True)
|
27 |
#pipe.transformer.to(memory_format=torch.channels_last)
|
|
|
34 |
MAX_IMAGE_SIZE = 1024
|
35 |
|
36 |
@spaces.GPU
|
37 |
+
def infer(prompt, negative_prompt="", seed=42, randomize_seed=False, width=1024, height=1024, guidance_scale=5.0, num_inference_steps=28, progress=gr.Progress(track_tqdm=True), model_version="0.2"):
|
38 |
+
|
39 |
if randomize_seed:
|
40 |
seed = random.randint(0, MAX_SEED)
|
41 |
|
42 |
generator = torch.Generator().manual_seed(seed)
|
43 |
+
if(model_version == "0.1"):
|
44 |
+
image = pipe_v1(
|
45 |
+
prompt = prompt,
|
46 |
+
negative_prompt = negative_prompt,
|
47 |
+
width=width,
|
48 |
+
height=height,
|
49 |
+
guidance_scale = guidance_scale,
|
50 |
+
num_inference_steps = num_inference_steps,
|
51 |
+
generator = generator
|
52 |
+
).images[0]
|
53 |
+
else:
|
54 |
+
image = pipe(
|
55 |
+
prompt = prompt,
|
56 |
+
negative_prompt = negative_prompt,
|
57 |
+
width=width,
|
58 |
+
height=height,
|
59 |
+
guidance_scale = guidance_scale,
|
60 |
+
num_inference_steps = num_inference_steps,
|
61 |
+
generator = generator
|
62 |
+
).images[0]
|
63 |
|
64 |
return image, seed
|
65 |
|
|
|
87 |
with gr.Column(elem_id="col-container"):
|
88 |
gr.Markdown(f"""
|
89 |
# AuraFlow 0.1
|
90 |
+
Demo of the [AuraFlow 0.2](https://huggingface.co/fal/AuraFlow) 6.8B parameters open source diffusion transformer model
|
91 |
[[blog](https://blog.fal.ai/auraflow/)] [[model](https://huggingface.co/fal/AuraFlow)] [[fal](https://fal.ai/models/fal-ai/aura-flow)]
|
92 |
""")
|
93 |
|
|
|
107 |
|
108 |
with gr.Accordion("Advanced Settings", open=False):
|
109 |
|
110 |
+
model_version = gr.Dropdown(
|
111 |
+
["0.1", "0.2"], label="Model version", value="0.1"
|
112 |
+
)
|
113 |
+
|
114 |
negative_prompt = gr.Text(
|
115 |
label="Negative prompt",
|
116 |
max_lines=1,
|
|
|
174 |
gr.on(
|
175 |
triggers=[run_button.click, prompt.submit, negative_prompt.submit],
|
176 |
fn = infer,
|
177 |
+
inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, model_version],
|
178 |
outputs = [result, seed]
|
179 |
)
|
180 |
|