File size: 2,917 Bytes
6052413
 
 
 
7ce0964
6052413
 
 
1bc459c
6052413
1bc459c
9c6ac55
6052413
9c6ac55
 
1bc459c
 
 
 
6052413
 
 
 
 
 
 
 
 
 
 
2454249
6052413
1bc459c
 
6052413
67c8755
 
1bc459c
6052413
 
 
 
 
 
 
 
 
1bc459c
6052413
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
import torch
import spaces

from huggingface_hub import hf_hub_download
from diffusers import FluxControlPipeline, FluxTransformer2DModel

####################################
#   Load the model(s) on GPU       #
####################################
path = "sayakpaul/FLUX.1-dev-edit-v0"
edit_transformer = FluxTransformer2DModel.from_pretrained(path, torch_dtype=torch.bfloat16)
pipeline = FluxControlPipeline.from_pretrained(
    "black-forest-labs/FLUX.1-dev", transformer=edit_transformer, torch_dtype=torch.bfloat16
).to("cuda")
pipeline.load_lora_weights(
    hf_hub_download("ByteDance/Hyper-SD", "Hyper-FLUX.1-dev-8steps-lora.safetensors"), adapter_name="hyper-sd"
)
pipeline.set_adapters(["hyper-sd"], adapter_weights=[0.125])

#####################################
#  The function for our Gradio app  #
#####################################
@spaces.GPU(duration=120)
def generate(prompt, input_image):
    """
    Runs the Flux Control pipeline for editing the given `input_image`
    with the specified `prompt`. The pipeline is on CPU by default.
    """
    output_image = pipeline(
        control_image=input_image,
        prompt=prompt,
        guidance_scale=30.,
        num_inference_steps=8,
        max_sequence_length=512,
        height=input_image.height,
        width=input_image.width,
        generator=torch.manual_seed(0)
    ).images[0]

    return output_image


def launch_app():
    with gr.Blocks() as demo:
        gr.Markdown(
            """
            # Flux Control Editing ๐Ÿ–Œ๏ธ

            This demo uses the [FLUX.1-dev](https://huggingface.co./black-forest-labs/FLUX.1-dev) 
            pipeline with an edit transformer from [Sayak Paul](https://huggingface.co./sayakpaul).
            
            **Acknowledgements**: 
            - [Sayak Paul](https://huggingface.co./sayakpaul) for open-sourcing FLUX.1-dev-edit-v0 
            - [black-forest-labs](https://huggingface.co./black-forest-labs) for FLUX.1-dev
            """
        )

        with gr.Row():
            prompt = gr.Textbox(
                label="Prompt",
                placeholder="e.g. 'Edit a certain thing in the image'"
            )
            input_image = gr.Image(
                label="Image",
                type="pil",
            )

        generate_button = gr.Button("Generate")
        output_image = gr.Image(label="Edited Image")

        # Connect button to function
        generate_button.click(
            fn=generate,
            inputs=[prompt, input_image],
            outputs=[output_image],
        )

        gr.Examples(
            examples=[
                ["Turn the color of the mushroom to gray", "mushroom.jpg"],
                ["Make the mushroom polka-dotted", "mushroom.jpg"],
            ],
            inputs=[prompt, input_image],
        )

    return demo


if __name__ == "__main__":
    demo = launch_app()
    demo.launch()