Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -33,11 +33,12 @@ JS = """function () {
|
|
| 33 |
}"""
|
| 34 |
|
| 35 |
if torch.cuda.is_available():
|
| 36 |
-
pipe = FluxPipeline.from_pretrained(model, torch_dtype=torch.bfloat16)
|
| 37 |
pipe.load_lora_weights(default_lora, weight_name = default_weight_name) # default load lora
|
| 38 |
pipe.fuse_lora(lora_scale=0.9)
|
| 39 |
|
| 40 |
|
|
|
|
| 41 |
def scrape_lora_link(url):
|
| 42 |
try:
|
| 43 |
# Send a GET request to the URL
|
|
@@ -100,13 +101,13 @@ def generate_image(
|
|
| 100 |
if seed == -1:
|
| 101 |
seed = random.randint(0, MAX_SEED)
|
| 102 |
seed = int(seed)
|
| 103 |
-
print(f'prompt:{prompt}')
|
| 104 |
|
| 105 |
text = str(translator.translate(prompt, 'English')) + "," + lora_word
|
| 106 |
|
|
|
|
|
|
|
| 107 |
generator = torch.Generator().manual_seed(seed)
|
| 108 |
|
| 109 |
-
|
| 110 |
image = pipe(
|
| 111 |
prompt=text,
|
| 112 |
height=height,
|
|
|
|
| 33 |
}"""
|
| 34 |
|
| 35 |
if torch.cuda.is_available():
|
| 36 |
+
pipe = FluxPipeline.from_pretrained(model, torch_dtype=torch.bfloat16)
|
| 37 |
pipe.load_lora_weights(default_lora, weight_name = default_weight_name) # default load lora
|
| 38 |
pipe.fuse_lora(lora_scale=0.9)
|
| 39 |
|
| 40 |
|
| 41 |
+
|
| 42 |
def scrape_lora_link(url):
|
| 43 |
try:
|
| 44 |
# Send a GET request to the URL
|
|
|
|
| 101 |
if seed == -1:
|
| 102 |
seed = random.randint(0, MAX_SEED)
|
| 103 |
seed = int(seed)
|
|
|
|
| 104 |
|
| 105 |
text = str(translator.translate(prompt, 'English')) + "," + lora_word
|
| 106 |
|
| 107 |
+
print(f"Prompt: {text}")
|
| 108 |
+
|
| 109 |
generator = torch.Generator().manual_seed(seed)
|
| 110 |
|
|
|
|
| 111 |
image = pipe(
|
| 112 |
prompt=text,
|
| 113 |
height=height,
|