Spaces:
Sleeping
Sleeping
genevera
commited on
Commit
·
5919897
1
Parent(s):
1baaf3c
oh my god shut up about the safety checker already
Browse files
app.py
CHANGED
|
@@ -1,4 +1,9 @@
|
|
| 1 |
import torch
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
from diffusers.loaders import AttnProcsLayers
|
| 3 |
from transformers import CLIPTextModel, CLIPTokenizer
|
| 4 |
from modules.beats.BEATs import BEATs, BEATsConfig
|
|
@@ -21,9 +26,7 @@ from diffusers import (
|
|
| 21 |
KDPM2AncestralDiscreteScheduler,
|
| 22 |
KDPM2DiscreteScheduler,
|
| 23 |
)
|
| 24 |
-
|
| 25 |
-
import gradio as gr
|
| 26 |
-
from scipy import signal
|
| 27 |
|
| 28 |
class AudioTokenWrapper(torch.nn.Module):
|
| 29 |
"""Simple wrapper module for Stable Diffusion that holds all the models together"""
|
|
@@ -198,7 +201,6 @@ def greet(audio, steps=25, scheduler="ddpm"):
|
|
| 198 |
image = pipeline(prompt, num_inference_steps=steps, guidance_scale=8.5, generator=generator).images[0]
|
| 199 |
return image
|
| 200 |
|
| 201 |
-
|
| 202 |
lora = False
|
| 203 |
repo_id = "philz1337/reliberate"
|
| 204 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
|
|
|
| 1 |
import torch
|
| 2 |
+
import numpy as np
|
| 3 |
+
import gradio as gr
|
| 4 |
+
from scipy import signal
|
| 5 |
+
from diffusers.utils import logging
|
| 6 |
+
logging.set_verbosity_error()
|
| 7 |
from diffusers.loaders import AttnProcsLayers
|
| 8 |
from transformers import CLIPTextModel, CLIPTokenizer
|
| 9 |
from modules.beats.BEATs import BEATs, BEATsConfig
|
|
|
|
| 26 |
KDPM2AncestralDiscreteScheduler,
|
| 27 |
KDPM2DiscreteScheduler,
|
| 28 |
)
|
| 29 |
+
|
|
|
|
|
|
|
| 30 |
|
| 31 |
class AudioTokenWrapper(torch.nn.Module):
|
| 32 |
"""Simple wrapper module for Stable Diffusion that holds all the models together"""
|
|
|
|
| 201 |
image = pipeline(prompt, num_inference_steps=steps, guidance_scale=8.5, generator=generator).images[0]
|
| 202 |
return image
|
| 203 |
|
|
|
|
| 204 |
lora = False
|
| 205 |
repo_id = "philz1337/reliberate"
|
| 206 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|