radames commited on
Commit
5f75dbb
·
2 Parent(s): 1e81fdc 7cdc8db

Merge branch 'main' into text2igm

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app-img2img.py +3 -1
  3. app-txt2img.py +2 -1
README.md CHANGED
@@ -16,7 +16,7 @@ You need a webcam to run this demo. 🤗
16
 
17
  ## Running Locally
18
 
19
- You need CUDA and Python or a Mac with an M1/M2/M3 chip
20
  `TIMEOUT`: limit user session timeout
21
  `SAFETY_CHECKER`: disabled if you want NSFW filter off
22
  `MAX_QUEUE_SIZE`: limit number of users on current app instance
 
16
 
17
  ## Running Locally
18
 
19
+ You need CUDA and Python 3.10 or a Mac with an M1/M2/M3 chip
20
  `TIMEOUT`: limit user session timeout
21
  `SAFETY_CHECKER`: disabled if you want NSFW filter off
22
  `MAX_QUEUE_SIZE`: limit number of users on current app instance
app-img2img.py CHANGED
@@ -31,7 +31,9 @@ HEIGHT = 512
31
  mps_available = hasattr(torch.backends, "mps") and torch.backends.mps.is_available()
32
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
33
  torch_device = device
34
- torch_dtype = torch.float16
 
 
35
 
36
  print(f"TIMEOUT: {TIMEOUT}")
37
  print(f"SAFETY_CHECKER: {SAFETY_CHECKER}")
 
31
  mps_available = hasattr(torch.backends, "mps") and torch.backends.mps.is_available()
32
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
33
  torch_device = device
34
+
35
+ # change to torch.float16 to save GPU memory
36
+ torch_dtype = torch.float32
37
 
38
  print(f"TIMEOUT: {TIMEOUT}")
39
  print(f"SAFETY_CHECKER: {SAFETY_CHECKER}")
app-txt2img.py CHANGED
@@ -31,7 +31,8 @@ HEIGHT = 512
31
  mps_available = hasattr(torch.backends, "mps") and torch.backends.mps.is_available()
32
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
33
  torch_device = device
34
- torch_dtype = torch.float16
 
35
 
36
  print(f"TIMEOUT: {TIMEOUT}")
37
  print(f"SAFETY_CHECKER: {SAFETY_CHECKER}")
 
31
  mps_available = hasattr(torch.backends, "mps") and torch.backends.mps.is_available()
32
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
33
  torch_device = device
34
+ # change to torch.float16 to save GPU memory
35
+ torch_dtype = torch.float32
36
 
37
  print(f"TIMEOUT: {TIMEOUT}")
38
  print(f"SAFETY_CHECKER: {SAFETY_CHECKER}")