hysts HF staff commited on
Commit
38666fe
1 Parent(s): 6f67f48
Files changed (1) hide show
  1. app.py +0 -2
app.py CHANGED
@@ -20,7 +20,6 @@ if not torch.cuda.is_available():
20
  MAX_NUM_FRAMES = int(os.getenv("MAX_NUM_FRAMES", "200"))
21
  DEFAULT_NUM_FRAMES = min(MAX_NUM_FRAMES, int(os.getenv("DEFAULT_NUM_FRAMES", "24")))
22
  MAX_SEED = np.iinfo(np.int32).max
23
- CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
24
 
25
  if torch.cuda.is_available():
26
  pipe = DiffusionPipeline.from_pretrained("cerspense/zeroscope_v2_576w", torch_dtype=torch.float16)
@@ -125,7 +124,6 @@ with gr.Blocks(css="style.css") as demo:
125
  inputs=inputs,
126
  outputs=result,
127
  fn=generate,
128
- cache_examples=CACHE_EXAMPLES,
129
  )
130
 
131
  gr.on(
 
20
  MAX_NUM_FRAMES = int(os.getenv("MAX_NUM_FRAMES", "200"))
21
  DEFAULT_NUM_FRAMES = min(MAX_NUM_FRAMES, int(os.getenv("DEFAULT_NUM_FRAMES", "24")))
22
  MAX_SEED = np.iinfo(np.int32).max
 
23
 
24
  if torch.cuda.is_available():
25
  pipe = DiffusionPipeline.from_pretrained("cerspense/zeroscope_v2_576w", torch_dtype=torch.float16)
 
124
  inputs=inputs,
125
  outputs=result,
126
  fn=generate,
 
127
  )
128
 
129
  gr.on(