sleeper371 commited on
Commit
20c0646
·
1 Parent(s): b5b7f54

remove prompt loading to GPU to fix zero GPU err

Browse files
Files changed (2) hide show
  1. core/bark/generate_audio.py +1 -1
  2. event_handlers.py +1 -1
core/bark/generate_audio.py CHANGED
@@ -39,7 +39,7 @@ def generate_audio(
39
 
40
  """
41
  if prompt is not None:
42
- semantic_prompt = prompt.semantic_prompt if prompt is not None else None
43
  # if len(semantic_prompt.shape) == 2:
44
  # semantic_prompt = semantic_prompt[0, :]
45
  assert (
 
39
 
40
  """
41
  if prompt is not None:
42
+ semantic_prompt = prompt.semantic_prompt.to("cuda") if prompt is not None else None
43
  # if len(semantic_prompt.shape) == 2:
44
  # semantic_prompt = semantic_prompt[0, :]
45
  assert (
event_handlers.py CHANGED
@@ -353,7 +353,7 @@ def generate_batch_audio(
353
  gr.Info("Loading audio prompt...")
354
  prompt_path = os.path.join(PROMPT_DIR, selected_prompt_name)
355
  bark_prompt = BarkPrompt.load_prompt(
356
- prompt_path, torch.device(inference_device)
357
  )
358
 
359
  generation_config = BarkGenerationConfig(
 
353
  gr.Info("Loading audio prompt...")
354
  prompt_path = os.path.join(PROMPT_DIR, selected_prompt_name)
355
  bark_prompt = BarkPrompt.load_prompt(
356
+ prompt_path, torch.device("cpu")
357
  )
358
 
359
  generation_config = BarkGenerationConfig(