DeF0017 commited on
Commit
d86204b
·
verified ·
1 Parent(s): 3daf5ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -76,6 +76,7 @@ NUM_EPOCHS = 0
76
  LOAD_MODEL = True
77
  SAVE_MODEL = False
78
  CHECKPOINT_GEN_A = f"{os.getcwd()}genA.pth.tar"
 
79
 
80
  transforms = A.Compose(
81
  [
@@ -99,10 +100,12 @@ def load_checkpoint(checkpoint_file, model, optimizer, lr):
99
  for param_group in optimizer.param_groups:
100
  param_group["lr"] = lr
101
 
 
102
  genA = Generator().to(DEVICE)
103
  optim_gen = optim.Adam(list(genB.parameters()) + list(genA.parameters()), lr=LEARNING_RATE, betas=(0.5, 0.999))
104
 
105
  load_checkpoint(CHECKPOINT_GEN_A, genA, optim_gen, LEARNING_RATE)
 
106
 
107
  def postprocess_and_show(output):
108
  # Detach from GPU, move to CPU, and remove the batch dimension
 
76
  LOAD_MODEL = True
77
  SAVE_MODEL = False
78
  CHECKPOINT_GEN_A = f"{os.getcwd()}genA.pth.tar"
79
+ CHECKPOINT_GEN_B = f"{os.getcwd()}genB.pth.tar"
80
 
81
  transforms = A.Compose(
82
  [
 
100
  for param_group in optimizer.param_groups:
101
  param_group["lr"] = lr
102
 
103
+ genB = Generator().to(DEVICE)
104
  genA = Generator().to(DEVICE)
105
  optim_gen = optim.Adam(list(genB.parameters()) + list(genA.parameters()), lr=LEARNING_RATE, betas=(0.5, 0.999))
106
 
107
  load_checkpoint(CHECKPOINT_GEN_A, genA, optim_gen, LEARNING_RATE)
108
+ load_checkpoint(CHECKPOINT_GEN_B, genB, optim_gen, LEARNING_RATE)
109
 
110
  def postprocess_and_show(output):
111
  # Detach from GPU, move to CPU, and remove the batch dimension