Skip to content

Commit

Permalink
[fix] variational_loss is handled per *_step now, might rethink later
Browse files Browse the repository at this point in the history
  • Loading branch information
ctr26 committed Aug 15, 2024
1 parent 956a107 commit df4296c
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions bioimage_embed/lightning/torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,17 +130,18 @@ def test_step(self, batch, batch_idx):
)
return model_output.loss

# Fangless function to be overloaded later
def batch_to_xy(self, batch):
"""
Fangless function to be overloaded later
"""
x, y = batch
return x, y

def eval_step(self, batch, batch_idx):
"""
This function should be overloaded in the child class to implement the evaluation logic.
"""
model_output = self.predict_step(batch, batch_idx)
return model_output
return self.predict_step(batch, batch_idx)

# def lr_scheduler_step(self, epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
# # Implement your own logic for updating the lr scheduler
Expand Down

0 comments on commit df4296c

Please sign in to comment.