Skip to content

Commit

Permalink
[fix] cleaning
Browse files Browse the repository at this point in the history
  • Loading branch information
ctr26 committed Oct 1, 2024
1 parent 927d9c9 commit ed6ea36
Show file tree
Hide file tree
Showing 6 changed files with 5 additions and 494 deletions.
25 changes: 0 additions & 25 deletions bioimage_embed/datasets/__init__.py

This file was deleted.

102 changes: 0 additions & 102 deletions bioimage_embed/hydra.py

This file was deleted.

7 changes: 4 additions & 3 deletions bioimage_embed/lightning/pyro.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@


class LitAutoEncoderPyro(pl.LightningModule):
"""
WIP Unsupported
"""

def __init__(self, model, batch_size=1, learning_rate=1e-3):
super().__init__()
# self.autoencoder = AutoEncoder(batch_size, 1)
Expand Down Expand Up @@ -59,6 +63,3 @@ def pyro_training_step(self, train_batch, batch_idx):

def training_step(self, train_batch, batch_idx):
return self.torch_training_step(train_batch, batch_idx)

def training_step(self, train_batch, batch_idx):
return self.pyro_training_step(train_batch, batch_idx)
12 changes: 0 additions & 12 deletions bioimage_embed/lightning/torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,16 +143,6 @@ def eval_step(self, batch, batch_idx):
"""
return self.predict_step(batch, batch_idx)

# def lr_scheduler_step(self, epoch, batch_idx, optimizer, optimizer_idx, second_order_closure=None):
# # Implement your own logic for updating the lr scheduler
# # This method will be called at each training step
# # Update the lr scheduler based on the provided arguments
# # You can access the lr scheduler using `self.lr_schedulers()`

# # Example:
# for lr_scheduler in self.lr_schedulers():
# lr_scheduler.step()

def timm_optimizers(self, model):
optimizer = optim.create_optimizer(self.args, model.parameters())
lr_scheduler = scheduler.create_scheduler(self.args, optimizer)[0]
Expand All @@ -168,8 +158,6 @@ def timm_to_lightning(self, optimizer, lr_scheduler):
}

def configure_optimizers(self):
# optimizer = optim.create_optimizer(self.args, self.model.parameters())
# lr_scheduler = scheduler.create_scheduler(self.args, optimizer)[0]
optimizer, lr_scheduler = self.timm_optimizers(self.model)
return self.timm_to_lightning(optimizer, lr_scheduler)

Expand Down
Loading

0 comments on commit ed6ea36

Please sign in to comment.