Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Poptorch 3.1.0 supports dictionary inputs #16683

Closed
wants to merge 1 commit into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 15 additions & 26 deletions tests/tests_pytorch/accelerators/test_ipu.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,17 +37,6 @@
import poptorch


class IPUModel(BoringModel):
def training_step(self, batch, batch_idx):
return self.step(batch)

def validation_step(self, batch, batch_idx):
return self.step(batch)

def test_step(self, batch, batch_idx):
return self.step(batch)


class IPUClassificationModel(ClassificationModel):
def training_step(self, batch, batch_idx):
x, y = batch
Expand Down Expand Up @@ -115,7 +104,7 @@ def test_no_warning_strategy(tmpdir):
@RunIf(ipu=True)
@pytest.mark.parametrize("devices", [1, 4])
def test_all_stages(tmpdir, devices):
model = IPUModel()
model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, accelerator="ipu", devices=devices)
trainer.fit(model)
trainer.validate(model)
Expand All @@ -126,7 +115,7 @@ def test_all_stages(tmpdir, devices):
@RunIf(ipu=True)
@pytest.mark.parametrize("devices", [1, 4])
def test_inference_only(tmpdir, devices):
model = IPUModel()
model = BoringModel()

trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, accelerator="ipu", devices=devices)
trainer.validate(model)
Expand Down Expand Up @@ -181,7 +170,7 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: str) -> Non
assert trainer.precision == "16"
raise SystemExit

model = IPUModel()
model = BoringModel()
trainer = Trainer(
default_root_dir=tmpdir, fast_dev_run=True, accelerator="ipu", devices=1, precision=16, callbacks=TestCallback()
)
Expand All @@ -200,7 +189,7 @@ def on_train_start(self, trainer: Trainer, pl_module: LightningModule) -> None:
assert param.dtype == torch.float16
raise SystemExit

model = IPUModel()
model = BoringModel()
model = model.half()
trainer = Trainer(
default_root_dir=tmpdir, fast_dev_run=True, accelerator="ipu", devices=1, precision=16, callbacks=TestCallback()
Expand Down Expand Up @@ -234,7 +223,7 @@ def on_train_start(self, trainer: Trainer, pl_module: LightningModule) -> None:
assert poptorch_model._options.toDict()["device_iterations"] == 2
raise SystemExit

model = IPUModel()
model = BoringModel()
trainer = Trainer(
default_root_dir=tmpdir,
fast_dev_run=True,
Expand All @@ -260,7 +249,7 @@ def on_train_start(self, trainer: Trainer, pl_module: LightningModule) -> None:
assert poptorch_model._options.Training.toDict()["gradient_accumulation"] == 2
raise SystemExit

model = IPUModel()
model = BoringModel()
trainer = Trainer(
default_root_dir=tmpdir,
fast_dev_run=True,
Expand All @@ -277,7 +266,7 @@ def on_train_start(self, trainer: Trainer, pl_module: LightningModule) -> None:
def test_stages_correct(tmpdir):
"""Ensure all stages correctly are traced correctly by asserting the output for each stage."""

class StageModel(IPUModel):
class StageModel(BoringModel):
def training_step(self, batch, batch_idx):
loss = super().training_step(batch, batch_idx)
# tracing requires a loss value that depends on the model.
Expand Down Expand Up @@ -321,7 +310,7 @@ def on_predict_batch_end(self, trainer, pl_module, outputs, batch, batch_idx, da

@RunIf(ipu=True)
def test_different_accumulate_grad_batches_fails(tmpdir):
model = IPUModel()
model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, accelerator="ipu", devices=1, accumulate_grad_batches={1: 2})
with pytest.raises(
MisconfigurationException, match="IPUs currently does not support different `accumulate_grad_batches`"
Expand All @@ -331,7 +320,7 @@ def test_different_accumulate_grad_batches_fails(tmpdir):

@RunIf(ipu=True)
def test_clip_gradients_fails(tmpdir):
model = IPUModel()
model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, accelerator="ipu", devices=1, gradient_clip_val=10)
with pytest.raises(MisconfigurationException, match="IPUs currently do not support clipping gradients."):
trainer.fit(model)
Expand All @@ -340,7 +329,7 @@ def test_clip_gradients_fails(tmpdir):
@RunIf(ipu=True)
def test_autoreport(tmpdir):
"""Ensure autoreport dumps to a file."""
model = IPUModel()
model = BoringModel()
autoreport_path = os.path.join(tmpdir, "report/")
trainer = Trainer(
default_root_dir=tmpdir,
Expand All @@ -358,7 +347,7 @@ def test_autoreport(tmpdir):
def test_manual_poptorch_dataloader(tmpdir):
model_options = poptorch.Options()

class IPUTestModel(IPUModel):
class IPUTestModel(BoringModel):
def train_dataloader(self):
dataloader = super().train_dataloader()
# save to instance to compare the reference later
Expand Down Expand Up @@ -389,7 +378,7 @@ def train_dataloader(self):
@RunIf(ipu=True)
def test_manual_poptorch_opts(tmpdir):
"""Ensure if the user passes manual poptorch Options, we run with the correct object."""
model = IPUModel()
model = BoringModel()
inference_opts = poptorch.Options()
training_opts = poptorch.Options()

Expand Down Expand Up @@ -418,7 +407,7 @@ def test_manual_poptorch_opts_custom(tmpdir):
"""Ensure if the user passes manual poptorch Options with custom parameters set, we respect them in our
poptorch options and the dataloaders."""

model = IPUModel()
model = BoringModel()
training_opts = poptorch.Options()
training_opts.deviceIterations(8)
training_opts.replicationFactor(2)
Expand Down Expand Up @@ -511,7 +500,7 @@ def test_replication_factor(tmpdir):
def test_default_opts(tmpdir):
"""Ensure default opts are set correctly in the IPUStrategy."""

model = IPUModel()
model = BoringModel()

trainer = Trainer(default_root_dir=tmpdir, accelerator="ipu", devices=1, fast_dev_run=True)
trainer.fit(model)
Expand All @@ -529,7 +518,7 @@ def test_default_opts(tmpdir):
def test_multi_optimizers_fails(tmpdir):
"""Ensure if there are multiple optimizers, we throw an exception."""

class TestModel(IPUModel):
class TestModel(BoringModel):
def configure_optimizers(self):
return [torch.optim.Adam(self.parameters()), torch.optim.Adam(self.parameters())]

Expand Down