save_hyperparameters() self.backbone = MyResNet(num_classes) self.train_accuracy = torchmetrics.Accuracy() def forward(self, x): embedding = self.backbone(x) return embedding def training_step(self, batch, batch_idx): x, y, _ = batch y_hat = self.backbone(x.float()) loss = F....
[LightningModule.CHECKPOINT_HYPER_PARAMS_TYPE]=type(model.hparams)else:checkpoint[LightningModule.CHECKPOINT_HYPER_PARAMS_KEY]=dict(model.hparams)# give the model a chance to add a few thingsmodel.on_save_checkpoint(checkpoint)ifself.trainer.datamoduleisnotNone:self.trainer.datamodule.on_save_...
KeyError: 'Trying to restore training state but checkpoint contains only the model. This is probably due to `ModelCheckpoint.save_weights_only` being set to `True`.' If I just want to run eval on apl.Moduleshould I avoid making a trainer?