Optimizer got an empty parameter list

Hello I’m a new user of pytorch and pytorch lightning and I’m facing the error mentioned in the title of the post : " ValueError: optimizer got an empty parameter list "

This is the code I’m using :

class MyClassifier(pl.LightningModule):
  def __init__(self, n_classes: int, steps_per_epoch= None, n_epochs=None):
    super().__init__()
    
    self.config = AutoConfig.from_pretrained(MODEL_NAME),
    self.model = AutoModel.from_pretrained(MODEL_NAME,return_dict=True,config=self.config),
    self.classifier = nn.Linear(self.config[0].hidden_size,n_classes),
    self.steps_per_epoch = steps_per_epoch,
    self.n_epochs = n_epochs,
    self.criterion = nn.BCELoss()

  def forward(self, input_ids, attention_mask, labels=None):
    output = self.model(input_ids, attention_mask = attention_mask)
    output = self.classifier(output.pooler_output)
    output = torch.sigmoid(output)

    loss = 0
    
    if labels is not None:
      loss = self.criterion(output,labels)
    return loss,output

  def training_step(self,batch, batch_ids):
    input_ids = batch['input_ids']
    attention_mask = batch['attention_mask']
    labels = batch['labels']
    loss,outputs =  self(input_ids,attention_mask,labels)
    self.log("train_loss", loss, prog_bar = True, logger=True)
    return {"loss":loss, "predictions":outputs,"labels":labels}


  def validation_step(self,batch, batch_ids):
    input_ids = batch['input_ids']
    attention_mask = batch['attention_mask']
    labels = batch['labels']
    loss,outputs =  self(input_ids,attention_mask,labels)
    self.log("val_loss", loss, prog_bar = True, logger=True)
    return loss

  def test_step(self,batch, batch_ids):
    input_ids = batch['input_ids']
    attention_mask = batch['attention_mask']
    labels = batch['labels']
    loss,outputs =  self(input_ids,attention_mask,labels)
    self.log("test_loss", loss, prog_bar = True, logger=True)
    return loss
  
  def training_epoch_end(self, outputs):
    labels = []
    predictions = []

    for output in outputs:
      for out_labels in output["labels"].detach().cpu():
        labels.append(out_labels)
      for out_predictions in output["predictions"].detach().cpu():
        predictions.append(out_predictions)
      labels = torch.stack(labels)
      predictions = torch.stack(predictions)

      for i,name in enumerate(LABELS_COLS):
        roc_score = auroc(predictions[:,i],labels[:,i])
        self.logger.experiment.add_scalar(f"{name}_roc_auc/Train",roc_score,self.current_epoch)

  def configure_optimizers(self):
    optimizer = AdamW(self.parameters(),lr=2e-5)
    warmup_steps = self.steps_per_epoch // 3 
    total_steps = self.steps_per_epoch * self.n_epochs - warmup_steps

    scheduler = get_linear_schedule_with_warmup(
        optimizer,
        warmup_steps,
        total_steps
    )
    return [optimizer], [scheduler]


model = MyClassifier(n_classes=len(LABELS_COLS), 
                           steps_per_epoch=len(train_df) // BATCH_SIZE,
                           n_epochs = N_EPOCHS)

trainer = pl.Trainer(max_epochs=N_EPOCHS, gpus=1, progress_bar_refresh_rate=30)

trainer.fit(model,data_module)

I figured that when I print list(model.parameters()) it’s empty but I don’t know why.

Can anyone assist me please ?

you have a comma (,) here at the end which converts it to tuple so nn.Module won’t consider it under self.parameters().