Accuracy is not improving and Training epochs long time

Hello I am a beginner and trying hands on with lightning for the first time. I was working on character classification dataset which has around 71 classes. I was able to achieve 90% accuracy using keras with each epoch taking time round about 4 minutes. When I tried to implement in lightning, my accuracy is below 1% and a single epoch is taking around 10 minutes. I know I must have done something wrong in my but I am not able to figure it out. My is code is as below:

first I created dataset using Image folder and applied transformation as:

tnfms = transforms.Compose([transforms.Resize((224, 224)),  
                  transforms.ToTensor(),
                  transforms.Normalize(mean=[0.485, 0.456, 0.406],
                                       std=[0.229, 0.224, 0.225])])

train_data = datasets.ImageFolder(path/'train', transform=tnfms)
val_data = datasets.ImageFolder(path/'test', transform=tnfms)  

Then I used lighting class to build model:

class Model(pl.LightningModule):
def __init__(self):
    super().__init__()
    self.net = models.densenet121(pretrained=True)
    self.net.classifier = nn.Linear(in_features=1024, out_features=71, bias=True)

def forward(self, x):
    x = self.net(x)
    return x

def configure_optimizers(self):
    return torch.optim.AdamW(self.parameters(), lr=0.001)

def training_step(self, batch, batch_idx):
    images, labels = batch 
    out = self(images)                 
    loss = F.cross_entropy(out, labels)
    tensorboard_logs = {'train_loss': loss}
    return {'loss': loss, 'log': tensorboard_logs}

def validation_step(self, batch, batch_idx):
    images, labels = batch 
    out = self(images)                  
    loss = F.cross_entropy(out, labels)
    a, y_hat = torch.max(out, dim=1)
    val_acc = accuracy_score(y_hat.cpu(), labels.cpu())
    val_acc = torch.tensor(val_acc)
    return {'val_loss': loss, 'val_acc': val_acc}

def validation_epoch_end(self, outputs):
    avg_loss = torch.stack([x['val_loss'] for x in outputs]).mean()
    avg_val_acc = torch.stack([x['val_acc'] for x in outputs]).mean()

    tensorboard_logs = {'val_loss': avg_loss, 'avg_val_acc': avg_val_acc}
    return {'val_loss': avg_loss, 'progress_bar': tensorboard_logs}

def train_dataloader(self):
    train_dl = DataLoader(train_data, batch_size=16, num_workers=4)
    return train_dl

def val_dataloader(self):
    val_dl = DataLoader(val_data, batch_size=16, num_workers=4)
    return val_dl

Then I run the fallowing program using by initializing trainer:

model = Model()
trainer = pl.Trainer(gpus=1)    
trainer.fit(model) 

I use sklearn’s accuracy_score() to calculate the accuracy. Please help me out.

This is a quick edit of your sample code according to docs

def __init__(self, ...):
    ...
    self.valid_acc = pl.metrics.Accuracy()

def training_step(self, batch, batch_idx):
    images, labels = batch 
    out = self(images)                 
    loss = F.cross_entropy(out, labels)
    self.log('loss', loss, on_step=True, on_epoch=True)
    return loss

def validation_step(self, batch, batch_idx):
    images, labels = batch 
    out = self(images)                  
    loss = F.cross_entropy(out, labels)
    a, y_hat = torch.max(out, dim=1)
    self.valid_acc(y_hat, y)
    self.log('valid_loss', loss, on_step=True, on_epoch=True)
    self.log('valid_acc', self.valid_acc, on_step=True, on_epoch=True)

def validation_epoch_end(self, outputs):
    # do nothing here