How to switch from optimizer during training

following the below script, it seems that LBFGS optimizer is not updating the weights, in fact the loss is not changing. I have no problem using Adam and SGD optimizer in Pytorch-lightening, however I do not know how to use LBFGS.

def configure_optimizers(self):
    optimizer = optim.LBFGS(self.parameters(), lr=0.01)
   return optimizer

def training_step(self, train_batch, batch_idx):
    x, t = train_batch
    lg, lb, li = self.problem_formulation(x, t, self.bndry)
    loss = lg + lb + li
    return {'loss': loss}

def backward(self, trainer, loss, optimizer, optimizer_idx):
    loss.backward(retain_graph=True)

def optimizer_step(self, current_epoch, batch_nb, optimizer, optimizer_idx, second_order_closure, 
                 on_tpu=False, using_native_amp=False, using_lbfgs=True):
    optimizer.step(second_order_closure)