Hello,
Does anyone knows this error? I’ve been checking DataModule documentation but so far, this parameter doesn’t seem to be describe anyware
Here the error
This is my code:
class MRIDatamodule(pl.LightningDataModule):
def __init__(self,Paths,im_size=256,batch_size=75,factor=1):
self.batch_size=batch_size
self.transform=ImageTransform(img_size=img_size)
self.factor=factor
self.TrainDir,self.ValtDir,self.TestDir=self.preparedata(Paths)
#self._log_hyperparams =None
self.prepare_data_per_node=True
def preparedata(self,Paths,balance=True):
DataObject=getDir(Paths)
df=DataObject.df
if balance:
drop_indices = np.random.choice(df[df["Group"]=="D3"].index, DataObject.D3-(DataObject.D1+DataObject.D2), replace=False)
df_subset = df.drop(drop_indices).reset_index(drop=True)
print("subset is {}".format(df_subset.shape))
else:
df_subset=df.copy()
TrainDir,tmp=train_test_split(df_subset, test_size=0.20,shuffle=True)
TestDir,ValtDir=train_test_split(tmp, test_size=0.50,shuffle=True)
TrainDir=TrainDir.reset_index(drop=True)
ValtDir=ValtDir.reset_index(drop=True)
TestDir=TestDir.reset_index(drop=True)
print("Train is {}".format(TrainDir.shape))
return TrainDir,ValtDir,TestDir
def prepare_data(self):
"""
Empty prepare_data method left in intentionally.
https://pytorch-lightning.readthedocs.io/en/latest/data/datamodule.html#prepare-data
"""
pass
def setup(self, stage=None):
if stage == "fit" or stage is None:
self.Train_dataset = MRIDataset(self.TrainDir,self.transform,factor=self.factor)
self.Val_dataset = MRIDataset(self.ValtDir, self.transform,factor=self.factor)
if stage=="test":
self.Test_dataset = MRIDataset(self.TestDir, self.transform, phase="test")
def train_dataloader(self):
return DataLoader(self.Train_dataset,shuffle=True,batch_size=self.batch_size)
def val_dataloader(self):
return DataLoader(self.Val_dataset,batch_size=self.batch_size)
def test_dataloader(self):
return DataLoader(self.Test_dataset,batch_size=self.batch_size)