Skip to content

Commit ad953fe

Browse files
committed
fix: force using multiprocess dataloader for wds
1 parent d684489 commit ad953fe

1 file changed

Lines changed: 0 additions & 5 deletions

File tree

seqchromloader/loader.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,6 @@ def initialize(self):
5757
pass
5858

5959
def __iter__(self):
60-
worker_info = torch.utils.data.get_worker_info()
6160
pipeline = [
6261
wds.SimpleShardList(self.wds),
6362
split_by_node(self.rank, self.world_size),
@@ -69,10 +68,6 @@ def __iter__(self):
6968
target="target.npy",
7069
label="label.npy")
7170
]
72-
if worker_info is None:
73-
logging.info("Worker info not found, won't split dataset across subprocesses, are you using custom dataloader?")
74-
logging.info("Ignore the message if you are not using multiprocessing on data loading")
75-
del pipeline[2]
7671

7772
# transform
7873
if self.transforms is not None:

0 commit comments

Comments
 (0)