Skip to content

Commit db8c864

Browse files
committed
[Enhance] add prefetch_factor to DataLoaderConfig
1 parent 6a4f3f5 commit db8c864

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

xtuner/v1/datasets/config.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -287,6 +287,7 @@ class DataloaderConfig(BaseDataloaderConfig):
287287
int, Parameter(help="pack extra buffer size when pack_level is expand_soft model")
288288
] = 100
289289
num_workers: Annotated[int, Parameter(help="dataloader num workers")] = 0
290+
prefetch_factor: Annotated[int | None, Parameter(help="number of batches loaded in advance by each worker")] = None
290291
pad_token_id: Annotated[int | None, Parameter(help="padding token id")] = None
291292
tokenizer_hash: Annotated[str | None, Parameter(help="tokenizer hash")] = None
292293

@@ -431,6 +432,7 @@ def build(
431432
dataset,
432433
batch_size=micro_batch_size,
433434
num_workers=self.num_workers,
435+
prefetch_factor=self.prefetch_factor,
434436
# Ensure to round up or drop last based on the `global_batch_size`,
435437
# if you want to replace a custom sampler.
436438
sampler=sampler,

0 commit comments

Comments
 (0)