add use_shared_memory params for AIstudio demo

This commit is contained in:
LDOUBLEV 2021-01-11 15:57:28 +08:00
parent ccfc754488
commit 825a28244c
3 changed files with 9 additions and 4 deletions

View File

@ -102,6 +102,7 @@ Train:
drop_last: False
batch_size_per_card: 16
num_workers: 8
use_shared_memory: False
Eval:
dataset:
@ -128,4 +129,5 @@ Eval:
shuffle: False
drop_last: False
batch_size_per_card: 1 # must be 1
num_workers: 8
num_workers: 8
use_shared_memory: False

View File

@ -76,6 +76,7 @@ Train:
batch_size_per_card: 256
drop_last: True
num_workers: 8
use_shared_memory: False
Eval:
dataset:
@ -96,3 +97,4 @@ Eval:
drop_last: False
batch_size_per_card: 256
num_workers: 4
use_shared_memory: False

View File

@ -66,8 +66,10 @@ def build_dataloader(config, mode, device, logger):
batch_size = loader_config['batch_size_per_card']
drop_last = loader_config['drop_last']
num_workers = loader_config['num_workers']
use_shared_memory = False
if 'use_shared_memory' in loader_config.keys():
use_shared_memory = loader_config['use_shared_memory']
else:
use_shared_memory = True
if mode == "Train":
#Distribute data to multiple cards
batch_sampler = DistributedBatchSampler(
@ -75,7 +77,6 @@ def build_dataloader(config, mode, device, logger):
batch_size=batch_size,
shuffle=False,
drop_last=drop_last)
use_shared_memory = True
else:
#Distribute data to single card
batch_sampler = BatchSampler(