Merge pull request #1707 from LDOUBLEV/trt_cpp
add use_shared_memory params for AIstudio demo
This commit is contained in:
commit
e1b932968d
|
@ -102,6 +102,7 @@ Train:
|
|||
drop_last: False
|
||||
batch_size_per_card: 16
|
||||
num_workers: 8
|
||||
use_shared_memory: False
|
||||
|
||||
Eval:
|
||||
dataset:
|
||||
|
@ -129,3 +130,4 @@ Eval:
|
|||
drop_last: False
|
||||
batch_size_per_card: 1 # must be 1
|
||||
num_workers: 8
|
||||
use_shared_memory: False
|
|
@ -76,6 +76,7 @@ Train:
|
|||
batch_size_per_card: 256
|
||||
drop_last: True
|
||||
num_workers: 8
|
||||
use_shared_memory: False
|
||||
|
||||
Eval:
|
||||
dataset:
|
||||
|
@ -96,3 +97,4 @@ Eval:
|
|||
drop_last: False
|
||||
batch_size_per_card: 256
|
||||
num_workers: 4
|
||||
use_shared_memory: False
|
||||
|
|
|
@ -66,8 +66,10 @@ def build_dataloader(config, mode, device, logger):
|
|||
batch_size = loader_config['batch_size_per_card']
|
||||
drop_last = loader_config['drop_last']
|
||||
num_workers = loader_config['num_workers']
|
||||
|
||||
use_shared_memory = False
|
||||
if 'use_shared_memory' in loader_config.keys():
|
||||
use_shared_memory = loader_config['use_shared_memory']
|
||||
else:
|
||||
use_shared_memory = True
|
||||
if mode == "Train":
|
||||
#Distribute data to multiple cards
|
||||
batch_sampler = DistributedBatchSampler(
|
||||
|
@ -75,7 +77,6 @@ def build_dataloader(config, mode, device, logger):
|
|||
batch_size=batch_size,
|
||||
shuffle=False,
|
||||
drop_last=drop_last)
|
||||
use_shared_memory = True
|
||||
else:
|
||||
#Distribute data to single card
|
||||
batch_sampler = BatchSampler(
|
||||
|
|
Loading…
Reference in New Issue