Merge pull request #1707 from LDOUBLEV/trt_cpp

add use_shared_memory params for AIstudio demo
This commit is contained in:
Double_V 2021-01-11 16:10:34 +08:00 committed by GitHub
commit e1b932968d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 9 additions and 4 deletions

View File

@ -102,6 +102,7 @@ Train:
drop_last: False
batch_size_per_card: 16
num_workers: 8
use_shared_memory: False
Eval:
dataset:
@ -129,3 +130,4 @@ Eval:
drop_last: False
batch_size_per_card: 1 # must be 1
num_workers: 8
use_shared_memory: False

View File

@ -76,6 +76,7 @@ Train:
batch_size_per_card: 256
drop_last: True
num_workers: 8
use_shared_memory: False
Eval:
dataset:
@ -96,3 +97,4 @@ Eval:
drop_last: False
batch_size_per_card: 256
num_workers: 4
use_shared_memory: False

View File

@ -66,8 +66,10 @@ def build_dataloader(config, mode, device, logger):
batch_size = loader_config['batch_size_per_card']
drop_last = loader_config['drop_last']
num_workers = loader_config['num_workers']
use_shared_memory = False
if 'use_shared_memory' in loader_config.keys():
use_shared_memory = loader_config['use_shared_memory']
else:
use_shared_memory = True
if mode == "Train":
#Distribute data to multiple cards
batch_sampler = DistributedBatchSampler(
@ -75,7 +77,6 @@ def build_dataloader(config, mode, device, logger):
batch_size=batch_size,
shuffle=False,
drop_last=drop_last)
use_shared_memory = True
else:
#Distribute data to single card
batch_sampler = BatchSampler(