Merge pull request #1707 from LDOUBLEV/trt_cpp
add use_shared_memory params for AIstudio demo
This commit is contained in:
commit
e1b932968d
|
@ -102,6 +102,7 @@ Train:
|
||||||
drop_last: False
|
drop_last: False
|
||||||
batch_size_per_card: 16
|
batch_size_per_card: 16
|
||||||
num_workers: 8
|
num_workers: 8
|
||||||
|
use_shared_memory: False
|
||||||
|
|
||||||
Eval:
|
Eval:
|
||||||
dataset:
|
dataset:
|
||||||
|
@ -129,3 +130,4 @@ Eval:
|
||||||
drop_last: False
|
drop_last: False
|
||||||
batch_size_per_card: 1 # must be 1
|
batch_size_per_card: 1 # must be 1
|
||||||
num_workers: 8
|
num_workers: 8
|
||||||
|
use_shared_memory: False
|
|
@ -76,6 +76,7 @@ Train:
|
||||||
batch_size_per_card: 256
|
batch_size_per_card: 256
|
||||||
drop_last: True
|
drop_last: True
|
||||||
num_workers: 8
|
num_workers: 8
|
||||||
|
use_shared_memory: False
|
||||||
|
|
||||||
Eval:
|
Eval:
|
||||||
dataset:
|
dataset:
|
||||||
|
@ -96,3 +97,4 @@ Eval:
|
||||||
drop_last: False
|
drop_last: False
|
||||||
batch_size_per_card: 256
|
batch_size_per_card: 256
|
||||||
num_workers: 4
|
num_workers: 4
|
||||||
|
use_shared_memory: False
|
||||||
|
|
|
@ -66,8 +66,10 @@ def build_dataloader(config, mode, device, logger):
|
||||||
batch_size = loader_config['batch_size_per_card']
|
batch_size = loader_config['batch_size_per_card']
|
||||||
drop_last = loader_config['drop_last']
|
drop_last = loader_config['drop_last']
|
||||||
num_workers = loader_config['num_workers']
|
num_workers = loader_config['num_workers']
|
||||||
|
if 'use_shared_memory' in loader_config.keys():
|
||||||
use_shared_memory = False
|
use_shared_memory = loader_config['use_shared_memory']
|
||||||
|
else:
|
||||||
|
use_shared_memory = True
|
||||||
if mode == "Train":
|
if mode == "Train":
|
||||||
#Distribute data to multiple cards
|
#Distribute data to multiple cards
|
||||||
batch_sampler = DistributedBatchSampler(
|
batch_sampler = DistributedBatchSampler(
|
||||||
|
@ -75,7 +77,6 @@ def build_dataloader(config, mode, device, logger):
|
||||||
batch_size=batch_size,
|
batch_size=batch_size,
|
||||||
shuffle=False,
|
shuffle=False,
|
||||||
drop_last=drop_last)
|
drop_last=drop_last)
|
||||||
use_shared_memory = True
|
|
||||||
else:
|
else:
|
||||||
#Distribute data to single card
|
#Distribute data to single card
|
||||||
batch_sampler = BatchSampler(
|
batch_sampler = BatchSampler(
|
||||||
|
|
Loading…
Reference in New Issue