delete save_inference_mode fun, because the dev paddle has support export crnn model

This commit is contained in:
WenmuZhou 2020-11-28 22:56:10 +08:00
parent 606a387354
commit ef0880b9ae
2 changed files with 0 additions and 17 deletions

View File

@ -333,22 +333,6 @@ def eval(model, valid_dataloader, post_process_class, eval_class):
return metirc
def save_inference_mode(model, config, logger):
if dist.get_rank() == 0:
model.eval()
print('infer')
save_path = '{}/infer/{}'.format(config['Global']['save_model_dir'],
config['Architecture']['model_type'])
if config['Architecture']['model_type'] == 'rec':
input_shape = [None, 3, 32, None]
jit_model = paddle.jit.to_static(
model, input_spec=[paddle.static.InputSpec(input_shape)])
paddle.jit.save(jit_model, save_path)
logger.info('inference model save to {}'.format(save_path))
model.train()
def preprocess():
FLAGS = ArgsParser().parse_args()
config = load_config(FLAGS.config)

View File

@ -89,7 +89,6 @@ def main(config, device, logger, vdl_writer):
program.train(config, train_dataloader, valid_dataloader, device, model,
loss_class, optimizer, lr_scheduler, post_process_class,
eval_class, pre_best_model_dict, logger, vdl_writer)
program.save_inference_mode(model, config, logger)
def test_reader(config, device, logger):