replace rec dataset
This commit is contained in:
parent
0b3aa57569
commit
8fda0c4fd2
|
@ -49,4 +49,13 @@ inference:tools/infer/predict_det.py
|
|||
--save_log_path:null
|
||||
--benchmark:True
|
||||
null:null
|
||||
|
||||
===========================deploy_params===========================
|
||||
trans_model:-m paddle_serving_client.convert
|
||||
--dirname:./inference/ch_ppocr_mobile_v2.0_det_infer/
|
||||
--model_filename:inference.pdmodel
|
||||
--params_filename:inference.pdiparams
|
||||
--serving_server:./deploy/pdserving/ppocr_det_mobile_2.0_serving/
|
||||
--serving_client:./deploy/pdserving/ppocr_det_mobile_2.0_client/
|
||||
serving_dir:./deploy/pdserving
|
||||
web_service:web_service_det.py &>log.txt &
|
||||
pipline:pipeline_http_client.py --image_dir=../../doc/imgs
|
||||
|
|
|
@ -9,7 +9,7 @@ Global.save_model_dir:./output/
|
|||
Train.loader.batch_size_per_card:lite_train_infer=128|whole_train_infer=128
|
||||
Global.pretrained_model:null
|
||||
train_model_name:latest
|
||||
train_infer_img_dir:./train_data/ic15_data/train
|
||||
train_infer_img_dir:./train_data/ic15_data/test
|
||||
null:null
|
||||
##
|
||||
trainer:norm_train|pact_train
|
||||
|
|
|
@ -74,3 +74,13 @@ else
|
|||
fi
|
||||
fi
|
||||
|
||||
# prepare serving env
|
||||
python_name=$(func_parser_value "${lines[2]}")
|
||||
${python_name} -m pip install install paddle-serving-server-gpu==0.6.1.post101
|
||||
${python_name} -m pip install paddle_serving_client==0.6.1
|
||||
${python_name} -m pip install paddle-serving-app==0.6.1
|
||||
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar
|
||||
wget -nc -P ./inference https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar
|
||||
cd ./inference && tar xf ch_ppocr_mobile_v2.0_det_infer.tar && tar xf ch_ppocr_mobile_v2.0_rec_infer.tar
|
||||
|
||||
|
||||
|
|
|
@ -144,6 +144,22 @@ benchmark_key=$(func_parser_key "${lines[49]}")
|
|||
benchmark_value=$(func_parser_value "${lines[49]}")
|
||||
infer_key1=$(func_parser_key "${lines[50]}")
|
||||
infer_value1=$(func_parser_value "${lines[50]}")
|
||||
# parser serving
|
||||
trans_model_py=$(func_parser_value "${lines[52]}")
|
||||
infer_model_dir_key=$(func_parser_key "${lines[53]}")
|
||||
infer_model_dir_value=$(func_parser_value "${lines[53]}")
|
||||
model_filename_key=$(func_parser_key "${lines[54]}")
|
||||
model_filename_value=$(func_parser_value "${lines[54]}")
|
||||
params_filename_key=$(func_parser_key "${lines[55]}")
|
||||
params_filename_value=$(func_parser_value "${lines[55]}")
|
||||
serving_server_key=$(func_parser_key "${lines[56]}")
|
||||
serving_server_value=$(func_parser_value "${lines[56]}")
|
||||
serving_client_key=$(func_parser_key "${lines[57]}")
|
||||
serving_client_value=$(func_parser_value "${lines[57]}")
|
||||
serving_dir_value=$(func_parser_value "${lines[58]}")
|
||||
web_service_py=$(func_parser_value "${lines[59]}")
|
||||
pipline_py=$(func_parser_value "${lines[60]}")
|
||||
|
||||
|
||||
LOG_PATH="./tests/output"
|
||||
mkdir -p ${LOG_PATH}
|
||||
|
@ -250,6 +266,23 @@ if [ ${MODE} = "infer" ]; then
|
|||
is_quant=${infer_quant_flag[Count]}
|
||||
func_inference "${python}" "${inference_py}" "${save_infer_dir}" "${LOG_PATH}" "${infer_img_dir}" ${is_quant}
|
||||
Count=$(($Count + 1))
|
||||
#run serving
|
||||
set_dirname=$(func_set_params "${infer_model_dir_key}" "${infer_model_dir_value}")
|
||||
set_model_filename=$(func_set_params "${model_filename_key}" "${model_filename_value}")
|
||||
set_params_filename=$(func_set_params "${params_filename_key}" "${params_filename_value}")
|
||||
set_serving_server=$(func_set_params "${serving_server_key}" "${serving_server_value}")
|
||||
set_serving_client=$(func_set_params "${serving_client_key}" "${serving_client_value}")
|
||||
trans_model_cmd="${python} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}"
|
||||
eval $trans_model_cmd
|
||||
cd ${serving_dir_value}
|
||||
echo $PWD
|
||||
web_service_cmd="${python} ${web_service_py}"
|
||||
echo $web_service_cmd
|
||||
eval $web_service_cmd
|
||||
pipline_cmd="${python} ${pipline_py}"
|
||||
echo $pipline_cmd
|
||||
eval $pipline_cmd
|
||||
|
||||
done
|
||||
|
||||
else
|
||||
|
@ -363,3 +396,4 @@ else
|
|||
done # done with: for autocast in ${autocast_list[*]}; do
|
||||
done # done with: for gpu in ${gpu_list[*]}; do
|
||||
fi # end if [ ${MODE} = "infer" ]; then
|
||||
|
||||
|
|
Loading…
Reference in New Issue