fix commit
This commit is contained in:
parent
5fc33c1213
commit
bbd7665c47
|
@ -30,7 +30,7 @@ sudo nvidia-docker run --name ppocr -v $PWD:/paddle --shm-size=64G --network=hos
|
|||
sudo docker container exec -it ppocr /bin/bash
|
||||
```
|
||||
|
||||
**2. 安装PaddlePaddle v2.0**
|
||||
**2. 安装PaddlePaddle 2.0**
|
||||
```
|
||||
pip3 install --upgrade pip
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ The inference model (the model saved by `paddle.jit.save`) is generally a solidi
|
|||
The model saved during the training process is the checkpoints model, which saves the parameters of the model and is mostly used to resume training.
|
||||
|
||||
Compared with the checkpoints model, the inference model will additionally save the structural information of the model. Therefore, it is easier to deploy because the model structure and model parameters are already solidified in the inference model file, and is suitable for integration with actual systems.
|
||||
For more details, please refer to the document [Classification Framework](https://github.com/PaddlePaddle/PaddleClas/blob/master/docs/zh_CN/extension/paddle_inference.md).
|
||||
For more details, please refer to the document [Classification Framework](https://github.com/PaddlePaddle/PaddleClas/blob/release%2F2.0/docs/zh_CN/extension/paddle_mobile_inference.md).
|
||||
|
||||
Next, we first introduce how to convert a trained model into an inference model, and then we will introduce text detection, text recognition, angle class, and the concatenation of them based on inference model.
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ You can also visit [DockerHub](https://hub.docker.com/r/paddlepaddle/paddle/tags
|
|||
sudo docker container exec -it ppocr /bin/bash
|
||||
```
|
||||
|
||||
**2. Install PaddlePaddle v2.0**
|
||||
**2. Install PaddlePaddle 2.0**
|
||||
```
|
||||
pip3 install --upgrade pip
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ class TextDetector(object):
|
|||
postprocess_params["box_thresh"] = args.det_db_box_thresh
|
||||
postprocess_params["max_candidates"] = 1000
|
||||
postprocess_params["unclip_ratio"] = args.det_db_unclip_ratio
|
||||
postprocess_params["use_dilation"] = args.use_dilation
|
||||
postprocess_params["use_dilation"] = True
|
||||
elif self.det_algorithm == "EAST":
|
||||
postprocess_params['name'] = 'EASTPostProcess'
|
||||
postprocess_params["score_thresh"] = args.det_east_score_thresh
|
||||
|
|
|
@ -124,6 +124,7 @@ def create_predictor(args, mode, logger):
|
|||
# cache 10 different shapes for mkldnn to avoid memory leak
|
||||
config.set_mkldnn_cache_capacity(10)
|
||||
config.enable_mkldnn()
|
||||
# TODO LDOUBLEV: fix mkldnn bug when bach_size > 1
|
||||
#config.set_mkldnn_op({'conv2d', 'depthwise_conv2d', 'pool2d', 'batch_norm'})
|
||||
args.rec_batch_num = 1
|
||||
|
||||
|
|
Loading…
Reference in New Issue