onnx模式释放模型降低占用
This commit is contained in:
parent
e8e6bf8f59
commit
8ac4684607
|
@ -30,7 +30,8 @@ def _load_model(position, model_path, model_type, use_legacy, use_lite, use_onnx
|
|||
# torch.save(model.state_dict(), model_path.replace(".ckpt", "_nobn.ckpt"))
|
||||
model.eval()
|
||||
model_path = model_path + '.onnx'
|
||||
if use_onnx and not os.path.exists(model_path):
|
||||
if use_onnx:
|
||||
if not os.path.exists(model_path):
|
||||
onnx_params = model.get_onnx_params(torch.device('cpu'))
|
||||
torch.onnx.export(
|
||||
model,
|
||||
|
@ -43,6 +44,7 @@ def _load_model(position, model_path, model_type, use_legacy, use_lite, use_onnx
|
|||
output_names=onnx_params['output_names'],
|
||||
dynamic_axes=onnx_params['dynamic_axes']
|
||||
)
|
||||
return None
|
||||
|
||||
return model
|
||||
|
||||
|
|
Loading…
Reference in New Issue