From 9f8eb0847e1a7d044494e7271d38167092925fb4 Mon Sep 17 00:00:00 2001 From: Wang Xin Date: Mon, 11 Mar 2024 05:50:30 +0000 Subject: [PATCH] =?UTF-8?q?=E3=80=90PPSCI=20Export&Infer=20No.6=E3=80=91ro?= =?UTF-8?q?ssler?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/zh/examples/rossler.md | 18 ++++++ examples/rossler/conf/transformer.yaml | 18 ++++++ examples/rossler/train_transformer.py | 80 +++++++++++++++++++++++++- 3 files changed, 115 insertions(+), 1 deletion(-) diff --git a/docs/zh/examples/rossler.md b/docs/zh/examples/rossler.md index 8ab5bd342..0e873c3a8 100644 --- a/docs/zh/examples/rossler.md +++ b/docs/zh/examples/rossler.md @@ -28,6 +28,24 @@ python train_transformer.py mode=eval EVAL.pretrained_model_path=https://paddle-org.bj.bcebos.com/paddlescience/models/rossler/rossler_transformer_pretrained.pdparams EMBEDDING_MODEL_PATH=https://paddle-org.bj.bcebos.com/paddlescience/models/rossler/rossler_pretrained.pdparams ``` +=== "模型导出命令" + + ``` sh + python train_transformer.py mode=export + ``` + +=== "模型推理命令" + + ``` sh + # linux + wget -nc https://paddle-org.bj.bcebos.com/paddlescience/datasets/transformer_physx/rossler_training.hdf5 -P ./datasets/ + wget -nc https://paddle-org.bj.bcebos.com/paddlescience/datasets/transformer_physx/rossler_valid.hdf5 -P ./datasets/ + # windows + # curl https://paddle-org.bj.bcebos.com/paddlescience/datasets/transformer_physx/rossler_training.hdf5 --output ./datasets/rossler_training.hdf5 + # curl https://paddle-org.bj.bcebos.com/paddlescience/datasets/transformer_physx/rossler_valid.hdf5 --output ./datasets/rossler_valid.hdf5 + python train_transformer.py mode=infer EMBEDDING_MODEL_PATH=https://paddle-org.bj.bcebos.com/paddlescience/models/rossler/rossler_pretrained.pdparams + ``` + | 模型 | MSE | | :-- | :-- | | [rossler_transformer_pretrained.pdparams](https://paddle-org.bj.bcebos.com/paddlescience/models/rossler/rossler_transformer_pretrained.pdparams) | 0.022 | diff --git a/examples/rossler/conf/transformer.yaml b/examples/rossler/conf/transformer.yaml index 6b5ea9002..0dab2ec05 100644 --- a/examples/rossler/conf/transformer.yaml +++ b/examples/rossler/conf/transformer.yaml @@ -24,6 +24,7 @@ hydra: mode: train # running mode: train/eval seed: 42 output_dir: ${hydra:run.dir} +log_freq: 20 TRAIN_BLOCK_SIZE: 32 VALID_BLOCK_SIZE: 256 TRAIN_FILE_PATH: ./datasets/rossler_training.hdf5 @@ -63,3 +64,20 @@ TRAIN: EVAL: batch_size: 16 pretrained_model_path: null + +INFER: + pretrained_model_path: https://paddle-org.bj.bcebos.com/paddlescience/models/rossler/rossler_transformer_pretrained.pdparams + export_path: ./inference/rossler_transformer + pdmodel_path: ${INFER.export_path}.pdmodel + pdpiparams_path: ${INFER.export_path}.pdiparams + device: gpu + engine: native + precision: fp32 + onnx_path: ${INFER.export_path}.onnx + ir_optim: false + min_subgraph_size: 10 + gpu_mem: 4000 + gpu_id: 0 + max_batch_size: 64 + num_cpu_threads: 4 + batch_size: 16 diff --git a/examples/rossler/train_transformer.py b/examples/rossler/train_transformer.py index 4398c8376..bb295b5f5 100644 --- a/examples/rossler/train_transformer.py +++ b/examples/rossler/train_transformer.py @@ -244,14 +244,92 @@ def evaluate(cfg: DictConfig): solver.visualize() +def export(cfg: DictConfig): + # set model + model = ppsci.arch.PhysformerGPT2(**cfg.MODEL) + + # initialize solver + solver = ppsci.solver.Solver( + model, + pretrained_model_path=cfg.INFER.pretrained_model_path, + ) + # export model + from paddle.static import InputSpec + + input_spec = [ + { + key: InputSpec([None, 256, 32], "float32", name=key) + for key in model.input_keys + }, + ] + + solver.export(input_spec, cfg.INFER.export_path) + + +def inference(cfg: DictConfig): + from deploy.python_infer import pinn_predictor + + predictor = pinn_predictor.PINNPredictor(cfg) + + embedding_model = build_embedding_model(cfg.EMBEDDING_MODEL_PATH) + output_transform = OutputTransform(embedding_model) + dataset_cfg = { + "name": "RosslerDataset", + "file_path": cfg.VALID_FILE_PATH, + "input_keys": cfg.MODEL.input_keys, + "label_keys": cfg.MODEL.output_keys, + "block_size": cfg.VALID_BLOCK_SIZE, + "stride": 1024, + "embedding_model": embedding_model, + } + + dataset = ppsci.data.dataset.build_dataset(dataset_cfg) + + input_dict = { + "embeds": dataset.embedding_data[: cfg.VIS_DATA_NUMS, :-1, :], + } + + output_dict = predictor.predict( + {key: input_dict[key] for key in cfg.MODEL.input_keys}, cfg.INFER.batch_size + ) + + # mapping data to cfg.INFER.output_keys + output_dict = { + store_key: paddle.to_tensor(output_dict[infer_key]) + for store_key, infer_key in zip(cfg.MODEL.output_keys, output_dict.keys()) + } + + input_dict = { + "states": dataset.data[: cfg.VIS_DATA_NUMS, 1:, :], + } + + output_dict = { + "pred_states": output_transform(output_dict).numpy(), + } + + data_dict = {**input_dict, **output_dict} + for i in range(cfg.VIS_DATA_NUMS): + ppsci.visualize.save_plot_from_3d_dict( + f"./rossler_transformer_pred_{i}", + {key: value[i] for key, value in data_dict.items()}, + ("states", "pred_states"), + ) + + @hydra.main(version_base=None, config_path="./conf", config_name="transformer.yaml") def main(cfg: DictConfig): if cfg.mode == "train": train(cfg) elif cfg.mode == "eval": evaluate(cfg) + elif cfg.mode == "export": + export(cfg) + elif cfg.mode == "infer": + inference(cfg) else: - raise ValueError(f"cfg.mode should in ['train', 'eval'], but got '{cfg.mode}'") + raise ValueError( + f"cfg.mode should in ['train', 'eval', 'export', 'infer'], but got '{cfg.mode}'" + ) if __name__ == "__main__":