Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 69 additions & 0 deletions jointContribution/IJACA_2024/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# IJACA_Code
The paddle version of the top three in each track of the IJACA 2024 competition.

Inference codes only now.

## Dataset
Please refer to the .ipynb files in each directory to download the data and set the corresponding parameters.

## Checkpoint
Donwload checkpoints:
``` sh
cd PaddleScience/jointContribution/IJACA_2024
# linux
wget -nc https://paddle-org.bj.bcebos.com/paddlescience/models/contrib/IJACA_2024_ckpts.tar.gz
# windows
# curl https://paddle-org.bj.bcebos.com/paddlescience/models/contrib/IJACA_2024_ckpts.tar.gz
```

Unzip the checkpoints and move them to the corresponding directory:
``` sh
tar -xvzf IJACA_2024_ckpts.tar.gz

# aminos
mkdir -p ./aminos/Logger/states/
mv ./ckpts/aminos/90.pdparams ./aminos/Logger/states/90.pdparams

# tenfeng
mkdir -p ./results/
mv ./ckpts/tenfeng/checkpoint.pdparams ./tenfeng/results/checkpoint.pdparams

# leejt
mv ./ckpts/leejt/model.pdparams ./leejt/model.pdparams

# bju
mv ./ckpts/bju/geom/ckpt ./bju/geom/
mv ./ckpts/bju/pretrained_checkpoint.pdparams ./bju/pretrained_checkpoint.pdparams

# zhongzaicanyu
# No pretrained checkpoint yet.
```

## Inference
First enter the corresponding directory. For example "aminos":
``` sh
cd aminos
```

Install requirements:
``` sh
pip install -r requirements.txt
```

Run Inference:
``` py
### aminos
python infer.py --dataset_dir "./Datasets" --load_index="90"

### tenfeng
python infer.py --epochs 69 --milestones 40 50 60 65 68 --gpu_id 0 --depth 5 --hidden_dim 256 --num_slices 32 --batch_size 4 --loss_type 'rl2' --submit --log_dir "./results" --training_data_dir "./Dataset/train_track_B_e" --testing_data_dir "./Dataset/Testset_track_B_e"

### leejt
python infer.py

### bju
python infer.py --train_data_dir "./Dataset/Trainset_track_B" --test_data_dir "./Dataset/Testset_track_B/Inference" --info_dir "./Dataset/Testset_track_B/Auxiliary" --ulip_ckpt "./geom/ckpt/checkpoint_pointbert.pdparams"

### zhongzaicanyu
python infer.py # not work yet.
```
Empty file.
121 changes: 121 additions & 0 deletions jointContribution/IJACA_2024/aminos/Extract_mesh/merge_h5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
import argparse
import os

import h5py
import numpy as np
import paddle


def load_ds_trackA_info(file_path, key_list):
path_trackA_ds = file_path
key_list = np.sort([int(key) for key in key_list])
key_list = [str(key) for key in key_list]
bounds = np.loadtxt(path_trackA_ds + "/watertight_global_bounds.txt")
pressure_mean_std = paddle.to_tensor(
data=np.loadtxt(path_trackA_ds + "/train_pressure_min_std.txt")
).to("float32")
voxel_mean_std = paddle.to_tensor(
data=np.loadtxt(path_trackA_ds + "/voxel_mean_std.txt")
).to("float32")
pos_mean_std = np.loadtxt(path_trackA_ds + "/pos_mean_std.txt")
normal_mean_std = np.loadtxt(path_trackA_ds + "/normal_mean_std.txt")
PN_mean_std = paddle.to_tensor(
data=np.concatenate([pos_mean_std, normal_mean_std], axis=-1)
).to("float32")
physics_info = {
"key_list": key_list,
"bounds": bounds,
"voxel_mean_std": voxel_mean_std,
"pressure_mean_std": pressure_mean_std,
"PN_mean_std": PN_mean_std,
}
return physics_info


def load_ds_trackB_info(file_path, key_list):
path_trackB_ds = file_path
key_list = np.sort([int(key) for key in key_list])
key_list = [str(key) for key in key_list]
pressure_mean_std = paddle.to_tensor(
data=np.loadtxt(path_trackB_ds + "/train_pressure_mean_std.txt")
).to("float32")
bounds = np.loadtxt(path_trackB_ds + "/global_bounds.txt")
voxel_mean_std = paddle.to_tensor(
data=np.loadtxt(path_trackB_ds + "/voxel_mean_std.txt")
).to("float32")
PNA_mean_std = paddle.to_tensor(
data=np.loadtxt(path_trackB_ds + "/PosNormalArea_mean_std.txt")
).to("float32")
PN_mean_std = PNA_mean_std[:, :6]
physics_info = {
"key_list": key_list,
"bounds": bounds,
"voxel_mean_std": voxel_mean_std,
"pressure_mean_std": pressure_mean_std,
"PN_mean_std": PN_mean_std,
}
return physics_info


def load_extra_info(file_path, key_list, track_type="A"):
if track_type == "A":
physics_info = load_ds_trackA_info(file_path, key_list)
else:
physics_info = load_ds_trackB_info(file_path, key_list)
return physics_info


def add_physics_info_to_group(group, physics_info):
for key, value in physics_info.items():
group.create_dataset(key, data=value)


def merge_h5_files(fileA_path, fileB_path, merged_file_path):
with h5py.File(fileA_path, "r") as fileA, h5py.File(
fileB_path, "r"
) as fileB, h5py.File(merged_file_path, "w") as merged_file:
key_list_A = list(fileA.keys())
key_list_B = list(fileB.keys())
physics_info_A = load_extra_info(
os.path.dirname(fileA_path), key_list_A, track_type="A"
)
physics_info_B = load_extra_info(
os.path.dirname(fileB_path), key_list_B, track_type="B"
)
for key in fileA.keys():
group = fileA[key]
new_key = "A_" + key
merged_file.copy(group, new_key)
add_physics_info_to_group(merged_file[new_key], physics_info_A)
for key in fileB.keys():
group = fileB[key]
new_key = "B_" + key
merged_file.copy(group, new_key)
add_physics_info_to_group(merged_file[new_key], physics_info_B)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="train / test a paddle model to predict frames"
)
parser.add_argument(
"--A_dir",
default="/home/xiaoli/project/3D-ShapeNet-car/src/Dataset/converted_dataset/trackA/test.h5",
type=str,
help="",
)
parser.add_argument(
"--B_dir",
default="/home/xiaoli/project/3D-ShapeNet-car/src/Dataset/converted_dataset/trackB/test.h5",
type=str,
help="",
)
parser.add_argument(
"--C_dir",
default="/home/xiaoli/project/3D-ShapeNet-car/src/Dataset/converted_dataset/trackC/k1.h5",
type=str,
help="",
)
params = parser.parse_args()
merge_h5_files(params.A_dir, params.B_dir, params.C_dir)
print("done")
Loading