Support skeleton (#155)

* pose

* pose

* pose

* pose

* 你的提交消息

* pose

* pose

* Delete train1.sh

* pretreatment

* configs

* pose

* reference

* Update gaittr.py

* naming

* naming

* Update transform.py

* update for datasets

* update README

* update name and README

* update

* Update transform.py
This commit is contained in:
Dongyang Jin
2023-09-27 16:20:00 +08:00
committed by GitHub
parent 853bb1821d
commit 2c29afadf3
41 changed files with 4251 additions and 12 deletions
+14 -2
View File
@@ -35,15 +35,27 @@ python datasets/OUMVLP/extractor.py --input_path Path_of_OUMVLP-base --output_pa
......
......
```
Step3 : To rearrange directory of OUMVLP dataset, turning to id-type-view structure, Run
Step3-1 : To rearrange directory of OUMVLP dataset(for silhouette), turning to id-type-view structure, Run
```
python datasets/OUMVLP/rearrange_OUMVLP.py --input_path Path_of_OUMVLP-raw --output_path Path_of_OUMVLP-rearranged
```
Step3-2 : To rearrange directory of OUMVLP dataset(for pose), turning to id-type-view structure, Run
```
python datasets/OUMVLP/rearrange_OUMVLP_pose.py --input_path Path_of_OUMVLP-pose --output_path Path_of_OUMVLP-pose-rearranged
```
Step4: Transforming images to pickle file, run
Step4-1: Transforming images to pickle file, run
```
python datasets/pretreatment.py --input_path Path_of_OUMVLP-rearranged --output_path Path_of_OUMVLP-pkl
```
Step4-2: Transforming pose txts to pickle file, run
```
python datasets/pretreatment.py --input_path Path_of_GREW-pose-rearranged --output_path Path_of_GREW-pose-pkl --pose --dataset GREW
```
gernerate the 17 Number of Pose Points Format from 18 Number of Pose Points
```
python datasets/OUMVLP/rearrange_OUMVLP_pose.py --input_path Path_of_OUMVLP-pose18 --output_path Path_of_OUMVLP-pose17
```
- Processed
```
+85
View File
@@ -0,0 +1,85 @@
import pickle
from tqdm import tqdm
from pathlib import Path
import os
import os.path as osp
import argparse
import logging
'''
gernerate the 17 Number of Pose Points Format from 18 Number of Pose Points
OUMVLP 17
# keypoints = {
# 0: "nose",
# 1: "left_eye",
# 2: "right_eye",
# 3: "left_ear",
# 4: "right_ear",
# 5: "left_shoulder",
# 6: "right_shoulder",
# 7: "left_elbow",
# 8: "right_elbow",
# 9: "left_wrist",
# 10: "right_wrist",
# 11: "left_hip",
# 12: "right_hip",
# 13: "left_knee",
# 14: "right_knee",
# 15: "left_ankle",
# 16: "right_ankle"
# }
OUMVLP 18
mask=[0,15,14,17,16,5,2,6,3,7,4,11,8,12,9,13,10]
# keypoints = {
# 0: "nose",
# 1: "neck",
# 2: "Rshoulder",
# 3: "Relbow",
# 4: "Rwrist",
# 5: "Lshoudler",
# 6: "Lelbow",
# 7: "Lwrist",
# 8: "Rhip",
# 9: "Rknee",
# 10: "Rankle",
# 11: "Lhip",
# 12: "Lknee",
# 13: "Lankle",
# 14: "Reye",
# 15: "Leye",
# 16: "Rear",
# 17: "Lear"
# }
'''
def ToOUMVLP17(input_path: Path, output_path: Path):
mask=[0,15,14,17,16,5,2,6,3,7,4,11,8,12,9,13,10]
TOTAL_SUBJECTS = 10307
progress = tqdm(total=TOTAL_SUBJECTS)
for subject in input_path.iterdir():
output_subject = subject.name
for seq in subject.iterdir():
output_seq = seq.name
for view in seq.iterdir():
src = os.path.join(view, f"{view.name}.pkl")
dst = os.path.join(output_path, output_subject, output_seq, view.name)
os.makedirs(dst, exist_ok=True)
with open(src,'rb') as f:
srcdata = pickle.load(f)
#[T,18,3]
data = srcdata[...,mask,:].copy()
# #[T,17,3]
pkl_path = os.path.join(dst,f'{view.name}.pkl')
pickle.dump(data,open(pkl_path,'wb'))
progress.update(1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OpenGait dataset pretreatment module.')
parser.add_argument('-i', '--input_path', default='', type=str, help='Root path of raw dataset.')
parser.add_argument('-o', '--output_path', default='', type=str, help='Output path of pickled dataset.')
parser.add_argument('-l', '--log_to_file', default='./pretreatment.log', type=str, help='Log file path. Default: ./pretreatment.log')
args = parser.parse_args()
logging.info('Begin')
ToOUMVLP17(input_path=Path(args.input_path), output_path=Path(args.output_path))
logging.info('Done')
+44
View File
@@ -0,0 +1,44 @@
import argparse
import os
import shutil
from pathlib import Path
from typing import Tuple
from tqdm import tqdm
TOTAL_SUBJECTS = 10307
def sanitize(name: str) -> Tuple[str, str]:
return name.split('_')
def rearrange(input_path: Path, output_path: Path) -> None:
os.makedirs(output_path, exist_ok=True)
progress = tqdm(total=TOTAL_SUBJECTS)
for folder in input_path.iterdir():
subject = folder.name
for sid in folder.iterdir():
view, seq = sanitize(sid.name)
src = os.path.join(input_path, subject,sid.name)
dst = os.path.join(output_path, subject, seq, view)
os.makedirs(dst, exist_ok=True)
for subfile in os.listdir(src):
if subfile not in os.listdir(dst) and subfile.endswith('.json'):
os.symlink(os.path.join(src, subfile),
os.path.join(dst, subfile))
progress.update(1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OUMVLP rearrange tool')
parser.add_argument('-i', '--input_path', required=True, type=str,
help='Root path of raw dataset.')
parser.add_argument('-o', '--output_path', default='OUMVLP_rearranged', type=str,
help='Root path for output.')
args = parser.parse_args()
input_path = Path(args.input_path).resolve()
output_path = Path(args.output_path).resolve()
rearrange(input_path, output_path)