Skip to content

Commit

Permalink
Add code for canonical space setup
Browse files Browse the repository at this point in the history
  • Loading branch information
isarandi committed Dec 12, 2024
1 parent 3e2d785 commit 8f68f5d
Show file tree
Hide file tree
Showing 34 changed files with 840 additions and 76 deletions.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from dataclasses import dataclass
from functools import partial
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
from nlf.pytorch.util import get_config
from nlf.pt.util import get_config
import torch
from torch import Tensor, nn
from torchvision.models._api import Weights, WeightsEnum
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
import tensorflow as tf

from simplepyutils import logger
import nlf.pytorch.backbones.efficientnet as effnet_pytorch
import nlf.pytorch.models.nlf_model as pt_nlf_model
import nlf.pytorch.models.field as pt_field
import nlf.pt.backbones.efficientnet as effnet_pytorch
import nlf.pt.models.nlf_model as pt_nlf_model
import nlf.pt.models.field as pt_field
import nlf.tf.backbones.builder as tf_backbones_builder
from nlf.tf import init as tf_init, tfu
import nlf.tf.model.field as tf_field
from nlf.pytorch.util import get_config
from nlf.pt.util import get_config

FLAGS = argparse.Namespace()

Expand Down
2 changes: 1 addition & 1 deletion nlf/pytorch/init.py → nlf/pt/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import matplotlib.pyplot as plt
import simplepyutils as spu
from simplepyutils import FLAGS, logger
from nlf.pytorch import util
from nlf.pt import util
from posepile.paths import DATA_ROOT


Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion nlf/pytorch/models/field.py → nlf/pt/models/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import torch.nn as nn

from nlf.paths import PROJDIR
from nlf.pytorch.util import get_config
from nlf.pt.util import get_config


def build_field():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
import torch.nn.functional as F

from nlf.paths import PROJDIR
from nlf.pytorch import ptu, ptu3d
from nlf.pytorch.models import util as model_util
from nlf.pytorch.util import get_config
from nlf.pt import ptu, ptu3d
from nlf.pt.models import util as model_util
from nlf.pt.util import get_config


class NLFModel(nn.Module):
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -8,27 +8,25 @@
import torch
import torchvision.transforms.functional

import nlf.pytorch.backbones.efficientnet as effnet_pytorch
import nlf.pytorch.models.field as pt_field
import nlf.pytorch.models.nlf_model as pt_nlf_model
import nlf.pt.backbones.efficientnet as effnet_pytorch
import nlf.pt.models.field as pt_field
import nlf.pt.models.nlf_model as pt_nlf_model
from nlf.paths import DATA_ROOT, PROJDIR
from nlf.pytorch import ptu, ptu3d
from nlf.pytorch.multiperson import person_detector, plausibility_check as plausib, warping
from nlf.pytorch.util import get_config
from nlf.pt import ptu, ptu3d
from nlf.pt.multiperson import person_detector, plausibility_check as plausib, warping
from nlf.pt.util import get_config
import simplepyutils as spu

# Dummy value which will mean that the intrinsic_matrix are unknown
UNKNOWN_INTRINSIC_MATRIX = ((-1, -1, -1), (-1, -1, -1), (-1, -1, -1))
DEFAULT_EXTRINSIC_MATRIX = ((1, 0, 0, 0), (0, 1, 0, 0), (0, 0, 1, 0), (0, 0, 0, 1))
DEFAULT_DISTORTION = (0, 0, 0, 0, 0)
DEFAULT_WORLD_UP = (0, -1, 0)
crop__ = None
import smplfitter
import simplepyutils as spu



def main():
skeleton_infos = spu.load_pickle(f"{DATA_ROOT}/skeleton_conversion/skeleton_types_huge8.pkl")

cano_joints = np.load(f'{PROJDIR}/canonical_joints/smpl.npy').astype(np.float32)
cano_verts = np.load(f'{PROJDIR}/canonical_verts/smpl.npy').astype(np.float32)
cano_both = np.concatenate([cano_joints, cano_verts], axis=0)
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import torch
from nlf.pytorch import ptu3d
from nlf.pt import ptu3d
import numpy as np
from typing import List, Tuple

Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion nlf/pytorch/ptu3d.py → nlf/pt/ptu3d.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
import torch

from nlf.pytorch import ptu
from nlf.pt import ptu
from typing import Optional, Tuple, List


Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
16 changes: 0 additions & 16 deletions nlf/pytorch/config/config.yaml

This file was deleted.

14 changes: 0 additions & 14 deletions nlf/pytorch/config/config_l.yaml

This file was deleted.

23 changes: 0 additions & 23 deletions nlf/pytorch/config/config_s_256.yaml

This file was deleted.

Empty file added nlf/tf/canonical/__init__.py
Empty file.
111 changes: 111 additions & 0 deletions nlf/tf/canonical/canonical_skel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
# This is for generating the initial canonical positions of the
# skeleton/keypoint-based datasets. These positions will be changed
# during training, but we need some starting point.
# We use the regressor that we learned in smpl2skel_pseudo.py on the
# canonical vertices, which gives us the body points.
# Addiitonally, since we also add Halpe and COCO-WholeBody in the 2D part,
# we also need the canonical points corresponding to Halpe and COCO-WholeBody's
# face and hand keypoints.
# For these keypoints we will use the corresponding SMPL-X points.
# We just need to set up the mapping of indices.

import numpy as np
import posepile.datasets2d as ds2d
import simplepyutils as spu
from posepile.merging.merged_dataset3d import merge_joint_infos

from nlf.paths import DATA_ROOT, PROJDIR


def main():
joint_info_huge8 = spu.load_pickle(f'{DATA_ROOT}/skeleton_conversion/huge8_joint_info.pkl')

# Generated by nlf/tf/canonical/smpl2skel_pseudo.py:
smpl2huge = np.load(f'{PROJDIR}/smpl2huge8.npy')

cano_smpl = np.load(f'{PROJDIR}/canonical_vertices_smpl.npy')
canonical_locs_init = smpl2huge.T @ cano_smpl
canonical_locs_init[356] = canonical_locs_init[519]
canonical_locs_init[360] = canonical_locs_init[513]

dataset_2d = ds2d.Pose2DDatasetBarecat(
f'{DATA_ROOT}/anno_barecat/anno_2d.barecat',
images_path=f'{DATA_ROOT}/posepile_33ds/images.barecat')
joint_info2d = dataset_2d.joint_info

joint_info_huge8_2 = merge_joint_infos(
[[joint_info_huge8, ''], [joint_info2d, '']])
spu.dump_pickle(joint_info_huge8_2, f'{DATA_ROOT}/skeleton_conversion/huge8_2_joint_info.pkl')

cano_smplx_144 = np.load(f'{PROJDIR}/canonical_joints_smplx_n_144.npy')

c = np.full((joint_info_huge8_2.n_joints, 3), np.nan)
c[:joint_info_huge8.n_joints] = canonical_locs_init

j = joint_info_huge8_2.ids

## HALPE
# face
s = joint_info_huge8_2.ids.rface1_halpe
c[s:s + 17] = cano_smplx_144[127:144]
c[s + 27:s + 61] = cano_smplx_144[86:120]
c[s + 61:s + 68] = cano_smplx_144[126:119:-1]

# eyebrows
c[s + 17] = cano_smplx_144[77]
c[s + 18] = cano_smplx_144[78] * 3 / 4 + cano_smplx_144[77] * 1 / 4
c[s + 19] = cano_smplx_144[78] * 1 / 2 + cano_smplx_144[79] * 1 / 2
c[s + 20] = cano_smplx_144[79] * 3 / 4 + cano_smplx_144[80] * 1 / 4
c[s + 21] = cano_smplx_144[80]

c[s + 22] = cano_smplx_144[81]
c[s + 23] = cano_smplx_144[82] * 3 / 4 + cano_smplx_144[81] * 1 / 4
c[s + 24] = cano_smplx_144[82] * 1 / 2 + cano_smplx_144[83] * 1 / 2
c[s + 25] = cano_smplx_144[83] * 3 / 4 + cano_smplx_144[84] * 1 / 4
c[s + 26] = cano_smplx_144[84]

# body
s = joint_info_huge8_2.ids.nose_halpe
c[s:s + 17] = cano_smplx_144[[55, 57, 56, 59, 58, 16, 17, 18, 19, 20, 21, 1, 2, 4, 5, 7, 8]]
c[s + 18:s + 20] = cano_smplx_144[[12, 0]]
c[s + 17] = canonical_locs_init[j.htop_3dhp]

# hands
c[s + 115] = cano_smplx_144[52] * 2 - cano_smplx_144[53]
c[s + 116:s + 120] = cano_smplx_144[[52, 53, 54, 71]]
c[s + 120:s + 124] = cano_smplx_144[[40, 41, 42, 72]]
c[s + 124:s + 128] = cano_smplx_144[[43, 44, 45, 73]]
c[s + 128:s + 132] = cano_smplx_144[[49, 50, 51, 74]]
c[s + 132:s + 136] = cano_smplx_144[[46, 47, 48, 75]]

c[s + 94] = cano_smplx_144[37] * 2 - cano_smplx_144[38]
c[s + 95:s + 99] = cano_smplx_144[[37, 38, 39, 66]]
c[s + 99:s + 103] = cano_smplx_144[[25, 26, 27, 67]]
c[s + 103:s + 107] = cano_smplx_144[[28, 29, 30, 68]]
c[s + 107:s + 111] = cano_smplx_144[[34, 35, 36, 69]]
c[s + 111:s + 115] = cano_smplx_144[[31, 32, 33, 70]]

# feet
c[s + 20:s + 26] = cano_smplx_144[[60, 63, 61, 64, 62, 65]]

c[j.head_posetrack] = c[j.neck_halpe]
c[j.head_aic] = c[j.htop_halpe]
c[j.head_mpii] = c[j.htop_halpe]

for i, n in enumerate(joint_info_huge8_2.names):
if any(n.endswith(x) for x in 'coco posetrack jrdb aic'.split()) and np.isnan(c[i, 0]):
c[i] = c[j[n.split('_')[0] + '_halpe']]
elif n.endswith('mpii') and np.isnan(c[i, 0]):
c[i] = c[j[n.split('_')[0] + '_3dhp']]

i_left_joints = [i for i, n in enumerate(joint_info_huge8_2.names) if n[0] == 'l']
i_right_joints = [
joint_info_huge8_2.ids['r' + joint_info_huge8_2.names[i][1:]] for i in i_left_joints]
i_center_joints = [i for i in range(joint_info_huge8_2.n_joints) if
i not in i_left_joints and i not in i_right_joints]
symm_init = (c + c[joint_info_huge8_2.mirror_mapping] * [-1, 1, 1]) / 2
symm_init[i_center_joints, 0] = 0
np.save(f'{PROJDIR}/canonical_loc_symmetric_init_866.npy', symm_init)

if __name__ == '__main__':
main()
Loading

0 comments on commit 8f68f5d

Please sign in to comment.