11
11
import numpy as np
12
12
import tempfile
13
13
import pandas as pd
14
+ import torch
15
+ import smplx
14
16
15
17
16
18
class CAESAR (data .Dataset ):
@@ -335,5 +337,184 @@ def __getitem__(self, index):
335
337
336
338
return return_dict
337
339
340
+ def __len__ (self ):
341
+ return self .dataset_size
342
+
343
+
344
+ class FourDHumanOutfit (data .Dataset ):
345
+ '''
346
+ FourDHumanOutfit dataset
347
+ '''
348
+ def __init__ (self ,
349
+ dataset_path : str ,
350
+ parameters_path : str = None ,
351
+ landmarks_path : str = None ,
352
+ sequence_list : List [str ] = None ,
353
+ # pelvis_normalization: bool = False,
354
+ # use_landmarks: str = "SMPL_INDEX_LANDAMRKS_REVISED",
355
+ transferred_landmarks_name : str = "simple" ,
356
+ body_model_path : str = "/SMPL-Fitting/data/body_models" ,
357
+ num_betas : int = 8 ,
358
+ ** kwargs ):
359
+
360
+ """
361
+ Loading 4DHumanOutfit dataset
362
+ Expecint the following structure:
363
+ smpl params save as: parameters_path/subj_name/subj_name-clothing_type-action_name/param.pt
364
+ scans save as: dataset_path/subj_name/subj_name-clothing_type-action_name/*/scan.obj
365
+ landmarks save as: landmarks_path/subj_name/subj_name-clothing_type-action_name/landmarks_{transferred_landmarks_name}.pt
366
+
367
+ :param dataset_path: (str) path to FourDHumanOutfit scans
368
+ :param parameters_path: (str) path to the directory where the already
369
+ fitted SMPL parameters are stored
370
+ :param sequence_list: (List[str]) list of sequences to load.
371
+ If "All", all sequences are loaded
372
+ """
373
+ # hidden are flo i luc
374
+ all_male_subjects = ["ben" ,"bob" ,"jon" ,"leo" ,"mat" ,"pat" ,"ray" ,"ted" ,"tom" ]
375
+ all_female_subjects = ["ada" ,"bea" ,"deb" ,"gia" ,"joy" ,"kim" ,"mia" ,"sue" ,"zoe" ]
376
+ all_subjects_names = all_male_subjects + all_female_subjects
377
+
378
+ # create gender mapper
379
+ all_genders = ["male" ] * len (all_male_subjects ) + ["female" ] * len (all_female_subjects )
380
+ self .gender_mapper = dict (zip (all_subjects_names ,all_genders ))
381
+
382
+ if not isinstance (sequence_list , type (None )):
383
+ use_subjects = [seq .split ("-" )[0 ] for seq in sequence_list ]
384
+ else :
385
+ use_subjects = all_subjects_names
386
+
387
+ self .load_parameters = False if isinstance (parameters_path , type (None )) else True
388
+ self .load_landmarks = False if isinstance (landmarks_path , type (None )) else True
389
+
390
+ self .scan_paths = []
391
+ self .subject_names = []
392
+ self .action_names = []
393
+ self .sequence_names = []
394
+ self .poses = []
395
+ self .shapes = []
396
+ self .trans = []
397
+ self .genders = []
398
+ self .landmarks = []
399
+
400
+ for subj_name in all_subjects_names :
401
+ if subj_name in use_subjects :
402
+ if self .load_parameters :
403
+ all_subj_action_paths = glob (os .path .join (parameters_path ,subj_name ,f"{ subj_name } -*" ))
404
+ else :
405
+ all_subj_action_paths = glob (os .path .join (dataset_path ,subj_name ,f"{ subj_name } -*" ))
406
+
407
+ for subj_action_path in all_subj_action_paths :
408
+ action_name = os .path .basename (subj_action_path ).split ("-" )[- 1 ]
409
+ clothing_name = os .path .basename (subj_action_path ).split ("-" )[- 2 ]
410
+ sequence_name = f"{ subj_name } -{ clothing_name } -{ action_name } "
411
+
412
+ if not isinstance (sequence_list , type (None )):
413
+ if sequence_name not in sequence_list :
414
+ continue
415
+
416
+ # load scans
417
+ all_seq_scan_paths = sorted (glob (os .path .join (dataset_path ,subj_name ,sequence_name ,"*/*.obj" )))
418
+
419
+ # load landmarks
420
+ if self .load_landmarks :
421
+ all_seq_lm_path = os .path .join (landmarks_path ,subj_name ,sequence_name ,f"landmarks_{ transferred_landmarks_name } .pt" )
422
+ if os .path .exists (all_seq_lm_path ):
423
+ all_seq_landmarks = torch .load (all_seq_lm_path ,map_location = torch .device ("cpu" ))
424
+
425
+
426
+ if self .load_parameters :
427
+ # load fitted parameters
428
+ all_seq_poses = torch .load (os .path .join (subj_action_path ,"poses.pt" ),
429
+ map_location = torch .device ("cpu" )).detach ().cpu ()
430
+ all_seq_shapes = torch .load (os .path .join (subj_action_path ,"betas.pt" ),
431
+ map_location = torch .device ("cpu" )).detach ().cpu ()
432
+ all_seq_trans = torch .load (os .path .join (subj_action_path ,"trans.pt" ),
433
+ map_location = torch .device ("cpu" )).detach ().cpu ()
434
+ all_seq_gender = self .gender_mapper [subj_name ]
435
+
436
+ N_frames = len (all_seq_scan_paths ) #all_seq_poses.shape[0]
437
+
438
+ for frame_ind in range (N_frames ):
439
+ self .scan_paths .append (all_seq_scan_paths [frame_ind ])
440
+ self .subject_names .append (subj_name )
441
+ self .action_names .append (action_name )
442
+ self .sequence_names .append (sequence_name )
443
+ if self .load_parameters :
444
+ self .poses .append (all_seq_poses [frame_ind ])#.unsqueeze(0).detach().cpu())
445
+ self .shapes .append (all_seq_shapes [frame_ind ])#.unsqueeze(0).detach().cpu())
446
+ self .trans .append (all_seq_trans [frame_ind ])#.unsqueeze(0).detach().cpu())
447
+ self .genders .append (all_seq_gender )
448
+ if self .load_landmarks :
449
+ self .landmarks .append (all_seq_landmarks [frame_ind ])#.unsqueeze(0).detach().cpu())
450
+
451
+
452
+ self .dataset_size = len (self .scan_paths )
453
+
454
+ # create body models
455
+ self .bms = {
456
+ "MALE" : smplx .create (body_model_path ,
457
+ model_type = "SMPL" ,
458
+ gender = "MALE" ,
459
+ num_betas = num_betas ,
460
+ use_face_contour = False ,
461
+ ext = 'pkl' ),
462
+ "FEMALE" : smplx .create (body_model_path ,
463
+ model_type = "SMPL" ,
464
+ gender = "FEMALE" ,
465
+ num_betas = num_betas ,
466
+ use_face_contour = False ,
467
+ ext = 'pkl' ),
468
+ "NEUTRAL" : smplx .create (body_model_path ,
469
+ model_type = "SMPL" ,
470
+ gender = "NEUTRAL" ,
471
+ num_betas = num_betas ,
472
+ use_face_contour = False ,
473
+ ext = 'pkl' ),
474
+ }
475
+
476
+
477
+ def __getitem__ (self , index ):
478
+ """
479
+ :return (dict): dictionary
480
+ """
481
+
482
+ # load scan
483
+ scan_path = self .scan_paths [index ]
484
+ sequence_name = self .sequence_names [index ] #scan_path.split("/")[-2]
485
+ scan_name = os .path .basename (scan_path ).split (".obj" )[0 ]
486
+ scan = o3d .io .read_triangle_mesh (scan_path )
487
+ scan_vertices = torch .from_numpy (np .asarray (scan .vertices ))
488
+ scan_faces = torch .from_numpy (np .asarray (scan .triangles ))
489
+ scan_faces = scan_faces if scan_faces .shape [0 ] > 0 else None
490
+ if self .load_parameters :
491
+ scan_landmarks = self .landmarks [index ]
492
+ scan_gender = self .genders [index ].upper ()
493
+
494
+ # create fitting
495
+ if self .load_parameters :
496
+ fit_pose = self .poses [index ].unsqueeze (0 )
497
+ fit_shape = self .shapes [index ].unsqueeze (0 )
498
+ fit_trans = self .trans [index ].unsqueeze (0 )
499
+ fit = self .bms [scan_gender ](body_pose = fit_pose [:,3 :],
500
+ betas = fit_shape ,
501
+ global_orient = fit_pose [:,:3 ],
502
+ transl = fit_trans ,
503
+ pose2rot = True ).vertices [0 ].detach ().cpu ()
504
+ # fit = fit + fit_trans
505
+
506
+
507
+ return {"name" : f"{ sequence_name } -{ scan_name } " ,
508
+ "sequence_name" : sequence_name ,
509
+ "vertices" : scan_vertices ,
510
+ "faces" : scan_faces ,
511
+ "landmarks" : scan_landmarks ,
512
+ "pose" : self .poses [index ],
513
+ "shape" : self .shapes [index ],
514
+ "trans" : self .trans [index ],
515
+ "gender" : self .genders [index ],
516
+ "fit" :fit }
517
+
518
+
338
519
def __len__ (self ):
339
520
return self .dataset_size
0 commit comments