-
Notifications
You must be signed in to change notification settings - Fork 3
/
wrnchAI.py
1414 lines (1097 loc) · 43 KB
/
wrnchAI.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
'''
wrnchAI.py -- a pure Python interface to wrnchAI
'''
from collections import namedtuple
import json
import platform
import _wrnchAI
__version__ = _wrnchAI.__version__
# this is a tag reqd for sphinx docs
with_openvino = _wrnchAI.with_openvino #:
def license_check():
return _wrnchAI.license_check()
def license_check_path(license_path=''):
'''
The `license_path` argument must either be '.', to search current
directory, or the path to the license. If it points directly to a
file, the file is probed for a valid license. If it points to a
directory, then all files ending with '.lic' are probed. for a valid
license. If not provided, the default license search path is the user
home directory.
'''
return _wrnchAI.license_check_path(license_path)
def license_check_string(license_string=''):
'''
The `license_string` argument can be the license key, the license file contents,
or empty. If empty, the default license path will be searched.
'''
return _wrnchAI.license_check_string(license_string)
def returncode_describe(returncode):
return _wrnchAI.returncode_describe(returncode)
def num_available_gpu():
return _wrnchAI.num_available_gpu()
class PoseParams(object):
'''
"Configure-time" parameters of a pose estimator. These parameters may only
be set once on a PoseEstimator and may not be changed dynamically.
'''
def __init__(self, impl=None):
if impl is None:
self.impl = _wrnchAI.PoseParams()
else:
self.impl = impl
@property
def bone_sensitivity(self):
return self.impl.bone_sensitivity
@bone_sensitivity.setter
def bone_sensitivity(self, sensitivity):
self.impl.bone_sensitivity = sensitivity
@property
def joint_sensitivity(self):
return self.impl.joint_sensitivity
@joint_sensitivity.setter
def joint_sensitivity(self, sensitivity):
self.impl.joint_sensitivity = sensitivity
@property
def net_precision_2d(self):
return self.impl.net_precision_2d
@net_precision_2d.setter
def net_precision_2d(self, precision):
self.impl.net_precision_2d = precision
@property
def net_precision_3d(self):
return self.impl.net_precision_3d
@net_precision_3d.setter
def net_precision_3d(self, precision):
self.impl.net_precision_3d = precision
@property
def enable_tracking(self):
return self.impl.enable_tracking
@property
def tracker_kind(self):
return self.impl.tracker_kind
@tracker_kind.setter
def tracker_kind(self, tracker_kind):
self.impl.tracker_kind = tracker_kind
@enable_tracking.setter
def enable_tracking(self, true_or_false):
'''
Enable or disable the (stateful) tracking module on a pose estimator
(which is on by default).
The tracker attempts to set consistent ids on persons in frames through time by
looking at positions and "appearance" of persons (ids are accessed through the
`id` property on pose types, e.g., `Pose2d.id`). The tracker works best with stable lighting
conditions, BGR (color) frames, and accurate, complete detections. In order to improve
detections, a higher 2d net resolution may be needed. In order to have
complete detections, setting a high min valid joints on PoseParams (for example, 12
or more) which configure a PoseEstimator may help. Note that the default min valid
joints used by PoseEstimators is currently 3. This tracker comes with a nontrivial
runtime cost -- users not interested in tracking should disable tracking (by using
`pose_params.enable_tracking = False`.
:param true_or_false: bool. `False` disables tracking,
`True` enables tracking (which is set by default).
'''
self.impl.enable_tracking = true_or_false
@property
def preferred_net_width(self):
return self.impl.preferred_net_width
@preferred_net_width.setter
def preferred_net_width(self, net_width):
self.impl.preferred_net_width = net_width
@property
def preferred_net_height(self):
return self.impl.preferred_net_height
@preferred_net_height.setter
def preferred_net_height(self, net_height):
self.impl.preferred_net_height = net_height
@property
def preferred_net_width_3d(self):
return self.impl.preferred_net_width_3d
@preferred_net_width_3d.setter
def preferred_net_width_3d(self, net_width_3d):
self.impl.preferred_net_width_3d = net_width_3d
@property
def preferred_net_height_3d(self):
return self.impl.preferred_net_height_3d
@preferred_net_height_3d.setter
def preferred_net_height_3d(self, net_height_3d):
self.impl.preferred_net_height_3d = net_height_3d
@property
def smoothing_betaX(self):
return self.impl.smoothing_betaX
@smoothing_betaX.setter
def smoothing_betaX(self, beta_x):
self.impl.smoothing_betaX = beta_x
@property
def smoothing_betaY(self):
return self.impl.smoothing_betaY
@smoothing_betaY.setter
def smoothing_betaY(self, beta_y):
self.impl.smoothing_betaY = beta_y
@property
def smoothing_betaZ(self):
return self.impl.smoothing_betaZ
@smoothing_betaZ.setter
def smoothing_betaZ(self, beta_z):
self.impl.smoothing_betaZ = beta_z
@property
def smoothing_cutoff_freq_velocity(self):
return self.impl.smoothing_cutoff_freq_velocity
@smoothing_cutoff_freq_velocity.setter
def smoothing_cutoff_freq_velocity(self, velocity):
self.impl.smoothing_cutoff_freq_velocity = velocity
@property
def smoothing_min_cutoff_freq_position(self):
return self.impl.smoothing_min_cutoff_freq_position
@smoothing_min_cutoff_freq_position.setter
def smoothing_min_cutoff_freq_position(self, position):
self.impl.smoothing_min_cutoff_freq_position = position
@property
def min_valid_joints(self):
return self.impl.min_valid_joints
@min_valid_joints.setter
def min_valid_joints(self, num_joints):
self.impl.min_valid_joints = num_joints
class IKParams(object):
'''
Parameters for PoseIK IK-solvers.
'''
def __init__(self):
self.impl = _wrnchAI.IKParams()
@property
def trans_reach(self):
return self.impl.trans_reach
@trans_reach.setter
def trans_reach(self, reach):
self.impl.trans_reach = reach
@property
def rot_reach(self):
return self.impl.rot_reach
@rot_reach.setter
def rot_reach(self, reach):
self.impl.rot_reach = reach
@property
def pull(self):
return self.impl.pull
@pull.setter
def pull(self, pull):
self.impl.pull = pull
@property
def resist(self):
return self.impl.resist
@resist.setter
def resist(self, resist):
self.impl.resist = resist
@property
def max_angular_velocity(self):
return self.impl.max_angular_velocity
@max_angular_velocity.setter
def max_angular_velocity(self, velocity):
self.impl.max_angular_velocity = velocity
@property
def fps(self):
return self.impl.fps
@fps.setter
def fps(self, fps):
self.impl.fps = fps
@property
def joint_visibility_thresh(self):
return self.impl.joint_visibility_thresh
@joint_visibility_thresh.setter
def joint_visibility_thresh(self, thresh):
self.impl.joint_visibility_thresh = thresh
class PoseEstimatorOptions(object):
'''
Options used by `PoseEstimator.process_frame`. Options select which
features to run on calls to process_frame (e.g., whether to estimate
hands or not).
'''
def __init__(self, impl=None):
if impl is None:
impl = _wrnchAI.PoseEstimatorOptions()
self.impl = impl
@property
def estimate_mask(self):
return self.impl.estimate_mask
@estimate_mask.setter
def estimate_mask(self, true_or_false):
self.impl.estimate_mask = true_or_false
@property
def estimate_3d(self):
return self.impl.estimate_3d
@estimate_3d.setter
def estimate_3d(self, true_or_false):
self.impl.estimate_3d = true_or_false
@property
def estimate_hands(self):
return self.impl.estimate_hands
@estimate_hands.setter
def estimate_hands(self, true_or_false):
self.impl.estimate_hands = true_or_false
@property
def estimate_all_handboxes(self):
return self.impl.estimate_all_handboxes
@estimate_all_handboxes.setter
def estimate_all_handboxes(self, true_or_false):
self.impl.estimate_all_handboxes = true_or_false
@property
def estimate_heads(self):
return self.impl.estimate_heads
@estimate_heads.setter
def estimate_heads(self, true_or_false):
self.impl.estimate_heads = true_or_false
@property
def estimate_face_poses(self):
return self.impl.estimate_face_poses
@estimate_face_poses.setter
def estimate_face_poses(self, true_or_false):
self.impl.estimate_face_poses = true_or_false
@property
def estimate_single(self):
return self.impl.estimate_single
@estimate_single.setter
def estimate_single(self, true_or_false):
self.impl.estimate_single = true_or_false
@property
def use_ik(self):
return self.impl.use_ik
@use_ik.setter
def use_ik(self, true_or_false):
self.impl.use_ik = true_or_false
@property
def main_person_id_mode(self):
return self.impl.main_person_id_mode
@main_person_id_mode.setter
def main_person_id_mode(self, mode):
self.impl.main_person_id_mode = mode
@property
def enable_joint_smoothing(self):
return self.impl.enable_joint_smoothing
@enable_joint_smoothing.setter
def enable_joint_smoothing(self, true_or_false):
self.impl.enable_joint_smoothing = true_or_false
@property
def enable_head_smoothing(self):
return self.impl.enable_head_smoothing
@enable_head_smoothing.setter
def enable_head_smoothing(self, true_or_false):
self.impl.enable_head_smoothing = true_or_false
@property
def rotation_multiple_of_90(self):
return self.impl.rotation_multiple_of_90
@rotation_multiple_of_90.setter
def rotation_multiple_of_90(self, multiple):
self.impl.rotation_multiple_of_90 = multiple
class PoseEstimator(object):
'''
Class for estimating 2d, 3d, hands, head, and face poses.
'''
def __init__(self,
models_path=None,
model_name_2d='',
license_path=None,
license_string=None,
params=None,
gpu_id=0,
output_format=None,
impl=None):
'''
Initialize a pose estimator
:param models_path: str or None. full path to the models directory
:param model_name_2d: str. must be the full name of the 2D pose model,
e.g., wrnch_pose2d_seg_v4.2.enc (or empty)
:param license_path: str. must either be '.', to search current
directory, or the path to the license. If it points directly to a
file, the file is probed for a valid license. If it points to a
directory, then all files ending with '.lic' are probed. for a
valid license. If not provided, the default license search path
is the user home directory.
:param params: PoseParams. The parameters which set "configure time"
data on the pose estimator.
:param gpu_id: int. The GPU id on which the pose estimator will run.
:param output_format: JointDefinition. The joint format in which the
pose estimator returns poses.
:param impl: _wrnchAI.PoseEstimator. Not normally used by clients of
wrnchAI.
'''
def impl_or_none(obj):
return obj.impl if obj is not None else None
if impl is None:
impl = _wrnchAI.PoseEstimator(
models_path=models_path,
model_name_2d=model_name_2d,
license_path=license_path if license_path else '',
license_string=license_string if license_string else '',
params=impl_or_none(params),
gpu_id=gpu_id,
output_format=impl_or_none(output_format))
self.impl = impl
def initialize_3d(self, models_path, ik_params=None):
'''
Initialize the 3d pose estimation capabilities of a pose estimator.
:param models_path: str. Full path to the models directory.
:param ik_params: IKParams or None. Optional IK parameters to set
on the 3d estimation subsystem.
'''
if ik_params is None:
self.impl.initialize_3d(models_path)
else:
self.impl.initialize_3d(ik_params, models_path)
def initialize_hands2d(self, models_path):
'''
Initialize the hand pose estimation capabilities of a pose estimator.
:param models_path: str. Full path to the models directory.
'''
self.impl.initialize_hands2d(models_path)
def initialize_head(self,
models_directory,
beta_x=0.1,
beta_y=0.1,
min_freq_cutff_position=0.1,
freq_cutoff_velocity=0.2):
'''
Initialize the hand pose estimation capabilities of a pose estimator.
:param models_path: str. Full path to the models directory.
'''
self.impl.initialize_head(
models_directory=models_directory,
beta_x=beta_x,
beta_y=beta_y,
min_freq_cutff_position=min_freq_cutff_position,
freq_cutoff_velocity=freq_cutoff_velocity)
def process_frame(self, bgr_array, options):
'''
The `bgr_array` argument should be a numpy array representing an image
in openCV 3-channel format, i.e., a row major sequence of bgr triples
[b1,g1,r1,b2,g2,r2, ... ] intensity values in the range [0,255]
'''
return self.impl.process_frame(bgr_array, options.impl)
def process_frame_gray(self, gray_array, options):
'''
The `gray_array` argument should be a numpy array representing an image
in openCV 1-channel format, i.e., a row major sequence of intensity
values in the range [0,255]
'''
return self.impl.process_frame_gray(gray_array, options.impl)
def has_IK(self):
return self.impl.has_IK()
def humans_2d(self):
'''
Get the Pose2d's last estimated by this PoseEstimator.
This poses should be interpreted with respect to the
`human_2d_output_format` JointDefinition.
:returns: list of Pose2d
'''
return pose_2d_list(self.impl.humans_2d())
def left_hands(self):
'''
Get hand pose of left hands estimated by this PoseEstimator.
This poses should be interpreted with respect to the
`hand_output_format` JointDefinition.
:returns: list of Pose2d
'''
return pose_2d_list(self.impl.left_hands())
def right_hands(self):
'''
Get hand pose of right hands estimated by this PoseEstimator.
This poses should be interpreted with respect to the
`hand_output_format` JointDefinition.
:returns: list of Pose2d
'''
return pose_2d_list(self.impl.right_hands())
def heads(self):
'''
Get the head pose last estimated by this PoseEstimator.
:returns: list of PoseHead
'''
return [PoseHead(head) for head in self.impl.heads()]
def faces(self):
'''
Get the pose face last estimated by this PoseEstimator.
:returns: list of PoseHead
'''
return [PoseFace(face) for face in self.impl.faces()]
def humans_3d(self):
'''
Get the Pose3d's last estimated by this PoseEstimator.
This poses should be interpreted with respect to the
`human_3d_output_format` JointDefinition.
:returns: list of Pose2d
'''
return pose_3d_list(self.impl.humans_3d())
def raw_humans_3d(self):
'''
Get the "raw" Pose3d's last estimated by this PoseEstimator.
This poses should be interpreted with respect to the
`human_3d_output_format_raw` JointDefinition.
:returns: list of Pose2d
'''
return pose_3d_list(self.impl.raw_humans_3d())
def mask(self):
'''
The mask is stored as four 8-bit single channel images
whose values represent the confidence that a human is present in that
pixel. The four generated masks are stored consecutively as follows:
(0) body; (1) right hand; (2) left hand; and (3) both hands.
'''
return self.impl.mask()
def hand_boxes(self):
def translate(native_handbox_pair):
return HandboxPair(
left=_translate_box_2d(native_handbox_pair.left()),
right=_translate_box_2d(native_handbox_pair.right()),
id=native_handbox_pair.id)
return [translate(box_pair) for box_pair in self.impl.hand_boxes()]
def set_hand_segmenter(self, segmenter_type):
'''
Set the hand segmenter type on a pose estimator.
:param segmenter_type: HandSegmenterType.
'''
self.impl.set_hand_segmenter(segmenter_type)
def hand_segmenter(self):
return self.impl.hand_segmenter()
def input_width(self):
return self.impl.input_width()
def input_height(self):
return self.impl.input_height()
def params(self):
return self.impl.params()
def get_IK_property(self, prop, solver_id):
'''
:param prop: IKProperty.
:param solver_id: int. Should be between 0 and get_num_IK_solvers.
:returns: float.
'''
return self.impl.get_IK_property(prop, solver_id)
def set_IK_property(self, prop, value, solver_id):
'''
:param prop: IKProperty
:param value: float
:param solver_id: int
'''
self.impl.set_IK_property(prop, value, solver_id)
def tpose_3d(self):
'''
Get the "T-Pose" used by the 3d solver.
:returns: Pose3d.
'''
return Pose3d(self.impl.tpose_3d())
def get_num_IK_solvers(self):
return self.impl.get_num_IK_solvers()
def human_2d_output_format(self):
'''
Return the JointDefinition format used with which 2d poses returned by
`self.humans_2d` are to be interpreted.
:returns: JointDefinition
'''
return JointDefinition(self.impl.human_2d_output_format())
def human_3d_output_format(self):
'''
Return the JointDefinition format used with which 3d poses returned by
`self.humans_3d` are to be interpreted.
:returns: JointDefinition
'''
return JointDefinition(self.impl.human_3d_output_format())
def human_3d_output_format_raw(self):
'''
Return the JointDefinition format used with which 3d poses returned by
`self.raw_humans_3d` are to be interpreted.
:returns: JointDefinition
'''
return JointDefinition(self.impl.human_3d_output_format_raw())
def hand_output_format(self):
'''
Return the JointDefinition format used with which 3d poses returned by
`self.left_hands` and `self.right_hands` are to be interpreted.
:returns: JointDefinition
'''
return JointDefinition(self.impl.hand_output_format())
def pose_params(self):
'''
Get the pose params which were used when configuring this
PoseEstimator.
:returns: PoseParams
'''
return PoseParams(self.impl.pose_params())
def serialize(self):
'''
Serialize a pose estimator to memory as a string.
Such a string can be written to disk (using binary mode) and re-read
later using the `PoseEstimator.deserialize` method to greatly speed up
initialization times.
:returns: str
'''
return self.impl.serialize()
@classmethod
def deserialize(cls, string, device_id=0, license_string='', license_path=''):
'''
Deserialize a serialized estimator string into a fresh PoseEstimator.
Such a string may be read from disk (using binary mode) from a previous
call to `serialize`.
:param string: str. the serialized data.
:param device_id: int. the GPU device id on which the new pose
estimator will run.
:param license_string: str. the license key or license file contents (the
default is empty, which implies to search for a license file in license_path)
:param license_path: std. the license file path (the default is empty, which
implies the license path is the user home directory)
:returns: PoseEstimator
'''
return cls(impl=_wrnchAI.PoseEstimator.deserialize(string, device_id, license_string, license_path))
def results_to_json(self):
'''
Return a JSON object holding the current results held in the estimator.
'''
return json.loads(self.impl.results_to_json().decode('utf-8'))
def clone(self):
'''
Clone a pose estimator: this is a quick way to create an identical copy
of a given pose estimator -- much faster than calling `initialize`.
:returns: a fresh pose estimator, identical to `self`.
'''
return PoseEstimator(impl=self.impl.clone())
def reset(self):
'''
Reset a pose estimator.
'''
self.impl.reset()
@property
def tracker_kind(self):
return self.impl.tracker_kind
@property
def is_3d_initialized(self):
return self.impl.is_3d_initialized
@property
def is_head_initialized(self):
return self.impl.is_head_initialized
@property
def are_hands_initialized(self):
return self.impl.are_hands_initialized
@property
def supports_mask_estimation(self):
return self.impl.supports_mask_estimation
class Pose2d(object):
'''
A Pose2d holds joint positions and scores computed by a PoseEstimator.
By itself, a Pose2d cannot be interpreted -- one needs a JointDefinition to
be able to make sense of the floats inside `joints` and `scores`.
'''
def __init__(self, impl):
self.impl = impl
@property
def is_main(self):
return self.impl.is_main
@property
def id(self):
return self.impl.id
@id.setter
def id(self, new_id):
self.impl.id = new_id
def joints(self):
'''
:returns: 1d numpy array of float. This is a flattened array of
the x,y pairs of joint positions inside the pose. Negative values
represent "invisible" joints.
'''
return self.impl.joints()
def bounding_box(self):
return _translate_box_2d(self.impl.bounding_box())
def scores(self):
'''
:returns: 1d numpy array of float. Giving the scores of each joint
in the Pose.
'''
return self.impl.scores()
def score(self):
'''
:returns: float. The clustering score of the pose, if clustered.
'''
return self.impl.score()
def pose_2d_list(pose_2ds):
return [Pose2d(pose_2d) for pose_2d in pose_2ds]
class Pose3d(object):
'''
A Pose3d holds joint positions, scores, and rotations computed by a
PoseEstimator or a PoseIK solver.
By itself, a Pose3d cannot be interpreted -- one needs a JointDefinition to
be able to make sense of the floats inside `joints`, `scores`, and
`rotations`.
'''
def __init__(self, impl):
self.impl = impl
@property
def id(self):
return self.impl.id
def positions(self):
'''
:returns: 1d numpy array of float. This is a flattened array of the
x,y,z triples inside the pose. Negative or NaN values represent
"invisible" joints.
'''
return self.impl.positions()
def rotations(self):
'''
:returns: 1d numpy array of float. This is a flattened array of the
4-tuple quaternions giving the local roations of joints.
'''
return self.impl.rotations()
def scale_hint(self):
return self.impl.scale_hint()
def pose_3d_list(pose_3ds):
return [Pose3d(pose_3d) for pose_3d in pose_3ds]
class PoseHead(object):
def __init__(self, _pose_head):
self.impl = _pose_head
@property
def id(self):
return self.impl.id
@property
def estimation_success(self):
return self.impl.estimation_success
@property
def bounding_box(self):
return _translate_box_2d(self.impl.bounding_box())
def head_rotation(self):
return self.impl.head_rotation()
def head_rotation_euler(self):
return self.impl.head_rotation_euler()
def head_rotation_quat(self):
return self.impl.head_rotation_quat()
class PoseFace(object):
def __init__(self, _pose_face):
self.impl = _pose_face
@property
def id(self):
return self.impl.id
def landmarks(self):
return self.impl.landmarks()
def arrow(self):
return self.impl.arrow()
class JointDefinition(object):
'''
JointDefinition describes a graph, with vertices being "joints"
and edges being "bones". Joints are defined by name, from the
`joint_names` function. Bones are defined by a list of pairs of
integers (from the `bone_pairs` function). Each integer in a bone pair
represents a bone connecting the corresponding pair of joint names.
'''
def __init__(self, _format):
self.impl = _format
def joint_names(self):
'''
:returns: list of str. a list of the joint names in this
JointDefinition
'''
return self.impl.joint_names()
@property
def num_joints(self):
return self.impl.num_joints
def bone_pairs(self):
'''
:returns: list of pair of int. A list of joint pairs in this
JointDefinition.
'''
return self.impl.bone_pairs()
def name(self):
return self.impl.name()
def get_joint_index(self, joint_name):
'''
Given a Pose object (e.g., Pose2d or Pose3d) in this JointDefinition,
get the index in the joints (or scores, or rotations) array giving
the value of the joint with this name. If joint_name is not in this
definition, an exception is raised.
For example, if `p` is a Pose2d in JointDefinition joint_def,
then to find the joint values for joint named "HEAD", we can do:
joint_index = joint_def.get_joint_index("HEAD")
x = p.joints()[2 * joint_index]
y = p.joints()[2 * joint_index + 1]
'''
return self.impl.get_joint_index(joint_name)
def print_joint_definition(self):
return self.impl.print_joint_definition()
class JointDefinitionRegistry(object):
'''
A registry of all available JointDefinitions inside wrnchAI.
'''
def __init__(self):
self.impl = _wrnchAI.JointDefinitionRegistry()
@staticmethod
def get(name):
return JointDefinition(_wrnchAI.JointDefinitionRegistry.get(name))
@staticmethod
def available_definitions():
return _wrnchAI.JointDefinitionRegistry.available_definitions()
class PoseIK(object):
'''
Inverse Kinematics solver.
'''
def __init__(self,
input_format,
params,
initial_pose=None):
if initial_pose is not None:
self.impl = _wrnchAI.PoseIK(input_format=input_format.impl,
params=params.impl,
initial_pose=self._extract_pose_impl(
initial_pose))
else:
self.impl = _wrnchAI.PoseIK(input_format=input_format.impl,
params=params.impl)
@staticmethod
def _extract_pose_impl(pose):
if isinstance(pose, Pose3d):
return pose.impl
return pose
def set_params(self, params):
self.impl.set_params(params)
def get_params(self):
return self.impl.get_params()
def solve(self, pose, visibilities):
return Pose3d(self.impl.solve(self._extract_pose_impl(pose),
visibilities))
def reset(self, initial_pose=None):
if initial_pose is None:
self.impl.reset()
else:
self.impl.reset(self._extract_pose_impl(initial_pose))
def get_ik_property(self, prop):
return self.impl.get_ik_property(prop)
def set_ik_property(self, prop, value):
self.impl.set_ik_property(prop, value)
@property
def get_output_format(self):
return JointDefinition(self.impl.get_output_format)
Sensitivity = _wrnchAI.Sensitivity
MainPersonID = _wrnchAI.MainPersonID
NetPrecision = _wrnchAI.NetPrecision
HandSegmenterType = _wrnchAI.HandSegmenterType
IKProperty = _wrnchAI.IKProperty
TrackerKind = _wrnchAI.TrackerKind
Box2d = namedtuple('Box2d', ['min_x', 'min_y', 'width', 'height'])
Box3d = namedtuple(
'Box3d', ['min_x', 'min_y', 'min_z', 'width', 'height', 'depth'])