Skip to content

Commit 31ef4d7

Browse files
committed
small changes to person association
1 parent 938f2f2 commit 31ef4d7

File tree

10 files changed

+76
-69
lines changed

10 files changed

+76
-69
lines changed

Pose2Sim/S00_Demo_Session/Config.toml

+25-24
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
[project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
frame_rate = 60 # fps
2324
frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate
@@ -118,13 +119,13 @@ weights_kpt = [1,1] # Pris en compte uniquement si on a plusieurs keypoints
118119
tracked_keypoint = 'Neck' # If the neck is not detected by the pose_model, check skeleton.py
119120
# and choose a stable point for tracking the person of interest (e.g., 'right_shoulder' with BLAZEPOSE)
120121
reproj_error_threshold_association = 20 # px
121-
likelihood_threshold_association = 0.05
122+
likelihood_threshold_association = 0.3
122123

123124

124125
[triangulation]
125126
reorder_trc = false # only checked if multi_person analysis
126127
reproj_error_threshold_triangulation = 15 # px
127-
likelihood_threshold_triangulation= 0.05
128+
likelihood_threshold_triangulation= 0.3
128129
min_cameras_for_triangulation = 2
129130
interpolation = 'cubic' #linear, slinear, quadratic, cubic, or none
130131
# 'none' if you don't want to interpolate missing points
@@ -187,65 +188,65 @@ opensim_bin_path = 'C:\OpenSim 4.4\bin'
187188
name = "CHip"
188189
id = "None"
189190
[[pose.CUSTOM.children]]
190-
id = 12
191191
name = "RHip"
192+
id = 12
192193
[[pose.CUSTOM.children.children]]
193-
id = 14
194194
name = "RKnee"
195+
id = 14
195196
[[pose.CUSTOM.children.children.children]]
196-
id = 16
197197
name = "RAnkle"
198+
id = 16
198199
[[pose.CUSTOM.children.children.children.children]]
199-
id = 22
200200
name = "RBigToe"
201+
id = 22
201202
[[pose.CUSTOM.children.children.children.children.children]]
202-
id = 23
203203
name = "RSmallToe"
204+
id = 23
204205
[[pose.CUSTOM.children.children.children.children]]
205-
id = 24
206206
name = "RHeel"
207+
id = 24
207208
[[pose.CUSTOM.children]]
208-
id = 11
209209
name = "LHip"
210+
id = 11
210211
[[pose.CUSTOM.children.children]]
211-
id = 13
212212
name = "LKnee"
213+
id = 13
213214
[[pose.CUSTOM.children.children.children]]
214-
id = 15
215215
name = "LAnkle"
216+
id = 15
216217
[[pose.CUSTOM.children.children.children.children]]
217-
id = 19
218218
name = "LBigToe"
219+
id = 19
219220
[[pose.CUSTOM.children.children.children.children.children]]
220-
id = 20
221221
name = "LSmallToe"
222+
id = 20
222223
[[pose.CUSTOM.children.children.children.children]]
223-
id = 21
224224
name = "LHeel"
225+
id = 21
225226
[[pose.CUSTOM.children]]
226-
id = 17
227227
name = "Neck"
228+
id = 17
228229
[[pose.CUSTOM.children.children]]
229-
id = 18
230230
name = "Head"
231+
id = 18
231232
[[pose.CUSTOM.children.children.children]]
232-
id = 0
233233
name = "Nose"
234+
id = 0
234235
[[pose.CUSTOM.children.children]]
235-
id = 6
236236
name = "RShoulder"
237+
id = 6
237238
[[pose.CUSTOM.children.children.children]]
238-
id = 8
239239
name = "RElbow"
240+
id = 8
240241
[[pose.CUSTOM.children.children.children.children]]
241-
id = 10
242242
name = "RWrist"
243+
id = 10
243244
[[pose.CUSTOM.children.children]]
244-
id = 5
245245
name = "LShoulder"
246+
id = 5
246247
[[pose.CUSTOM.children.children.children]]
247-
id = 7
248248
name = "LElbow"
249+
id = 7
249250
[[pose.CUSTOM.children.children.children.children]]
250-
id = 9
251251
name = "LWrist"
252+
id = 9

Pose2Sim/S00_Demo_Session/S00_P00_SingleParticipant/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P00_SingleParticipant/S00_P00_T00_StaticTrial/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P00_SingleParticipant/S00_P00_T01_BalancingTrial/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P01_MultiParticipants/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P01_MultiParticipants/S00_P01_T00_StaticTrialParticipant1/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P01_MultiParticipants/S00_P01_T01_StaticTrialParticipant2/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
# [project]
21-
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
# multi_person = false # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
# nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/S00_Demo_Session/S00_P01_MultiParticipants/S00_P01_T02_Participants1-2/Config.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@
1818

1919

2020
[project]
21-
multi_person = true # true for trials with multiple participants. If false, only the main person in scene is analyzed.
21+
multi_person = true # true for trials with multiple participants. If false, only the main person in scene is analyzed (and it run much faster).
22+
nb_persons_to_detect = 2 # checked only if multi_person is selected
2223
# frame_rate = 60 # FPS
2324
# frame_range = [] # For example [10,300], or [] for all frames
2425
## N.B.: If you want a time range instead, use frame_range = time_range * frame_rate

Pose2Sim/markerAugmentation.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,8 @@ def get_midhip_data(trc_file):
6262

6363

6464
def augmentTRC(config_dict):
65-
6665
# get parameters from Config.toml
6766
project_dir = config_dict.get('project').get('project_dir')
68-
session_dir = os.path.realpath(os.path.join(project_dir, '..', '..'))
6967
pathInputTRCFile = os.path.realpath(os.path.join(project_dir, 'pose-3d'))
7068
pathOutputTRCFile = os.path.realpath(os.path.join(project_dir, 'pose-3d'))
7169
pose_model = config_dict.get('pose').get('pose_model')
@@ -81,9 +79,6 @@ def augmentTRC(config_dict):
8179
augmenter_model = 'v0.3'
8280
offset = True
8381

84-
if pose_model not in ['BODY_25', 'BODY_25B']:
85-
raise ValueError('Marker augmentation is only supported with OpenPose BODY_25 and BODY_25B models.')
86-
8782
# Apply all trc files
8883
trc_files = [f for f in glob.glob(os.path.join(pathInputTRCFile, '*.trc')) if 'filt' in f and '_LSTM' not in f]
8984
if len(trc_files) == 0:
@@ -119,6 +114,13 @@ def augmentTRC(config_dict):
119114
except:
120115
raise ValueError('Cannot read TRC file. You may need to enable interpolation in Config.toml while triangulating.')
121116

117+
# Verify that all feature markers are present in the TRC file.
118+
feature_markers_joined = set(feature_markers_all[0]+feature_markers_all[1])
119+
trc_markers = set(trc_file.marker_names)
120+
missing_markers = list(feature_markers_joined - trc_markers)
121+
if len(missing_markers) > 0:
122+
raise ValueError(f'Marker augmentation requires {missing_markers} markers and they are not present in the TRC file.')
123+
122124
# Loop over augmenter types to handle separate augmenters for lower and
123125
# upper bodies.
124126
outputs_all = {}

Pose2Sim/personAssociation.py

+30-33
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@ def min_with_single_indices(L, T):
117117
return minL, argminL, T_minL
118118

119119

120-
def sort_people(Q_kpt_old, Q_kpt, nb_persons_to_detect):
120+
def sort_people(Q_kpt_old, Q_kpt):
121121
'''
122122
Associate persons across frames
123123
Persons' indices are sometimes swapped when changing frame
@@ -205,6 +205,7 @@ def best_persons_and_cameras_combination(config, json_files_framef, personsIDs_c
205205
'''
206206

207207
multi_person = config.get('project').get('multi_person')
208+
nb_persons_to_detect = config.get('project').get('nb_persons_to_detect')
208209
error_threshold_tracking = config.get('personAssociation').get('reproj_error_threshold_association')
209210
likelihood_threshold = config.get('personAssociation').get('likelihood_threshold_association')
210211
min_cameras_for_triangulation = config.get('triangulation').get('min_cameras_for_triangulation')
@@ -213,7 +214,6 @@ def best_persons_and_cameras_combination(config, json_files_framef, personsIDs_c
213214
n_cams = len(json_files_framef)
214215
error_min = np.inf
215216
nb_cams_off = 0 # cameras will be taken-off until the reprojection error is under threshold
216-
217217
errors_below_thresh = []
218218
comb_errors_below_thresh = []
219219
Q_kpt = []
@@ -295,39 +295,39 @@ def best_persons_and_cameras_combination(config, json_files_framef, personsIDs_c
295295
Q_kpt = [Q_comb[np.argmin(error_comb)]]
296296
if errors_below_thresh[0] < error_threshold_tracking:
297297
break
298-
299-
# print('\n', personsIDs_combinations)
300-
# print(errors_below_thresh)
301-
# print(comb_errors_below_thresh)
302-
# print(Q_kpt)
298+
303299
if multi_person:
304-
# sort combinations by error magnitude
305-
errors_below_thresh_sorted = sorted(errors_below_thresh)
306-
sorted_idx = np.array([errors_below_thresh.index(e) for e in errors_below_thresh_sorted])
307-
comb_errors_below_thresh = np.array(comb_errors_below_thresh)[sorted_idx]
308-
Q_kpt = np.array(Q_kpt)[sorted_idx]
309-
# remove combinations with indices used several times for the same person
310-
comb_errors_below_thresh = [c.tolist() for c in comb_errors_below_thresh]
311-
comb = comb_errors_below_thresh.copy()
312-
comb_ok = np.array([comb[0]])
313-
for i, c1 in enumerate(comb):
314-
idx_ok = np.array([not(common_items_in_list(c1, c2)) for c2 in comb[1:]])
315-
try:
316-
comb = np.array(comb[1:])[idx_ok]
317-
comb_ok = np.concatenate((comb_ok, [comb[0]]))
318-
except:
319-
break
320-
sorted_pruned_idx = [i for i, x in enumerate(comb_errors_below_thresh) for c in comb_ok if np.array_equal(x,c,equal_nan=True)]
321-
errors_below_thresh = np.array(errors_below_thresh_sorted)[sorted_pruned_idx].tolist()
322-
comb_errors_below_thresh = np.array(comb_errors_below_thresh)[sorted_pruned_idx].tolist()
323-
Q_kpt = Q_kpt[sorted_pruned_idx].tolist()
300+
if len(errors_below_thresh)>0:
301+
# sort combinations by error magnitude
302+
errors_below_thresh_sorted = sorted(errors_below_thresh)
303+
sorted_idx = np.array([errors_below_thresh.index(e) for e in errors_below_thresh_sorted])
304+
comb_errors_below_thresh = np.array(comb_errors_below_thresh)[sorted_idx]
305+
Q_kpt = np.array(Q_kpt)[sorted_idx]
306+
# remove combinations with indices used several times for the same person
307+
comb_errors_below_thresh = [c.tolist() for c in comb_errors_below_thresh]
308+
comb = comb_errors_below_thresh.copy()
309+
comb_ok = np.array([comb[0]])
310+
for i, c1 in enumerate(comb):
311+
idx_ok = np.array([not(common_items_in_list(c1, c2)) for c2 in comb[1:]])
312+
try:
313+
comb = np.array(comb[1:])[idx_ok]
314+
comb_ok = np.concatenate((comb_ok, [comb[0]]))
315+
except:
316+
break
317+
sorted_pruned_idx = [i for i, x in enumerate(comb_errors_below_thresh) for c in comb_ok if np.array_equal(x,c,equal_nan=True)]
318+
errors_below_thresh = np.array(errors_below_thresh_sorted)[sorted_pruned_idx].tolist()
319+
comb_errors_below_thresh = np.array(comb_errors_below_thresh)[sorted_pruned_idx].tolist()
320+
Q_kpt = Q_kpt[sorted_pruned_idx].tolist()
324321

325322
# Remove indices already used for a person
326323
personsIDs_combinations = np.array([personsIDs_combinations[i] for i in range(len(personsIDs_combinations))
327324
if not np.array(
328325
[personsIDs_combinations[i,j]==comb[j] for comb in comb_errors_below_thresh for j in range(len(comb))]
329326
).any()])
330-
if len(personsIDs_combinations) < len(errors_below_thresh):
327+
if len(errors_below_thresh) >= len(personsIDs_combinations) or len(errors_below_thresh) >= nb_persons_to_detect:
328+
errors_below_thresh = errors_below_thresh[:nb_persons_to_detect]
329+
comb_errors_below_thresh = comb_errors_below_thresh[:nb_persons_to_detect]
330+
Q_kpt = Q_kpt[:nb_persons_to_detect]
331331
break
332332

333333
nb_cams_off += 1
@@ -450,8 +450,6 @@ def track_2d_all(config):
450450
json_tracked_files = [[os.path.join(poseTracked_dir, j_dir, j_file) for j_file in json_files_names[j]] for j, j_dir in enumerate(json_dirs_names)]
451451

452452
# person's tracking
453-
json_files_flatten = [item for sublist in json_files for item in sublist]
454-
nb_persons_to_detect = max([len(json.load(open(json_fname))['people']) for json_fname in json_files_flatten])
455453
f_range = [[min([len(j) for j in json_files])] if frame_range==[] else frame_range][0]
456454
n_cams = len(json_dirs_names)
457455
error_min_tot, cameras_off_tot = [], []
@@ -462,7 +460,7 @@ def track_2d_all(config):
462460
Found {len(P)} cameras in the calibration file,\
463461
and {n_cams} cameras based on the number of pose folders.')
464462

465-
Q_kpt = [np.array([0., 0., 0., 1.])] * nb_persons_to_detect
463+
Q_kpt = [np.array([0., 0., 0., 1.])]
466464
for f in tqdm(range(*f_range)):
467465
# print(f'\nFrame {f}:')
468466
json_files_f = [json_files[c][f] for c in range(n_cams)]
@@ -476,8 +474,7 @@ def track_2d_all(config):
476474
errors_below_thresh, comb_errors_below_thresh, Q_kpt = best_persons_and_cameras_combination(config, json_files_f, personsIDs_comb, P, tracked_keypoint_id, calib_params)
477475

478476
# reID persons across frames by checking the distance from one frame to another
479-
nb_persons_to_detect_frame = max([len(Q_kpt_old), len(Q_kpt)])
480-
Q_kpt, personsIDs_sorted = sort_people(Q_kpt_old, Q_kpt, nb_persons_to_detect_frame)
477+
Q_kpt, personsIDs_sorted = sort_people(Q_kpt_old, Q_kpt)
481478
errors_below_thresh = np.array(errors_below_thresh)[personsIDs_sorted]
482479
comb_errors_below_thresh = np.array(comb_errors_below_thresh)[personsIDs_sorted]
483480

0 commit comments

Comments
 (0)