Coverage for src/serums/distances.py: 0%

186 statements  

« prev     ^ index     » next       coverage.py v7.6.4, created at 2024-11-11 16:43 +0000

1"""Functions for calculating distances.""" 

2 

3import numpy as np 

4import numpy.linalg as la 

5from scipy.optimize import linear_sum_assignment 

6 

7import warnings 

8 

9from serums.enums import SingleObjectDistance 

10 

11 

12def calculate_hellinger(x, y, cov_x, cov_y): 

13 r"""Calculates the hellinger distance between two Gaussian distributions. 

14 

15 Notes 

16 ----- 

17 It is at most 1, and for Gaussian distributions it takes the form 

18 

19 .. math:: 

20 d_H(f,g) &= 1 - \sqrt{\frac{\sqrt{\det{\left[\Sigma_x \Sigma_y\right]}}} 

21 {\det{\left[0.5\Sigma\right]}}} \exp{\epsilon} \\ 

22 \epsilon &= \frac{1}{4}(x - y)^T\Sigma^{-1}(x - y) \\ 

23 \Sigma &= \Sigma_x + \Sigma_y 

24 

25 Parameters 

26 ---------- 

27 x : N x 1 numpy array 

28 first distribution location parameter. 

29 y : N x 1 numpy array 

30 Second distribution location parameter. 

31 cov_x : N x N numpy array 

32 First distribution covariance. 

33 cov_y : N x N numpy array 

34 Second distribution covariance. 

35 

36 Returns 

37 ------- 

38 float 

39 Hellinger distance. 

40 

41 """ 

42 cov = cov_x + cov_y 

43 diff = (x - y).reshape((cov.shape[0], 1)) 

44 epsilon = -0.25 * diff.T @ la.inv(cov) @ diff 

45 

46 return 1 - np.sqrt(np.sqrt(la.det(cov_x @ cov_y)) / la.det(0.5 * cov)) * np.exp( 

47 epsilon 

48 ) 

49 

50 

51def calculate_mahalanobis(x, y, cov): 

52 r"""Calculates the Mahalanobis distance between a point and a distribution. 

53 

54 Notes 

55 ----- 

56 Uses the form 

57 

58 .. math:: 

59 d(x, y) = \sqrt{(x-y)^T\Sigma_y^{-1}(x-y)} 

60 

61 Parameters 

62 ---------- 

63 x : N x 1 numpy array 

64 Point. 

65 y : N x 1 numpy array 

66 Distribution location parameter. 

67 cov : N x N numpy array 

68 Distribution covariance. 

69 

70 Returns 

71 ------- 

72 float 

73 Mahalanobis distance. 

74 """ 

75 diff = (x - y).reshape((cov.shape[0], 1)) 

76 return np.sqrt(diff.T @ cov @ diff) 

77 

78 

79def calculate_ospa( 

80 est_mat, 

81 true_mat, 

82 c, 

83 p, 

84 use_empty=True, 

85 core_method=None, 

86 true_cov_mat=None, 

87 est_cov_mat=None, 

88): 

89 """Calculates the OSPA distance between the truth at all timesteps. 

90 

91 Notes 

92 ----- 

93 This calculates the Optimal SubPattern Assignment metric for the 

94 extracted states and the supplied truth point distributions. The 

95 calculation is based on 

96 :cite:`Schuhmacher2008_AConsistentMetricforPerformanceEvaluationofMultiObjectFilters` 

97 with much of the math defined in 

98 :cite:`Schuhmacher2008_ANewMetricbetweenDistributionsofPointProcesses`. 

99 A value is calculated for each timestep available in the data. This can 

100 use different distance metrics as the core distance. The default follows 

101 the main paper where the euclidean distance is used. Other options 

102 include the Hellinger distance 

103 (see :cite:`Nagappa2011_IncorporatingTrackUncertaintyintotheOSPAMetric`), 

104 or the Mahalanobis distance. 

105 

106 Parameters 

107 ---------- 

108 est_mat : S x T x N numpy array 

109 Numpy array of state dimension by number of timesteps by number of objects 

110 for times/objects which do not exist use a value of np.nan for all state 

111 dimensions (i.e. if object 1 at timestep 2 does not exist then 

112 :code:`est_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

113 corresonds to estimated states. 

114 true_mat : S x T x N numpy array 

115 Numpy array of state dimension by number of timesteps by number of objects 

116 for times/objects which do not exist use a value of np.nan for all state 

117 dimensions (i.e. if object 1 at timestep 2 does not exist then 

118 :code:`true_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

119 corresonds to the true states. 

120 c : float 

121 Distance cutoff for considering a point properly assigned. This 

122 influences how cardinality errors are penalized. For :math:`p = 1` 

123 it is the penalty given false point estimate. 

124 p : int 

125 The power of the distance term. Higher values penalize outliers 

126 more. 

127 use_empty : bool, Optional 

128 Flag indicating if empty values should be set to 0 or nan. The default 

129 of True is fine for most cases. 

130 core_method : :class:`.enums.SingleObjectDistance`, Optional 

131 The main distance measure to use for the localization component. 

132 The default value of None implies :attr:`.enums.SingleObjectDistance.EUCLIDEAN`. 

133 true_cov_mat : S x S x T x N numpy array, Optional 

134 Numpy array of state dimension by state dimension by number of timesteps 

135 by number of objects for times/objects which do not exist use a value 

136 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

137 not exist then 

138 :code:`true_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

139 This corresonds to the true states, the object order must be consistent 

140 with the truth matrix, and is only needed for core methods 

141 :attr:`.enums.SingleObjectDistance.HELLINGER`. The default value is None. 

142 est_cov_mat : S x S x T x N numpy array, Optional 

143 Numpy array of state dimension by state dimension by number of timesteps 

144 by number of objects for times/objects which do not exist use a value 

145 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

146 not exist then 

147 :code:`est_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

148 This corresonds to the estimated states, the object order must be consistent 

149 with the estimated matrix, and is only needed for core methods 

150 :attr:`.enums.SingleObjectDistance.MAHALANOBIS`. The default value is None. 

151 

152 Returns 

153 ------- 

154 ospa : numpy array 

155 OSPA values at each timestep. 

156 localization : numpy array 

157 Localization component of the OSPA value at each timestep. 

158 cardinality : numpy array 

159 Cardinality component of the OSPA value at each timestep. 

160 core_method : :class:`.enums.SingleObjectDistance` 

161 Method to use as the core distance statistic. 

162 c : float 

163 Maximum distance value used. 

164 p : int 

165 Power of the distance term used. 

166 distances : Ne x Nt x T numpy array 

167 Numpy array of distances, rows are estimated objects columns are truth. 

168 e_exists : Ne x T numpy array 

169 Bools indicating if the estimated object exists at that time. 

170 t_exists : Nt x T numpy array 

171 Bools indicating if the true object exists at that time. 

172 """ 

173 # error checking on optional input arguments 

174 if core_method is None: 

175 core_method = SingleObjectDistance.EUCLIDEAN 

176 

177 elif core_method is SingleObjectDistance.MAHALANOBIS and est_cov_mat is None: 

178 msg = ( 

179 "Must give estimated covariances to calculate {:s} OSPA. Using {:s} instead" 

180 ) 

181 warnings.warn(msg.format(core_method, SingleObjectDistance.EUCLIDEAN)) 

182 core_method = SingleObjectDistance.EUCLIDEAN 

183 

184 elif core_method is SingleObjectDistance.HELLINGER and true_cov_mat is None: 

185 msg = "Must save covariances to calculate {:s} OSPA. Using {:s} instead" 

186 warnings.warn(msg.format(core_method, SingleObjectDistance.EUCLIDEAN)) 

187 core_method = SingleObjectDistance.EUCLIDEAN 

188 

189 if core_method is SingleObjectDistance.HELLINGER: 

190 c = np.min([1, c]).item() 

191 

192 # setup data structuers 

193 t_exists = np.logical_not(np.isnan(true_mat[0, :, :])).T 

194 e_exists = np.logical_not(np.isnan(est_mat[0, :, :])).T 

195 

196 # compute distance for all permutations 

197 num_timesteps = true_mat.shape[1] 

198 nt_objs = true_mat.shape[2] 

199 ne_objs = est_mat.shape[2] 

200 distances = np.nan * np.ones((ne_objs, nt_objs, num_timesteps)) 

201 comb = np.array( 

202 np.meshgrid(np.arange(ne_objs, dtype=int), np.arange(nt_objs, dtype=int)) 

203 ).T.reshape(-1, 2) 

204 e_inds = comb[:, 0] 

205 t_inds = comb[:, 1] 

206 shape = (ne_objs, nt_objs) 

207 

208 localization = np.nan * np.ones(num_timesteps) 

209 cardinality = np.nan * np.ones(num_timesteps) 

210 

211 for tt in range(num_timesteps): 

212 # use proper core method 

213 if core_method is SingleObjectDistance.EUCLIDEAN: 

214 distances[:, :, tt] = np.sqrt( 

215 np.sum((true_mat[:, tt, t_inds] - est_mat[:, tt, e_inds]) ** 2, axis=0) 

216 ).reshape(shape) 

217 

218 elif core_method is SingleObjectDistance.MANHATTAN: 

219 distances[:, :, tt] = np.sum( 

220 np.abs(true_mat[:, tt, t_inds] - est_mat[:, tt, e_inds]), axis=0 

221 ).reshape(shape) 

222 

223 elif core_method is SingleObjectDistance.HELLINGER: 

224 for row, col in zip(e_inds, t_inds): 

225 if not (e_exists[row, tt] and t_exists[col, tt]): 

226 continue 

227 

228 distances[row, col, tt] = calculate_hellinger( 

229 est_mat[:, tt, row], 

230 true_mat[:, tt, col], 

231 est_cov_mat[:, :, tt, row], 

232 true_cov_mat[:, :, tt, col], 

233 ) 

234 

235 elif core_method is SingleObjectDistance.MAHALANOBIS: 

236 for row, col in zip(e_inds, t_inds): 

237 if not (e_exists[row, tt] and t_exists[col, tt]): 

238 continue 

239 

240 distances[row, col, tt] = calculate_mahalanobis( 

241 est_mat[:, tt, row], 

242 true_mat[:, tt, col], 

243 est_cov_mat[:, :, tt, row], 

244 ) 

245 

246 else: 

247 warnings.warn( 

248 "Single-object distance {} is not implemented. SKIPPING".format( 

249 core_method 

250 ) 

251 ) 

252 core_method = None 

253 break 

254 

255 # check for mismatch 

256 one_exist = np.logical_xor(e_exists[:, [tt]], t_exists[:, [tt]].T) 

257 empty = np.logical_and( 

258 np.logical_not(e_exists[:, [tt]]), np.logical_not(t_exists[:, [tt]]).T 

259 ) 

260 

261 distances[one_exist, tt] = c 

262 if use_empty: 

263 distances[empty, tt] = 0 

264 else: 

265 distances[empty, tt] = np.nan 

266 

267 distances[:, :, tt] = np.minimum(distances[:, :, tt], c) 

268 

269 m = np.sum(e_exists[:, tt]) 

270 n = np.sum(t_exists[:, tt]) 

271 if n.astype(int) == 0 and m.astype(int) == 0: 

272 localization[tt] = 0 

273 cardinality[tt] = 0 

274 continue 

275 

276 if n.astype(int) == 0 or m.astype(int) == 0: 

277 localization[tt] = 0 

278 cardinality[tt] = c 

279 continue 

280 

281 cont_sub = distances[e_exists[:, tt], :, tt][:, t_exists[:, tt]] ** p 

282 row_ind, col_ind = linear_sum_assignment(cont_sub) 

283 cost = cont_sub[row_ind, col_ind].sum() 

284 

285 inv_max_card = 1.0 / np.max([n, m]) 

286 card_diff = np.abs(n - m) 

287 inv_p = 1.0 / p 

288 c_p = c**p 

289 localization[tt] = (inv_max_card * cost) ** inv_p 

290 cardinality[tt] = (inv_max_card * c_p * card_diff) ** inv_p 

291 

292 ospa = localization + cardinality 

293 

294 return ( 

295 ospa, 

296 localization, 

297 cardinality, 

298 core_method, 

299 c, 

300 p, 

301 distances, 

302 e_exists, 

303 t_exists, 

304 ) 

305 

306 

307def calculate_ospa2( 

308 est_mat, 

309 true_mat, 

310 c, 

311 p, 

312 win_len, 

313 core_method=SingleObjectDistance.MANHATTAN, 

314 true_cov_mat=None, 

315 est_cov_mat=None, 

316): 

317 """Calculates the OSPA(2) distance between the truth at all timesteps. 

318 

319 Notes 

320 ----- 

321 This calculates the OSPA-on-OSPA, or OSPA(2) metric as defined by 

322 :cite:`Beard2017_OSPA2UsingtheOSPAMetrictoEvaluateMultiTargetTrackingPerformance` 

323 and further explained in :cite:`Beard2020_ASolutionforLargeScaleMultiObjectTracking`. 

324 It can be thought of as the time averaged per track error between the true 

325 and estimated tracks. The inner OSPA calculation can use any suitable OSPA 

326 distance metric from :func:`.calculate_ospa` 

327 

328 Parameters 

329 ---------- 

330 est_mat : S x T x N numpy array 

331 Numpy array of state dimension by number of timesteps by number of objects 

332 for times/objects which do not exist use a value of np.nan for all state 

333 dimensions (i.e. if object 1 at timestep 2 does not exist then 

334 :code:`est_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

335 corresonds to estimated states. 

336 true_mat : S x T x N numpy array 

337 Numpy array of state dimension by number of timesteps by number of objects 

338 for times/objects which do not exist use a value of np.nan for all state 

339 dimensions (i.e. if object 1 at timestep 2 does not exist then 

340 :code:`true_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

341 corresonds to the true states. 

342 c : float 

343 Distance cutoff for considering a point properly assigned. This 

344 influences how cardinality errors are penalized. For :math:`p = 1` 

345 it is the penalty given false point estimate. 

346 p : int 

347 The power of the distance term. Higher values penalize outliers 

348 more. 

349 win_len : int 

350 Number of timesteps to average the OSPA over. 

351 core_method : :class:`.enums.SingleObjectDistance`, Optional 

352 The main distance measure to use for the localization component of the 

353 inner OSPA calculation. 

354 The default value of None implies :attr:`.enums.SingleObjectDistance.EUCLIDEAN`. 

355 true_cov_mat : S x S x T x N numpy array, Optional 

356 Numpy array of state dimension by state dimension by number of timesteps 

357 by number of objects for times/objects which do not exist use a value 

358 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

359 not exist then 

360 :code:`true_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

361 This corresonds to the true states, the object order must be consistent 

362 with the truth matrix, and is only needed for core methods 

363 :attr:`.enums.SingleObjectDistance.HELLINGER`. The default value is None. 

364 est_cov_mat : S x S x T x N numpy array, Optional 

365 Numpy array of state dimension by state dimension by number of timesteps 

366 by number of objects for times/objects which do not exist use a value 

367 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

368 not exist then 

369 :code:`est_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

370 This corresonds to the estimated states, the object order must be consistent 

371 with the estimated matrix, and is only needed for core methods 

372 :attr:`.enums.SingleObjectDistance.MAHALANOBIS`. The default value is None. 

373 

374 Returns 

375 ------- 

376 ospa2 : numpy array 

377 OSPA values at each timestep. 

378 localization : numpy array 

379 Localization component of the OSPA value at each timestep. 

380 cardinality : numpy array 

381 Cardinality component of the OSPA value at each timestep. 

382 core_method : :class:`.enums.SingleObjectDistance` 

383 Method to use as the core distance statistic. 

384 c : float 

385 Maximum distance value used. 

386 p : int 

387 Power of the distance term used. 

388 win_len : int 

389 Window length used. 

390 """ 

391 # Note p is redundant here so set = 1 

392 (core_method, c, _, distances, e_exists, t_exists) = calculate_ospa( 

393 est_mat, 

394 true_mat, 

395 c, 

396 1, 

397 use_empty=False, 

398 core_method=core_method, 

399 true_cov_mat=true_cov_mat, 

400 est_cov_mat=est_cov_mat, 

401 )[3:9] 

402 

403 num_timesteps = distances.shape[2] 

404 inv_p = 1.0 / p 

405 c_p = c**p 

406 

407 localization = np.nan * np.ones(num_timesteps) 

408 cardinality = np.nan * np.ones(num_timesteps) 

409 

410 for tt in range(num_timesteps): 

411 win_idx = np.array( 

412 [ii for ii in range(max(tt - win_len + 1, 0), tt + 1)], dtype=int 

413 ) 

414 

415 # find matrix of time averaged OSPA between tracks 

416 with warnings.catch_warnings(): 

417 warnings.filterwarnings(action="ignore", message="Mean of empty slice") 

418 track_dist = np.nanmean(distances[:, :, win_idx], axis=2) 

419 

420 track_dist[np.isnan(track_dist)] = 0 

421 

422 valid_rows = np.any(e_exists[:, win_idx], axis=1) 

423 valid_cols = np.any(t_exists[:, win_idx], axis=1) 

424 m = np.sum(valid_rows) 

425 n = np.sum(valid_cols) 

426 

427 if n.astype(int) <= 0 and m.astype(int) <= 0: 

428 localization[tt] = 0 

429 cardinality[tt] = 0 

430 continue 

431 

432 if n.astype(int) <= 0 or m.astype(int) <= 0: 

433 cost = 0 

434 else: 

435 inds = np.logical_and( 

436 valid_rows.reshape((valid_rows.size, 1)), 

437 valid_cols.reshape((1, valid_cols.size)), 

438 ) 

439 track_dist = (track_dist[inds] ** p).reshape((m.astype(int), n.astype(int))) 

440 row_ind, col_ind = linear_sum_assignment(track_dist) 

441 cost = track_dist[row_ind, col_ind].sum() 

442 

443 max_nm = np.max([n, m]) 

444 localization[tt] = (cost / max_nm) ** inv_p 

445 cardinality[tt] = (c_p * np.abs(m - n) / max_nm) ** inv_p 

446 

447 ospa2 = localization + cardinality 

448 

449 return ospa2, localization, cardinality, core_method, c, p, win_len 

450 

451 

452# TODO: Rewrite function as needed for GOSPA calculation, need to include parameter a 

453def calculate_gospa( 

454 est_mat, 

455 true_mat, 

456 c, 

457 p, 

458 a, 

459 use_empty=True, 

460 core_method=None, 

461 true_cov_mat=None, 

462 est_cov_mat=None, 

463): 

464 """Calculates the OSPA distance between the truth at all timesteps. 

465 

466 Notes 

467 ----- 

468 This calculates the Generalized Optimal SubPattern Assignment metric for the 

469 extracted states and the supplied truth point distributions. The 

470 calculation is based on 

471 :cite: `Rahmathullah2017_GeneralizedOptimalSubPatternAssignmentMetric` 

472 

473 Parameters 

474 ---------- 

475 est_mat : S x T x N numpy array 

476 Numpy array of state dimension by number of timesteps by number of objects 

477 for times/objects which do not exist use a value of np.nan for all state 

478 dimensions (i.e. if object 1 at timestep 2 does not exist then 

479 :code:`est_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

480 corresonds to estimated states. 

481 true_mat : S x T x N numpy array 

482 Numpy array of state dimension by number of timesteps by number of objects 

483 for times/objects which do not exist use a value of np.nan for all state 

484 dimensions (i.e. if object 1 at timestep 2 does not exist then 

485 :code:`true_mat[:, 2, 1] = np.nan * np.ones(state_dim)`). This 

486 corresonds to the true states. 

487 c : float 

488 Distance cutoff for considering a point properly assigned. This 

489 influences how cardinality errors are penalized. For :math:`p = 1` 

490 it is the penalty given false point estimate. 

491 p : int 

492 The power of the distance term. Higher values penalize outliers 

493 more. 

494 a : int 

495 The normalization factor of the distance term. Appropriately penalizes missed 

496 or false detection of tracks rather than normalizing by the total maximum 

497 cardinality. 

498 use_empty : bool, Optional 

499 Flag indicating if empty values should be set to 0 or nan. The default 

500 of True is fine for most cases. 

501 core_method : :class:`.enums.SingleObjectDistance`, Optional 

502 The main distance measure to use for the localization component. 

503 The default value of None implies :attr:`.enums.SingleObjectDistance.EUCLIDEAN`. 

504 true_cov_mat : S x S x T x N numpy array, Optional 

505 Numpy array of state dimension by state dimension by number of timesteps 

506 by number of objects for times/objects which do not exist use a value 

507 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

508 not exist then 

509 :code:`true_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

510 This corresonds to the true states, the object order must be consistent 

511 with the truth matrix, and is only needed for core methods 

512 :attr:`.enums.SingleObjectDistance.HELLINGER`. The default value is None. 

513 est_cov_mat : S x S x T x N numpy array, Optional 

514 Numpy array of state dimension by state dimension by number of timesteps 

515 by number of objects for times/objects which do not exist use a value 

516 of np.nan for all state dimensions (i.e. if object 1 at timestep 2 does 

517 not exist then 

518 :code:`est_cov_mat[:, :, 2, 1] = np.nan * np.ones((state_dim, state_dim))`). 

519 This corresonds to the estimated states, the object order must be consistent 

520 with the estimated matrix, and is only needed for core methods 

521 :attr:`.enums.SingleObjectDistance.MAHALANOBIS`. The default value is None. 

522 

523 Returns 

524 ------- 

525 ospa : numpy array 

526 GOSPA values at each timestep. 

527 localization : numpy array 

528 Localization component of the GOSPA value at each timestep. 

529 cardinality : numpy array 

530 Cardinality component of the GOSPA value at each timestep. 

531 core_method : :class:`.enums.SingleObjectDistance` 

532 Method to use as the core distance statistic. 

533 c : float 

534 Maximum distance value used. 

535 p : int 

536 Power of the distance term used. 

537 distances : Ne x Nt x T numpy array 

538 Numpy array of distances, rows are estimated objects columns are truth. 

539 e_exists : Ne x T numpy array 

540 Bools indicating if the estimated object exists at that time. 

541 t_exists : Nt x T numpy array 

542 Bools indicating if the true object exists at that time. 

543 """ 

544 # error checking on optional input arguments 

545 if core_method is None: 

546 core_method = SingleObjectDistance.EUCLIDEAN 

547 

548 elif core_method is SingleObjectDistance.MAHALANOBIS and est_cov_mat is None: 

549 msg = ( 

550 "Must give estimated covariances to calculate {:s} OSPA. Using {:s} instead" 

551 ) 

552 warnings.warn(msg.format(core_method, SingleObjectDistance.EUCLIDEAN)) 

553 core_method = SingleObjectDistance.EUCLIDEAN 

554 

555 elif core_method is SingleObjectDistance.HELLINGER and true_cov_mat is None: 

556 msg = "Must save covariances to calculate {:s} OSPA. Using {:s} instead" 

557 warnings.warn(msg.format(core_method, SingleObjectDistance.EUCLIDEAN)) 

558 core_method = SingleObjectDistance.EUCLIDEAN 

559 

560 if core_method is SingleObjectDistance.HELLINGER: 

561 c = np.min([1, c]).item() 

562 

563 # setup data structuers 

564 t_exists = np.logical_not(np.isnan(true_mat[0, :, :])).T 

565 e_exists = np.logical_not(np.isnan(est_mat[0, :, :])).T 

566 

567 # compute distance for all permutations 

568 num_timesteps = true_mat.shape[1] 

569 nt_objs = true_mat.shape[2] 

570 ne_objs = est_mat.shape[2] 

571 distances = np.nan * np.ones((ne_objs, nt_objs, num_timesteps)) 

572 comb = np.array( 

573 np.meshgrid(np.arange(ne_objs, dtype=int), np.arange(nt_objs, dtype=int)) 

574 ).T.reshape(-1, 2) 

575 e_inds = comb[:, 0] 

576 t_inds = comb[:, 1] 

577 shape = (ne_objs, nt_objs) 

578 

579 localization = np.nan * np.ones(num_timesteps) 

580 cardinality = np.nan * np.ones(num_timesteps) 

581 

582 for tt in range(num_timesteps): 

583 # use proper core method 

584 if core_method is SingleObjectDistance.EUCLIDEAN: 

585 distances[:, :, tt] = np.sqrt( 

586 np.sum((true_mat[:, tt, t_inds] - est_mat[:, tt, e_inds]) ** 2, axis=0) 

587 ).reshape(shape) 

588 

589 elif core_method is SingleObjectDistance.MANHATTAN: 

590 distances[:, :, tt] = np.sum( 

591 np.abs(true_mat[:, tt, t_inds] - est_mat[:, tt, e_inds]), axis=0 

592 ).reshape(shape) 

593 

594 elif core_method is SingleObjectDistance.HELLINGER: 

595 for row, col in zip(e_inds, t_inds): 

596 if not (e_exists[row, tt] and t_exists[col, tt]): 

597 continue 

598 

599 distances[row, col, tt] = calculate_hellinger( 

600 est_mat[:, tt, row], 

601 true_mat[:, tt, col], 

602 est_cov_mat[:, :, tt, row], 

603 true_cov_mat[:, :, tt, col], 

604 ) 

605 

606 elif core_method is SingleObjectDistance.MAHALANOBIS: 

607 for row, col in zip(e_inds, t_inds): 

608 if not (e_exists[row, tt] and t_exists[col, tt]): 

609 continue 

610 

611 distances[row, col, tt] = calculate_mahalanobis( 

612 est_mat[:, tt, row], 

613 true_mat[:, tt, col], 

614 est_cov_mat[:, :, tt, row], 

615 ) 

616 

617 else: 

618 warnings.warn( 

619 "Single-object distance {} is not implemented. SKIPPING".format( 

620 core_method 

621 ) 

622 ) 

623 core_method = None 

624 break 

625 

626 # check for mismatch 

627 one_exist = np.logical_xor(e_exists[:, [tt]], t_exists[:, [tt]].T) 

628 empty = np.logical_and( 

629 np.logical_not(e_exists[:, [tt]]), np.logical_not(t_exists[:, [tt]]).T 

630 ) 

631 

632 distances[one_exist, tt] = c 

633 if use_empty: 

634 distances[empty, tt] = 0 

635 else: 

636 distances[empty, tt] = np.nan 

637 

638 distances[:, :, tt] = np.minimum(distances[:, :, tt], c) 

639 

640 m = np.sum(e_exists[:, tt]) 

641 n = np.sum(t_exists[:, tt]) 

642 if n.astype(int) == 0 and m.astype(int) == 0: 

643 localization[tt] = 0 

644 cardinality[tt] = 0 

645 continue 

646 

647 if n.astype(int) == 0 or m.astype(int) == 0: 

648 localization[tt] = 0 

649 cardinality[tt] = c 

650 continue 

651 

652 cont_sub = distances[e_exists[:, tt], :, tt][:, t_exists[:, tt]] ** p 

653 row_ind, col_ind = linear_sum_assignment(cont_sub) 

654 cost = cont_sub[row_ind, col_ind].sum() 

655 

656 card_diff = np.abs(n - m) 

657 inv_p = 1.0 / p 

658 c_p = c**p 

659 localization[tt] = (cost) ** inv_p 

660 cardinality[tt] = ((c_p / a) * card_diff) ** inv_p 

661 

662 ospa = localization + cardinality 

663 

664 return ( 

665 ospa, 

666 localization, 

667 cardinality, 

668 core_method, 

669 c, 

670 p, 

671 a, 

672 distances, 

673 e_exists, 

674 t_exists, 

675 )