Coverage for sarvey/utils.py: 67%

285 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2024-10-17 12:36 +0000

1#!/usr/bin/env python 

2 

3# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 

4# 

5# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 

6# 

7# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 

8# of the SAR4Infra project with funds of the German Federal Ministry for Digital and 

9# Transport and contributions from Landesamt fuer Vermessung und Geoinformation 

10# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 

11# 

12# This program is free software: you can redistribute it and/or modify it under 

13# the terms of the GNU General Public License as published by the Free Software 

14# Foundation, either version 3 of the License, or (at your option) any later 

15# version. 

16# 

17# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 

18# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 

19# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 

20# This requirement extends to SARvey. 

21# 

22# This program is distributed in the hope that it will be useful, but WITHOUT 

23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

25# details. 

26# 

27# You should have received a copy of the GNU Lesser General Public License along 

28# with this program. If not, see <https://www.gnu.org/licenses/>. 

29 

30"""Utils module for SARvey.""" 

31import multiprocessing 

32import time 

33from os.path import exists, join 

34 

35import numpy as np 

36from scipy.sparse.linalg import lsqr 

37from typing import Union 

38from logging import Logger 

39 

40from mintpy.utils import ptime 

41 

42from sarvey.objects import Points, NetworkParameter, Network, BaseStack, AmplitudeImage 

43from sarvey.ifg_network import IfgNetwork 

44 

45 

46def convertBboxToBlock(*, bbox: tuple): 

47 """ConvertBboxToBlock. read box and write2hdf5_block block have different definitions.""" 

48 block = None 

49 if len(bbox) == 4: 

50 block = (bbox[1], bbox[3], bbox[0], bbox[2]) 

51 if len(bbox) == 6: 

52 block = (bbox[2], bbox[5], bbox[1], bbox[4], bbox[0], bbox[3]) 

53 return block 

54 

55 

56def invertIfgNetwork(*, phase: np.ndarray, num_points: int, ifg_net_obj: IfgNetwork, num_cores: int, ref_idx: int, 

57 logger: Logger): 

58 """Wrap the ifg network inversion running in parallel. 

59 

60 Parameters 

61 ---------- 

62 phase: np.ndarray 

63 interferometric phases of the points. 

64 num_points: int 

65 number of points. 

66 ifg_net_obj: IfgNetwork 

67 instance of class IfgNetwork. 

68 num_cores: int 

69 number of cores to use for multiprocessing. 

70 ref_idx: int 

71 index of temporal reference date for interferogram network inversion. 

72 logger: Logger 

73 logging handler 

74 

75 Returns 

76 ------- 

77 phase_ts: np.ndarray 

78 inverted phase time series of the points. 

79 """ 

80 msg = "#" * 10 

81 msg += " INVERT IFG NETWORK " 

82 msg += "#" * 10 

83 logger.info(msg=msg) 

84 

85 start_time = time.time() 

86 design_mat = ifg_net_obj.getDesignMatrix() 

87 

88 if num_cores == 1: 

89 args = (np.arange(num_points), num_points, phase, design_mat, ifg_net_obj.num_images, ref_idx) 

90 idx_range, phase_ts = launchInvertIfgNetwork(parameters=args) 

91 else: 

92 # use only 10 percent of the cores, because scipy.sparse.linalg.lsqr is already running in parallel 

93 num_cores = int(np.floor(num_cores / 10)) 

94 logger.info(msg="start parallel processing with {} cores.".format(num_cores)) 

95 pool = multiprocessing.Pool(processes=num_cores) 

96 

97 phase_ts = np.zeros((num_points, ifg_net_obj.num_images), dtype=np.float32) 

98 

99 num_cores = num_points if num_cores > num_points else num_cores # avoids having more samples than cores 

100 idx = splitDatasetForParallelProcessing(num_samples=num_points, num_cores=num_cores) 

101 args = [( 

102 idx_range, 

103 idx_range.shape[0], 

104 phase[idx_range, :], 

105 design_mat, 

106 ifg_net_obj.num_images, 

107 ref_idx) for idx_range in idx] 

108 

109 results = pool.map(func=launchInvertIfgNetwork, iterable=args) 

110 

111 # retrieve results 

112 for i, phase_i in results: 

113 phase_ts[i, :] = phase_i 

114 

115 m, s = divmod(time.time() - start_time, 60) 

116 logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) 

117 return phase_ts 

118 

119 

120def launchInvertIfgNetwork(parameters: tuple): 

121 """Launch the inversion of the interferogram network in parallel. 

122 

123 Parameters 

124 ---------- 

125 parameters: tuple 

126 parameters for inversion 

127 

128 Tuple contains: 

129 idx_range: np.ndarray 

130 range of point indices to be processed 

131 num_points: int 

132 number of points 

133 phase: np.ndarray 

134 interferometric phases of the points 

135 design_mat: np.ndarray 

136 design matrix 

137 num_images: int 

138 number of images 

139 ref_idx: int 

140 index of temporal reference date for interferogram network inversion 

141 

142 Returns 

143 ------- 

144 idx_range: np.ndarray 

145 range of indices of the points processed 

146 phase_ts: np.ndarray 

147 inverted phase time series 

148 """ 

149 # Unpack the parameters 

150 (idx_range, num_points, phase, design_mat, num_images, ref_idx) = parameters 

151 

152 design_mat = np.delete(arr=design_mat, obj=ref_idx, axis=1) # remove reference date 

153 idx = np.ones((num_images,), dtype=np.bool_) 

154 idx[ref_idx] = False 

155 phase_ts = np.zeros((num_points, num_images), dtype=np.float32) 

156 

157 prog_bar = ptime.progressBar(maxValue=num_points) 

158 for i in range(num_points): 

159 phase_ts[i, idx] = lsqr(design_mat, phase[i, :])[0] 

160 prog_bar.update(value=i + 1, every=np.ceil(num_points / 100), 

161 suffix='{}/{} points'.format(i + 1, num_points)) 

162 

163 return idx_range, phase_ts 

164 

165 

166def predictPhase(*, obj: [NetworkParameter, Points], vel: np.ndarray = None, demerr: np.ndarray = None, 

167 ifg_space: bool = True, logger: Logger): 

168 """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. 

169 

170 Can be used for both arc phase or point phase. Wrapper function for 'predictPhaseCore(...)' 

171 

172 Parameters 

173 ---------- 

174 obj: Union[NetworkParameter, Points] 

175 object of either 'networkParameter' or 'points'. If instance of 'points' is given, 'vel' and 'demerr' 

176 also need to be specified. 

177 vel: np.ndarray 

178 velocity for each sample (default: None) 

179 demerr: np.ndarray 

180 dem error for each sample (default: None). 

181 ifg_space: bool 

182 set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted 

183 in acquisition space. (default: True) 

184 logger: Logger 

185 Logging handler. 

186 

187 Returns 

188 ------- 

189 pred_phase_demerr: np.ndarray 

190 predicted phase from DEM error 

191 pred_phase_vel: np.ndarray 

192 predicted phase from velocity 

193 

194 Raises 

195 ------ 

196 ValueError 

197 vel or demerr is none 

198 TypeError 

199 obj is of the wrong type 

200 """ 

201 if isinstance(obj, Points): 

202 if (vel is None) or (demerr is None): 

203 logger.error(msg="Both 'vel' and 'demerr' are needed if 'obj' is instance of class 'points'!") 

204 raise ValueError 

205 pred_phase_demerr, pred_phase_vel = predictPhaseCore( 

206 ifg_net_obj=obj.ifg_net_obj, 

207 wavelength=obj.wavelength, 

208 vel=vel, 

209 demerr=demerr, 

210 slant_range=obj.slant_range, 

211 loc_inc=obj.loc_inc, 

212 ifg_space=ifg_space 

213 ) 

214 elif isinstance(obj, NetworkParameter): 

215 pred_phase_demerr, pred_phase_vel = predictPhaseCore( 

216 ifg_net_obj=obj.ifg_net_obj, 

217 wavelength=obj.wavelength, 

218 vel=obj.vel, 

219 demerr=obj.demerr, 

220 slant_range=obj.slant_range, 

221 loc_inc=obj.loc_inc, 

222 ifg_space=ifg_space 

223 ) 

224 else: 

225 logger.error(msg="'obj' must be instance of 'points' or 'networkParameter'!") 

226 raise TypeError 

227 return pred_phase_demerr, pred_phase_vel 

228 

229 

230def predictPhaseCore(*, ifg_net_obj: IfgNetwork, wavelength: float, vel: np.ndarray, 

231 demerr: np.ndarray, slant_range: np.ndarray, loc_inc: np.ndarray, ifg_space: bool = True): 

232 """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. 

233 

234 Can be used for both arc phase or point phase. 

235 

236 Parameters 

237 ---------- 

238 ifg_net_obj: IfgNetwork 

239 instance of class ifgNetwork 

240 wavelength: float 

241 wavelength in [m] 

242 vel: np.ndarray 

243 velocity for each sample 

244 demerr: np.ndarray 

245 dem error for each sample 

246 slant_range: np.ndarray 

247 slant range distance for each sample 

248 loc_inc: np.ndarray 

249 local incidence angle for each sample 

250 ifg_space: bool 

251 set to True if the phase shall be predicted in interferogram space. If False, phase will be 

252 predicted in acquisition space. (default: True) 

253 

254 Returns 

255 ------- 

256 pred_phase_demerr: np.ndarray 

257 predicted phase from DEM error 

258 pred_phase_vel: np.ndarray 

259 predicted phase from velocity 

260 """ 

261 factor = 4 * np.pi / wavelength 

262 

263 if ifg_space: 

264 tbase = ifg_net_obj.tbase_ifg 

265 pbase = ifg_net_obj.pbase_ifg 

266 else: 

267 tbase = ifg_net_obj.tbase 

268 pbase = ifg_net_obj.pbase 

269 

270 # compute phase due to DEM error 

271 pred_phase_demerr = factor * pbase[:, np.newaxis] / (slant_range * np.sin(loc_inc))[np.newaxis, :] * demerr 

272 

273 # compute phase due to velocity 

274 pred_phase_vel = factor * tbase[:, np.newaxis] * vel 

275 

276 return pred_phase_demerr.T, pred_phase_vel.T 

277 

278 

279def predictPhaseSingle(*, demerr: float, vel: float, slant_range: float, loc_inc: float, 

280 ifg_net_obj: IfgNetwork, wavelength: float, only_vel: bool = False, ifg_space: bool = True): 

281 """Predict the phase time series for only one point based on the estimated parameters DEM error and mean velocity. 

282 

283 Can be used for both arc phase or point phase. 

284 

285 Parameters 

286 ---------- 

287 demerr: float 

288 DEM error (scalar) 

289 vel: float 

290 mean velocity (scalar) 

291 slant_range: float 

292 slant range distance in [m] (scalar) 

293 loc_inc: float 

294 local incidence angle in [rad] (scalar) 

295 ifg_net_obj: IfgNetwork 

296 object of class IfgNetwork 

297 wavelength: float 

298 radar wavelength in [m] 

299 only_vel: bool 

300 set to True if only the mean velocity shall be predicted (default: False) 

301 ifg_space: bool 

302 set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted in 

303 acquisition space. (default: True) 

304 

305 Returns 

306 ------- 

307 pred_phase: np.ndarray 

308 predicted phase 

309 """ 

310 factor = 4 * np.pi / wavelength 

311 

312 if ifg_space: 

313 tbase = ifg_net_obj.tbase_ifg 

314 pbase = ifg_net_obj.pbase_ifg 

315 num_time = ifg_net_obj.num_ifgs 

316 else: 

317 tbase = ifg_net_obj.tbase 

318 pbase = ifg_net_obj.pbase 

319 num_time = ifg_net_obj.num_images 

320 

321 if only_vel: 

322 a = np.zeros((num_time, 1)) 

323 else: 

324 a = np.zeros((num_time, 2)) 

325 a[:, 0] = factor * tbase 

326 

327 if only_vel: 

328 pred_phase = np.matmul(a, np.array([vel])).reshape((-1,)) 

329 else: 

330 a[:, 1] = factor * pbase / (slant_range * np.sin(loc_inc)) 

331 pred_phase = np.matmul(a, np.array([vel, demerr])).reshape((-1,)) 

332 

333 return pred_phase 

334 

335 

336def estimateParameters(*, obj: Union[Points, Network], estimate_ref_atmo: bool = True, ifg_space: bool = True): 

337 """Estimate the parameters either per point or per arc. 

338 

339 Parameters are velocity and DEM error (or additionally reference APS). 

340 

341 Parameters 

342 ---------- 

343 obj: Union[Points, Network] 

344 object of either network, networkParameter, points or pointsParameters 

345 estimate_ref_atmo: bool 

346 set to True if APS of reference date shall be estimated. corresponds to offset of linear 

347 motion model (default: False). 

348 ifg_space: bool 

349 set to True if the phase shall be predicted in interferogram space. If False, phase will be 

350 predicted in acquisition space. (default: True) 

351 

352 Returns 

353 ------- 

354 vel: np.ndarray 

355 velocity for each point 

356 demerr: np.ndarray 

357 dem error for each point 

358 ref_atmo: np.ndarray 

359 reference APS for each point 

360 omega: 

361 sum of squared residuals 

362 v_hat: 

363 residuals 

364 """ 

365 num = obj.phase.shape[0] # either number of points or number of arcs 

366 

367 if ifg_space: 

368 tbase = obj.ifg_net_obj.tbase_ifg 

369 pbase = obj.ifg_net_obj.pbase_ifg 

370 num_time = obj.ifg_net_obj.num_ifgs 

371 else: 

372 tbase = obj.ifg_net_obj.tbase 

373 pbase = obj.ifg_net_obj.pbase 

374 num_time = obj.ifg_net_obj.num_images 

375 

376 vel = np.zeros((num,), dtype=np.float32) 

377 demerr = np.zeros((num,), dtype=np.float32) 

378 omega = np.zeros((num,), dtype=np.float32) 

379 coherence = np.zeros((num,), dtype=np.float32) 

380 v_hat = np.zeros((num, num_time), dtype=np.float32) 

381 

382 ref_atmo = None 

383 if estimate_ref_atmo: 

384 ref_atmo = np.zeros((num,), dtype=np.float32) 

385 a = np.zeros((num_time, 3), dtype=np.float32) 

386 a[:, 2] = 4 * np.pi / obj.wavelength # atmospheric delay at reference acquisition 

387 else: 

388 a = np.zeros((num_time, 2)) 

389 

390 a[:, 1] = 4 * np.pi / obj.wavelength * tbase # velocity 

391 

392 for p in range(obj.num_points): 

393 obv_vec = obj.phase[p, :] 

394 a[:, 0] = 4 * np.pi / obj.wavelength * pbase / (obj.slant_range[p] * np.sin(obj.loc_inc[p])) # demerr 

395 

396 x_hat, omega[p] = np.linalg.lstsq(a, obv_vec, rcond=None)[0:2] 

397 demerr[p] = x_hat[0] 

398 vel[p] = x_hat[1] 

399 if estimate_ref_atmo: 

400 ref_atmo[p] = x_hat[2] 

401 v_hat[p, :] = obv_vec - np.matmul(a, x_hat) 

402 coherence[p] = np.abs(np.mean(np.exp(1j * v_hat[p, :]))) 

403 

404 if not estimate_ref_atmo: 

405 ref_atmo = None 

406 

407 return vel, demerr, ref_atmo, coherence, omega, v_hat 

408 

409 

410def splitImageIntoBoxesRngAz(*, length: int, width: int, num_box_az: int, num_box_rng: int): 

411 """Split the image into several boxes. 

412 

413 (adapted from mintpy.ifgram_inversion.split2boxes) 

414 

415 Parameters 

416 ---------- 

417 num_box_rng: int 

418 Number of boxes in range direction 

419 num_box_az: 

420 Number of boxes in azimuth direction 

421 length: int 

422 length of the image 

423 width: int 

424 width of the image 

425 

426 Returns 

427 ------- 

428 box_list: list 

429 of tuple of 4 int (xmin, ymin, xmax, ymax) 

430 num_box: int 

431 number of boxes 

432 """ 

433 y_step = int(np.rint((length / num_box_rng) / 10) * 10) 

434 x_step = int(np.rint((width / num_box_az) / 10) * 10) 

435 

436 box_list = [] 

437 y0 = 0 

438 y1 = 0 

439 while y1 != length: 

440 x0 = 0 

441 x1 = 0 

442 # y1 = min([length, y0 + y_step]) 

443 if y0 + y_step + int(np.rint(y_step / 2)) > length: 

444 y1 = length 

445 else: 

446 y1 = y0 + y_step 

447 while x1 != width: 

448 if x0 + x_step + int(np.rint(x_step / 2)) > width: 

449 x1 = width 

450 else: 

451 x1 = x0 + x_step 

452 # x1 = min([width, x0 + x_step]) 

453 box = (x0, y0, x1, y1) 

454 box_list.append(box) 

455 x0 = x1 

456 y0 = y1 

457 

458 num_box = len(box_list) 

459 return box_list, num_box 

460 

461 

462def preparePatches(*, num_patches: int, width: int, length: int, logger: Logger): 

463 """Create patches to subset the image stack for parallel processing to reduce memory usage. 

464 

465 Parameters 

466 ---------- 

467 num_patches: int 

468 number of patches to split the image into 

469 width: int 

470 width of the image 

471 length: int 

472 length of the image 

473 logger: Logger 

474 logging handler 

475 

476 Returns 

477 ------- 

478 box_list: list 

479 tuples with the radar coordinates of the boxes 

480 num_patches: int 

481 number of actual patches created by the function 

482 """ 

483 patch_size_lut = { 

484 1: (1, 1), 

485 2: (1, 2), 

486 3: (1, 3), 

487 4: (2, 2), 

488 6: (2, 3), 

489 8: (2, 4), 

490 10: (2, 5), 

491 12: (3, 4), 

492 15: (3, 5), 

493 28: (4, 7), 

494 } 

495 if num_patches == 1: 

496 box_list = [tuple(i for i in (0, 0, width, length))] 

497 num_patches = 1 

498 else: 

499 num_patches = num_patches 

500 if num_patches > max(patch_size_lut.keys()): 

501 num_patches = max(patch_size_lut.keys()) 

502 logger.info(msg=f"Number of patches is higher than expected. Reduce to {num_patches} boxes.") 

503 else: 

504 while not (num_patches in patch_size_lut.keys()): 

505 num_patches += 1 

506 box_list, num_patches = splitImageIntoBoxesRngAz(length=length, 

507 width=width, 

508 num_box_az=patch_size_lut[num_patches][1], 

509 num_box_rng=patch_size_lut[num_patches][0]) 

510 logger.info(msg=f"Process {num_patches} patches " + 

511 f"({patch_size_lut[num_patches][1]} x {patch_size_lut[num_patches][0]}).") 

512 return box_list, num_patches 

513 

514 

515def splitDatasetForParallelProcessing(*, num_samples: int, num_cores: int): 

516 """Split the dataset into chunks of similar size for processing them in parallel. 

517 

518 Parameters 

519 ---------- 

520 num_samples: int 

521 number of samples to be split 

522 num_cores: int 

523 number of cores to split among 

524 

525 Returns 

526 ------- 

527 idx: list 

528 list of sample ranges for each core 

529 """ 

530 rest = np.mod(num_samples, num_cores) 

531 avg_num_samples_per_core = int((num_samples - rest) / num_cores) 

532 num_samples_per_core = np.zeros((num_cores,), dtype=np.int16) 

533 num_samples_per_core[:] = avg_num_samples_per_core 

534 c = rest 

535 i = 0 

536 while c > 0: 

537 num_samples_per_core[i] += 1 

538 i += 1 

539 c -= 1 

540 

541 idx = list() 

542 cur_idx = 0 

543 for i in range(num_cores): 

544 idx.append([cur_idx, cur_idx + num_samples_per_core[i]]) 

545 cur_idx += num_samples_per_core[i] 

546 

547 idx = [np.arange(s, e) for s, e in idx] 

548 return idx 

549 

550 

551def createSpatialGrid(*, coord_utm_img: np.ndarray, length: int, width: int, grid_size: int): 

552 """Create a spatial grid over the image. 

553 

554 Parameters 

555 ---------- 

556 coord_utm_img: np.ndarray 

557 coordinates of all image pixels in UTM 

558 length: int 

559 number of pixels in length of the image 

560 width: int 

561 number of pixels in width of the image 

562 grid_size: int 

563 size of the grid in [m] 

564 

565 Returns 

566 ------- 

567 box_list: list 

568 of tuples with the radar coordinates of the boxes 

569 num_box: int 

570 actual number of boxes created by the function 

571 """ 

572 p0 = coord_utm_img[:, 0, 0] 

573 p1 = coord_utm_img[:, 0, -1] 

574 p2 = coord_utm_img[:, -1, 0] 

575 dist_width = np.linalg.norm(p0 - p1) 

576 dist_length = np.linalg.norm(p0 - p2) 

577 num_box_az = int(np.round(dist_width / grid_size)) 

578 num_box_rng = int(np.round(dist_length / grid_size)) 

579 

580 # split image into different parts 

581 box_list, num_box = splitImageIntoBoxesRngAz(length=length, width=width, 

582 num_box_az=num_box_az, num_box_rng=num_box_rng) 

583 

584 return box_list, num_box 

585 

586 

587def selectBestPointsInGrid(*, box_list: list, quality: np.ndarray, sel_min: bool = True): 

588 """Select the best point inside a grid. 

589 

590 If several pixel fullfil the criteria, the first one is selected. 

591 

592 Parameters 

593 ---------- 

594 box_list: list 

595 of tuples with the radar coordinates of the boxes 

596 quality: np.ndarray 

597 quality of the pixels 

598 sel_min: bool 

599 set to True if the minimum value shall be selected (default: True) 

600 

601 Returns 

602 ------- 

603 cand_mask_sparse: np.ndarray 

604 boolean mask of the selected pixels 

605 """ 

606 cand_mask_sparse = np.zeros_like(quality).astype(np.bool_) 

607 

608 for box in box_list: 

609 qual_box = quality[box[1]:box[3], box[0]:box[2]] 

610 if sel_min: 

611 idx_box = np.where(np.min(qual_box) == qual_box) 

612 if np.min(qual_box) == np.inf: # no mininum value exists in this box 

613 continue 

614 else: # max 

615 idx_box = np.where(np.max(qual_box) == qual_box) 

616 

617 if idx_box[0].shape[0] > 1: # more than one index might be found, due to quality(PS) = 1 in MiaplPy 

618 idx_box_tmp = [idx_box[0][0], idx_box[1][0]] 

619 idx_box = idx_box_tmp 

620 idx_img = (idx_box[0] + box[1], idx_box[1] + box[0]) 

621 cand_mask_sparse[idx_img] = True 

622 return cand_mask_sparse 

623 

624 

625def spatiotemporalConsistency(*, coord_utm: np.ndarray, phase: np.ndarray, wavelength: float, min_dist: int = 15, 

626 max_dist: float = np.inf, knn: int = 50): 

627 """Spatiotemporal consistency proposed by Hanssen et al. (2008) and implemented in DePSI (van Leijen, 2014). 

628 

629 Parameters 

630 ---------- 

631 coord_utm: np.ndarray 

632 UTM coordinates of the points 

633 phase: np.ndarray 

634 phase time series of the points 

635 wavelength: float 

636 radar wavelength in [m] 

637 min_dist: int 

638 minimum distance to other points in [m] (default: 15) 

639 max_dist: float 

640 maximum distance to other points in [m] (default: np.inf) 

641 knn: int 

642 number of nearest neighbors to consider (default: 50) 

643 

644 Returns 

645 ------- 

646 stc: np.ndarray 

647 spatiotemporal consistency of the points 

648 """ 

649 from scipy.spatial import KDTree 

650 

651 num_samples, num_time = phase.shape 

652 tree = KDTree(data=coord_utm) 

653 

654 stc = np.zeros((num_samples,), np.float64) 

655 

656 for p in range(num_samples): 

657 dist, idx = tree.query([coord_utm[p, 0], coord_utm[p, 1]], k=knn) 

658 mask = (dist < max_dist) & (dist > min_dist) & (dist != 0) 

659 rho = list() 

660 for i in idx[mask]: 

661 diff = (phase[i, :-1] - phase[p, :-1]) - (phase[i, 1:] - phase[p, 1:]) 

662 rho.append(wavelength / (4 * np.pi) * np.sqrt((1 / (num_time - 1) * np.sum(diff ** 2)))) 

663 if not rho: 

664 stc[p] = np.nan 

665 else: 

666 stc[p] = np.min(rho) 

667 return stc 

668 

669 

670def temporalAutoCorrelation(*, residuals: np.ndarray, lag: int): 

671 """Compute the temporal autocorrelation for given time lag from the residuals. 

672 

673 Parameters 

674 ---------- 

675 residuals: np.ndarray 

676 residual phase time series (dim: num_points x num_time_steps) 

677 lag: int 

678 time lag used for computing the correlation 

679 

680 Returns 

681 ------- 

682 auto_corr: np.ndarray 

683 auto-correlation of each point (dim: num_points x lag) 

684 """ 

685 num_points = residuals.shape[0] 

686 auto_corr = np.zeros((num_points, lag)) 

687 for lag_num in range(1, lag + 1): 

688 for p in range(num_points): 

689 auto_corr[p, lag_num - 1] = abs(np.corrcoef( 

690 np.array([residuals[p, :-lag_num], residuals[p, lag_num:]]))[0][1]) 

691 return auto_corr 

692 

693 

694def readPhasePatchwise(*, stack_obj: BaseStack, dataset_name: str, num_patches: int, cand_mask: np.ndarray, 

695 point_id_img: np.ndarray, 

696 logger: Logger): 

697 """Read the phase from a file in a patchwise manner to reduce memory usage. 

698 

699 Parameters 

700 ---------- 

701 stack_obj: BaseStack 

702 instance of class BaseStack 

703 dataset_name: str 

704 name of the dataset to read (e.g. 'ifgs' or 'phase') 

705 num_patches: int 

706 number of patches to split the image into 

707 cand_mask: np.ndarray 

708 boolean mask of the selected pixels 

709 point_id_img: np.ndarray 

710 image with point IDs for each pixel 

711 logger: Logger 

712 logging handler 

713 

714 Returns 

715 ------- 

716 phase_points: np.ndarray 

717 phase time series of the selected pixels 

718 """ 

719 if dataset_name == "ifgs": 

720 length, width, num_images = stack_obj.getShape(dataset_name=dataset_name) 

721 elif dataset_name == "phase": # result from miaplpy 

722 num_images, length, width = stack_obj.getShape(dataset_name=dataset_name) 

723 else: 

724 logger.error(f"Reading '{dataset_name}' is not supported.") 

725 raise NotImplementedError 

726 

727 if num_patches == 1: 

728 phase_img = stack_obj.read(dataset_name=dataset_name) 

729 if dataset_name == "phase": # result from miaplpy 

730 phase_img = np.moveaxis(phase_img, 0, -1) 

731 phase_points = phase_img[cand_mask, :] 

732 else: 

733 phase_points = np.angle(phase_img[cand_mask, :]) 

734 else: 

735 box_list, num_patches = preparePatches(num_patches=num_patches, 

736 width=width, 

737 length=length, 

738 logger=logger) 

739 num_points = cand_mask[cand_mask].shape[0] 

740 phase_points = np.zeros((num_points, num_images), dtype=np.float32) 

741 start_idx = 0 

742 point_id_order = list() 

743 for idx in range(num_patches): 

744 bbox = box_list[idx] 

745 if dataset_name == "phase": # result from miaplpy 

746 # slcStack has different order: starts with num_images. Adjust bbox (x0, y0, z0, x1, y1, z1) 

747 # read whole slcStack and subset to time span outside this function. 

748 box = (bbox[1], 0, bbox[0], bbox[3], num_images, bbox[2]) 

749 phase_img = stack_obj.read(dataset_name=dataset_name, box=box, print_msg=False) 

750 phase_img = np.moveaxis(phase_img, 0, -1) 

751 else: 

752 phase_img = stack_obj.read(dataset_name=dataset_name, box=bbox, print_msg=False) 

753 cur_cand_mask = cand_mask[bbox[1]:bbox[3], bbox[0]:bbox[2]] 

754 

755 # extract the wrapped phase for the selected pixels in the patch 

756 cur_num_points = cur_cand_mask[cur_cand_mask].shape[0] 

757 stop_idx = start_idx + cur_num_points 

758 if dataset_name == "phase": 

759 phase_points[start_idx:stop_idx, :] = phase_img[cur_cand_mask, :] # miaplpy results are phases 

760 else: 

761 phase_points[start_idx:stop_idx, :] = np.angle(phase_img[cur_cand_mask, :]) 

762 start_idx = stop_idx 

763 

764 # store order of IDs to sort the points after loading all ifgs 

765 cur_point_id = point_id_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] 

766 cur_point_id = cur_point_id[cur_cand_mask] 

767 point_id_order.append(cur_point_id) 

768 logger.info(msg="\r\033[KPatches read:\t {}/{}".format(idx + 1, num_patches)) 

769 # reorder points to fit to the same structure for all datasets 

770 idx = np.argsort(np.hstack(point_id_order)) 

771 phase_points = phase_points[idx, :] 

772 

773 return phase_points 

774 

775 

776def detectValidAreas(*, bmap_obj: AmplitudeImage, logger: Logger): 

777 """Detect valid areas based on amplitude image. 

778 

779 Parameters 

780 ---------- 

781 bmap_obj: AmplitudeImage 

782 instance of class AmplitudeImage 

783 logger: Logger 

784 logging handler 

785 

786 Returns 

787 ------- 

788 mask_valid_area: np.ndarray 

789 boolean mask of the valid areas 

790 """ 

791 bmap_obj.open() 

792 mask_valid_area = (10 ** (bmap_obj.background_map / 10)) > 0 

793 num_invalid = mask_valid_area[~mask_valid_area].shape[0] 

794 if num_invalid > 0: 

795 logger.info(msg=f"Number of invalid pixels found in image: {num_invalid}") 

796 return mask_valid_area 

797 

798 

799def setReferenceToPeakOfHistogram(*, phase: np.ndarray, vel: np.ndarray, num_bins: int = 100): 

800 """Set reference phase value to peak of the velocity histogram. 

801 

802 It assumes that no velocity (i.e. stable area) is occuring most frequently. 

803 

804 Parameters 

805 ---------- 

806 phase: np.ndarray 

807 phase time series of the points 

808 vel: np.ndarray 

809 velocity of the points 

810 num_bins: int 

811 number of bins for the histogram (default: 100) 

812 

813 Returns 

814 ------- 

815 phase: np.ndarray 

816 phase time series adjusted by the new reference phase 

817 """ 

818 if phase.shape[0] < 40: # the method will not give meaningfull results if too few points are available 

819 num_bins = 10 

820 

821 # find most frequent velocity 

822 hist, bin_edges = np.histogram(vel, bins=num_bins, density=True) 

823 max_idx = np.argmax(hist) 

824 

825 # find a set of points which have the most frequent velocity 

826 mask = (vel >= bin_edges[max_idx]) & (vel < bin_edges[max_idx + 1]) 

827 

828 # determine reference phase from mean of the phase time series of the selected points 

829 ref_phase = np.mean(phase[mask, :], axis=0) 

830 

831 # adjust the phases by the reference sarvey 

832 phase -= ref_phase 

833 

834 return phase 

835 

836 

837def checkIfRequiredFilesExist(*, path_to_files: str, required_files: list, logger: Logger): 

838 """ 

839 Check if all required files exist from previous processing steps. 

840 

841 Parameters 

842 ---------- 

843 path_to_files: str 

844 path to the files 

845 required_files: list 

846 list of required files which are all checked 

847 logger: Logger 

848 logging handler 

849 

850 Raises 

851 ------ 

852 FileNotFoundError 

853 if a required file is missing 

854 """ 

855 # loop over all required files and check if they exist, if not: raise error 

856 for file in required_files: 

857 if not exists(join(path_to_files, file)): 

858 logger.error(f"File from previous step(s) is missing: {file}.") 

859 raise FileNotFoundError