Coverage for sarvey/processing.py: 87%

514 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2024-10-17 12:36 +0000

1#!/usr/bin/env python 

2 

3# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 

4# 

5# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 

6# 

7# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 

8# of the SAR4Infra project with funds of the German Federal Ministry for Digital and 

9# Transport and contributions from Landesamt fuer Vermessung und Geoinformation 

10# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 

11# 

12# This program is free software: you can redistribute it and/or modify it under 

13# the terms of the GNU General Public License as published by the Free Software 

14# Foundation, either version 3 of the License, or (at your option) any later 

15# version. 

16# 

17# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 

18# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 

19# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 

20# This requirement extends to SARvey. 

21# 

22# This program is distributed in the hope that it will be useful, but WITHOUT 

23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

25# details. 

26# 

27# You should have received a copy of the GNU Lesser General Public License along 

28# with this program. If not, see <https://www.gnu.org/licenses/>. 

29 

30"""Processing module for SARvey.""" 

31from os.path import join, exists 

32import matplotlib.pyplot as plt 

33from matplotlib import colormaps 

34import numpy as np 

35from logging import Logger 

36 

37from miaplpy.objects.slcStack import slcStack 

38from mintpy.utils import readfile 

39from mintpy.utils.plot import auto_flip_direction 

40 

41from sarvey import viewer 

42from sarvey.densification import densifyNetwork 

43from sarvey.filtering import estimateAtmosphericPhaseScreen, simpleInterpolation 

44from sarvey.ifg_network import (DelaunayNetwork, SmallBaselineYearlyNetwork, SmallTemporalBaselinesNetwork, 

45 SmallBaselineNetwork, StarNetwork) 

46from sarvey.objects import Network, Points, AmplitudeImage, CoordinatesUTM, NetworkParameter, BaseStack 

47from sarvey.unwrapping import spatialParameterIntegration, \ 

48 parameterBasedNoisyPointRemoval, temporalUnwrapping, spatialUnwrapping, removeGrossOutliers 

49from sarvey.preparation import createArcsBetweenPoints, selectPixels, createTimeMaskFromDates 

50import sarvey.utils as ut 

51from sarvey.coherence import computeIfgsAndTemporalCoherence 

52from sarvey.triangulation import PointNetworkTriangulation 

53from sarvey.config import Config 

54 

55 

56class Processing: 

57 """Processing.""" 

58 

59 def __init__(self, path: str, config: Config, logger: Logger): 

60 """Init.""" 

61 self.path = path 

62 self.config = config 

63 self.logger = logger 

64 

65 def runPreparation(self): 

66 """RunPreparation.""" 

67 log = self.logger 

68 

69 msg = "#" * 10 

70 msg += " PREPARE PROCESSING: LOAD INPUT " 

71 msg += "#" * 10 

72 log.info(msg=msg) 

73 

74 # load slc data 

75 slc_stack_obj = slcStack(join(self.config.general.input_path, "slcStack.h5")) 

76 slc_stack_obj.open() 

77 

78 if "ORBIT_DIRECTION" in slc_stack_obj.metadata: 

79 log.info(msg="Orbit direction: {}".format(slc_stack_obj.metadata["ORBIT_DIRECTION"])) 

80 else: 

81 log.warning(msg="No orbit direction found in metadata. Add 'ORBIT_DIRECTION' to metadata of 'slcStack.h5'" 

82 "and run again!") 

83 raise AttributeError("No orbit direction found in metadata.") 

84 

85 time_mask, num_slc, date_list = createTimeMaskFromDates( 

86 start_date=self.config.preparation.start_date, 

87 stop_date=self.config.preparation.end_date, 

88 date_list=slc_stack_obj.dateList, 

89 logger=log 

90 ) 

91 log.info(msg=f"Start date: {date_list[0]}") 

92 log.info(msg=f"Stop date: {date_list[-1]}") 

93 log.info(msg=f"Number of SLC: {num_slc}") 

94 

95 msg = "#" * 10 

96 msg += " DESIGN IFG NETWORK " 

97 msg += "#" * 10 

98 log.info(msg=msg) 

99 

100 ifg_net_obj = None 

101 if self.config.preparation.ifg_network_type == "star": 

102 ifg_net_obj = StarNetwork() 

103 ifg_net_obj.configure( 

104 pbase=slc_stack_obj.pbase[time_mask], 

105 tbase=slc_stack_obj.tbase[time_mask], 

106 ref_idx=int(np.floor(num_slc/2)), 

107 dates=date_list 

108 ) 

109 log.info(msg="Star ifg network") 

110 elif self.config.preparation.ifg_network_type == "sb": 

111 ifg_net_obj = SmallBaselineNetwork() 

112 ifg_net_obj.configure(pbase=slc_stack_obj.pbase[time_mask], 

113 tbase=slc_stack_obj.tbase[time_mask], 

114 num_link=self.config.preparation.num_ifgs, 

115 max_tbase=self.config.preparation.max_tbase, 

116 dates=date_list) 

117 log.info(msg="Small baseline network") 

118 elif self.config.preparation.ifg_network_type == "stb": 

119 ifg_net_obj = SmallTemporalBaselinesNetwork() 

120 ifg_net_obj.configure( 

121 pbase=slc_stack_obj.pbase[time_mask], 

122 tbase=slc_stack_obj.tbase[time_mask], 

123 num_link=self.config.preparation.num_ifgs, 

124 dates=date_list 

125 ) 

126 log.info(msg="Small temporal baseline network") 

127 elif self.config.preparation.ifg_network_type == "stb_year": 

128 ifg_net_obj = SmallBaselineYearlyNetwork() 

129 ifg_net_obj.configure( 

130 pbase=slc_stack_obj.pbase[time_mask], 

131 tbase=slc_stack_obj.tbase[time_mask], 

132 num_link=self.config.preparation.num_ifgs, 

133 dates=date_list 

134 ) 

135 log.info(msg="Small temporal baseline and yearly ifg network") 

136 elif self.config.preparation.ifg_network_type == "delaunay": 

137 ifg_net_obj = DelaunayNetwork() 

138 ifg_net_obj.configure( 

139 pbase=slc_stack_obj.pbase[time_mask], 

140 tbase=slc_stack_obj.tbase[time_mask], 

141 dates=date_list 

142 ) 

143 log.info(msg="Delaunay ifg network") 

144 

145 ifg_net_obj.writeToFile(path=join(self.path, "ifg_network.h5"), logger=log) 

146 log.info(msg=f"temporal baselines: {np.unique(np.round(np.abs(ifg_net_obj.tbase_ifg) * 365.25).astype(int))}") 

147 

148 fig = ifg_net_obj.plot() 

149 fig.savefig(join(self.path, "pic", "step_0_interferogram_network.png"), dpi=300) 

150 plt.close(fig) 

151 

152 msg = "#" * 10 

153 msg += f" GENERATE STACK OF {ifg_net_obj.num_ifgs} INTERFEROGRAMS & ESTIMATE TEMPORAL COHERENCE " 

154 msg += "#" * 10 

155 log.info(msg=msg) 

156 

157 box_list, num_patches = ut.preparePatches(num_patches=self.config.general.num_patches, 

158 width=slc_stack_obj.width, 

159 length=slc_stack_obj.length, 

160 logger=log) 

161 

162 # create placeholder in result file for datasets which are stored patch-wise 

163 dshape = (slc_stack_obj.length, slc_stack_obj.width, ifg_net_obj.num_ifgs) 

164 ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=log) 

165 ifg_stack_obj.prepareDataset(dataset_name="ifgs", dshape=dshape, dtype=np.csingle, 

166 metadata=slc_stack_obj.metadata, mode='w', chunks=(30, 30, ifg_net_obj.num_ifgs)) 

167 

168 # create placeholder in result file for datasets which are stored patch-wise 

169 temp_coh_obj = BaseStack(file=join(self.path, "temporal_coherence.h5"), logger=log) 

170 dshape = (slc_stack_obj.length, slc_stack_obj.width) 

171 temp_coh_obj.prepareDataset(dataset_name="temp_coh", metadata=slc_stack_obj.metadata, 

172 dshape=dshape, dtype=np.float32, mode="w", chunks=True) 

173 

174 mean_amp_img = computeIfgsAndTemporalCoherence( 

175 path_temp_coh=join(self.path, "temporal_coherence.h5"), 

176 path_ifgs=join(self.path, "ifg_stack.h5"), 

177 path_slc=join(self.config.general.input_path, "slcStack.h5"), 

178 ifg_array=np.array(ifg_net_obj.ifg_list), 

179 time_mask=time_mask, 

180 wdw_size=self.config.preparation.filter_window_size, 

181 num_boxes=num_patches, 

182 box_list=box_list, 

183 num_cores=self.config.general.num_cores, 

184 logger=log 

185 ) 

186 

187 # store auxilliary datasets for faster access during processing 

188 if not exists(join(self.path, "coordinates_utm.h5")): 

189 coord_utm_obj = CoordinatesUTM(file_path=join(self.path, "coordinates_utm.h5"), logger=self.logger) 

190 coord_utm_obj.prepare(input_path=join(self.config.general.input_path, "geometryRadar.h5")) 

191 del coord_utm_obj 

192 

193 if not exists(join(self.path, "background_map.h5")): 

194 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

195 bmap_obj.prepare(slc_stack_obj=slc_stack_obj, img=mean_amp_img, logger=self.logger) 

196 ax = bmap_obj.plot(logger=self.logger) 

197 img = ax.get_images()[0] 

198 cbar = plt.colorbar(img, pad=0.03, shrink=0.5) 

199 cbar.ax.set_visible(False) 

200 plt.tight_layout() 

201 plt.gcf().savefig(join(self.path, "pic", "step_0_amplitude_image.png"), dpi=300) 

202 plt.close(plt.gcf()) 

203 del bmap_obj 

204 del mean_amp_img 

205 

206 temp_coh = temp_coh_obj.read(dataset_name="temp_coh") 

207 

208 fig = plt.figure(figsize=(15, 5)) 

209 ax = fig.add_subplot() 

210 im = ax.imshow(temp_coh, cmap=colormaps["gray"], vmin=0, vmax=1) 

211 auto_flip_direction(slc_stack_obj.metadata, ax=ax, print_msg=True) 

212 ax.set_xlabel("Range") 

213 ax.set_ylabel("Azimuth") 

214 plt.colorbar(im, pad=0.03, shrink=0.5) 

215 plt.title("Temporal coherence") 

216 plt.tight_layout() 

217 fig.savefig(join(self.path, "pic", "step_0_temporal_phase_coherence.png"), dpi=300) 

218 plt.close(fig) 

219 

220 def runConsistencyCheck(self): 

221 """RunConsistencyCheck.""" 

222 # 0) select candidates for first order points 

223 ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=self.logger) 

224 length, width, num_ifgs = ifg_stack_obj.getShape(dataset_name="ifgs") 

225 

226 cand_mask1 = selectPixels( 

227 path=self.path, selection_method="temp_coh", thrsh=self.config.consistency_check.coherence_p1, 

228 grid_size=self.config.consistency_check.grid_size, bool_plot=True, logger=self.logger 

229 ) 

230 

231 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

232 mask_valid_area = ut.detectValidAreas(bmap_obj=bmap_obj, logger=self.logger) 

233 

234 if self.config.consistency_check.mask_p1_file is not None: 

235 path_mask_aoi = join(self.config.consistency_check.mask_p1_file) 

236 self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_aoi)) 

237 mask_aoi = readfile.read(path_mask_aoi, datasetName='mask')[0].astype(np.bool_) 

238 mask_valid_area &= mask_aoi 

239 else: 

240 self.logger.info(msg="No mask for area of interest given.") 

241 

242 cand_mask1 &= mask_valid_area 

243 

244 fig = plt.figure(figsize=(15, 5)) 

245 ax = fig.add_subplot() 

246 ax.imshow(mask_valid_area, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) 

247 bmap_obj.plot(ax=ax, logger=self.logger) 

248 coord_xy = np.array(np.where(cand_mask1)).transpose() 

249 val = np.ones_like(cand_mask1) 

250 sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask1], s=0.5, cmap=plt.get_cmap("autumn_r"), 

251 vmin=1, vmax=2) # set min, max to ensure that points are yellow 

252 cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) 

253 cbar.ax.set_visible(False) # make size of axis consistent with all others 

254 plt.tight_layout() 

255 plt.title("Mask for first order point set") 

256 fig.savefig(join(self.path, "pic", "step_1_mask_p1.png"), dpi=300) 

257 plt.close(fig) 

258 

259 if cand_mask1[cand_mask1].shape[0] == 0: 

260 self.logger.error("No points selected for first-order points. Modify the coherence threshold.") 

261 raise ValueError 

262 

263 # create unique point_id throughout the image to make it possible to mix first-order and second-order points 

264 # in the densification step. point_id is ordered so that it fits to anydata[mask].ravel() when loading the data. 

265 point_id_img = np.arange(0, length * width).reshape((length, width)) 

266 

267 point_obj = Points(file_path=join(self.path, "p1_ifg_wr.h5"), logger=self.logger) 

268 point_id1 = point_id_img[cand_mask1] 

269 

270 point_obj.prepare( 

271 point_id=point_id1, 

272 coord_xy=coord_xy, 

273 input_path=self.config.general.input_path 

274 ) 

275 

276 point_obj.phase = ut.readPhasePatchwise(stack_obj=ifg_stack_obj, dataset_name="ifgs", 

277 num_patches=self.config.general.num_patches, cand_mask=cand_mask1, 

278 point_id_img=point_id_img, logger=self.logger) 

279 

280 point_obj.writeToFile() 

281 del ifg_stack_obj, cand_mask1 

282 

283 # 1) create spatial network 

284 arcs = createArcsBetweenPoints(point_obj=point_obj, 

285 knn=self.config.consistency_check.num_nearest_neighbours, 

286 max_arc_length=self.config.consistency_check.max_arc_length, 

287 logger=self.logger) 

288 net_obj = Network(file_path=join(self.path, "point_network.h5"), logger=self.logger) 

289 net_obj.computeArcObservations( 

290 point_obj=point_obj, 

291 arcs=arcs 

292 ) 

293 net_obj.writeToFile() 

294 net_obj.open(input_path=self.config.general.input_path) # to retrieve external data 

295 

296 demerr, vel, gamma = temporalUnwrapping(ifg_net_obj=point_obj.ifg_net_obj, 

297 net_obj=net_obj, 

298 wavelength=point_obj.wavelength, 

299 velocity_bound=self.config.consistency_check.velocity_bound, 

300 demerr_bound=self.config.consistency_check.dem_error_bound, 

301 num_samples=self.config.consistency_check.num_optimization_samples, 

302 num_cores=self.config.general.num_cores, 

303 logger=self.logger) 

304 

305 net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), 

306 logger=self.logger) 

307 net_par_obj.prepare( 

308 net_obj=net_obj, 

309 demerr=demerr, 

310 vel=vel, 

311 gamma=gamma 

312 ) 

313 net_par_obj.writeToFile() 

314 

315 # 3) spatial unwrapping of the arc network and removal of outliers (arcs and points) 

316 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

317 thrsh_visualisation = 0.7 

318 

319 try: 

320 ax = bmap_obj.plot(logger=self.logger) 

321 arc_mask = net_par_obj.gamma.reshape(-1) <= thrsh_visualisation 

322 ax, cbar = viewer.plotColoredPointNetwork(x=point_obj.coord_xy[:, 1], y=point_obj.coord_xy[:, 0], 

323 arcs=net_par_obj.arcs[arc_mask, :], 

324 val=net_par_obj.gamma[arc_mask], 

325 ax=ax, linewidth=1, cmap_name="autumn", clim=(0, 1)) 

326 ax.set_title("Coherence from temporal unwrapping\n" 

327 r"(only arcs with $\gamma \leq$ {} " 

328 "shown)\nBefore outlier removal".format(thrsh_visualisation)) 

329 fig = ax.get_figure() 

330 plt.tight_layout() 

331 fig.savefig(join(self.path, "pic", "step_1_arc_coherence.png"), dpi=300) 

332 except BaseException as e: 

333 self.logger.exception(msg="NOT POSSIBLE TO PLOT SPATIAL NETWORK OF POINTS. {}".format(e)) 

334 

335 net_par_obj, point_id, coord_xy, design_mat = removeGrossOutliers( 

336 net_obj=net_par_obj, 

337 point_id=point_obj.point_id, 

338 coord_xy=point_obj.coord_xy, 

339 min_num_arc=self.config.consistency_check.min_num_arc, 

340 quality_thrsh=self.config.consistency_check.arc_unwrapping_coherence, 

341 logger=self.logger 

342 ) 

343 

344 try: 

345 ax = bmap_obj.plot(logger=self.logger) 

346 arc_mask = net_par_obj.gamma.reshape(-1) <= thrsh_visualisation 

347 ax, cbar = viewer.plotColoredPointNetwork(x=coord_xy[:, 1], y=coord_xy[:, 0], 

348 arcs=net_par_obj.arcs[arc_mask, :], 

349 val=net_par_obj.gamma[arc_mask], 

350 ax=ax, linewidth=1, cmap_name="autumn", clim=(0, 1)) 

351 ax.set_title("Coherence from temporal unwrapping\n" 

352 r"(only arcs with $\gamma \leq$ {} " 

353 "shown)\nAfter outlier removal".format(thrsh_visualisation)) 

354 fig = ax.get_figure() 

355 plt.tight_layout() 

356 fig.savefig(join(self.path, "pic", "step_1_arc_coherence_reduced.png"), dpi=300) 

357 except BaseException as e: 

358 self.logger.exception(msg="NOT POSSIBLE TO PLOT SPATIAL NETWORK OF POINTS. {}".format(e)) 

359 

360 spatial_ref_id, point_id, net_par_obj = parameterBasedNoisyPointRemoval( 

361 net_par_obj=net_par_obj, 

362 point_id=point_id, 

363 coord_xy=coord_xy, 

364 design_mat=design_mat, 

365 bmap_obj=bmap_obj, 

366 bool_plot=True, 

367 logger=self.logger 

368 ) 

369 

370 net_par_obj.writeToFile() # arcs were removed. obj still needed in next step. 

371 point_obj.removePoints(keep_id=point_id, input_path=self.config.general.input_path) 

372 point_obj.writeToFile() 

373 

374 def runUnwrappingTimeAndSpace(self): 

375 """RunTemporalAndSpatialUnwrapping.""" 

376 net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), 

377 logger=self.logger) 

378 net_par_obj.open(input_path=self.config.general.input_path) 

379 

380 point_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) 

381 point_obj.open( 

382 other_file_path=join(self.path, "p1_ifg_wr.h5"), 

383 input_path=self.config.general.input_path 

384 ) 

385 

386 # reference point can be set arbitrarily, because outliers are removed. 

387 spatial_ref_idx = 0 

388 

389 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

390 

391 self.logger.info(msg="Integrate parameters from arcs to points.") 

392 self.logger.info(msg="Integrate DEM correction.") 

393 demerr = spatialParameterIntegration(val_arcs=net_par_obj.demerr, 

394 arcs=net_par_obj.arcs, 

395 coord_xy=point_obj.coord_xy, 

396 weights=net_par_obj.gamma, 

397 spatial_ref_idx=spatial_ref_idx, logger=self.logger) 

398 

399 # demerr = spatialParameterIntegrationIterative(val_arcs=net_par_obj.demerr, all_arcs=net_par_obj.arcs, 

400 # coord_xy=point_obj.coord_xy, all_weights=net_par_obj.gamma, 

401 # spatial_ref_idx=spatial_ref_idx, 

402 # res_tol=5.0, 

403 # max_rm_fraction=0.001) 

404 fig = viewer.plotScatter(value=-demerr, coord=point_obj.coord_xy, 

405 ttl="Parameter integration: DEM correction in [m]", 

406 bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] 

407 fig.savefig(join(self.path, "pic", "step_2_estimation_dem_correction.png"), dpi=300) 

408 plt.close(fig) 

409 

410 self.logger.info(msg="Integrate mean velocity.") 

411 vel = spatialParameterIntegration(val_arcs=net_par_obj.vel, 

412 arcs=net_par_obj.arcs, 

413 coord_xy=point_obj.coord_xy, 

414 weights=net_par_obj.gamma, 

415 spatial_ref_idx=spatial_ref_idx, logger=self.logger) 

416 

417 # vel = spatialParameterIntegrationIterative(val_arcs=net_par_obj.vel, all_arcs=net_par_obj.arcs, 

418 # coord_xy=point_obj.coord_xy, 

419 # all_weights=net_par_obj.gamma, 

420 # spatial_ref_idx=spatial_ref_idx, 

421 # res_tol=1.0, 

422 # max_rm_fraction=0.001) 

423 fig = viewer.plotScatter(value=-vel, coord=point_obj.coord_xy, 

424 ttl="Parameter integration: mean velocity in [m / year]", 

425 bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] 

426 fig.savefig(join(self.path, "pic", "step_2_estimation_velocity.png"), dpi=300) 

427 plt.close(fig) 

428 

429 self.logger.info(msg="Remove phase contributions from mean velocity" 

430 " and DEM correction from wrapped phase of points.") 

431 pred_phase_demerr, pred_phase_vel = ut.predictPhase( 

432 obj=point_obj, vel=vel, demerr=demerr, 

433 ifg_space=True, logger=self.logger 

434 ) 

435 pred_phase = pred_phase_demerr + pred_phase_vel 

436 

437 wr_phase = point_obj.phase 

438 wr_res_phase = np.angle(np.exp(1j * wr_phase) * np.conjugate(np.exp(1j * pred_phase))) 

439 

440 if self.config.unwrapping.use_arcs_from_temporal_unwrapping: 

441 arcs = net_par_obj.arcs # use this to avoid unreliable connections. Takes a bit longer. 

442 else: 

443 triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, 

444 logger=self.logger) 

445 triang_obj.triangulateGlobal() 

446 arcs = triang_obj.getArcsFromAdjMat() 

447 

448 unw_res_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, 

449 num_points=point_obj.num_points, 

450 phase=wr_res_phase, 

451 method=self.config.general.spatial_unwrapping_method, 

452 edges=arcs, 

453 num_cores=self.config.general.num_cores, logger=self.logger) 

454 

455 # use same reference point for spatial integration and Puma unwrapping before recombining phases 

456 unw_res_phase = unw_res_phase - unw_res_phase[spatial_ref_idx, :] 

457 

458 self.logger.info(msg="Add phase contributions from mean velocity and DEM correction back to " 

459 "spatially unwrapped residual phase.") 

460 unw_phase = unw_res_phase + pred_phase 

461 # unw_phase = unw_res_phase # debug: don't add phase back. 

462 

463 # adjust reference to peak of histogram 

464 point_obj.phase = unw_phase 

465 vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] 

466 point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) 

467 

468 point_obj.writeToFile() 

469 

470 phase_ts = ut.invertIfgNetwork( 

471 phase=unw_phase, 

472 num_points=point_obj.num_points, 

473 ifg_net_obj=point_obj.ifg_net_obj, 

474 num_cores=1, # self.config.general.num_cores, 

475 ref_idx=0, 

476 logger=self.logger 

477 ) 

478 point_obj = Points(file_path=join(self.path, "p1_ts.h5"), logger=self.logger) 

479 point_obj.open( 

480 other_file_path=join(self.path, "p1_ifg_unw.h5"), 

481 input_path=self.config.general.input_path 

482 ) 

483 point_obj.phase = phase_ts 

484 point_obj.writeToFile() 

485 

486 def runUnwrappingSpace(self): 

487 """RunSpatialUnwrapping.""" 

488 point_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) 

489 point_obj.open( 

490 other_file_path=join(self.path, "p1_ifg_wr.h5"), 

491 input_path=self.config.general.input_path 

492 ) 

493 

494 if self.config.unwrapping.use_arcs_from_temporal_unwrapping: 

495 net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), 

496 logger=self.logger) 

497 net_par_obj.open(input_path=self.config.general.input_path) 

498 arcs = net_par_obj.arcs # use this to avoid unreliable connections. Takes a bit longer. 

499 else: 

500 # re-triangulate with delaunay to make PUMA faster 

501 triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, 

502 logger=self.logger) 

503 triang_obj.triangulateGlobal() 

504 arcs = triang_obj.getArcsFromAdjMat() 

505 

506 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

507 ax = bmap_obj.plot(logger=self.logger) 

508 ax, cbar = viewer.plotColoredPointNetwork(x=point_obj.coord_xy[:, 1], 

509 y=point_obj.coord_xy[:, 0], 

510 arcs=arcs, 

511 val=np.zeros(arcs.shape[0], dtype=np.float32), 

512 ax=ax, linewidth=0.5, cmap_name="hot", clim=(0, 1)) 

513 cbar.ax.set_visible(False) 

514 ax.set_xlabel("Range") 

515 ax.set_ylabel("Azimuth") 

516 ax.set_title("Unwrapping Network") 

517 plt.tight_layout() 

518 plt.gcf().savefig(join(self.path, "pic", "step_2_unwrapping_network_p1.png"), dpi=300) 

519 plt.close(plt.gcf()) 

520 

521 unw_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, 

522 num_points=point_obj.num_points, 

523 phase=point_obj.phase, 

524 method=self.config.general.spatial_unwrapping_method, 

525 edges=arcs, 

526 num_cores=self.config.general.num_cores, logger=self.logger) 

527 

528 # adjust reference to peak of histogram 

529 point_obj.phase = unw_phase 

530 vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] 

531 point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) 

532 

533 point_obj.writeToFile() 

534 del point_obj 

535 

536 point_obj = Points(file_path=join(self.path, "p1_ts.h5"), logger=self.logger) 

537 point_obj.open( 

538 other_file_path=join(self.path, "p1_ifg_wr.h5"), 

539 input_path=self.config.general.input_path 

540 ) 

541 

542 # for sbas the ifg network needs to be inverted to get the phase time series 

543 phase_ts = ut.invertIfgNetwork(phase=unw_phase, num_points=point_obj.num_points, 

544 ifg_net_obj=point_obj.ifg_net_obj, 

545 num_cores=1, # self.config.general.num_cores, 

546 ref_idx=0, 

547 logger=self.logger) 

548 

549 point_obj.phase = phase_ts 

550 point_obj.writeToFile() 

551 

552 def runFiltering(self): 

553 """RunFiltering.""" 

554 coh_value = int(self.config.filtering.coherence_p2 * 100) 

555 

556 # create output file which contains filtered phase time series 

557 point1_obj = Points(file_path=join(self.path, "p1_ts_filt.h5"), logger=self.logger) 

558 point1_obj.open( 

559 other_file_path=join(self.path, "p1_ts.h5"), 

560 input_path=self.config.general.input_path 

561 ) 

562 p1_mask = point1_obj.createMask() # used later for selecting psCand2 when a spatial mask AOI is given. 

563 

564 # select only pixels which have low phase noise and are well distributed 

565 mask = point1_obj.createMask() 

566 

567 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

568 

569 # temporal auto-correlation 

570 auto_corr_img = np.zeros_like(mask, np.float64) 

571 

572 vel, demerr, _, _, _, residuals = ut.estimateParameters(obj=point1_obj, ifg_space=False) 

573 

574 if self.config.filtering.use_moving_points: 

575 auto_corr = ut.temporalAutoCorrelation(residuals=residuals, lag=1).reshape(-1) 

576 else: 

577 # remove DEM error, but not velocity before estimating the temporal autocorrelation 

578 pred_phase_demerr = ut.predictPhase( 

579 obj=point1_obj, vel=vel, demerr=demerr, ifg_space=False, logger=self.logger)[0] 

580 phase_wo_demerr = point1_obj.phase - pred_phase_demerr 

581 auto_corr = ut.temporalAutoCorrelation(residuals=phase_wo_demerr, lag=1).reshape(-1) 

582 

583 auto_corr_img[mask] = auto_corr 

584 auto_corr_img[~mask] = np.inf 

585 

586 fig = viewer.plotScatter(value=auto_corr, coord=point1_obj.coord_xy, bmap_obj=bmap_obj, 

587 ttl="Temporal autocorrelation", unit="[ ]", s=3.5, cmap="autumn_r", 

588 vmin=0, vmax=1, logger=self.logger)[0] 

589 fig.savefig(join(self.path, "pic", "step_3_temporal_autocorrelation.png"), dpi=300) 

590 plt.close(fig) 

591 

592 # create grid 

593 coord_utm_obj = CoordinatesUTM(file_path=join(self.path, "coordinates_utm.h5"), logger=self.logger) 

594 coord_utm_obj.open() 

595 

596 # remove points based on threshold 

597 mask_thrsh = auto_corr_img <= self.config.filtering.max_temporal_autocorrelation 

598 auto_corr_img[~mask_thrsh] = np.inf 

599 

600 box_list, num_box = ut.createSpatialGrid(coord_utm_img=coord_utm_obj.coord_utm, length=point1_obj.length, 

601 width=point1_obj.width, 

602 grid_size=self.config.filtering.grid_size) 

603 

604 cand_mask_sparse = ut.selectBestPointsInGrid(box_list=box_list, quality=auto_corr_img, sel_min=True) 

605 

606 num_p1_points_for_filtering = cand_mask_sparse[cand_mask_sparse].shape[0] 

607 if num_p1_points_for_filtering < 10: 

608 self.logger.warning(msg=f"Only {num_p1_points_for_filtering} points for APS filtering selected. Filtering " 

609 f"results are probably not reliable. You can e.g. increase 'max_auto_corr' or try " 

610 f"to increase the number of first-order points during step 1 and 2.") 

611 

612 point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( 

613 (point1_obj.length, point1_obj.width)) 

614 keep_id = point_id_img[np.where(cand_mask_sparse)] 

615 point1_obj.removePoints(keep_id=keep_id, input_path=self.config.general.input_path) 

616 point1_obj.writeToFile() # to be able to load aps1 from this file having the same set of points 

617 

618 # store plot for quality control during processing 

619 fig, ax = viewer.plotScatter(value=auto_corr_img[cand_mask_sparse], coord=point1_obj.coord_xy, 

620 bmap_obj=bmap_obj, ttl="Selected pixels for APS estimation", 

621 unit="Auto-correlation\n[ ]", s=5, cmap="autumn_r", vmin=0, vmax=1, 

622 logger=self.logger)[:2] 

623 viewer.plotGridFromBoxList(box_list=box_list, ax=ax, edgecolor="k", linewidth=0.2) 

624 fig.savefig(join(self.path, "pic", "step_3_stable_points.png"), dpi=300) 

625 plt.close(fig) 

626 

627 if self.config.filtering.use_moving_points: 

628 # recompute the residuals, because now there are fewer points in the obj 

629 phase_for_aps_filtering = ut.estimateParameters(obj=point1_obj, ifg_space=False)[-1] 

630 else: 

631 phase_for_aps_filtering = point1_obj.phase 

632 

633 # create output which contains only the atmospheric phase screen (no parameters) 

634 aps1_obj = Points(file_path=join(self.path, "p1_aps.h5"), logger=self.logger) 

635 aps1_obj.open( 

636 other_file_path=join(self.path, "p1_ts_filt.h5"), 

637 input_path=self.config.general.input_path 

638 ) 

639 

640 # select second-order points 

641 cand_mask2 = selectPixels( 

642 path=self.path, selection_method="temp_coh", 

643 thrsh=self.config.filtering.coherence_p2, 

644 grid_size=None, bool_plot=True, 

645 logger=self.logger 

646 ) # first-order points are included in second-order points 

647 

648 if self.config.phase_linking.use_phase_linking_results: 

649 # read PL results 

650 pl_coh = readfile.read(join(self.config.phase_linking.inverted_path, "phase_series.h5"), 

651 datasetName='temporalCoherence')[0] 

652 pl_coh = pl_coh[1, :, :] 

653 siblings = readfile.read(join(self.config.phase_linking.inverted_path, "phase_series.h5"), 

654 datasetName='shp')[0] 

655 

656 if self.config.phase_linking.use_ps: 

657 mask_ps = readfile.read(self.config.phase_linking.mask_ps_file, 

658 datasetName='mask')[0].astype(np.bool_) 

659 cand_mask_pl = (pl_coh > self.config.filtering.coherence_p2) | mask_ps 

660 else: 

661 cand_mask_pl = (pl_coh > self.config.filtering.coherence_p2) 

662 # remove ps candidates, because the ps detection strategy in miaplpy seems to be biased. 

663 cand_mask_pl[siblings <= self.config.phase_linking.num_siblings] = False 

664 

665 if self.config.phase_linking.mask_phase_linking_file is not None: 

666 path_mask_pl_aoi = join(self.config.phase_linking.mask_phase_linking_file) 

667 self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_pl_aoi)) 

668 mask_pl_aoi = readfile.read(path_mask_pl_aoi, datasetName='mask')[0].astype(np.bool_) 

669 

670 fig = plt.figure(figsize=(15, 5)) 

671 ax = fig.add_subplot() 

672 ax.imshow(mask_pl_aoi, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) 

673 bmap_obj.plot(ax=ax, logger=self.logger) 

674 coord_xy = np.array(np.where(cand_mask_pl)).transpose() 

675 val = np.ones_like(cand_mask_pl) 

676 sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask_pl], s=0.5, 

677 cmap=plt.get_cmap("autumn_r"), 

678 vmin=1, vmax=2) # set min, max to ensure that points are yellow 

679 cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) 

680 cbar.ax.set_visible(False) # make size of axis consistent with all others 

681 plt.tight_layout() 

682 plt.title("Mask for phase linking points") 

683 fig.savefig(join(self.path, "pic", "step_3_mask_p2_coh{}_phase_linking.png".format(coh_value)), dpi=300) 

684 plt.close(fig) 

685 

686 # mask points after plotting, so that available coherent points are visible in figure 

687 cand_mask_pl[~mask_pl_aoi] = False 

688 

689 # combine phase linking coherence with TPC cand_mask2 

690 cand_mask2 = cand_mask2 | cand_mask_pl 

691 

692 mask_valid_area = ut.detectValidAreas(bmap_obj=bmap_obj, logger=self.logger) 

693 

694 if self.config.filtering.mask_p2_file is not None: 

695 path_mask_aoi = join(self.config.filtering.mask_p2_file) 

696 self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_aoi)) 

697 mask_aoi = readfile.read(path_mask_aoi, datasetName='mask')[0].astype(np.bool_) 

698 mask_valid_area &= mask_aoi 

699 # todo: add unstable points from p1 for densification 

700 else: 

701 self.logger.info(msg="No mask for area of interest given.") 

702 

703 cand_mask2[p1_mask] = True # add all selected 1.order points to avoid spatial gaps in 2D unwrapping 

704 # cand_mask2[cand_mask_sparse] = True # add only stable points from 1.order points 

705 

706 cand_mask2 &= mask_valid_area 

707 

708 fig = plt.figure(figsize=(15, 5)) 

709 ax = fig.add_subplot() 

710 ax.imshow(mask_valid_area, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) 

711 bmap_obj.plot(ax=ax, logger=self.logger) 

712 coord_xy = np.array(np.where(cand_mask2)).transpose() 

713 val = np.ones_like(cand_mask2) 

714 sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask2], s=0.5, cmap=plt.get_cmap("autumn_r"), 

715 vmin=1, vmax=2) # set min, max to ensure that points are yellow 

716 cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) 

717 cbar.ax.set_visible(False) # make size of axis consistent with all others 

718 plt.tight_layout() 

719 plt.title("Mask for dense point set") 

720 fig.savefig(join(self.path, "pic", "step_3_mask_p2_coh{}.png".format(coh_value)), dpi=300) 

721 plt.close(fig) 

722 

723 point2_obj = Points(file_path=join(self.path, "p2_coh{}_ifg_wr.h5".format(coh_value)), logger=self.logger) 

724 coord_xy = np.array(np.where(cand_mask2)).transpose() 

725 point_id2 = point_id_img[cand_mask2] 

726 point2_obj.prepare( 

727 point_id=point_id2, 

728 coord_xy=coord_xy, 

729 input_path=self.config.general.input_path 

730 ) 

731 

732 ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=self.logger) 

733 

734 point2_obj.phase = ut.readPhasePatchwise(stack_obj=ifg_stack_obj, dataset_name="ifgs", 

735 num_patches=self.config.general.num_patches, cand_mask=cand_mask2, 

736 point_id_img=point_id_img, logger=self.logger) 

737 

738 if self.config.phase_linking.use_phase_linking_results: 

739 self.logger.info(msg="read phase from MiaplPy results...") 

740 phase_linking_obj = BaseStack( 

741 file=join(self.config.phase_linking.inverted_path, "phase_series.h5"), 

742 logger=self.logger 

743 ) 

744 

745 pl_phase = ut.readPhasePatchwise( 

746 stack_obj=phase_linking_obj, dataset_name="phase", 

747 num_patches=self.config.general.num_patches, 

748 cand_mask=cand_mask2, 

749 point_id_img=point_id_img, logger=self.logger 

750 ) 

751 

752 # subset to time span 

753 slc_stack_obj = slcStack(join(self.config.general.input_path, "slcStack.h5")) 

754 slc_stack_obj.open() 

755 time_mask = createTimeMaskFromDates( 

756 start_date=self.config.preparation.start_date, 

757 stop_date=self.config.preparation.end_date, 

758 date_list=slc_stack_obj.dateList, 

759 logger=self.logger 

760 )[0] 

761 pl_phase = pl_phase[:, time_mask] 

762 

763 pl_ifgs = np.zeros((point2_obj.num_points, point2_obj.ifg_net_obj.num_ifgs), dtype=np.float32) 

764 

765 c = 0 

766 for i, j in np.asarray(point1_obj.ifg_net_obj.ifg_list): 

767 pl_ifgs[:, c] = np.angle(np.exp(1j * pl_phase[:, i]) * np.conjugate(np.exp(1j * pl_phase[:, j]))) 

768 c += 1 

769 

770 # change only phase for good phase linking pixels and keep original phase for good tpc pixels 

771 mask_pl = cand_mask_pl[cand_mask2] 

772 point2_obj.phase[mask_pl] = pl_ifgs[mask_pl] 

773 

774 point2_obj.writeToFile() 

775 del point2_obj, ifg_stack_obj 

776 

777 aps2_obj = Points(file_path=join(self.path, "p2_coh{}_aps.h5".format(coh_value)), logger=self.logger) 

778 aps2_obj.open( 

779 other_file_path=join(self.path, "p2_coh{}_ifg_wr.h5".format(coh_value)), 

780 input_path=self.config.general.input_path 

781 ) 

782 

783 if self.config.filtering.apply_aps_filtering: 

784 # spatial filtering of points with linear motion only (no non-linear motion) 

785 if self.config.filtering.interpolation_method == "kriging": 

786 aps1_phase, aps2_phase = estimateAtmosphericPhaseScreen( 

787 residuals=phase_for_aps_filtering, 

788 coord_utm1=point1_obj.coord_utm, 

789 coord_utm2=aps2_obj.coord_utm, 

790 num_cores=self.config.general.num_cores, 

791 bool_plot=False, 

792 logger=self.logger 

793 ) 

794 else: 

795 aps1_phase, aps2_phase = simpleInterpolation( 

796 residuals=phase_for_aps_filtering, 

797 coord_utm1=point1_obj.coord_utm, 

798 coord_utm2=aps2_obj.coord_utm, 

799 interp_method=self.config.filtering.interpolation_method 

800 ) 

801 else: 

802 msg = "#" * 10 

803 msg += " SKIP ATMOSPHERIC FILTERING! " 

804 msg += "#" * 10 

805 self.logger.info(msg=msg) 

806 num_points1 = phase_for_aps_filtering.shape[0] 

807 num_points2 = aps2_obj.coord_utm.shape[0] 

808 num_time = phase_for_aps_filtering.shape[1] 

809 aps1_phase = np.zeros((num_points1, num_time), dtype=np.float32) 

810 aps2_phase = np.zeros((num_points2, num_time), dtype=np.float32) 

811 

812 point1_obj.phase -= aps1_phase 

813 point1_obj.writeToFile() 

814 

815 aps1_obj.phase = aps1_phase 

816 aps2_obj.phase = aps2_phase 

817 aps1_obj.writeToFile() 

818 aps2_obj.writeToFile() 

819 

820 def runDensificationTimeAndSpace(self): 

821 """RunDensificationTimeAndSpace.""" 

822 coh_value = int(self.config.filtering.coherence_p2 * 100) 

823 

824 point2_obj = Points(file_path=join(self.path, "p2_coh{}_ifg_unw.h5".format(coh_value)), logger=self.logger) 

825 point2_obj.open( 

826 other_file_path=join(self.path, "p2_coh{}_ifg_wr.h5".format(coh_value)), 

827 input_path=self.config.general.input_path 

828 ) # wrapped phase 

829 

830 # estimate parameters from unwrapped phase 

831 point1_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) 

832 point1_obj.open(input_path=self.config.general.input_path) 

833 vel_p1, demerr_p1 = ut.estimateParameters(obj=point1_obj, ifg_space=True)[:2] 

834 

835 # load wrapped phase to remove known components for unwrapping p2 points 

836 point1_obj = Points(file_path=join(self.path, "p1_ifg_wr.h5"), logger=self.logger) # wrapped phase! 

837 point1_obj.open(input_path=self.config.general.input_path) 

838 

839 aps1_obj = Points(file_path=join(self.path, "p1_aps.h5"), logger=self.logger) 

840 aps1_obj.open(input_path=self.config.general.input_path) 

841 

842 aps2_obj = Points(file_path=join(self.path, "p2_coh{}_aps.h5".format(coh_value)), logger=self.logger) 

843 aps2_obj.open(input_path=self.config.general.input_path) 

844 

845 if self.config.filtering.mask_p2_file is None: 

846 """ 

847 overview of points contained in the *_obj 

848 (unstable p1 means: p1 which were not used in atmospheric filtering) 

849 p2: p2 - inconsistent p2 + unstable p1 + stable p1 --> p2: p2 

850 aps2: p2 + unstable p1 + stable p1 --> aps2: p2 

851 p1: stable p1 + unstable p1 --> p1: stable p1 + unstable p1 

852 aps1: stable p1 --> aps1: stable p1 + unstable p1 

853 """ 

854 # find unstable p1 in p2 (and in aps2) 

855 point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( 

856 (point1_obj.length, point1_obj.width)) 

857 p1_mask = point1_obj.createMask() 

858 aps1_mask = aps1_obj.createMask() 

859 mask_unstable_p1 = p1_mask & (~aps1_mask) 

860 unstable_p1_id = point_id_img[np.where(mask_unstable_p1)] 

861 

862 mask_unstable_p1_in_p2 = np.ones((aps2_obj.num_points,), dtype=np.bool_) 

863 for p in aps2_obj.point_id: 

864 if p not in unstable_p1_id: 

865 mask_unstable_p1_in_p2[aps2_obj.point_id == p] = False 

866 

867 # add unstable p1 from aps2 to aps1 

868 aps1_obj.addPointsFromObj( 

869 new_point_id=aps2_obj.point_id[mask_unstable_p1_in_p2], 

870 new_coord_xy=aps2_obj.coord_xy[mask_unstable_p1_in_p2, :], 

871 new_phase=aps2_obj.phase[mask_unstable_p1_in_p2, :], 

872 new_num_points=mask_unstable_p1_in_p2[mask_unstable_p1_in_p2].shape[0], 

873 input_path=self.config.general.input_path 

874 ) 

875 

876 # remove unstable p1 from p2 and aps2. thereby remove inconsistent p2 from aps2. 

877 p2_mask = point2_obj.createMask() 

878 mask_only_p2 = p2_mask & (~p1_mask) 

879 keep_id = point_id_img[np.where(mask_only_p2)] 

880 point2_obj.removePoints(keep_id=keep_id, input_path=self.config.general.input_path) 

881 aps2_obj.removePoints(keep_id=keep_id, input_path=self.config.general.input_path) 

882 

883 else: 

884 """ 

885 if spatial mask is applied: 

886 p2: p2 - inconsistent p2 (?) + p1 (coincidently?) --> p2: p2 

887 aps2: p2 + p1 (coincidently?) --> aps2: p2 

888 p1: stable p1 + unstable p1 --> p1: stable p1 (+ unstable p1) 

889 aps1: stable p1 --> aps1: stable p1 (+ unstable p1) 

890 """ 

891 use_all_p1 = False # todo: add to config 

892 if use_all_p1: 

893 raise NotImplementedError("Use all p1 is not implemented.") 

894 else: 

895 # remove also values from estimated parameters 

896 mask = np.ones((point1_obj.num_points,), dtype=np.bool_) 

897 for p in point1_obj.point_id: 

898 if p not in aps1_obj.point_id: 

899 mask[point1_obj.point_id == p] = False 

900 

901 vel_p1 = vel_p1[mask] 

902 demerr_p1 = demerr_p1[mask] 

903 

904 # remove unstable p1 from p1 

905 point1_obj.removePoints( 

906 keep_id=aps1_obj.point_id, 

907 input_path=self.config.general.input_path 

908 ) 

909 

910 # remove p2 which are coincidentally equal to p1 

911 point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( 

912 (point1_obj.length, point1_obj.width)) 

913 p1_mask = point1_obj.createMask() 

914 p2_mask = point2_obj.createMask() 

915 mask_p2 = ~(p1_mask & p2_mask) & p2_mask 

916 p2_id = point_id_img[np.where(mask_p2)] 

917 point2_obj.removePoints(keep_id=p2_id, input_path=self.config.general.input_path) 

918 aps2_obj.removePoints(keep_id=p2_id, input_path=self.config.general.input_path) 

919 

920 # return to ifg-space 

921 a_ifg = point2_obj.ifg_net_obj.getDesignMatrix() 

922 aps1_ifg_phase = np.matmul(a_ifg, aps1_obj.phase.T).T 

923 aps2_ifg_phase = np.matmul(a_ifg, aps2_obj.phase.T).T 

924 

925 # correct for APS 

926 point2_obj.phase = np.angle(np.exp(1j * point2_obj.phase) * np.conjugate(np.exp(1j * aps2_ifg_phase))) 

927 point1_obj.phase = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * aps1_ifg_phase))) 

928 

929 demerr, vel, gamma = densifyNetwork( 

930 point1_obj=point1_obj, 

931 vel_p1=vel_p1, 

932 demerr_p1=demerr_p1, 

933 point2_obj=point2_obj, 

934 num_conn_p1=self.config.densification.num_connections_to_p1, 

935 max_dist_p1=self.config.densification.max_distance_to_p1, 

936 velocity_bound=self.config.densification.velocity_bound, 

937 demerr_bound=self.config.densification.dem_error_bound, 

938 num_samples=self.config.densification.num_optimization_samples, 

939 num_cores=self.config.general.num_cores, 

940 logger=self.logger 

941 ) # returns parameters of both first- and second-order points 

942 

943 # store combined set of first and second-order points 

944 point2_obj.addPointsFromObj( 

945 new_point_id=point1_obj.point_id, 

946 new_coord_xy=point1_obj.coord_xy, 

947 new_phase=point1_obj.phase, 

948 new_num_points=point1_obj.num_points, 

949 input_path=self.config.general.input_path 

950 ) 

951 

952 bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) 

953 fig = viewer.plotScatter(value=gamma, coord=point2_obj.coord_xy, bmap_obj=bmap_obj, 

954 ttl="Coherence from temporal unwrapping\nBefore outlier removal", s=3.5, cmap="autumn", 

955 vmin=0, vmax=1, logger=self.logger)[0] 

956 fig.savefig(join(self.path, "pic", "step_4_temporal_unwrapping_p2_coh{}.png".format(coh_value)), dpi=300) 

957 plt.close(fig) 

958 

959 mask_gamma = gamma >= self.config.densification.arc_unwrapping_coherence 

960 self.logger.info(msg=f"Reduce the dense point set by {mask_gamma[~mask_gamma].shape[0]} points,") 

961 self.logger.info(msg=f"due to coherence from temporal unwrapping < " 

962 f"{self.config.densification.arc_unwrapping_coherence}") 

963 point2_obj.removePoints(mask=mask_gamma, keep_id=[], input_path=self.config.general.input_path) 

964 

965 fig = plt.figure(figsize=(15, 5)) 

966 axs = fig.subplots(1, 2) 

967 axs[0].hist(-vel[mask_gamma] * 100, bins=200) 

968 axs[0].set_ylabel('Absolute frequency') 

969 axs[0].set_xlabel('Mean velocity [cm / year]') 

970 

971 axs[1].hist(-demerr[mask_gamma], bins=200) 

972 axs[1].set_ylabel('Absolute frequency') 

973 axs[1].set_xlabel('DEM error [m]') 

974 fig.savefig(join(self.path, "pic", "step_4_consistency_parameters_p2_coh{}.png".format(coh_value)), 

975 dpi=300) 

976 plt.close(fig) 

977 

978 fig = viewer.plotScatter(value=gamma[mask_gamma], coord=point2_obj.coord_xy, bmap_obj=bmap_obj, 

979 ttl="Coherence from temporal unwrapping\nAfter outlier removal", s=3.5, cmap="autumn", 

980 vmin=0, vmax=1, logger=self.logger)[0] 

981 fig.savefig(join(self.path, "pic", "step_4_temporal_unwrapping_p2_coh{}_reduced.png".format(coh_value)), 

982 dpi=300) 

983 plt.close(fig) 

984 

985 fig = viewer.plotScatter(value=-vel[mask_gamma], coord=point2_obj.coord_xy, 

986 ttl="Mean velocity in [m / year]", 

987 bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] 

988 fig.savefig(join(self.path, "pic", "step_4_estimation_velocity_p2_coh{}.png".format(coh_value)), dpi=300) 

989 plt.close(fig) 

990 

991 fig = viewer.plotScatter(value=-demerr[mask_gamma], coord=point2_obj.coord_xy, ttl="DEM error in [m]", 

992 bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] 

993 fig.savefig(join(self.path, "pic", "step_4_estimation_dem_correction_p2_coh{}.png".format(coh_value)), dpi=300) 

994 plt.close(fig) 

995 

996 self.logger.info(msg="Remove phase contributions from mean velocity " 

997 "and DEM correction from wrapped phase of points.") 

998 pred_phase_demerr, pred_phase_vel = ut.predictPhase( 

999 obj=point2_obj, 

1000 vel=vel[mask_gamma], 

1001 demerr=demerr[mask_gamma], 

1002 ifg_space=True, 

1003 logger=self.logger 

1004 ) 

1005 pred_phase = pred_phase_demerr + pred_phase_vel 

1006 

1007 wr_phase = point2_obj.phase 

1008 wr_res_phase = np.angle(np.exp(1j * wr_phase) * np.conjugate(np.exp(1j * pred_phase))) 

1009 

1010 triang_obj = PointNetworkTriangulation(coord_xy=point2_obj.coord_xy, coord_utmxy=None, logger=self.logger) 

1011 triang_obj.triangulateGlobal() 

1012 arcs = triang_obj.getArcsFromAdjMat() 

1013 

1014 unw_res_phase = spatialUnwrapping(num_ifgs=point2_obj.ifg_net_obj.num_ifgs, 

1015 num_points=point2_obj.num_points, 

1016 phase=wr_res_phase, 

1017 method=self.config.general.spatial_unwrapping_method, 

1018 edges=arcs, 

1019 num_cores=self.config.general.num_cores, logger=self.logger) 

1020 

1021 self.logger.info(msg="Add phase contributions from mean velocity " 

1022 "and DEM correction back to spatially unwrapped residual phase.") 

1023 unw_phase = unw_res_phase + pred_phase 

1024 

1025 point2_obj.phase = unw_phase 

1026 vel = ut.estimateParameters(obj=point2_obj, ifg_space=True)[0] 

1027 point2_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) 

1028 

1029 point2_obj.writeToFile() 

1030 

1031 phase_ts = ut.invertIfgNetwork( 

1032 phase=unw_phase, 

1033 num_points=point2_obj.num_points, 

1034 ifg_net_obj=point2_obj.ifg_net_obj, 

1035 num_cores=1, # self.config.general.num_cores, 

1036 ref_idx=0, 

1037 logger=self.logger) 

1038 

1039 point_obj = Points(file_path=join(self.path, "p2_coh{}_ts.h5".format(coh_value)), logger=self.logger) 

1040 point_obj.open( 

1041 other_file_path=join(self.path, "p2_coh{}_ifg_unw.h5".format(coh_value)), 

1042 input_path=self.config.general.input_path 

1043 ) 

1044 point_obj.phase = phase_ts 

1045 

1046 point_obj.writeToFile() 

1047 

1048 def runDensificationSpace(self): 

1049 """RunDensification.""" 

1050 coh_value = int(self.config.filtering.coherence_p2 * 100) 

1051 

1052 point_obj = Points(file_path=join(self.path, "p2_coh{}_ifg_unw.h5".format(coh_value)), logger=self.logger) 

1053 point_obj.open( 

1054 other_file_path=join(self.path, "p2_coh{}_ifg_wr.h5".format(coh_value)), 

1055 input_path=self.config.general.input_path 

1056 ) # open wr phase 

1057 

1058 aps2_obj = Points(file_path=join(self.path, "p2_coh{}_aps.h5".format(coh_value)), logger=self.logger) 

1059 aps2_obj.open(input_path=self.config.general.input_path) 

1060 

1061 # return to ifg-space 

1062 a_ifg = point_obj.ifg_net_obj.getDesignMatrix() 

1063 aps2_ifg_phase = np.matmul(a_ifg, aps2_obj.phase.T).T 

1064 

1065 # correct for APS 

1066 point_obj.phase = np.angle(np.exp(1j * point_obj.phase) * np.conjugate(np.exp(1j * aps2_ifg_phase))) 

1067 

1068 triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=None, logger=self.logger) 

1069 triang_obj.triangulateGlobal() # if coord_utm is not given, only global delaunay and knn can be calculated 

1070 arcs = triang_obj.getArcsFromAdjMat() 

1071 

1072 unw_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, 

1073 num_points=point_obj.num_points, 

1074 phase=point_obj.phase, 

1075 method=self.config.general.spatial_unwrapping_method, 

1076 edges=arcs, 

1077 num_cores=self.config.general.num_cores, logger=self.logger) 

1078 

1079 # adjust reference to peak of histogram 

1080 point_obj.phase = unw_phase 

1081 vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] 

1082 point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) 

1083 

1084 point_obj.writeToFile() 

1085 del point_obj 

1086 

1087 point_obj = Points(file_path=join(self.path, "p2_coh{}_ts.h5".format(coh_value)), logger=self.logger) 

1088 point_obj.open( 

1089 other_file_path=join(self.path, "p2_coh{}_ifg_wr.h5".format(coh_value)), 

1090 input_path=self.config.general.input_path 

1091 ) 

1092 

1093 phase_ts = ut.invertIfgNetwork(phase=unw_phase, num_points=point_obj.num_points, 

1094 ifg_net_obj=point_obj.ifg_net_obj, 

1095 num_cores=1, # self.config.general.num_cores, 

1096 ref_idx=0, 

1097 logger=self.logger) 

1098 

1099 point_obj.phase = phase_ts 

1100 

1101 point_obj.writeToFile()