Coverage for sarvey/densification.py: 96%

82 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2024-10-17 12:36 +0000

1#!/usr/bin/env python 

2 

3# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. 

4# 

5# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) 

6# 

7# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context 

8# of the SAR4Infra project with funds of the German Federal Ministry for Digital and 

9# Transport and contributions from Landesamt fuer Vermessung und Geoinformation 

10# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. 

11# 

12# This program is free software: you can redistribute it and/or modify it under 

13# the terms of the GNU General Public License as published by the Free Software 

14# Foundation, either version 3 of the License, or (at your option) any later 

15# version. 

16# 

17# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ 

18# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you 

19# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. 

20# This requirement extends to SARvey. 

21# 

22# This program is distributed in the hope that it will be useful, but WITHOUT 

23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 

24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 

25# details. 

26# 

27# You should have received a copy of the GNU Lesser General Public License along 

28# with this program. If not, see <https://www.gnu.org/licenses/>. 

29 

30"""Densification module for SARvey.""" 

31import time 

32import multiprocessing 

33import numpy as np 

34from scipy.spatial import KDTree 

35from logging import Logger 

36 

37from mintpy.utils import ptime 

38 

39from sarvey.unwrapping import oneDimSearchTemporalCoherence 

40from sarvey.objects import Points 

41import sarvey.utils as ut 

42 

43 

44def densificationInitializer(tree_p1: KDTree, point2_obj: Points, demod_phase1: np.ndarray): 

45 """DensificationInitializer. 

46 

47 Sets values to global variables for parallel processing. 

48 

49 Parameters 

50 ---------- 

51 tree_p1 : KDTree 

52 KDTree of the first-order network 

53 point2_obj : Points 

54 Points object with second-order points 

55 demod_phase1 : np.ndarray 

56 demodulated phase of the first-order network 

57 """ 

58 global global_tree_p1 

59 global global_point2_obj 

60 global global_demod_phase1 

61 

62 global_tree_p1 = tree_p1 

63 global_point2_obj = point2_obj 

64 global_demod_phase1 = demod_phase1 

65 

66 

67def launchDensifyNetworkConsistencyCheck(args: tuple): 

68 """LaunchDensifyNetworkConsistencyCheck. 

69 

70 Launches the densification of the network with second-order points inside parallel processing. 

71 

72 Parameters 

73 ---------- 

74 args : tuple 

75 Tuple with the following parameters: 

76 

77 idx_range : np.ndarray 

78 Array with the indices of the second-order points 

79 num_points : int 

80 Number of second-order points 

81 num_conn_p1 : int 

82 Number of nearest points in the first-order network 

83 max_dist_p1 : float 

84 Maximum allowed distance to the nearest points in the first-order network 

85 velocity_bound : float 

86 Bound for the velocity estimate in temporal unwrapping 

87 demerr_bound : float 

88 Bound for the DEM error estimate in temporal unwrapping 

89 num_samples : int 

90 Number of samples for the search of the optimal parameters 

91 

92 Returns 

93 ------- 

94 idx_range : np.ndarray 

95 Array with the indices of the second-order points 

96 demerr_p2 : np.ndarray 

97 DEM error array of the second-order points 

98 vel_p2 : np.ndarray 

99 Velocity array of the second-order points 

100 gamma_p2 : np.ndarray 

101 Estimated temporal coherence array of the second-order points resulting from temporal unwrapping 

102 """ 

103 (idx_range, num_points, num_conn_p1, max_dist_p1, velocity_bound, demerr_bound, num_samples) = args 

104 

105 counter = 0 

106 prog_bar = ptime.progressBar(maxValue=num_points) 

107 

108 # initialize output 

109 demerr_p2 = np.zeros((num_points,), dtype=np.float32) 

110 vel_p2 = np.zeros((num_points,), dtype=np.float32) 

111 gamma_p2 = np.zeros((num_points,), dtype=np.float32) 

112 

113 design_mat = np.zeros((global_point2_obj.ifg_net_obj.num_ifgs, 2), dtype=np.float32) 

114 

115 demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) 

116 vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) 

117 

118 factor = 4 * np.pi / global_point2_obj.wavelength 

119 

120 for idx in range(num_points): 

121 p2 = idx_range[idx] 

122 # nearest points in p1 

123 dist, nearest_p1 = global_tree_p1.query([global_point2_obj.coord_utm[p2, 0], 

124 global_point2_obj.coord_utm[p2, 1]], k=num_conn_p1) 

125 mask = (dist < max_dist_p1) & (dist != 0) 

126 mask[:3] = True # ensure that always at least the three closest points are used 

127 nearest_p1 = nearest_p1[mask] 

128 

129 # compute arc observations to nearest points 

130 arc_phase_p1 = np.angle(np.exp(1j * global_point2_obj.phase[p2, :]) * 

131 np.conjugate(np.exp(1j * global_demod_phase1[nearest_p1, :]))) 

132 

133 design_mat[:, 0] = (factor * global_point2_obj.ifg_net_obj.pbase_ifg 

134 / (global_point2_obj.slant_range[p2] * np.sin(global_point2_obj.loc_inc[p2]))) 

135 design_mat[:, 1] = factor * global_point2_obj.ifg_net_obj.tbase_ifg 

136 

137 demerr_p2[idx], vel_p2[idx], gamma_p2[idx] = oneDimSearchTemporalCoherence( 

138 demerr_range=demerr_range, 

139 vel_range=vel_range, 

140 obs_phase=arc_phase_p1, 

141 design_mat=design_mat 

142 ) 

143 

144 prog_bar.update(counter + 1, every=np.int16(200), 

145 suffix='{}/{} points'.format(counter + 1, num_points)) 

146 counter += 1 

147 

148 return idx_range, demerr_p2, vel_p2, gamma_p2 

149 

150 

151def densifyNetwork(*, point1_obj: Points, vel_p1: np.ndarray, demerr_p1: np.ndarray, point2_obj: Points, 

152 num_conn_p1: int, max_dist_p1: float, velocity_bound: float, demerr_bound: float, 

153 num_samples: int, num_cores: int = 1, logger: Logger): 

154 """DensifyNetwork. 

155 

156 Densifies the network with second-order points by connecting the second-order points to the closest points in the 

157 first-order network. 

158 

159 Parameters 

160 ---------- 

161 point1_obj : Points 

162 Points object with first-order points 

163 vel_p1 : np.ndarray 

164 Velocity array of the first-order points 

165 demerr_p1 : np.ndarray 

166 DEM error array of the first-order points 

167 point2_obj : Points 

168 Points object with second-order points 

169 num_conn_p1 : int 

170 Number of nearest points in the first-order network 

171 max_dist_p1 : float 

172 Maximum allowed distance to the nearest points in the first-order network 

173 velocity_bound : float 

174 Bound for the velocity estimate in temporal unwrapping 

175 demerr_bound : float 

176 Bound for the DEM error estimate in temporal unwrapping 

177 num_samples : int 

178 Number of samples for the search of the optimal parameters 

179 num_cores : int 

180 Number of cores for parallel processing (default: 1) 

181 logger : Logger 

182 Logger object 

183 

184 Returns 

185 ------- 

186 demerr_p2 : np.ndarray 

187 DEM error array of the second-order points 

188 vel_p2 : np.ndarray 

189 Velocity array of the second-order points 

190 gamma_p2 : np.ndarray 

191 Estimated temporal coherence array of the second-order points resulting from temporal unwrapping 

192 """ 

193 msg = "#" * 10 

194 msg += " DENSIFICATION WITH SECOND-ORDER POINTS " 

195 msg += "#" * 10 

196 logger.info(msg=msg) 

197 start_time = time.time() 

198 

199 # find the closest points from first-order network 

200 tree_p1 = KDTree(data=point1_obj.coord_utm) 

201 

202 # remove parameters from wrapped phase 

203 pred_phase_demerr, pred_phase_vel = ut.predictPhase( 

204 obj=point1_obj, 

205 vel=vel_p1, demerr=demerr_p1, 

206 ifg_space=True, logger=logger 

207 ) 

208 pred_phase = pred_phase_demerr + pred_phase_vel 

209 

210 # Note: for small baselines it does not make a difference if re-wrapping the phase difference or not. 

211 # However, for long baselines (like in the star network) it does make a difference. Leijen (2014) does not re-wrap 

212 # the arc double differences to be able to test the ambiguities. Kampes (2006) does re-wrap, but is testing based 

213 # on the estimated parameters. Hence, it doesn't make a difference for him. Not re-wrapping can be a starting point 

214 # for triangle-based temporal unwrapping. 

215 # demod_phase1 = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * pred_phase))) # re-wrapping 

216 demod_phase1 = point1_obj.phase - pred_phase # not re-wrapping 

217 

218 # initialize output 

219 init_args = (tree_p1, point2_obj, demod_phase1) 

220 

221 if num_cores == 1: 

222 densificationInitializer(tree_p1=tree_p1, point2_obj=point2_obj, demod_phase1=demod_phase1) 

223 args = (np.arange(point2_obj.num_points), point2_obj.num_points, num_conn_p1, max_dist_p1, 

224 velocity_bound, demerr_bound, num_samples) 

225 idx_range, demerr_p2, vel_p2, gamma_p2 = launchDensifyNetworkConsistencyCheck(args) 

226 else: 

227 with multiprocessing.Pool(num_cores, initializer=densificationInitializer, initargs=init_args) as pool: 

228 logger.info(msg="start parallel processing with {} cores.".format(num_cores)) 

229 num_cores = point2_obj.num_points if num_cores > point2_obj.num_points else num_cores 

230 # avoids having less samples than cores 

231 idx = ut.splitDatasetForParallelProcessing(num_samples=point2_obj.num_points, num_cores=num_cores) 

232 args = [( 

233 idx_range, 

234 idx_range.shape[0], 

235 num_conn_p1, 

236 max_dist_p1, 

237 velocity_bound, 

238 demerr_bound, 

239 num_samples 

240 ) for idx_range in idx] 

241 

242 results = pool.map_async(launchDensifyNetworkConsistencyCheck, args, chunksize=1) 

243 while True: 

244 time.sleep(5) 

245 if results.ready(): 

246 results = results.get() 

247 break 

248 # needed to make coverage work in multiprocessing (not sure what that means. copied from package Arosics). 

249 pool.close() 

250 pool.join() 

251 

252 demerr_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 

253 vel_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 

254 gamma_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) 

255 

256 # retrieve results 

257 for i, demerr_i, vel_i, gamma_i in results: 

258 demerr_p2[i] = demerr_i 

259 vel_p2[i] = vel_i 

260 gamma_p2[i] = gamma_i 

261 

262 m, s = divmod(time.time() - start_time, 60) 

263 logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) 

264 

265 # combine p1 and p2 parameters and bring them in correct order using point_id 

266 sort_idx = np.argsort(np.append(point1_obj.point_id, point2_obj.point_id)) 

267 demerr_p2 = np.append(demerr_p1, demerr_p2) # add gamma=1 for p1 pixels 

268 vel_p2 = np.append(vel_p1, vel_p2) 

269 gamma_p2 = np.append(np.ones_like(point1_obj.point_id), gamma_p2) # add gamma=1 for p1 pixels 

270 

271 demerr_p2 = demerr_p2[sort_idx] 

272 vel_p2 = vel_p2[sort_idx] 

273 gamma_p2 = gamma_p2[sort_idx] 

274 return demerr_p2, vel_p2, gamma_p2