Coverage for sarvey/sarvey_export.py: 0%
134 statements
« prev ^ index » next coverage.py v7.6.0, created at 2024-10-17 12:36 +0000
« prev ^ index » next coverage.py v7.6.0, created at 2024-10-17 12:36 +0000
1#!/usr/bin/env python
3# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
4#
5# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
6#
7# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
8# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
9# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
10# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
11#
12# This program is free software: you can redistribute it and/or modify it under
13# the terms of the GNU General Public License as published by the Free Software
14# Foundation, either version 3 of the License, or (at your option) any later
15# version.
16#
17# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
18# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
19# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
20# This requirement extends to SARvey.
21#
22# This program is distributed in the hope that it will be useful, but WITHOUT
23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
25# details.
26#
27# You should have received a copy of the GNU Lesser General Public License along
28# with this program. If not, see <https://www.gnu.org/licenses/>.
30"""Console script for exporting data from SARvey format to GIS formats."""
31import argparse
32import logging
33from logging import Logger
34import sys
35import time
36import warnings
37import os
38from os.path import join, dirname, basename
39import numpy as np
40import pandas as pd
41import geopandas as gpd
42from pyproj import CRS
43from pyproj.aoi import AreaOfInterest
44from pyproj.database import query_utm_crs_info
45from shapely import Point
46from shapely.errors import ShapelyDeprecationWarning
48from sarvey.config import loadConfiguration
49from sarvey.console import showLogoSARvey
50from sarvey.objects import Points
51import sarvey.utils as ut
52from sarvey.geolocation import calculateGeolocationCorrection
55warnings.filterwarnings("ignore", category=ShapelyDeprecationWarning)
58def exportDataToGisFormat(*, file_path: str, output_path: str, input_path: str,
59 correct_geolocation: bool = False, no_timeseries: bool = False, logger: Logger):
60 """Export data to GIS format (shp or gpkg).
62 Parameters
63 ----------
64 file_path: str
65 Path to the input file.
66 output_path: str
67 Path for writing output file.
68 input_path: str
69 Path to slcStack.h5 and geometryRadar.h5.
70 correct_geolocation: bool
71 Correct geolocation or not
72 no_timeseries: bool
73 Export time series data or not
74 logger: Logger
75 Logger handle.
76 """
77 point_obj = Points(file_path=file_path, logger=logger)
79 point_obj.open(input_path=input_path)
81 # todo: add corrected height to output
82 # todo: add option to mask the output to e.g. linear infrastructures or other AOI
84 vel, demerr, _, coherence, omega, _ = ut.estimateParameters(obj=point_obj, ifg_space=False)
86 stc = ut.spatiotemporalConsistency(coord_utm=point_obj.coord_utm, phase=point_obj.phase,
87 wavelength=point_obj.wavelength)
89 point_obj.phase *= point_obj.wavelength / (4 * np.pi) # in [m]
91 # extract displacement
92 defo_ts = np.zeros_like(point_obj.phase, dtype=np.float32)
93 for i in range(point_obj.num_points):
94 phase_topo = (point_obj.ifg_net_obj.pbase / (point_obj.slant_range[i] * np.sin(point_obj.loc_inc[i])) *
95 demerr[i])
96 defo_ts[i, :] = point_obj.phase[i, :] - phase_topo
98 # transform into meters
99 defo_ts *= 1000 # in [mm]
101 utm_crs_list = query_utm_crs_info(
102 datum_name="WGS 84",
103 area_of_interest=AreaOfInterest(
104 west_lon_degree=point_obj.coord_lalo[:, 1].min(),
105 south_lat_degree=point_obj.coord_lalo[:, 0].min(),
106 east_lon_degree=point_obj.coord_lalo[:, 1].max(),
107 north_lat_degree=point_obj.coord_lalo[:, 0].max(),
108 ),
109 contains=True
110 )
112 utm_epsg = utm_crs_list[0].code
114 dates = ["D{}".format(date).replace("-", "") for date in point_obj.ifg_net_obj.dates]
116 dates = dates[:point_obj.phase.shape[1]] # remove dates which were not processed
118 if no_timeseries:
119 df_points = pd.DataFrame({})
120 else:
121 df_points = pd.DataFrame({date: () for date in dates})
123 if correct_geolocation:
124 logger.info("Calculate geolocation correction.")
125 coord_correction = calculateGeolocationCorrection(path_geom=input_path,
126 point_obj=point_obj,
127 demerr=demerr,
128 logger=logger)
129 coord_correction_norm = np.linalg.norm(coord_correction, axis=1)
130 max_error_index = np.argmax(coord_correction_norm)
131 logger.info(f"Maximum geolocation correction: {coord_correction_norm[max_error_index]:.1f} m "
132 f"corresponding to {demerr[max_error_index]:.1f} m DEM correction")
133 else:
134 coord_correction = 0
135 logger.info("geolocation correction skipped.")
137 coord_utm = point_obj.coord_utm
138 coord_utm += coord_correction
139 df_points['coord'] = (coord_utm).tolist()
140 df_points['coord'] = df_points['coord'].apply(Point)
141 df_points.insert(0, 'point_id', point_obj.point_id.tolist())
142 df_points.insert(1, 'velocity', vel * 1000) # in [mm]
143 df_points.insert(2, 'coherence', coherence)
144 df_points.insert(3, 'omega', omega)
145 df_points.insert(4, 'st_consistency', stc * 1000) # in [mm]
146 df_points.insert(5, 'dem_error', demerr) # in [m]
147 df_points.insert(6, 'dem', point_obj.height) # in [m]
149 df_points.columns = [col[:10] for col in df_points.columns]
151 if not no_timeseries:
152 for i, date in enumerate(dates):
153 df_points[date] = defo_ts[:, i]
155 gdf_points = gpd.GeoDataFrame(df_points, geometry='coord')
156 gdf_points = gdf_points.set_crs(CRS.from_epsg(utm_epsg))
157 logger.info(msg="write to file.")
158 gdf_points.to_file(output_path)
161def createParser():
162 """Create_parser."""
163 parser = argparse.ArgumentParser(
164 description="Export InSAR time series results from '.h5' to GIS data formats.",
165 formatter_class=argparse.RawTextHelpFormatter,
166 epilog="""EXAMPLE:
167 sarvey_export outputs/p2_coh50_ts.h5 -o outputs/shp/p2_coh50.shp # export time series to shapefile
168 sarvey_export outputs/p2_coh50_ts.h5 -o outputs/shp/p2_coh50.gpkg # export time series to geopackage
169 sarvey_export outputs/p2_coh90_ts.h5 -o outputs/shp/p2_coh90.shp -g # apply geolocation correction
170 sarvey_export outputs/p2_coh90_ts.h5 -o outputs/shp/p2_coh90.shp -g -t # skip time series data
171 """)
173 parser.add_argument('file_path', type=str, help='Path to input file.')
175 parser.add_argument("-o", "--output_path", type=str, dest="output_path", default="",
176 help="Path to output file. If empty, the name of the input file will be used.")
178 # parser.add_argument("-f", "--format", type=str, required=False, metavar="FILE", dest="format",
179 # help="Output file format (if not already specified within '-o'). Can be 'shp', 'gpkg',
180 # 'csv'.")
182 parser.add_argument("-l", "--log_dir", type=str, required=False, metavar="FILE", dest="log_dir",
183 default="logfiles/", help="Logfile directory (default: 'logfiles/')")
185 parser.add_argument('-w', '--workdir', default=None, dest="workdir",
186 help='Working directory (default: current directory).')
188 parser.add_argument('-g', '--correct_geo', default=False, action="store_true", dest="correct_geolocation",
189 help='Correct Geolocation (default: False).')
191 parser.add_argument('-t', '--no-time-series', default=False, action="store_true", dest="no_timeseries",
192 help='Do not export time series (default: False).')
194 return parser
197def main(iargs=None):
198 """Run Main.
200 :param iargs:
201 """
202 parser = createParser()
203 args = parser.parse_args(iargs)
205 log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
206 logger = logging.getLogger(__name__)
208 console_handler = logging.StreamHandler(sys.stdout)
209 console_handler.setFormatter(log_format)
210 logger.addHandler(console_handler)
211 logging_level = logging.getLevelName("INFO")
212 logger.setLevel(logging_level)
214 if args.workdir is None:
215 args.workdir = os.path.abspath(os.path.curdir)
216 else:
217 logger.info(msg="Working directory: {}".format(args.workdir))
219 args.log_dir = join(args.workdir, args.log_dir)
220 current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
221 log_filename = f"sarvey_export_{current_datetime}.log"
223 if not os.path.exists(args.log_dir):
224 os.mkdir(args.log_dir)
225 file_handler = logging.FileHandler(filename=join(args.log_dir, log_filename))
226 file_handler.setFormatter(log_format)
227 logger.addHandler(file_handler)
229 showLogoSARvey(logger=logger, step="Export results")
231 # read config file to retrieve location of inputs
232 config_file_path = os.path.abspath(join(args.workdir, dirname(args.file_path), "config.json"))
234 if not os.path.exists(config_file_path):
235 # check if any config file is available in upper directory (backward compatibility)
236 files = np.array([os.path.abspath(f) for f in os.listdir(join(dirname(config_file_path), ".."))
237 if os.path.isfile(f)])
238 potential_configs = np.array([(basename(f).split(".")[-1] == "json") and ("config" in basename(f))
239 for f in files])
240 if potential_configs[potential_configs].shape[0] == 0:
241 raise FileNotFoundError(f"Backup configuration file not found: {config_file_path}!")
242 else:
243 logger.warning(msg=f"Backup configuration file not found: {config_file_path}!")
244 logger.warning(msg=f"Other configuration files automatically detected: {files[potential_configs]}!")
245 logger.warning(msg=f"Automatically selected configuration file: {files[potential_configs][0]}!")
246 config_file_path = files[potential_configs][0]
248 config = loadConfiguration(path=config_file_path)
250 # create output directory
251 if args.output_path == "":
252 output_dir = args.workdir
253 output_fname = basename(args.file_path).split(".")[-2]
254 output_format = "shp"
255 args.output_path = join(output_dir, output_fname + "." + output_format)
256 else:
257 output_dir = join(args.workdir, dirname(args.output_path))
258 output_fname = basename(args.output_path)
259 name_splitted = output_fname.split(".")
260 if len(name_splitted) == 1:
261 args.output_path = join(output_dir, output_fname + ".shp") # use shapefile as default format
262 elif len(name_splitted) == 2:
263 output_format = name_splitted[-1] # use specified format
264 if (output_format != "shp") and (output_format != "gpkg"):
265 logger.error(msg=f"Output format not supported: {output_format}!")
266 raise ValueError
267 logger.info(msg=f"Detected output format: {output_format}.")
268 args.output_path = join(output_dir, output_fname)
269 else:
270 logger.error(msg=f"Output format was not recognized! {output_fname}")
271 raise ValueError
273 logger.info(msg=f"Output file: {args.output_path}")
275 # specify geolocation status
276 logger.info(msg=f"Correct geolocation: {args.correct_geolocation}")
278 # specify time series flag
279 logger.info(msg=f"Export time series data: {not args.no_timeseries}")
281 if not os.path.exists(output_dir):
282 os.mkdir(output_dir)
284 exportDataToGisFormat(file_path=args.file_path, output_path=args.output_path,
285 input_path=config.general.input_path,
286 correct_geolocation=args.correct_geolocation, no_timeseries=args.no_timeseries,
287 logger=logger)
290if __name__ == '__main__':
291 main()