Coverage for sarvey/sarvey_mti.py: 65%
143 statements
« prev ^ index » next coverage.py v7.6.0, created at 2024-10-17 12:36 +0000
« prev ^ index » next coverage.py v7.6.0, created at 2024-10-17 12:36 +0000
1#!/usr/bin/env python
3# SARvey - A multitemporal InSAR time series tool for the derivation of displacements.
4#
5# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de)
6#
7# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context
8# of the SAR4Infra project with funds of the German Federal Ministry for Digital and
9# Transport and contributions from Landesamt fuer Vermessung und Geoinformation
10# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein.
11#
12# This program is free software: you can redistribute it and/or modify it under
13# the terms of the GNU General Public License as published by the Free Software
14# Foundation, either version 3 of the License, or (at your option) any later
15# version.
16#
17# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++
18# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you
19# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes.
20# This requirement extends to SARvey.
21#
22# This program is distributed in the hope that it will be useful, but WITHOUT
23# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
24# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
25# details.
26#
27# You should have received a copy of the GNU Lesser General Public License along
28# with this program. If not, see <https://www.gnu.org/licenses/>.
30"""MTI module for SARvey."""
32import argparse
33import os
34import shutil
35from os.path import join
37import json5
38import matplotlib
39import sys
40import logging
41import time
42from logging import Logger
43from pydantic.schema import schema
45from sarvey.console import printStep, printCurrentConfig, showLogoSARvey
46from sarvey.processing import Processing
47from sarvey.config import Config, loadConfiguration
48from sarvey.utils import checkIfRequiredFilesExist
50try:
51 matplotlib.use('QtAgg')
52except ImportError as e:
53 print(e)
55EXAMPLE = """Example:
56 sarvey -f config.json 0 0 -g # create default config file with the name config.json and exit
57 sarvey -f config.json 0 0 # run only preparation step
58 sarvey -f config.json 0 4 # run all processing steps
60 sarvey -f config.json 0 0 -p # print explanation of the configuration parameters to console
61"""
63STEP_DICT = {
64 0: "PREPARATION",
65 1: "CONSISTENCY CHECK",
66 2: "UNWRAPPING",
67 3: "FILTERING",
68 4: "DENSIFICATION",
69}
72def run(*, config: Config, args: argparse.Namespace, logger: Logger):
73 """Run the specified processing steps.
75 Parameters
76 ----------
77 config: Config
78 object of configuration class.
79 args: argparse.Namespace
80 command line input arguments
81 logger: Logger
82 Logging handler.
83 """
84 showLogoSARvey(logger=logger, step="MTInSAR")
86 steps = range(args.start, args.stop + 1)
88 config_default_dict = generateTemplateFromConfigModel()
90 proc_obj = Processing(path=config.general.output_path, config=config, logger=logger)
92 printCurrentConfig(config_section=config.general.dict(),
93 config_section_default=config_default_dict["general"],
94 logger=logger)
96 if config.phase_linking.use_phase_linking_results:
97 printCurrentConfig(config_section=config.phase_linking.dict(),
98 config_section_default=config_default_dict["phase_linking"],
99 logger=logger)
101 if 0 in steps:
102 printStep(step=0, step_dict=STEP_DICT, logger=logger)
103 printCurrentConfig(config_section=config.preparation.dict(),
104 config_section_default=config_default_dict["preparation"],
105 logger=logger)
106 proc_obj.runPreparation()
107 required_files = ["background_map.h5", "coordinates_utm.h5", "ifg_network.h5", "ifg_stack.h5",
108 "temporal_coherence.h5"]
110 if 1 in steps:
111 checkIfRequiredFilesExist(
112 path_to_files=config.general.output_path,
113 required_files=required_files,
114 logger=logger
115 )
116 printStep(step=1, step_dict=STEP_DICT, logger=logger)
117 printCurrentConfig(config_section=config.consistency_check.dict(),
118 config_section_default=config_default_dict["consistency_check"],
119 logger=logger)
120 proc_obj.runConsistencyCheck()
121 required_files.extend(["point_network.h5", "point_network_parameter.h5", "p1_ifg_wr.h5"])
123 if 2 in steps:
124 checkIfRequiredFilesExist(
125 path_to_files=config.general.output_path,
126 required_files=required_files,
127 logger=logger
128 )
129 printStep(step=2, step_dict=STEP_DICT, logger=logger)
130 printCurrentConfig(config_section=config.unwrapping.dict(),
131 config_section_default=config_default_dict["unwrapping"],
132 logger=logger)
133 if proc_obj.config.general.apply_temporal_unwrapping:
134 proc_obj.runUnwrappingTimeAndSpace()
135 else:
136 proc_obj.runUnwrappingSpace()
137 required_files.extend(["p1_ifg_unw.h5", "p1_ts.h5"])
139 if 3 in steps:
140 checkIfRequiredFilesExist(
141 path_to_files=config.general.output_path,
142 required_files=required_files,
143 logger=logger
144 )
145 printStep(step=3, step_dict=STEP_DICT, logger=logger)
146 printCurrentConfig(config_section=config.filtering.dict(),
147 config_section_default=config_default_dict["filtering"],
148 logger=logger)
149 proc_obj.runFiltering()
150 coh_value = int(config.filtering.coherence_p2 * 100)
151 required_files.extend(["p1_aps.h5", f"p2_coh{coh_value}_ifg_wr.h5", f"p2_coh{coh_value}_aps.h5"])
153 if 4 in steps:
154 checkIfRequiredFilesExist(
155 path_to_files=config.general.output_path,
156 required_files=required_files,
157 logger=logger
158 )
159 printStep(step=4, step_dict=STEP_DICT, logger=logger)
160 printCurrentConfig(config_section=config.densification.dict(),
161 config_section_default=config_default_dict["densification"],
162 logger=logger)
163 if proc_obj.config.general.apply_temporal_unwrapping:
164 proc_obj.runDensificationTimeAndSpace()
165 else:
166 proc_obj.runDensificationSpace()
168 logger.info(msg="SARvey MTI finished normally.")
169 # close log-file to avoid problems with deleting the files
170 if logger.hasHandlers():
171 for handler in logger.handlers[:]:
172 logger.removeHandler(handler)
173 handler.flush()
174 handler.close()
177def generateTemplateFromConfigModel():
178 """GenerateTemplateFromConfigModel."""
179 top_level_schema = schema([Config])
180 top_level_dict = dict()
181 for sec_name, sec_def in top_level_schema['definitions'].items():
182 if sec_name == "Config":
183 # substitute the class names of subsections in top_level_dict by the name of the sections in class Config
184 for subsec_name, subsec_def in sec_def["properties"].items():
185 top_level_dict[subsec_name] = top_level_dict.pop(subsec_def["title"])
186 continue # don't add "Config" to top_level_dict
187 sec_dict = dict()
188 for subsec_name, subsec_def in sec_def["properties"].items():
189 if "default" not in subsec_def:
190 sec_dict.update({subsec_name: None})
191 else:
192 sec_dict.update({subsec_name: subsec_def["default"]})
193 top_level_dict.update({sec_name: sec_dict})
195 return top_level_dict
198def createParser():
199 """Create_parser.
201 :return:
202 """
203 parser = argparse.ArgumentParser(
204 description='Multitemporal InSAR processing workflow\n\n' +
205 'Run the following steps:\n' +
206 '0 - preparation\n' +
207 '1 - consistency check\n' +
208 '2 - spatial unwrapping\n' +
209 '3 - filtering\n' +
210 '4 - densification',
211 formatter_class=argparse.RawTextHelpFormatter,
212 epilog=EXAMPLE)
214 parser.add_argument('start', choices={0, 1, 2, 3, 4}, type=int,
215 help='Start of processing')
217 parser.add_argument('stop', choices={0, 1, 2, 3, 4}, type=int,
218 help='Stop of processing')
220 parser.add_argument("-f", "--filepath", type=str, required=True, metavar="FILE",
221 help="Path to the config.json file.")
223 parser.add_argument("-g", "--generate_config", action="store_true", default=False, dest="generate_config",
224 help="Write default configuration to file specified by '-f'.")
226 parser.add_argument("-p", "--print_config_explanation", action="store_true", default=False,
227 dest="print_config_explanation",
228 help="Prints exhaustive explanations about configuration to console.")
230 parser.add_argument('-w', '--workdir', default=None, dest="workdir",
231 help='Working directory (default: current directory).')
233 return parser
236def main(iargs=None):
237 """Run Main.
239 :param iargs:
240 """
241 parser = createParser()
242 args = parser.parse_args(iargs)
244 # initiate logger
245 logging_level = logging.getLevelName('DEBUG') # set a default value before until level is read from config
247 log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
248 logger = logging.getLogger(__name__)
250 console_handler = logging.StreamHandler(sys.stdout)
251 console_handler.setFormatter(log_format)
252 logger.addHandler(console_handler)
253 logger.setLevel(logging_level)
255 if args.generate_config:
256 logger.info(msg=f"Write default config to file: {args.filepath}.")
257 default_config_dict = generateTemplateFromConfigModel()
258 with open(args.filepath, "w") as f:
259 f.write(json5.dumps(default_config_dict, indent=4))
260 return 0
262 if args.print_config_explanation:
263 top_level_schema = schema([Config])
264 print(json5.dumps(top_level_schema, indent=2))
265 return 0
267 if args.stop < args.start:
268 logger.error(msg="Choose Start <= Stop!")
269 raise ValueError
271 if args.workdir is None:
272 args.workdir = os.path.abspath(os.path.curdir)
273 else:
274 logger.info(msg="Working directory: {}".format(args.workdir))
276 config_file_path = os.path.abspath(join(args.workdir, args.filepath))
278 config = loadConfiguration(path=config_file_path)
280 current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
281 log_filename = f"sarvey_{current_datetime}.log"
282 logpath = config.general.logfile_path
283 if not os.path.exists(logpath):
284 os.mkdir(logpath)
285 file_handler = logging.FileHandler(filename=join(logpath, log_filename))
286 file_handler.setFormatter(log_format)
287 file_logging_level = logging.getLevelName("DEBUG")
288 file_handler.setLevel(file_logging_level)
289 logger.addHandler(file_handler)
291 logging_level = logging.getLevelName(config.general.logging_level)
292 console_handler.setLevel(logging_level)
294 config.general.output_path = os.path.abspath(join(args.workdir, config.general.output_path))
295 if config.consistency_check.mask_p1_file is not None:
296 config.consistency_check.mask_p1_file = os.path.abspath(
297 join(args.workdir, config.consistency_check.mask_p1_file))
298 if config.filtering.mask_p2_file is not None:
299 config.filtering.mask_p2_file = os.path.abspath(
300 join(args.workdir, config.filtering.mask_p2_file))
302 # create all necessary directories
303 if not os.path.exists(config.general.output_path):
304 os.mkdir(config.general.output_path)
305 if not os.path.exists(join(config.general.output_path, "pic")):
306 os.mkdir(join(config.general.output_path, "pic"))
308 # copy config file to output directory to ensure that there is always a backup config file with latest parameters
309 shutil.copy2(src=config_file_path, dst=join(config.general.output_path, "config.json"))
311 run(config=config, args=args, logger=logger)
314if __name__ == '__main__':
315 main()