diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index af6dc3d..80d7ef7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,5 @@ jobs: permissions: {} with: release_prefix: HyP3 mintpy - release_branch: main - develop_branch: develop secrets: - USER_TOKEN: ${{ secrets.USERNAME_FOR_GITHUB_ACTIONS_PAK }} + USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} diff --git a/.github/workflows/tag-version.yml b/.github/workflows/tag-version.yml index 7fa52c0..e03a1e6 100644 --- a/.github/workflows/tag-version.yml +++ b/.github/workflows/tag-version.yml @@ -11,8 +11,5 @@ jobs: # https://github.com/ASFHyP3/actions#reusable-bump-versionyml uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.20.0 permissions: {} - with: - user: username_for_github_actions - email: email_for@github_actions.com secrets: - USER_TOKEN: ${{ secrets.USERNAME_FOR_GITHUB_ACTIONS_PAK }} + USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ebfd12..1e9828a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.0.0] + +### Added +- Initial workflow to run MintPy with basic settings. + ## [0.1.0] ### Added diff --git a/README.md b/README.md index ec03d25..1a5fed4 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,56 @@ -# HyP3 mintpy +# HyP3 MintPy HyP3 plugin for MintPy processing. + +## Usage +The `hyp3_mintpy` command line tool can be run using the following structure: +```bash +python -m hyp3_mintpy \ + --job-name Okmok_44 \ + --min-coherence 0.1 \ +``` +Where: + +* `--job-name` is the multiburst project name name in HyP3 +* `--min-coherence` is the minimum coherence for the timeseries inversion + +> [!IMPORTANT] +> Earthdata credentials are necessary to access HyP3 data. See the Credentials section for more information. + +### Credentials + +Generally, credentials are provided via environment variables, but some may be provided by command-line arguments or via a `.netrc` file. + +For Earthdata login, you can provide credentials by exporting environment variables: +``` +export EARTHDATA_USERNAME=your-edl-username +export EARTHDATA_PASSWORD=your-edl-password +``` +or via your [`~/.netrc` file](https://everything.curl.dev/usingcurl/netrc) which should contain lines like these two: +``` +machine urs.earthdata.nasa.gov login your-edl-username password your-edl-password +``` + +## Developer Setup +1. Ensure that conda is installed on your system (we recommend using [mambaforge](https://github.com/conda-forge/miniforge#mambaforge) to reduce setup times). +2. Download a local version of the `hyp3-mintpy` repository (`git clone https://github.com/ASFHyP3/hyp3-mintpy.git`) +3. In the base directory for this project call `mamba env create -f environment.yml` to create your Python environment, then activate it (`mamba activate hyp3-mintpy`) +4. Finally, install a development version of the package (`python -m pip install -e .`) + +To run all commands in sequence use: +```bash +git clone https://github.com/ASFHyP3/hyp3-mintpy.git +cd hyp3-mintpy +mamba env create -f environment.yml +mamba activate hyp3-mintpy +python -m pip install -e . +``` + +## Contributing +Contributions to the HyP3 mintpy plugin are welcome! If you would like to contribute, please submit a pull request on the GitHub repository. + +## Contact Us +Want to talk about HyP3 mintpy? We would love to hear from you! + +Found a bug? Want to request a feature? +[open an issue](https://github.com/ASFHyP3/hyp3-gather-landsat/issues/new) diff --git a/environment.yml b/environment.yml index d9ea44a..242f6db 100644 --- a/environment.yml +++ b/environment.yml @@ -11,7 +11,11 @@ dependencies: - pytest-console-scripts - pytest-cov # For running + - geopandas - hyp3lib>=3,<4 + - hyp3_sdk + - mintpy + - rasterio # TODO: insert conda-forge dependencies as list here - pip: - -r requirements-static.txt diff --git a/pyproject.toml b/pyproject.toml index d9748e9..8d012f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers=[ "Programming Language :: Python :: 3.13", ] dependencies = [ - "hyp3lib>=3,<4", + "hyp3lib>=3,<5", # TODO: insert Python dependencies as list here ] dynamic = ["version"] diff --git a/requirements-static.txt b/requirements-static.txt index 47e0ccd..98faa83 100644 --- a/requirements-static.txt +++ b/requirements-static.txt @@ -1,2 +1,3 @@ -ruff==0.13.2 +ruff==0.14.2 mypy==1.18.2 +opensarlab_lib diff --git a/src/hyp3_mintpy/__main__.py b/src/hyp3_mintpy/__main__.py index 2d94848..01d6a41 100644 --- a/src/hyp3_mintpy/__main__.py +++ b/src/hyp3_mintpy/__main__.py @@ -1,9 +1,13 @@ """mintpy processing for HyP3.""" import logging +import os +import warnings from argparse import ArgumentParser +from pathlib import Path from hyp3lib.aws import upload_file_to_s3 +from hyp3lib.fetch import write_credentials_to_netrc_file from hyp3_mintpy.process import process_mintpy @@ -15,7 +19,10 @@ def main() -> None: parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') # TODO: Your arguments here - parser.add_argument('--greeting', default='Hello world!', help='Write this greeting to a product file') + parser.add_argument('--job-name', help='The name of the HyP3 job', required=True) + parser.add_argument( + '--min-coherence', default=0.01, type=float, help='The minimum coherence to process', required=False + ) args = parser.parse_args() @@ -23,9 +30,18 @@ def main() -> None: format='%(asctime)s - %(levelname)s - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', level=logging.INFO ) - product_file = process_mintpy( - greeting=args.greeting, - ) + username = os.getenv('EARTHDATA_USERNAME') + password = os.getenv('EARTHDATA_PASSWORD') + if username and password: + write_credentials_to_netrc_file(username, password, append=False) + + if not (Path.home() / '.netrc').exists(): + warnings.warn( + 'Earthdata credentials must be present as environment variables, or in your netrc.', + UserWarning, + ) + + product_file = process_mintpy(job_name=args.job_name, min_coherence=args.min_coherence) if args.bucket: upload_file_to_s3(product_file, args.bucket, args.bucket_prefix) diff --git a/src/hyp3_mintpy/process.py b/src/hyp3_mintpy/process.py index 4bde930..ba2a08a 100644 --- a/src/hyp3_mintpy/process.py +++ b/src/hyp3_mintpy/process.py @@ -1,19 +1,258 @@ """mintpy processing.""" import logging +import os +import shutil +import subprocess from pathlib import Path +import geopandas as gpd +import hyp3_sdk as sdk +import opensarlab_lib as osl +import shapely.wkt +from osgeo import gdal +from tqdm.auto import tqdm + +import hyp3_mintpy +from hyp3_mintpy import util + log = logging.getLogger(__name__) -def process_mintpy(greeting: str = 'Hello world!') -> Path: +def rename_products(folder: str) -> None: + """Rename downloaded products to make them compatible with MintPy. + + Args: + folder: Path for the folder that has the downloaded products. + """ + cwd = Path.cwd() + os.chdir(folder) + folders = list(Path('./').glob('*')) + folders = [fol for fol in folders if Path(fol).is_dir()] + for fol in folders: + new = True + if str(fol).count('_') > 7: + new = False + os.chdir(str(fol)) + fs = list(Path('./').glob('*')) + txts = [t for t in fs if '.txt' in str(t) and 'README' not in str(t)] + ar = txts[0].open() + lines = ar.readlines() + ar.close() + burst = lines[0].split('_')[1] + '_' + lines[0].split('_')[2] + for f in fs: + name = f.name + if new: + newname = 'S1_' + burst + '_' + '_'.join([n for n in name.split('_')[3:]]) + else: + newname = 'S1_' + burst + '_' + '_'.join([n for n in name.split('_')[10:]]) + print(newname) + if '.txt' in newname and 'README' not in newname: + foldername = newname.split('.')[0] + subprocess.call('mv ' + name + ' ' + newname, shell=True) + os.chdir(cwd) + os.chdir(folder) + subprocess.call('mv ' + fol.name + ' ' + foldername, shell=True) + os.chdir(cwd) + + +def download_pairs(job_name: str, folder: str | None = None) -> None: + """Downloads HyP3 products and renames files to meet MintPy standards. + + Args: + job_name: Name of the HyP3 project. + folder: Folder name that will contain the downloaded products. If None it will create a folder with the project name. + """ + hyp3 = sdk.HyP3() + jobs = hyp3.find_jobs(name=job_name) + + if folder is None: + folder = job_name + if not Path(folder).is_dir(): + Path.mkdir(Path(folder)) + + file_list = jobs.download_files(Path(folder)) + for z in file_list: + shutil.unpack_archive(str(z), folder) + z.unlink() + + rename_products(folder) + + +def set_same_epsg(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame: + """Checks if the EPSG is the same to all files if not it reprojects them. + + Args: + gdf: Geopandas dataframe with all the tiff files. + + Returns: + Geopandas dataframe with reprojected files. + """ + proj_count = gdf['EPSG'].value_counts() + predominant_epsg = proj_count.idxmax() + print(f'reprojecting to predominant EPSG: {predominant_epsg}') + tiff_path = gdf['tiff_path'].tolist() + for _, row in gdf.loc[gdf['EPSG'] != predominant_epsg].iterrows(): + pth = row['tiff_path'] + no_data_val = util.get_no_data_val(pth) + res = util.get_res(pth) + + temp = pth.parent / f'temp_{pth.stem}.tif' + pth.rename(temp) + src_epsg = row['EPSG'] + + warp_options = { + 'dstSRS': f'EPSG:{predominant_epsg}', + 'srcSRS': f'EPSG:{src_epsg}', + 'targetAlignedPixels': True, + 'xRes': res, + 'yRes': res, + 'dstNodata': no_data_val, + } + gdal.Warp(str(pth), str(temp), **warp_options) + temp.unlink() + + gdf = gpd.GeoDataFrame( + { + 'tiff_path': tiff_path, + 'EPSG': [util.get_epsg(p) for p in tiff_path], + 'geometry': [util.get_geotiff_bbox(p) for p in tiff_path], + } + ) + + return gdf + + +def check_extent(gdf: gpd.GeoDataFrame, common_extents: list) -> None: + """Checks the geometries in the GeoDataFrame are within the common_extents. + + Args: + gdf: Geopandas dataframe with all the tiff files. + common_extents: List with the common extent coordinates. + """ + wkt = ( + f'POLYGON(({common_extents[0]} {common_extents[1]}, {common_extents[2]} {common_extents[1]}, {common_extents[2]} ' + f'{common_extents[3]}, {common_extents[0]} {common_extents[3]}, {common_extents[0]} {common_extents[1]}))' + ) + + wkt_shapely_geom = shapely.wkt.loads(wkt) + if not util.check_within_bounds(wkt_shapely_geom, gdf): + print('WKT exceeds bounds of at least one dataset') + raise Exception('Error determining area of common coverage') + + +def set_same_frame(folder: str, wgs84: bool = False) -> None: + """Checks the coordinate system for all the files in the folder and reprojects them if necessary. + + Args: + folder: Path to the folder that has the HyP3 products. + wgs84: If True reprojects all the files to WGS84 system. + """ + data_path = Path(folder) + dem = sorted(list(data_path.glob('*/*dem*.tif'))) + lv_phi = sorted(list(data_path.glob('*/*lv_phi*.tif'))) + lv_theta = sorted(list(data_path.glob('*/*lv_theta*.tif'))) + water_mask = sorted(list(data_path.glob('*/*_water_mask*.tif'))) + unw = sorted(list(data_path.glob('*/*_unw_phase*.tif'))) + corr = sorted(list(data_path.glob('*/*_corr*.tif'))) + conn_comp = sorted(list(data_path.glob('*/*_conncomp*.tif'))) + tiff_path = dem + lv_phi + lv_theta + water_mask + unw + corr + conn_comp + + gdf = gpd.GeoDataFrame( + { + 'tiff_path': tiff_path, + 'EPSG': [util.get_epsg(p) for p in tiff_path], + 'geometry': [util.get_geotiff_bbox(p) for p in tiff_path], + } + ) + + # check for multiple projections and project to the predominant EPSG + if gdf['EPSG'].nunique() > 1: + gdf = set_same_epsg(gdf) + + # check the file extent is within the common extent + common_extents = osl.get_common_coverage_extents(unw) + check_extent(gdf, common_extents) + + # reprojects all files to the common extent + for pth in tqdm(gdf['tiff_path']): + print(f'Subsetting: {pth}') + temp_pth = pth.parent / f'subset_{pth.name}' + gdal.Translate( + destName=str(temp_pth), + srcDS=str(pth), + projWin=[common_extents[0], common_extents[3], common_extents[2], common_extents[1]], + ) + pth.unlink() + temp_pth.rename(pth) + + # reprojects all files to WGS84 if necessary + if wgs84: + for pth in tqdm(gdf['tiff_path']): + print(f'Converting {pth} to WGS84') + gdal.Warp(str(pth), str(pth), dstSRS='EPSG:4326') + + +def write_cfg(job_name: str, min_coherence: str) -> None: + """Creates a basic config file from a template. + + Args: + job_name: Name of the HyP3 project. + min_coherence: Minimum coherence for timeseries processing. + """ + cfg_folder = Path(hyp3_mintpy.__file__).parent / 'schemas' + + with Path(f'{cfg_folder}/config.txt').open() as cfg: + lines = cfg.readlines() + + abspath = Path(job_name).resolve() + Path(f'{job_name}/MintPy').mkdir(parents=True) + with Path(f'{job_name}/MintPy/{job_name}.txt').open('w') as cfg: + for line in lines: + newstring = '' + if 'folder' in line: + newstring += line.replace('folder', str(abspath)) + elif 'min_coherence' in line: + newstring += line.replace('min_coherence', min_coherence) + else: + newstring = line + cfg.write(newstring) + + +def run_mintpy(job_name: str) -> Path: + """Calls mintpy and prepares a zip file with the outputs. + + Args: + job_name: Name of the HyP3 project. + + Returns: + Path for the output zip file. + """ + subprocess.call(f'smallbaselineApp.py {job_name}/MintPy/{job_name}.txt --work-dir {job_name}/MintPy', shell=True) + subprocess.call(f'mv {job_name}/MintPy/*.h5 {job_name}/', shell=True) + subprocess.call(f'mv {job_name}/MintPy/inputs/geometry*.h5 {job_name}/', shell=True) + subprocess.call(f'mv {job_name}/MintPy/*.txt {job_name}/', shell=True) + subprocess.call(f'rm -rf {job_name}/MintPy {job_name}/S1_* {job_name}/shape_*', shell=True) + output_zip = shutil.make_archive(base_name=job_name, format='zip', base_dir=job_name) + + return Path(output_zip) + + +def process_mintpy(job_name: str, min_coherence: float) -> Path: """Create a greeting product. Args: - greeting: Write this greeting to a product file (Default: "Hello world!" ) + job_name: Name of the HyP3 project. + min_coherence: Minimum coherence for timeseries processing. + + Returns: + Path for the output zip file. """ - log.debug(f'Greeting: {greeting}') - product_file = Path('greeting.txt') - product_file.write_text(greeting) + download_pairs(job_name) + set_same_frame(job_name, wgs84=True) + + write_cfg(job_name, str(min_coherence)) + + product_file = run_mintpy(job_name) return product_file diff --git a/src/hyp3_mintpy/schemas/config.txt b/src/hyp3_mintpy/schemas/config.txt new file mode 100644 index 0000000..cb55141 --- /dev/null +++ b/src/hyp3_mintpy/schemas/config.txt @@ -0,0 +1,13 @@ +mintpy.load.processor = hyp3 +##---------geometry datasets: +mintpy.load.demFile = folder/*/*_dem*.tif +mintpy.load.incAngleFile = folder/*/*_lv_theta*.tif +mintpy.load.azAngleFile = folder/*/*_lv_phi*.tif +mintpy.load.waterMaskFile = folder/*/*_water_mask*.tif +##---------interferogram datasets: +mintpy.load.unwFile = folder/*/*_unw_phase*.tif +mintpy.load.corFile = folder/*/*_corr*.tif +mintpy.network.minCoherence = min_coherence +mintpy.networkInversion.minTempCoh = min_coherence +mintpy.reference.minCoherence = min_coherence +mintpy.troposphericDelay.method = no diff --git a/src/hyp3_mintpy/util.py b/src/hyp3_mintpy/util.py new file mode 100644 index 0000000..8c05ec0 --- /dev/null +++ b/src/hyp3_mintpy/util.py @@ -0,0 +1,282 @@ +"""util functions.""" + +import os +import re +from collections import Counter +from datetime import datetime +from pathlib import Path + +import geopandas as gpd +import numpy as np +import rasterio +import shapely.wkt +from mintpy.utils import readfile +from osgeo import gdal, ogr, osr +from pyproj import Transformer +from shapely.geometry import Polygon +from shapely.geometry.base import BaseGeometry +from shapely.ops import transform + + +gdal.UseExceptions() + + +def get_projection(img_path: Path | str) -> str | None: + """Gets image projection. + + Args: + img_path: a string or posix path to a product in a UTM projection. + + Returns: + the projection (as a string) or None if none found + """ + img_path = str(img_path) + try: + info = gdal.Info(img_path, format='json')['coordinateSystem']['wkt'] + except KeyError: + return None + except TypeError: + raise FileNotFoundError + + regex = r'ID\["EPSG",[0-9]{4,5}\]\]$' + results = re.search(regex, info) + if results: + return results.group(0).split(',')[1][:-2] + else: + return None + + +def get_projections(tiff_paths: list[Path | str]) -> dict: + """Takes: List of string or posix paths to geotiffs. + + Returns: Dictionary key: epsg, value: number of tiffs in that epsg. + """ + epsgs = [] + for p in tiff_paths: + epsgs.append(get_projection(p)) + + epsgs_dic = dict(Counter(epsgs)) + return epsgs_dic + + +def get_res(tiff: Path) -> float: + """Takes: path to a GeoTiff. + + Returns: The GeoTiff's resolution + """ + f = gdal.Open(str(tiff)) + return f.GetGeoTransform()[1] + + +def get_no_data_val(pth: os.PathLike) -> None | float | int: + """Takes: path to a GeoTiff. + + Returns: The GeoTiff's no-data value. + """ + f = gdal.Open(str(pth)) + if f.GetRasterBand(1).DataType > 5: + no_data_val = f.GetRasterBand(1).GetNoDataValue() + return np.nan if no_data_val is None else f.GetRasterBand(1).GetNoDataValue() + else: + return 0 + + +def get_mintpy_vmin_vmax( + dataset_path: os.PathLike, mask_path: os.PathLike | None = None, bottom_percentile: float = 0.0 +) -> tuple[float, float]: + """Gets minimum and maximum values for velocity file. + + Takes: + dataset_path: path to a MintPy hdf5 dataset + mask_path: path to a MintPy hdf5 dataset containing a coherence mask (such as 'maskTempCoh.h5') + bottom_percentile: lower end of the percentile you would like to use for vmin, vmax + The upper end of the percentile will be symetrical with the passed lower end. + Passing 0.05 as the bottom_percentile will result in 1.0 - 0.05 = 0.95 being used for the high end + + Returns: vmin, vmax values covering the data (or masked data), centered at zero. + """ + data, _ = readfile.read(dataset_path) + + if mask_path: + mask, _ = readfile.read(mask_path) + data *= mask + + vel_min = np.nanpercentile(data, bottom_percentile) * 100 + vel_max = np.nanpercentile(data, 1.0 - bottom_percentile) * 100 + + vmin = -np.nanmax([np.abs(vel_min), np.abs(vel_max)]) + vmax = np.nanmax([np.abs(vel_min), np.abs(vel_max)]) + return (vmin, vmax) + + +def get_recent_mintpy_config_path() -> os.PathLike | None: + """Gets the path for config file. + + Returns the mintpy config path saved in .recent_mintpy_config + if it exists else None + """ + recent_mintpy_path = Path.cwd() / '.recent_mintpy_config' + + try: + with recent_mintpy_path.open() as f: + line = f.readline() + if Path(line).exists(): + return Path(line) + except FileNotFoundError: + return None + except: + raise + return None # Found a recent path but it no longer exists + + +def write_recent_mintpy_config_path(pth: str | os.PathLike) -> None: + """Writes a config file for mintpy. + + Takes: A string path or posix path to a GeoTiff. + """ + recent_mintpy_path = Path.cwd() / '.recent_mintpy_config' + with recent_mintpy_path.open('w+') as f: + f.write(str(pth)) + + +def get_epsg(geotiff_path: str | os.PathLike) -> str: + """Takes: A string path or posix path to a GeoTiff. + + Returns: The string EPSG of the Geotiff. + """ + ds = gdal.Open(str(geotiff_path)) + proj = ds.GetProjection() + srs = osr.SpatialReference() + srs.ImportFromWkt(proj) + srs.AutoIdentifyEPSG() + return srs.GetAuthorityCode(None) + + +def get_geotiff_bbox(geotiff_path: str | os.PathLike, dst_epsg: str | None = None) -> Polygon: + """Gets bbox for geotiff file. + + Takes: + geotiff_path: path to a GeoTiff. + dst_epsg: optional EPSG for reprojection. + + Returns: The GeoTiffs bounding box as a shapely.geometry.Polygon. + """ + with rasterio.open(geotiff_path) as dataset: + bounds = dataset.bounds + min_x, min_y = (bounds.left, bounds.bottom) + max_x, max_y = (bounds.right, bounds.top) + + if dst_epsg: + srs_crs = dataset.crs + transformer = Transformer.from_crs(srs_crs, f'EPSG:{str(dst_epsg)}', always_xy=True) + min_x, min_y = transformer.transform(bounds.left, bounds.bottom) + max_x, max_y = transformer.transform(bounds.right, bounds.top) + + return Polygon([(min_x, min_y), (max_x, min_y), (max_x, max_y), (min_x, max_y), (min_x, min_y)]) + + +def possible_wgs84_wkt(wkt: str) -> bool: + """If WKT Polygon falls within the range of valid WGS84 coords, prompts user to indicate whether the WKT is WGS84 or UTM. + + Takes: Well-Known-Text polygon string. + + Returns: True if the WKT could be WGS84 else False. + """ + lon_regex = r'(?:\(|,)(-?\d{1,6}\.?\d{0,6})' + lat_regex = r'(?<=\s)-?\d{,6}.?\d{,6}' + + lon_results = re.findall(lon_regex, wkt) + lon_results = [float(n) for n in lon_results] + lat_results = re.findall(lat_regex, wkt) + lat_results = [float(n) for n in lat_results] + if ( + -180.0 <= np.min(lon_results) + and np.max(lon_results) <= 180.0 + and -90.0 <= np.min(lat_results) + and np.max(lat_results) <= 90.0 + ): + while True: + print('Detected possible WGS84 (lat/lon) coordinates') + wgs84 = input('Are these lat/lon coordinates? (y or n)') + if wgs84 in ['y', 'n']: + do_wgs84 = True if wgs84 == 'y' else False + return do_wgs84 + else: + return False + + +def project_wkt_polygon(wkt_polygon: str, source_epsg: int | str, target_epsg: int | str) -> str: + """Projects polygon into target EPSG. + + Takes: + wkt_polygon: A Well-Known-Text POLYGON string + source_epsg: wkt_polygon's EPSG + target_epsg: the target EPSG for projection + + Returns: A Well-Known-Text string in the target EPSG + """ + polygon = shapely.wkt.loads(wkt_polygon) + transformer = Transformer.from_crs(f'EPSG:{source_epsg}', f'EPSG:{target_epsg}', always_xy=True) + transformed_polygon = transform(transformer.transform, polygon) + return transformed_polygon.wkt + + +def get_valid_wkt() -> tuple[str, BaseGeometry]: + """Prompts user for WKT. + + Returns: WKT string, Shapely Polygon from WKT + """ + while True: + try: + wkt = input( + 'Please enter your WKT (e.g. "POLYGON((-148.4241 64.6077,-146.9478 64.6077,-146.9478 65.1052,-148.4241 65.1052,-148.4241 64.6077))": ' + ) + + shapely_geom = shapely.wkt.loads(wkt) + + if not gpd.GeoSeries([shapely_geom]).is_valid[0]: + print('Invalid geometry detected. Please enter a valid WKT.') + continue + + return wkt, shapely_geom + except Exception as e: + print(f'Error: {e}. Please enter a valid WKT.') + + +def check_within_bounds(wkt_shapely_geom: BaseGeometry, gdf: gpd.GeoDataFrame) -> bool: + """Checks if elements in the geopandas dataframe are within the AOI. + + wkt_shapely_geom: A shapely Polygon describing a subset AOI + gdf: a geopandas.GeoDataFrame containing geometries for each dataset to subset to wkt_shapely_geom + + returns: True if wkt_shapely_geom is contained within all geometries in the GeoDataFrame, else False + """ + return all(wkt_shapely_geom.within(geom) for geom in gdf['geometry']) + + +def save_shapefile( + ogr_geom: ogr.Geometry, + epsg: str | int, + dst_path: str | os.PathLike | None = Path.cwd() / f'shape_{datetime.strftime(datetime.now(), "%Y%m%dT%H%M%S")}.shp', +) -> None: + """Writes a shapefile from an ogr geometry in a given projection. + + ogr_geom: An ogr geometry + epsg: the EPSG projection to apply to the shapefile + dst_path: (optional) shapefile destination path + """ + epsg = int(epsg) + driver = ogr.GetDriverByName('Esri Shapefile') + ds = driver.CreateDataSource(str(dst_path)) + srs = osr.SpatialReference() + srs.ImportFromEPSG(epsg) + layer = ds.CreateLayer('', srs, ogr.wkbPolygon) + defn = layer.GetLayerDefn() + + feat = ogr.Feature(defn) + feat.SetGeometry(ogr_geom) + + layer.CreateFeature(feat) + feat = None + + ds = layer = feat = None diff --git a/tests/conftest.py b/tests/conftest.py index e69de29..b3aaf31 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -0,0 +1,9 @@ +from pathlib import Path + +import pytest + + +@pytest.fixture +def test_data_directory(): + here = Path(__file__).parent + return here / 'data' diff --git a/tests/data/test_unw_phase.tif b/tests/data/test_unw_phase.tif new file mode 100644 index 0000000..6863a8b Binary files /dev/null and b/tests/data/test_unw_phase.tif differ diff --git a/tests/data/test_water_mask.tif b/tests/data/test_water_mask.tif new file mode 100644 index 0000000..c2f26c6 Binary files /dev/null and b/tests/data/test_water_mask.tif differ diff --git a/tests/data/testplane_water_mask.tif b/tests/data/testplane_water_mask.tif new file mode 100644 index 0000000..14a16b1 Binary files /dev/null and b/tests/data/testplane_water_mask.tif differ diff --git a/tests/test_process.py b/tests/test_process.py new file mode 100644 index 0000000..6baa2fc --- /dev/null +++ b/tests/test_process.py @@ -0,0 +1,115 @@ +import subprocess +from pathlib import Path + +import geopandas as gpd +import opensarlab_lib as osl +import pytest + +from hyp3_mintpy import util +from hyp3_mintpy.process import check_extent, rename_products, set_same_epsg, set_same_frame, write_cfg + + +def test_rename_products_new(): + Path('test/S1_000-000000s0n00-000000s0n00-000000s0n00_IW_00000000_00000000_VV_INT80_0000').mkdir(parents=True) + with Path( + 'test/S1_000-000000s0n00-000000s0n00-000000s0n00_IW_00000000_00000000_VV_INT80_0000/S1_000-000000s0n00-000000s0n00-000000s0n00_IW_00000000_00000000_VV_INT80_0000.txt' + ).open('w') as test: + test.write('S1_000000_IW1_00000000T000000_VV_AAAA-BURST') + + rename_products('test') + folder = Path('test/S1_000000_IW1_00000000_00000000_VV_INT80_0000') + txt = folder / 'S1_000000_IW1_00000000_00000000_VV_INT80_0000.txt' + + assert folder.is_dir() + assert txt.exists() + + subprocess.call('rm -rf test', shell=True) + + +def test_rename_products_old(): + Path('test/S1A_000_W000_0_N00_0_E000_0_N00_0_00000000_00000000_VV_INT80_0000').mkdir(parents=True) + with Path( + 'test/S1A_000_W000_0_N00_0_E000_0_N00_0_00000000_00000000_VV_INT80_0000/S1A_000_W000_0_N00_0_E000_0_N00_0_00000000_00000000_VV_INT80_0000.txt' + ).open('w') as test: + test.write('S1_000000_IW1_00000000T000000_VV_AAAA-BURST') + + rename_products('test') + folder = Path('test/S1_000000_IW1_00000000_00000000_VV_INT80_0000') + txt = folder / 'S1_000000_IW1_00000000_00000000_VV_INT80_0000.txt' + + assert folder.is_dir() + assert txt.exists() + + subprocess.call('rm -rf test', shell=True) + + +def test_set_same_epsg(test_data_directory): + tiff_path = list(test_data_directory.glob('*.tif')) + gdf = gpd.GeoDataFrame( + { + 'tiff_path': tiff_path, + 'EPSG': [util.get_epsg(p) for p in tiff_path], + 'geometry': [util.get_geotiff_bbox(p) for p in tiff_path], + } + ) + gdf = set_same_epsg(gdf) + assert gdf['EPSG'].nunique() == 1 + + +def test_check_extent(test_data_directory): + tiff_path = list(test_data_directory.glob('test_*.tif')) + gdf = gpd.GeoDataFrame( + { + 'tiff_path': tiff_path, + 'EPSG': [util.get_epsg(p) for p in tiff_path], + 'geometry': [util.get_geotiff_bbox(p) for p in tiff_path], + } + ) + with pytest.raises(Exception, match='Error determining area of common coverage'): + check_extent(gdf, [0, 0, 1, 1]) + + assert check_extent(gdf, [670000.0, 5900000.0, 840000.0, 5950000.0]) is None # type: ignore + + +def test_set_same_frame(test_data_directory): + data = test_data_directory + + Path('test/test').mkdir(parents=True) + sdata = str(data) + test = Path('test/test') + stest = str(test) + subprocess.call(f'cp {sdata}/test_*.tif {stest}/', shell=True) + + set_same_frame('test', wgs84=True) + + extent_unw = osl.get_common_coverage_extents([test / 'test_unw_phase.tif']) + extent_mask = osl.get_common_coverage_extents([test / 'test_water_mask.tif']) + + assert extent_unw == extent_mask + + epsg_unw = util.get_epsg(str(test / 'test_unw_phase.tif')) + epsg_mask = util.get_epsg(str(test / 'test_water_mask.tif')) + + assert epsg_unw == epsg_mask + + subprocess.call('rm -rf test', shell=True) + + +def test_write_cfg(): + job_name = 'test_job' + min_coherence = '0.5' + write_cfg(job_name, min_coherence) + + assert Path(f'{job_name}/MintPy/{job_name}.txt').exists() + + with Path(f'{job_name}/MintPy/{job_name}.txt').open() as cfg: + lines = cfg.readlines() + + minCoh = 0.0 + for line in lines: + if 'minCoherence' in line: + minCoh = float(line.split('=')[1]) + + assert minCoh == float(min_coherence) + + subprocess.call(f'rm -rf {job_name}', shell=True)