Skip to content

Commit

Permalink
add function for check-ups and clean-up
Browse files Browse the repository at this point in the history
  • Loading branch information
mgovorcin committed Dec 23, 2023
1 parent c84913f commit d44a18f
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 8 deletions.
44 changes: 39 additions & 5 deletions tools/ARIAtools/contrib/ARIA_product.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ def __init__(self, work_dir: str, gunw_dir:str = None):
self.work_dir = work_directory
self.product_dir = products_dir
self.aoi = None
self.user_json = str(work_directory / 'user_bbox.json')
self.user_json = str(self.aria_dir / 'user_bbox.json')
product_json_file = 'prods_TOTbbox_metadatalyr.json'
self.product_json = str(work_directory / product_json_file)
self.product_json = str(self.aria_dir/ product_json_file)

# Dataframe
self.df = None # all
Expand Down Expand Up @@ -149,23 +149,28 @@ def filter_min_aoi_coverage(self, min_coverage_thresh=70, verbose=True):
print(f' Number of rejected pairs: {gdf_rejected.shape[0]}')
return gdf_selected, gdf_rejected

def save_aria_bbox(self):
def save_aria_bbox(self, overwrite=False):
if self.dataframe_fin is None:
raise ValueError('Final dataframe does not exist!')
unioned_gdf = get_unioned_df(self.dataframe_fin)

if overwrite:
print('Overwrite aoi json files!')
user_json = Path(self.user_json)
product_json = Path(self.product_json)
if user_json.exists(): user_json.unlink()
if product_json.exists(): product_json.unlink()

# Get the min common area
bbox_shp = intersection_all(unioned_gdf.geometry)
# Write a new Shapefile
geojson_dict = dict(index=0, geometry='Polygon')
with fiona.open(self.user_json, 'w', 'GeoJSON', geojson_dict) as c:
## If there are multiple geometries, put the "for" loop here
c.write({
'geometry': mapping(bbox_shp),
})

with fiona.open(self.product_json, 'w', 'GeoJSON', geojson_dict) as c:
## If there are multiple geometries, put the "for" loop here
c.write({
'geometry': mapping(unioned_gdf.unary_union),
})
Expand Down Expand Up @@ -324,6 +329,9 @@ def export_layers(self, layer='unwrappedPhase',
stack_stats.to_csv(str(self.aria_dir / 'stack_stats.csv'))

def prepare_stack(self):

self.check_exported_products('unwrappedPhase')
self.check_exported_products('connectedComponents')
ref_dlist = generate_stack(self, 'unwrappedPhase',
'unwrapStack',
bperp_file='stack_stats.csv',
Expand All @@ -349,6 +357,7 @@ def prepare_stack(self):
for layer in layers:
print('')
if layer in ARIA_STACK_OUTFILES.keys():
self.check_exported_products(layer)
generate_stack(self,
layer,
ARIA_STACK_OUTFILES[layer],
Expand Down Expand Up @@ -379,3 +388,28 @@ def load_pickle(self, fname):
pickle = Path(fname)
file = open(str(pickle), 'r')
self = pickle.load(file)

def check_exported_products(self, layer):
# Ensure the correct number of layers:
int_list = (Path(self.aria_dir) / layer)
int_list = list(int_list.glob('[0-9]*[0-9].vrt'))
product_list = [p['pair_name'][0] for p in self.products[0]]
flag = np.array([ip.name.split('.')[0] in product_list for ip in int_list])

# remove
for file in np.array(int_list)[~flag]:
file.unlink()

def clean_aria_directories(self):
import shutil
dir_remove_list = ['azimuthAngle', 'connectedComponents',
'incidenceAngle', 'coherence', 'DEM',
'mask', 'unwrappedPhase', 'stack']
for rdir in dir_remove_list:
print(f'Removing {rdir}')
shutil.rmtree(Path(self.aria_dir) / rdir)

for dfile in ['stack_stats.csv','user_bbox.json',
'prods_TOTbbox_metadatalyr.json']:
print(f'Removing {dfile}')
(Path(self.aria_dir) / dfile).unlink()
23 changes: 20 additions & 3 deletions tools/ARIAtools/contrib/product/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,13 @@
import warnings
from datetime import datetime as dt
from shapely.geometry import box
from pathlib import Path

def extract_version(path:str):
fname = Path(path).name.split('.')[0]
version = fname.split('-')[-1]
version_ix = np.int16(version.split('v')[1].split('_')[0])
return version_ix

def get_duplicates(df, threshold=80):
def _check_duplicates(df, date12, threshold=90):
Expand All @@ -25,12 +32,22 @@ def _check_duplicates(df, date12, threshold=90):
intersect = g12.iloc[ix+1].geometry.intersection(g12.iloc[ix].geometry)
overlap1 = (intersect.area / g12.iloc[ix+1].geometry.area) * 100
overlap2 = (intersect.area / g12.iloc[ix].geometry.area) * 100

if (overlap1 > threshold) or (overlap2 > threshold):
if g12.iloc[ix+1].geometry.area > g12.iloc[ix].geometry.area:

v_ix1 = extract_version(g12.iloc[ix+1].PATH)
v_ix2 = extract_version(g12.iloc[ix].PATH)

if v_ix1 == v_ix2:
if (overlap1 > threshold) or (overlap2 > threshold):
if g12.iloc[ix+1].geometry.area > g12.iloc[ix].geometry.area:
remove_list.append(g12.PATH.iloc[ix])
else:
remove_list.append(g12.PATH.iloc[ix+1])
else:
if v_ix1 > v_ix2:
remove_list.append(g12.PATH.iloc[ix])
else:
remove_list.append(g12.PATH.iloc[ix+1])

return remove_list

# Loop through aquisition dates to check for duplicates
Expand Down

0 comments on commit d44a18f

Please sign in to comment.