I was having issues reading the HDF5 file named "inputs/geometryGeo.h5" in Matlab. The compression method "lzf" is not available:
H5Z_pipeline required filter 'lzf' is not registered
Apparently, in
load_data.py the compression filter was hard-wired to "lzf". To work around this issue, I changed two lines in load_data.py below and then set
mintpy.load.compression = gzip
#################################################################
def load_data(inps):
"""load data into HDF5 files."""
## 0. read input
start_time = time.time()
iDict = read_inps2dict(inps)
## 1. prepare metadata
prepare_metadata(iDict)
extraDict = get_extra_metadata(iDict)
# skip data writing as it is included in prep_aria/nisar
if iDict['processor'] in ['aria', 'nisar']:
return
## 2. search & write data files
print('-'*50)
print('updateMode : {}'.format(iDict['updateMode']))
print('compression: {}'.format(iDict['compression']))
print('multilook x/ystep: {}/{}'.format(iDict['xstep'], iDict['ystep']))
print('multilook method : {}'.format(iDict['method']))
kwargs = dict(updateMode=iDict['updateMode'], xstep=iDict['xstep'], ystep=iDict['ystep'])
# read subset info [need the metadata from above]
iDict = read_subset_box(iDict)
# geometry in geo / radar coordinates
geom_dset_name2template_key = {
**GEOM_DSET_NAME2TEMPLATE_KEY,
**IFG_DSET_NAME2TEMPLATE_KEY,
**OFF_DSET_NAME2TEMPLATE_KEY,
}
geom_geo_obj, geom_radar_obj = read_inps_dict2geometry_dict_object(iDict, geom_dset_name2template_key)
geom_geo_file = os.path.abspath('./inputs/geometryGeo.h5')
geom_radar_file = os.path.abspath('./inputs/geometryRadar.h5')
if run_or_skip(geom_geo_file, geom_geo_obj, iDict['box4geo'], **kwargs) == 'run':
geom_geo_obj.write2hdf5(
outputFile=geom_geo_file,
access_mode='w',
box=iDict['box4geo'],
xstep=iDict['xstep'],
ystep=iDict['ystep'],
compression=iDict['compression']) # KF use specified compression='lzf')
if run_or_skip(geom_radar_file, geom_radar_obj, iDict['box'], **kwargs) == 'run':
geom_radar_obj.write2hdf5(
outputFile=geom_radar_file,
access_mode='w',
box=iDict['box'],
xstep=iDict['xstep'],
ystep=iDict['ystep'],
extra_metadata=extraDict,
compression=iDict['compression']) # KF use specified compression='lzf')
# observations: ifgram, ion or offset
# loop over obs stacks
stack_ds_name2tmpl_key_list = [
IFG_DSET_NAME2TEMPLATE_KEY,
ION_DSET_NAME2TEMPLATE_KEY,
OFF_DSET_NAME2TEMPLATE_KEY,
]
stack_files = ['ifgramStack.h5', 'ionStack.h5', 'offsetStack.h5']
stack_files = [os.path.abspath(os.path.join('./inputs', x)) for x in stack_files]
for ds_name2tmpl_opt, stack_file in zip(stack_ds_name2tmpl_key_list, stack_files):
# initiate dict objects
stack_obj = read_inps_dict2ifgram_stack_dict_object(iDict, ds_name2tmpl_opt)
# use geom_obj as size reference while loading ionosphere
geom_obj = None
if os.path.basename(stack_file).startswith('ion'):
geom_obj = geom_geo_obj if iDict['geocoded'] else geom_radar_obj
# write dict objects to HDF5 files
if run_or_skip(stack_file, stack_obj, iDict['box'], geom_obj=geom_obj, **kwargs) == 'run':
stack_obj.write2hdf5(
outputFile=stack_file,
access_mode='w',
box=iDict['box'],
xstep=iDict['xstep'],
ystep=iDict['ystep'],
mli_method=iDict['method'],
compression=iDict['compression'],
extra_metadata=extraDict,
geom_obj=geom_obj)
# used time
m, s = divmod(time.time()-start_time, 60)
print(f'time used: {m:02.0f} mins {s:02.1f} secs.\n')
return