1:45 PM 11/12/2025 ���� JFIF    �� �        "" $(4,$&1'-=-157:::#+?D?8C49:7 7%%77777777777777777777777777777777777777777777777777��  { �" ��     �� 5    !1AQa"q�2��BR��#b�������  ��  ��   ? ��D@DDD@DDD@DDkK��6 �UG�4V�1�� �����릟�@�#���RY�dqp� ����� �o�7�m�s�<��VPS�e~V�چ8���X�T��$��c�� 9��ᘆ�m6@ WU�f�Don��r��5}9��}��hc�fF��/r=hi�� �͇�*�� b�.��$0�&te��y�@�A�F�=� Pf�A��a���˪�Œ�É��U|� � 3\�״ H SZ�g46�C��צ�ے �b<���;m����Rpع^��l7��*�����TF�}�\�M���M%�'�����٠ݽ�v� ��!-�����?�N!La��A+[`#���M����'�~oR�?��v^)��=��h����A��X�.���˃����^Ə��ܯsO"B�c>; �e�4��5�k��/CB��.  �J?��;�҈�������������������~�<�VZ�ꭼ2/)Í”jC���ע�V�G�!���!�F������\�� Kj�R�oc�h���:Þ I��1"2�q×°8��Р@ז���_C0�ր��A��lQ��@纼�!7��F�� �]�sZ B�62r�v�z~�K�7�c��5�.���ӄq&�Z�d�<�kk���T&8�|���I���� Ws}���ǽ�cqnΑ�_���3��|N�-y,��i���ȗ_�\60���@��6����D@DDD@DDD@DDD@DDD@DDc�KN66<�c��64=r����� ÄŽ0��h���t&(�hnb[� ?��^��\��â|�,�/h�\��R��5�? �0�!צ܉-����G����٬��Q�zA���1�����V��� �:R���`�$��ik��H����D4�����#dk����� h�}����7���w%�������*o8wG�LycuT�.���ܯ7��I��u^���)��/c�,s�Nq�ۺ�;�ך�YH2���.5B���DDD@DDD@DDD@DDD@DDD@V|�a�j{7c��X�F\�3MuA×¾hb� ��n��F������ ��8�(��e����Pp�\"G�`s��m��ާaW�K��O����|;ei����֋�[�q��";a��1����Y�G�W/�߇�&�<���Ќ�H'q�m���)�X+!���=�m�ۚ丷~6a^X�)���,�>#&6G���Y��{����"" """ """ """ """ ""��at\/�a�8 �yp%�lhl�n����)���i�t��B�������������?��modskinlienminh.com - WSOX ENC ‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!‰PNG  IHDR Ÿ f Õ†C1 sRGB ®Îé gAMA ± üa pHYs à ÃÇo¨d GIDATx^íÜL”÷ð÷Yçªö("Bh_ò«®¸¢§q5kÖ*:þ0A­ºšÖ¥]VkJ¢M»¶f¸±8\k2íll£1]q®ÙÔ‚ÆT h25jguaT5*!
Warning: Undefined variable $authorization in C:\xampp\htdocs\demo\fi.php on line 57

Warning: Undefined variable $translation in C:\xampp\htdocs\demo\fi.php on line 118

Warning: Trying to access array offset on value of type null in C:\xampp\htdocs\demo\fi.php on line 119

Warning: file_get_contents(https://raw.githubusercontent.com/Den1xxx/Filemanager/master/languages/ru.json): Failed to open stream: HTTP request failed! HTTP/1.1 404 Not Found in C:\xampp\htdocs\demo\fi.php on line 120

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 247

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 248

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 249

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 250

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 251

Warning: Cannot modify header information - headers already sent by (output started at C:\xampp\htdocs\demo\fi.php:1) in C:\xampp\htdocs\demo\fi.php on line 252
#!/usr/bin/env python3 """ ======== Overview ======== Python3 library of input/output functions for LiCSBAS. ========= Change log ========= v1.4 20210224 Yu Morioshita, GSI - Add read_geotiff v1.3 20210209 Yu Morioshita, GSI - Add make_geotiff v1.2.1 20201211 Yu Morioshita, GSI - Skip invalid lines in baselines file in read_bperp_file v1.2 20200703 Yu Morioshita, GSI - Replace problematic terms v1.1 20200227 Yu Morioshita, Uni of Leeds and GSI - Add hgt_linear_flag to make_tstxt - Add make_point_kml v1.0 20190730 Yu Morioshita, Uni of Leeds and GSI - Original implementation """ import sys import numpy as np import subprocess as subp import datetime as dt import statsmodels.api as sm from osgeo import gdal, osr #%% def make_dummy_bperp(bperp_file, imdates): with open(bperp_file, 'w') as f: for i, imd in enumerate(imdates): if i==0: bp = 0 elif np.mod(i, 4)==1: bp = np.random.rand()/2+0.5 #0.5~1 elif np.mod(i, 4)==2: bp = -np.random.rand()/2-0.5 #-1~-0.5 elif np.mod(i, 4)==3: bp = np.random.rand()/2 #0~0.5 elif np.mod(i, 4)==0: bp = -np.random.rand()/2 #-0.5~0 ifg_dt = dt.datetime.strptime(imd, '%Y%m%d').toordinal() - dt.datetime.strptime(imdates[0], '%Y%m%d').toordinal() print('{:3d} {} {} {:5.2f} {:4d} {} {:4d} {} {:5.2f}'.format(i, imdates[0], imd, bp, ifg_dt, 0, ifg_dt, 0, bp), file=f) #%% def make_geotiff(data, latn_p, lonw_p, dlat, dlon, outfile, compress_option, nodata=None): length, width = data.shape if data.dtype == np.float32: dtype = gdal.GDT_Float32 elif data.dtype == np.uint8: dtype = gdal.GDT_Byte else: print('error with the data format - neither float nor int') return driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(outfile, width, length, 1, dtype, options=compress_option) outRaster.SetGeoTransform((lonw_p, dlon, 0, latn_p, 0, dlat)) outband = outRaster.GetRasterBand(1) outband.WriteArray(data) if nodata is not None: outband.SetNoDataValue(nodata) outRaster.SetMetadataItem('AREA_OR_POINT', 'Area') outRasterSRS = osr.SpatialReference() outRasterSRS.ImportFromEPSG(4326) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() #%% def make_point_kml(lat, lon, kmlfile): with open(kmlfile, "w") as f: print('\n\n\n{},{}\n\n'.format(lon, lat), file=f) #%% def make_tstxt(x, y, imdates, ts, tsfile, refx1, refx2, refy1, refy2, gap, lat=None, lon=None, reflat1=None, reflat2=None, reflon1=None, reflon2=None, deramp_flag=None, hgt_linear_flag=None, filtwidth_km=None, filtwidth_yr=None): """ Make txt of time series. Format example: # x, y : 432, 532 # lat, lon: 34.65466, 136.65432 # ref : 21:22/54:55 # refgeo : 136.98767/136.98767/34.95364/34.95364 # deramp, filtwidth_km, filtwidth_yr: 1, 2, 0.653 # hgt_linear_flag: 1 # gap : 20160104_20160116, 20170204_20170216 # linear model: -3.643*t+4.254 20141030 0.00 20150216 -3.50 20160716 -3.5 """ ### Calc model imdates_ordinal = np.array(([dt.datetime.strptime(imd, '%Y%m%d').toordinal() for imd in imdates])) ##73???? imdates_yr = (imdates_ordinal-imdates_ordinal[0])/365.25 A = sm.add_constant(imdates_yr) #[1, t] vconst, vel = sm.OLS(ts, A, missing='drop').fit().params ### Identify gaps ixs_gap = np.where(gap==1)[0] # n_im-1, bool gap_str = '' for ix_gap in ixs_gap: gap_str = gap_str+imdates[ix_gap]+'_'+imdates[ix_gap+1]+' ' ### Output with open(tsfile, 'w') as f: print('# x, y : {}, {}'.format(x, y), file=f) if all(v is not None for v in [lat, lon]): print('# lat, lon: {:.5f}, {:.5f}'.format(lat, lon), file=f) print('# ref : {}:{}/{}:{}'.format(refx1, refx2, refy1, refy2), file=f) if all(v is not None for v in [reflon1, reflon2, reflat1, reflat2]): print('# refgeo : {:.5f}/{:.5f}/{:.5f}/{:.5f}'.format(reflon1, reflon2, reflat1, reflat2), file=f) if filtwidth_yr is not None: print('# deramp, filtwidth_km, filtwidth_yr : {}, {}, {:.3f}'.format(deramp_flag, filtwidth_km, filtwidth_yr), file=f) if hgt_linear_flag is not None: print('# hgt_linear_flag : {}'.format(hgt_linear_flag), file=f) print('# gap : {}'.format(gap_str), file=f) print('# linear model: {:.3f}*t{:+.3f}'.format(vel, vconst), file=f) for i, imd in enumerate(imdates): print('{} {:7.2f}'.format(imd, ts[i]), file=f) #%% def read_bperp_file(bperp_file, imdates): """ bperp_file (baselines) contains (m: primary (master), s: secondary, sm: single prime): smdate sdate bp dt 20170302 20170326 130.9 24.0 20170302 20170314 32.4 12.0 Old bperp_file contains (m: primary (master), s:secondary, sm: single prime): num mdate sdate bp dt dt_m_sm dt_s_sm bp_m_sm bp_s_sm 1 20170218 20170326 96.6 36.0 -12.0 24.0 34.2 130.9 2 20170302 20170314 32.4 12.0 0.0 12.0 0.0 32.4 Return: bperp """ bperp = [] bperp_dict = {} ### Determine type of bperp_file; old or not try: with open(bperp_file) as f: line = f.readline().split() #list if not (line[0].startswith("2") or line[0].startswith("1")): line = f.readline().split() # find first line that starts with '2' or '1' except: print('ERROR with baselines file') #bperp = np.random.randn(len(imdates)) # keep small numbers #return bperp return False if len(line) == 4: ## new format bperp_dict[line[0]] = '0.00' ## single prime. unnecessary? with open(bperp_file) as f: for l in f: if len(l.split()) == 4: bperp_dict[l.split()[1]] = l.split()[2] else: ## old format with open(bperp_file) as f: for l in f: bperp_dict[l.split()[1]] = l.split()[-2] bperp_dict[l.split()[2]] = l.split()[-1] for imd in imdates: if imd in bperp_dict: bperp.append(float(bperp_dict[imd])) else: ## If no key exists print('ERROR: bperp for {} not found! using just zero'.format(imd), file=sys.stderr) bperp.append(0) #return False return bperp #%% def read_geotiff(file, file_ref=None): geotiff = gdal.Open(file) if file_ref is not None: # Compare size and area size = (geotiff.RasterXSize, geotiff.RasterYSize) area = geotiff.GetGeoTransform() geotiff_ref = gdal.Open(file_ref) size_ref = (geotiff_ref.RasterXSize, geotiff_ref.RasterYSize) area_ref = geotiff_ref.GetGeoTransform() if not (size == size_ref and area == area_ref): raise Exception('ERROR: File size or area are not identical between {} and {}'.format(file, file_ref)) data = geotiff.ReadAsArray() return data #%% def read_img(file, length, width, dtype=np.float32, endian='little'): """ Read image data into numpy array. endian: 'little' or 'big' (not 'little' is regarded as 'big') """ if endian == 'little': data = np.fromfile(file, dtype=dtype).reshape((length, width)) else: data = np.fromfile(file, dtype=dtype).byteswap().reshape((length, width)) return data #%% def read_ifg_list(ifg_listfile): ifgdates = [] f = open(ifg_listfile) line = f.readline() while line: if (line[0] == "2") or (line[0] == "1"): ifgd = line.split()[0] ifgdates.append(str(ifgd)) line = f.readline() else: line = f.readline() continue return ifgdates def read_epochlist(txtfile, outasdt = False): ''' Reads txt file containing epochs, either yyyy-mm-dd or yyyymmdd ''' f = open(txtfile) line = f.readline() out = [] while line: if (line[0] == "2") or (line[0] == "1"): out.append(str(line.split()[0]).replace("-", "")) line = f.readline() else: line = f.readline() continue out.sort() if outasdt: outdt = [] for epoch in out: try: outdt.append(dt.datetime.strptime(epoch, '%Y-%m-%d').date()) except: outdt.append(dt.datetime.strptime(epoch, '%Y%m%d').date()) out = outdt out = list(set(out)) return out #%% def get_param_par(mlipar, field): """ Get parameter from mli.par or dem_par file. Examples of fields are; - range_samples - azimuth_lines - range_looks - azimuth_looks - range_pixel_spacing (m) - azimuth_pixel_spacing (m) - radar_frequency (Hz) """ value = subp.check_output(['grep', field,mlipar]).decode().split()[1].strip() return value #%% def read_residual_file(resid_file): """ # RMS of residual (in number of 2pi) 20141018_20141205 0.07 ... 20220720_20220801 0.06 RMS_mode: 0.05 RMS_median: 0.10 RMS_mean: 0.13 RMS_thresh: 0.20 """ ifg_list = [] residual_list = [] with open(resid_file) as f: for l in f: if l.startswith("2"): ifg_list.append(l.split()[0]) residual_list.append(float(l.split()[1])) return ifg_list, residual_list def remove_indices_from_dataset(h5file, dataset_name, indices_to_remove, new_name=None): """ Remove multiple indices from an HDF5 dataset using an already open h5py.File object. (co-created by MS Copilot Chat) :param h5file: Open h5py.File object :param dataset_name: Name of the dataset to modify :param indices_to_remove: List of indices to remove :param new_name: Optional new dataset name (default: overwrite original) """ indices_to_remove = sorted(set(indices_to_remove)) old_dset = h5file[dataset_name] old_shape = old_dset.shape keep_count = old_shape[0] - len(indices_to_remove) # Create new dataset new_name = new_name or dataset_name + "_new" new_dset = h5file.create_dataset(new_name, shape=(keep_count,) + old_shape[1:], dtype=old_dset.dtype, chunks=True) # Copy slices write_pos = 0 remove_set = set(indices_to_remove) for i in range(old_shape[0]): if i in remove_set: continue new_dset[write_pos] = old_dset[i] write_pos += 1 # Optionally replace old dataset if new_name == dataset_name: del h5file[dataset_name] h5file[dataset_name] = new_dset