diff --git a/HLS_SuPER/HLS_PER.py b/HLS_SuPER/HLS_PER.py index 580f84a..5eed2a6 100644 --- a/HLS_SuPER/HLS_PER.py +++ b/HLS_SuPER/HLS_PER.py @@ -52,9 +52,11 @@ def open_hls( Some HLS Landsat scenes have the metadata in the wrong location. """ # Open using rioxarray - da = rxr.open_rasterio(url, chunks=chunk_size, mask_and_scale=False).squeeze( + da_org = rxr.open_rasterio(url, chunks=chunk_size, mask_and_scale=False).squeeze( "band", drop=True ) + # (Add) 复制源数据进行后续操作, 以便最后复制源数据属性信息 + da = da_org.copy() # (Add) 读取波段名称 split_asset = url.split("/")[-1].split(".") @@ -81,6 +83,14 @@ def open_hls( else: da = da * 0.0001 # Remove Scale Factor After Scaling - Prevents Double Scaling + # (Add) 缩放计算后会丢源属性和坐标系, 需要复制源数据坐标系与属性 + # 先根据是否裁剪情况判断需要复制的坐标系 + if clip: + da.rio.write_crs(da_org.rio.crs, inplace=True) + else: + da.rio.write_crs(roi.crs, inplace=True) + # 再复制源数据属性 + da.attrs = da_org.attrs.copy() da.attrs["scale_factor"] = 1.0 # Add Scale Factor to Attributes Manually - This will overwrite/add if the data is missing. @@ -90,7 +100,8 @@ def open_hls( da.attrs["scale_factor"] = 0.01 else: da.attrs["scale_factor"] = 0.0001 - + # 销毁源数据 + del da_org return da @@ -175,10 +186,10 @@ def process_granule( quality_output_name = create_output_name(quality_url, band_dict) quality_output_file = f"{output_dir}/{quality_output_name}" # Check if quality asset is already processed + # Open Quality Layer + qa_da = open_hls(quality_url, roi, clip, scale, chunk_size) if not os.path.isfile(quality_output_file): # Write Output - # Open Quality Layer - qa_da = open_hls(quality_url, roi, clip, scale, chunk_size) qa_da.rio.to_raster(raster_path=quality_output_file, driver="COG") else: logging.info( diff --git a/HLS_SuPER/HLS_Su.py b/HLS_SuPER/HLS_Su.py index 02f63f5..63c54dc 100644 --- a/HLS_SuPER/HLS_Su.py +++ b/HLS_SuPER/HLS_Su.py @@ -32,7 +32,7 @@ def hls_search( # (Add) 根据瓦片ID过滤影像 if tile_id: - results = hls_tileid_filter(results, tile_id) + results = tileid_filter(results, tile_id) # Filter by cloud cover if cloud_cover: @@ -52,9 +52,11 @@ def hls_search( return selected_results_urls -def hls_tileid_filter(results, tile_id): +def tileid_filter(results, tile_id): """ (Add) 基于给定的瓦片ID过滤earthaccess检索的数据结果 + + 实测可过滤数据集: HLS.L30, HLS.S30, MCD43A4 """ tile_ids = [] diff --git a/HLS_SuPER/__init__.py b/HLS_SuPER/__init__.py new file mode 100644 index 0000000..e69de29