From 017714b2543c33cfe58ca250ab09f43612a8f45f Mon Sep 17 00:00:00 2001 From: xhong Date: Sat, 11 Jan 2025 22:30:41 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E4=BC=98=E5=8C=96HLS=E5=A4=84=E7=90=86?= =?UTF-8?q?=E9=80=BB=E8=BE=91=EF=BC=8C=E9=81=BF=E5=85=8D=E6=95=B0=E6=8D=AE?= =?UTF-8?q?=E5=B1=9E=E6=80=A7=E4=B8=A2=E5=A4=B1=E7=9A=84=E9=97=AE=E9=A2=98?= =?UTF-8?q?.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- HLS_SuPER/HLS_PER.py | 19 +++++++++++++++---- HLS_SuPER/HLS_Su.py | 6 ++++-- HLS_SuPER/__init__.py | 0 3 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 HLS_SuPER/__init__.py diff --git a/HLS_SuPER/HLS_PER.py b/HLS_SuPER/HLS_PER.py index 580f84a..5eed2a6 100644 --- a/HLS_SuPER/HLS_PER.py +++ b/HLS_SuPER/HLS_PER.py @@ -52,9 +52,11 @@ def open_hls( Some HLS Landsat scenes have the metadata in the wrong location. """ # Open using rioxarray - da = rxr.open_rasterio(url, chunks=chunk_size, mask_and_scale=False).squeeze( + da_org = rxr.open_rasterio(url, chunks=chunk_size, mask_and_scale=False).squeeze( "band", drop=True ) + # (Add) 复制源数据进行后续操作, 以便最后复制源数据属性信息 + da = da_org.copy() # (Add) 读取波段名称 split_asset = url.split("/")[-1].split(".") @@ -81,6 +83,14 @@ def open_hls( else: da = da * 0.0001 # Remove Scale Factor After Scaling - Prevents Double Scaling + # (Add) 缩放计算后会丢源属性和坐标系, 需要复制源数据坐标系与属性 + # 先根据是否裁剪情况判断需要复制的坐标系 + if clip: + da.rio.write_crs(da_org.rio.crs, inplace=True) + else: + da.rio.write_crs(roi.crs, inplace=True) + # 再复制源数据属性 + da.attrs = da_org.attrs.copy() da.attrs["scale_factor"] = 1.0 # Add Scale Factor to Attributes Manually - This will overwrite/add if the data is missing. @@ -90,7 +100,8 @@ def open_hls( da.attrs["scale_factor"] = 0.01 else: da.attrs["scale_factor"] = 0.0001 - + # 销毁源数据 + del da_org return da @@ -175,10 +186,10 @@ def process_granule( quality_output_name = create_output_name(quality_url, band_dict) quality_output_file = f"{output_dir}/{quality_output_name}" # Check if quality asset is already processed + # Open Quality Layer + qa_da = open_hls(quality_url, roi, clip, scale, chunk_size) if not os.path.isfile(quality_output_file): # Write Output - # Open Quality Layer - qa_da = open_hls(quality_url, roi, clip, scale, chunk_size) qa_da.rio.to_raster(raster_path=quality_output_file, driver="COG") else: logging.info( diff --git a/HLS_SuPER/HLS_Su.py b/HLS_SuPER/HLS_Su.py index 02f63f5..63c54dc 100644 --- a/HLS_SuPER/HLS_Su.py +++ b/HLS_SuPER/HLS_Su.py @@ -32,7 +32,7 @@ def hls_search( # (Add) 根据瓦片ID过滤影像 if tile_id: - results = hls_tileid_filter(results, tile_id) + results = tileid_filter(results, tile_id) # Filter by cloud cover if cloud_cover: @@ -52,9 +52,11 @@ def hls_search( return selected_results_urls -def hls_tileid_filter(results, tile_id): +def tileid_filter(results, tile_id): """ (Add) 基于给定的瓦片ID过滤earthaccess检索的数据结果 + + 实测可过滤数据集: HLS.L30, HLS.S30, MCD43A4 """ tile_ids = [] diff --git a/HLS_SuPER/__init__.py b/HLS_SuPER/__init__.py new file mode 100644 index 0000000..e69de29