Compare commits

..

8 Commits
v1.0 ... master

14 changed files with 798 additions and 559 deletions

2
.gitignore vendored
View File

@ -9,3 +9,5 @@ data/
*.tif *.tif
*.tiff *.tiff
*.ipynb

View File

@ -22,7 +22,7 @@ Step2: Process DEM data
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
Authors: Hong Xie Authors: Hong Xie
Last Updated: 2025-07-03 Last Updated: 2025-08-05
=============================================================================== ===============================================================================
""" """
@ -39,7 +39,7 @@ import geopandas as gpd
import numpy as np import numpy as np
from rioxarray import open_rasterio from rioxarray import open_rasterio
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_utils import setup_dask_environment, clip_image, mosaic_images from utils.common_utils import setup_dask_environment, clip_image, mosaic_images
from HLS_SuPER.HLS_Su import earthdata_search from HLS_SuPER.HLS_Su import earthdata_search
@ -183,14 +183,14 @@ def process_granule(
dem.rio.write_crs("EPSG:4326", inplace=True) dem.rio.write_crs("EPSG:4326", inplace=True)
dem.attrs["scale_factor"] = 1 dem.attrs["scale_factor"] = 1
dem_raster_list.append(dem) dem_raster_list.append(dem)
if len(dem_raster_list) > 1: if len(dem_raster_list) >= 1:
if name == "slope" or name == "aspect": if name == "slope" or name == "aspect":
dem_mosaiced = mosaic_images(dem_raster_list, nodata=-9999) dem_mosaiced = mosaic_images(dem_raster_list, nodata=-9999)
else: else:
dem_mosaiced = mosaic_images(dem_raster_list, nodata=-32768) dem_mosaiced = mosaic_images(dem_raster_list, nodata=-32768)
if roi is not None and clip: if roi is not None and clip:
dem_mosaiced = clip_image(dem_mosaiced, roi) dem_mosaiced = clip_image(dem_mosaiced, roi, clip_by_box=True)
dem_mosaiced.rio.to_raster(output_file, driver="COG", compress="DEFLATE") dem_mosaiced.rio.to_raster(output_file, driver="COG", compress="DEFLATE")
except Exception as e: except Exception as e:
logging.error(f"Error processing files in {name}: {e}") logging.error(f"Error processing files in {name}: {e}")
return False return False
@ -211,13 +211,12 @@ def main(region: list, asset_name: list, tile_id: str):
unzip_dir = os.path.join(download_dir, "UNZIP") unzip_dir = os.path.join(download_dir, "UNZIP")
output_dir = os.path.join(output_root_dir, "TIF", tile_id) output_dir = os.path.join(output_root_dir, "TIF", tile_id)
os.makedirs(unzip_dir, exist_ok=True) os.makedirs(unzip_dir, exist_ok=True)
os.makedirs(output_dir, exist_ok=True)
results_urls_file = f"{output_root_dir}\\NASADEM_{tile_id}_results_urls.json" results_urls_file = f"{output_root_dir}\\NASADEM_{tile_id}_results_urls.json"
if not os.path.isfile(results_urls_file): # 默认覆盖上一次检索记录
results_urls = earthdata_search(asset_name, roi=bbox) results_urls = earthdata_search(asset_name, roi=bbox)
with open(results_urls_file, "w") as f: with open(results_urls_file, "w") as f:
json.dump(results_urls, f) json.dump(results_urls, f)
else:
results_urls = json.load(open(results_urls_file))
# 构造待解压的文件列表 # 构造待解压的文件列表
zip_file_list = [os.path.join(download_dir, os.path.basename(result[0])) for result in results_urls] zip_file_list = [os.path.join(download_dir, os.path.basename(result[0])) for result in results_urls]

View File

@ -28,7 +28,7 @@ import logging
import earthaccess import earthaccess
from xarray import open_dataset from xarray import open_dataset
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_utils import setup_dask_environment from utils.common_utils import setup_dask_environment
from HLS_SuPER.HLS_Su import earthdata_search from HLS_SuPER.HLS_Su import earthdata_search

View File

@ -6,7 +6,7 @@ For example, MCD43A3, MCD43A4, MOD11A1.
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
Authors: Hong Xie Authors: Hong Xie
Last Updated: 2025-07-15 Last Updated: 2025-09-11
=============================================================================== ===============================================================================
""" """
@ -20,9 +20,14 @@ import rioxarray as rxr
import dask.distributed import dask.distributed
import geopandas as gpd import geopandas as gpd
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_utils import clip_image, reproject_image, setup_dask_environment from utils.common_utils import (
clip_image,
reproject_image,
setup_dask_environment,
setup_logging,
)
from HLS_SuPER.HLS_Su import earthdata_search from HLS_SuPER.HLS_Su import earthdata_search
@ -117,7 +122,15 @@ def open_modis(file_path, prod_name):
raise ValueError(f"Unknown MODIS product: {prod_name}.") raise ValueError(f"Unknown MODIS product: {prod_name}.")
def process_modis(download_file, prod_name, roi, clip=True, scale=True, target_crs=None, output_file=None): def process_modis(
download_file,
prod_name,
roi,
clip=True,
scale=True,
target_crs=None,
output_file=None,
):
""" """
MODIS 数据进行预处理, 包括裁剪, 重投影和缩放. MODIS 数据进行预处理, 包括裁剪, 重投影和缩放.
""" """
@ -127,13 +140,14 @@ def process_modis(download_file, prod_name, roi, clip=True, scale=True, target_c
if roi is not None and clip: if roi is not None and clip:
modis = clip_image(modis, roi) modis = clip_image(modis, roi)
if target_crs is not None: if target_crs is not None and modis is not None:
modis = reproject_image(modis, target_crs) modis = reproject_image(modis, target_crs)
# 重投影后再裁剪一次
if roi is not None and clip:
modis = clip_image(modis, roi)
# 重投影后再裁剪一次 if modis.isnull().all():
if roi is not None and clip: logging.error(f"Processing {download_file}. Roi area all pixels are nodata.")
modis = clip_image(modis, roi)
if scale: if scale:
# 缩放计算后会丢源属性和坐标系, 需要先备份源数据属性信息 # 缩放计算后会丢源属性和坐标系, 需要先备份源数据属性信息
org_attrs = modis.attrs org_attrs = modis.attrs
@ -165,14 +179,9 @@ def process_granule(
clip, clip,
scale, scale,
output_dir, output_dir,
target_crs="EPSG:4326",
tile_id=None, tile_id=None,
target_crs="EPSG:4326",
): ):
logging.basicConfig(
level=logging.INFO,
format="%(levelname)s:%(asctime)s ||| %(message)s",
handlers=[logging.StreamHandler(sys.stdout)],
)
download_hdf_name = os.path.basename(granule_urls[0]) download_hdf_name = os.path.basename(granule_urls[0])
# 获取名称与日期 # 获取名称与日期
@ -191,7 +200,7 @@ def process_granule(
out_tif_name = f"MODIS.{prod_name}.{tile_id}.{date}.NBRDF.tif" out_tif_name = f"MODIS.{prod_name}.{tile_id}.{date}.NBRDF.tif"
else: else:
out_tif_name = download_hdf_name.replace(".hdf", ".tif") out_tif_name = download_hdf_name.replace(".hdf", ".tif")
# 除 MCD43A4 需用于光谱指数计算外, MOD11A1 日间温度与 MCD43A4 反照率无需再按日期归档 # 除 MCD43A4 需用于光谱指数计算外, MOD11A1 日间温度与 MCD43A3 反照率无需再按日期归档
if prod_name == "MOD11A1" or prod_name == "MCD43A3": if prod_name == "MOD11A1" or prod_name == "MCD43A3":
output_path = os.path.join(output_dir, "TIF") output_path = os.path.join(output_dir, "TIF")
else: else:
@ -199,25 +208,29 @@ def process_granule(
os.makedirs(output_path, exist_ok=True) os.makedirs(output_path, exist_ok=True)
output_file = os.path.join(output_path, out_tif_name) output_file = os.path.join(output_path, out_tif_name)
if not os.path.isfile(output_file): # Step1: 下载 HDF 文件
# Step1: 下载 HDF 文件 if not os.path.isfile(download_file):
if not os.path.isfile(download_file):
try:
earthaccess.download(granule_urls, download_path)
except Exception as e:
logging.error(f"Error downloading {download_file}: {e}")
return
else:
logging.warning(f"{download_file} already exists. Skipping.")
# Step2: 处理 HDF 文件
try: try:
process_modis(download_file, prod_name, roi, clip, scale, target_crs, output_file) earthaccess.download(granule_urls, download_path)
except Exception as e:
logging.error(f"Error downloading {download_file}: {e}")
return
else:
logging.warning(f"{download_file} already exists. Skipping.")
# Step2: 处理 HDF 文件
if not os.path.isfile(output_file):
try:
process_modis(
download_file, prod_name, roi, clip, scale, target_crs, output_file
)
logging.info(f"Processed {output_file} successfully.")
except Exception as e: except Exception as e:
os.remove(download_file) os.remove(download_file)
logging.info(f"Removed corrupted file {download_file}. Retrying download.") logging.info(f"Removed corrupted file {download_file}. Retrying download.")
process_granule(granule_urls, roi, clip, scale, output_dir, target_crs, tile_id) process_granule(
logging.info(f"Processed {output_file} successfully.") granule_urls, roi, clip, scale, output_dir, target_crs, tile_id
)
else: else:
logging.warning(f"{output_file} already exists. Skipping.") logging.warning(f"{output_file} already exists. Skipping.")
@ -229,6 +242,7 @@ def main(
years: list, years: list,
dates: tuple[str, str], dates: tuple[str, str],
tile_id: str, tile_id: str,
target_crs: str,
output_root_dir: str, output_root_dir: str,
): ):
bbox = tuple(list(region.total_bounds)) bbox = tuple(list(region.total_bounds))
@ -255,20 +269,9 @@ def main(
with open(results_urls_file, "w") as f: with open(results_urls_file, "w") as f:
json.dump(results_urls, f) json.dump(results_urls, f)
# 配置日志, 首次配置生效, 后续嵌套配置无效
logging.basicConfig(
level=logging.INFO, # 级别为INFO及以上的日志会被记录
format="%(levelname)s:%(asctime)s ||| %(message)s",
handlers=[
logging.StreamHandler(sys.stdout), # 输出到控制台
logging.FileHandler(
f"{output_dir}\\{asset_name}_{tile_id}_SuPER.log"
), # 输出到日志文件
],
)
client = dask.distributed.Client(timeout=60, memory_limit="8GB") client = dask.distributed.Client(timeout=60, memory_limit="8GB")
client.run(setup_dask_environment) client.run(setup_dask_environment)
client.run(setup_logging)
all_start_time = time.time() all_start_time = time.time()
for year in years: for year in years:
year_results_dir = os.path.join(output_dir, year) year_results_dir = os.path.join(output_dir, year)
@ -276,6 +279,11 @@ def main(
year_results_dir, f"{asset_name}_{modis_tile_id}_{year}_results_urls.json" year_results_dir, f"{asset_name}_{modis_tile_id}_{year}_results_urls.json"
) )
year_results = json.load(open(year_results_file)) year_results = json.load(open(year_results_file))
# 配置主进程日志
logs_file = os.path.join(
year_results_dir, f"{asset_name}_{tile_id}_{year}_SuPER.log"
)
setup_logging(logs_file)
client.scatter(year_results) client.scatter(year_results)
start_time = time.time() start_time = time.time()
@ -287,14 +295,21 @@ def main(
clip=True, clip=True,
scale=True, scale=True,
output_dir=year_results_dir, output_dir=year_results_dir,
target_crs="EPSG:32649",
tile_id=tile_id, tile_id=tile_id,
target_crs=target_crs,
) )
for granule_url in year_results for granule_url in year_results
] ]
dask.compute(*tasks) dask.compute(*tasks)
total_time = time.time() - start_time total_time = time.time() - start_time
# Dask任务结束后读取dask_worker.txt日志文件内容, 并注入到logs_file中
with open(logs_file, "a") as f:
with open("dask_worker.log", "r") as f2:
f.write(f2.read())
# 随后清空dask_worker.txt文件
with open("dask_worker.log", "w") as f:
f.write("")
logging.info( logging.info(
f"{year} MODIS {asset_name} Downloading complete and proccessed. Total time: {total_time} seconds" f"{year} MODIS {asset_name} Downloading complete and proccessed. Total time: {total_time} seconds"
) )
@ -303,6 +318,9 @@ def main(
logging.info( logging.info(
f"All MODIS {asset_name} Downloading complete and proccessed. Total time: {all_total_time} seconds" f"All MODIS {asset_name} Downloading complete and proccessed. Total time: {all_total_time} seconds"
) )
# 最后删除dask_worker.log文件
os.remove("dask_worker.log")
return
if __name__ == "__main__": if __name__ == "__main__":
@ -310,12 +328,22 @@ if __name__ == "__main__":
# region = gpd.read_file("./data/vectors/wuling_guanqu_polygon.geojson") # region = gpd.read_file("./data/vectors/wuling_guanqu_polygon.geojson")
tile_id = "49REL" tile_id = "49REL"
region = gpd.read_file(f"./data/vectors/{tile_id}.geojson") region = gpd.read_file(f"./data/vectors/{tile_id}.geojson")
# asset_name = "MOD11A1" target_crs = "EPSG:32649"
asset_name = "MOD11A1"
# asset_name = "MCD43A3" # asset_name = "MCD43A3"
asset_name = "MCD43A4" # asset_name = "MCD43A4"
modis_tile_id = "h27v06" modis_tile_id = "h27v06"
# 示例文件名称: MCD43A4.A2024001.h27v05.061.2024010140610.hdf # 示例文件名称: MCD43A4.A2024001.h27v05.061.2024010140610.hdf
years = ["2024", "2023", "2022"] years = ["2024"]
dates = ("03-01", "10-31") dates = ("03-01", "10-31")
output_root_dir = ".\\data\\MODIS\\" output_root_dir = ".\\data\\MODIS\\"
main(region, asset_name, modis_tile_id, years, dates, tile_id, output_root_dir) main(
region,
asset_name,
modis_tile_id,
years,
dates,
tile_id,
target_crs,
output_root_dir,
)

View File

@ -48,7 +48,7 @@ import numpy as np
import xarray as xr import xarray as xr
from rioxarray import open_rasterio from rioxarray import open_rasterio
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_utils import setup_dask_environment, clip_image, mosaic_images from utils.common_utils import setup_dask_environment, clip_image, mosaic_images
from HLS_SuPER.HLS_Su import earthdata_search from HLS_SuPER.HLS_Su import earthdata_search

View File

@ -28,7 +28,7 @@ import h5py
from osgeo import gdal from osgeo import gdal
import xarray as xr import xarray as xr
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_params import EASE2_GRID_PARAMS, EPSG from utils.common_params import EASE2_GRID_PARAMS, EPSG
from utils.common_utils import ( from utils.common_utils import (

View File

@ -55,7 +55,7 @@ import logging
import time import time
from datetime import datetime, timedelta from datetime import datetime, timedelta
sys.path.append("D:\NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
class getInsituData: class getInsituData:

View File

@ -5,7 +5,7 @@ HLS Subsetting, Processing, and Exporting Reformatted Data Prep Script
Authors: Cole Krehbiel, Mahsa Jami, and Erik Bolch Authors: Cole Krehbiel, Mahsa Jami, and Erik Bolch
Contact: lpdaac@usgs.gov Contact: lpdaac@usgs.gov
Editor: Hong Xie Editor: Hong Xie
Last Updated: 2025-01-15 Last Updated: 2025-09-05
=============================================================================== ===============================================================================
""" """
@ -24,11 +24,12 @@ import json
import earthaccess import earthaccess
from shapely.geometry import box from shapely.geometry import box
from shapely.geometry.polygon import orient
import geopandas as gpd import geopandas as gpd
from datetime import datetime as dt from datetime import datetime as dt
import dask.distributed import dask.distributed
sys.path.append("D:/NASA_EarthData_Script") sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from utils.common_utils import setup_dask_environment from utils.common_utils import setup_dask_environment
from HLS_Su import hls_search from HLS_Su import hls_search
@ -169,7 +170,15 @@ def parse_arguments():
return parser.parse_args() return parser.parse_args()
def ensure_ccw(geom):
"""
Ensure the exterior ring of the polygon is counterclockwise.
"""
if geom.exterior.is_ccw:
return geom # Already counterclockwise
else:
return orient(geom, sign=1.0) # Make it counterclockwise
def format_roi(roi): def format_roi(roi):
""" """
Determines if submitted ROI is a file or bbox coordinates. Determines if submitted ROI is a file or bbox coordinates.
@ -190,22 +199,20 @@ def format_roi(roi):
logging.info( logging.info(
"Multiple polygons detected. Creating single geometry of external coordinates." "Multiple polygons detected. Creating single geometry of external coordinates."
) )
single_geometry = roi.unary_union.convex_hull single_geometry = roi.union_all().convex_hull
roi = gpd.GeoDataFrame(geometry=[single_geometry], crs=roi.crs) roi = gpd.GeoDataFrame(geometry=[single_geometry], crs=roi.crs)
# Check if ROI is in Geographic CRS, if not, convert to it # Check if ROI is in Geographic CRS, if not, convert to it
if roi.crs.is_geographic: if not roi.crs.is_geographic:
# List Vertices in correct order for search roi = roi.to_crs("EPSG:4326")
# (Add) 使用外包矩形坐标作为检索使用的坐标
minx, miny, maxx, maxy = roi.total_bounds
bounding_box = box(minx, miny, maxx, maxy)
vertices_list = list(bounding_box.exterior.coords)
else:
roi_geographic = roi.to_crs("EPSG:4326")
logging.info( logging.info(
"Note: ROI submitted is being converted to Geographic CRS (EPSG:4326)" "Note: ROI submitted is being converted to Geographic CRS (EPSG:4326)"
) )
vertices_list = list(roi_geographic.geometry[0].exterior.coords) roi['geometry'] = roi['geometry'].apply(ensure_ccw)
# List Vertices in correct order for search
# (Add) 使用外包矩形坐标简化提交检索时使用的坐标
minx, miny, maxx, maxy = roi.total_bounds
bounding_box = box(minx, miny, maxx, maxy)
vertices_list = list(bounding_box.exterior.coords)
except (FileNotFoundError, ValueError): except (FileNotFoundError, ValueError):
sys.exit( sys.exit(
f"The GeoJSON/shapefile is either not valid or could not be found.\nPlease double check the name and provide the absolute path to the file or make sure that it is located in {os.getcwd()}" f"The GeoJSON/shapefile is either not valid or could not be found.\nPlease double check the name and provide the absolute path to the file or make sure that it is located in {os.getcwd()}"
@ -217,6 +224,7 @@ def format_roi(roi):
# Convert bbox to a geodataframe for clipping # Convert bbox to a geodataframe for clipping
roi = gpd.GeoDataFrame(geometry=[box(*bbox)], crs="EPSG:4326") roi = gpd.GeoDataFrame(geometry=[box(*bbox)], crs="EPSG:4326")
roi['geometry'] = roi['geometry'].apply(ensure_ccw)
vertices_list = list(roi.geometry[0].exterior.coords) vertices_list = list(roi.geometry[0].exterior.coords)
@ -248,6 +256,8 @@ def format_tile_id(tile_id):
""" """
(Add) 格式化tile_id参数 (Add) 格式化tile_id参数
""" """
if tile_id is None:
return None
tile_id = tile_id.strip("'").strip('"') tile_id = tile_id.strip("'").strip('"')
return str(tile_id) return str(tile_id)
@ -481,6 +491,7 @@ def main():
# Defaults to the current directory # Defaults to the current directory
output_dir = os.getcwd() + os.sep output_dir = os.getcwd() + os.sep
os.makedirs(output_dir, exist_ok=True)
logging.info(f"Output directory set to: {output_dir}") logging.info(f"Output directory set to: {output_dir}")
# Format/Validate Dates # Format/Validate Dates

View File

@ -2,32 +2,34 @@
## 仓库说明 ## 仓库说明
1) EARTHDATA 说明 1. EARTHDATA 说明
- NASA 计划于 2026 年底将公开数据全部集成进 EARTHDATA 中
- HLS 数据集NASA 多部门将Landsat8/9与Sentinel-2A/B统一协调至30m划分为 - NASA 计划于 2026 年底将公开数据全部集成进 EARTHDATA 中
- L30Landsat8/9包含其独有的2个热红外波段并已处理为表面亮温℃ - HLS 数据集NASA 多部门将 Landsat8/9 与 Sentinel-2A/B 统一协调至 30m划分为
- S30Sentinel-2A/B包含其独有的4个红边波段 - L30Landsat8/9包含其独有的 2 个热红外波段,并已处理为表面亮温 ℃)
- 对于核心六波段Blue/Green/Red/NIR/SWIR1/SWIR2可直接合并使用合并后湖北地区单格网观测频率约为3-4d - S30Sentinel-2A/B包含其独有的 4 个红边波段)
2) 使用说明 - 对于核心六波段Blue/Green/Red/NIR/SWIR1/SWIR2可直接合并使用合并后湖北地区单格网观测频率约为 3-4d
- 本仓库基于官方原始脚本修复了实际使用中存在的bug并添加了更多可控制参数
- 爬取时不要使用魔法工具 2. 使用说明
- 实测单格网全年影像爬取+预处理约1h
- 单用户每秒限制100次请求 - 本仓库基于官方原始脚本修复了实际使用中存在的 bug并添加了更多可控制参数
- 爬取时不要使用魔法工具
- 实测单格网全年影像爬取+预处理约 1h
- 单用户每秒限制 100 次请求
## 1 安装基础环境 ## 1 安装基础环境
### 1.1 miniforge ### 1.1 miniforge
- miniforge是结合conda与mamba的最小化包比Anaconda和Miniconda更快更轻量并且配置命令与原conda基本一致支持直接使用mamba命令。 - miniforge 是结合 conda mamba 的最小化包,比 Anaconda Miniconda 更快更轻量,并且配置命令与原 conda 基本一致,支持直接使用 mamba 命令。
- 简而言之环境配置效率上miniforge > Mambaforge (202407已废弃) > Miniconda + Mamba > Miniconda > Anaconda - 简而言之环境配置效率上miniforge > Mambaforge (202407 已废弃) > Miniconda + Mamba > Miniconda > Anaconda
- 官方仓库地址https://github.com/conda-forge/miniforge - 官方仓库地址https://github.com/conda-forge/miniforge
- 官方下载地址https://conda-forge.org/download/ - 官方下载地址https://conda-forge.org/download/
### 1.2 配置环境变量 ### 1.2 配置环境变量
- 为了在控制台中直接使用conda命令需要将安装的相关目录配置到Path环境变量中。 - 为了在控制台中直接使用 conda 命令,需要将安装的相关目录配置到 Path 环境变量中。
``` ```
D:\program\miniforge3 D:\program\miniforge3
@ -37,14 +39,14 @@ D:\program\miniforge3\Library\bin
### 1.3 配置权限 ### 1.3 配置权限
- 详细配置与miniconda相同图文教程地址https://gis-xh.github.io/my-note/python/01conda/Win11-Miniconda-install/ - 详细配置与 miniconda 相同图文教程地址https://gis-xh.github.io/my-note/python/01conda/Win11-Miniconda-install/
- Windows环境下需要设置虚拟环境文件夹的访问权限为所有用户可访问否则会出现无法读取虚拟环境文件的问题 - Windows 环境下,需要设置虚拟环境文件夹的访问权限为所有用户可访问,否则会出现无法读取虚拟环境文件的问题
- 具体地: - 具体地:
- 设置`D:\program\miniforge3\env`目录为所有用户可访问,具体操作为:右键点击文件夹 -> 属性 -> 安全 -> 编辑 -> 添加 -> 添加所有用户 -> 全选 -> 应用 -> 确定 - 设置`D:\program\miniforge3\env`目录为所有用户可访问,具体操作为:右键点击文件夹 -> 属性 -> 安全 -> 编辑 -> 添加 -> 添加所有用户 -> 全选 -> 应用 -> 确定
### 1.4 配置镜像源 ### 1.4 配置镜像源
- 生成下载源文件的配置文件 (若已经安装过Anaconda/Miniconda则无需执行此步骤) - 生成下载源文件的配置文件 (若已经安装过 Anaconda/Miniconda则无需执行此步骤)
```sh ```sh
conda config --set show_channel_urls yes conda config --set show_channel_urls yes
@ -57,7 +59,7 @@ envs_dirs:
- D:\program\miniforge3\envs - D:\program\miniforge3\envs
- 其他路径地址(可选,创建虚拟环境时将会按照顺序查找) - 其他路径地址(可选,创建虚拟环境时将会按照顺序查找)
channels: channels:
- defaults - conda-forge
show_channel_urls: true show_channel_urls: true
channel_alias: https://mirrors.tuna.tsinghua.edu.cn/anaconda channel_alias: https://mirrors.tuna.tsinghua.edu.cn/anaconda
default_channels: default_channels:
@ -75,6 +77,12 @@ custom_channels:
simpleitk: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud simpleitk: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
``` ```
- 清理镜像缓存
```sh
conda clean -i
```
### 1.5 初始化 conda ### 1.5 初始化 conda
- 打开控制台,初始化 PowerShell 与 CMD - 打开控制台,初始化 PowerShell 与 CMD
@ -89,20 +97,33 @@ conda init cmd.exe
## 2 运行环境配置 ## 2 运行环境配置
### 2.1 使用mamba创建并激活虚拟环境 ### 2.1 使用 mamba 创建并激活虚拟环境
- 克隆虚拟环境 (Windows环境下推荐) - 克隆虚拟环境 (完整复刻运行环境所有依赖)
```sh ```sh
mamba env create -f setup/lpdaac_windows.yml mamba env create -f setup/lpdaac.yml
```
- 克隆虚拟环境 (复刻主要依赖环境-部分依赖可能会更新)
```sh
mamba env create -f setup/environment.yml
``` ```
- 激活虚拟环境 - 激活虚拟环境
```sh ```sh
mamba activate lpdaac_windows mamba activate lpdaac
``` ```
- 导出当前虚拟环境
```sh
mamba env export > setup/lpdaac.yml
```
## 3 设计思路 ## 3 设计思路
### 3.1 数据组织 ### 3.1 数据组织
@ -146,11 +167,10 @@ data/
### 3.2 NASA Earthdata 账户准备 ### 3.2 NASA Earthdata 账户准备
- 参考自NASA官网示例Demohttps://github.com/nasa/LPDAAC-Data-Resources/blob/main/setup/setup_instructions_python.md - 参考自 NASA 官网示例 Demohttps://github.com/nasa/LPDAAC-Data-Resources/blob/main/setup/setup_instructions_python.md
- 首次运行爬取命令时,需要输入用户名和密码,用户名和密码可以在 [Earthdata](https://urs.earthdata.nasa.gov/) 注册获取。 - 首次运行爬取命令时,需要输入用户名和密码,用户名和密码可以在 [Earthdata](https://urs.earthdata.nasa.gov/) 注册获取。
- 需要注意的是,密码中最好不要出 `@/#/$/%` 等符号,爬取时可能会出错。 - 需要注意的是,密码中最好不要出 `@/#/$/%` 等符号,爬取时可能会出错。
- 单个用户每秒限制最多100次请求参考自https://forum.earthdata.nasa.gov/viewtopic.php?t=3734 - 单个用户每秒限制最多 100 次请求参考自https://forum.earthdata.nasa.gov/viewtopic.php?t=3734
## 4 使用示例 ## 4 使用示例
@ -160,7 +180,7 @@ data/
- `-roi`:感兴趣区,需要按照 **左下右上** 的逆时针顺序设置点坐标,同时还支持 `shp``geojson/json` 格式文件 - `-roi`:感兴趣区,需要按照 **左下右上** 的逆时针顺序设置点坐标,同时还支持 `shp``geojson/json` 格式文件
- `-clip`:是否对影像进行裁剪,默认 `False` - `-clip`:是否对影像进行裁剪,默认 `False`
- `-tile`HLS影像瓦片ID例如 `T49RGQ` - `-tile`HLS 影像瓦片 ID例如 `T49RGQ`
- `-dir`:输出目录,必须是已存在的目录 - `-dir`:输出目录,必须是已存在的目录
- `-start`:开始时间,格式为 `YYYY-MM-DD` - `-start`:开始时间,格式为 `YYYY-MM-DD`
- `-end`:结束时间,格式为 `YYYY-MM-DD` - `-end`:结束时间,格式为 `YYYY-MM-DD`
@ -172,19 +192,19 @@ data/
#### 4.1.2 爬取云端数据并在内存中进行预处理示例 #### 4.1.2 爬取云端数据并在内存中进行预处理示例
- 爬取 L30 与 S30 的核心光谱波段仅按照感兴趣区瓦片ID起止时间以及产品名称筛选影像不进行云量筛选影像对影像进行去云掩膜处理 - 爬取 L30 与 S30 的核心光谱波段:仅按照感兴趣区,瓦片 ID起止时间以及产品名称筛选影像不进行云量筛选影像对影像进行去云掩膜处理
```sh ```sh
python .\\HLS_SuPER\\HLS_SuPER.py -roi '112.9834,30.5286,114.32373,31.64448' -tile T49RGQ -dir .\\data\\HLS\\ALL -start 2024-01-01 -end 2024-01-31 -prod both -bands BLUE,GREEN,RED,NIR1,SWIR1,SWIR2,Fmask -scale True python .\\HLS_SuPER\\HLS_SuPER.py -roi '112.9834,30.5286,114.32373,31.64448' -tile T49RGQ -dir .\\data\\HLS\\ALL -start 2024-01-01 -end 2024-01-31 -prod both -bands BLUE,GREEN,RED,NIR1,SWIR1,SWIR2,Fmask -scale True
``` ```
- 爬取 L30 的所有波段按照感兴趣区瓦片ID起止时间以及产品名称筛选影像过滤云量小于70% 的影像,对影像进行去云掩膜处理 - 爬取 L30 的所有波段:按照感兴趣区,瓦片 ID起止时间以及产品名称筛选影像过滤云量小于 70% 的影像,对影像进行去云掩膜处理
```sh ```sh
python .\\HLS_SuPER\\HLS_SuPER.py -roi '112.9834,30.5286,114.32373,31.64448' -tile T49RGQ -dir .\\data\\HLS\\L30\\subset -start 2024-01-01 -end 2024-01-31 -prod HLSL30 -bands COASTAL-AEROSOL,BLUE,GREEN,RED,NIR1,SWIR1,SWIR2,CIRRUS,TIR1,TIR2,Fmask -cc 70 -scale True python .\\HLS_SuPER\\HLS_SuPER.py -roi '112.9834,30.5286,114.32373,31.64448' -tile T49RGQ -dir .\\data\\HLS\\L30\\subset -start 2024-01-01 -end 2024-01-31 -prod HLSL30 -bands COASTAL-AEROSOL,BLUE,GREEN,RED,NIR1,SWIR1,SWIR2,CIRRUS,TIR1,TIR2,Fmask -cc 70 -scale True
``` ```
- 仅爬取 L30 的热红外波段仅按照感兴趣区瓦片ID起止时间以及产品名称筛选影像不进行云量筛选影像对影像进行去云掩膜处理 - 仅爬取 L30 的热红外波段:仅按照感兴趣区,瓦片 ID起止时间以及产品名称筛选影像不进行云量筛选影像对影像进行去云掩膜处理
```sh ```sh
python .\\HLS_SuPER\\HLS_SuPER.py -roi .\\data\\vectors\\49REL.geojson -tile T49REL -dir .\\data\\HLS\\2024 -start 2024-06-01 -end 2024-09-01 -prod HLSL30 -bands TIR1,TIR2 python .\\HLS_SuPER\\HLS_SuPER.py -roi .\\data\\vectors\\49REL.geojson -tile T49REL -dir .\\data\\HLS\\2024 -start 2024-06-01 -end 2024-09-01 -prod HLSL30 -bands TIR1,TIR2
@ -198,4 +218,4 @@ python .\\HLS_SuPER\\HLS_SuPER.py -roi .\\data\\vectors\\49REL.geojson -tile T49
### 4.2 其他数据 ### 4.2 其他数据
v1.0: 直接运行`DATA_SuPER/`目录下所需数据对应的*.py文件即可. v1.0: 直接运行`DATA_SuPER/`目录下所需数据对应的\*.py 文件即可.

29
setup/environment.yml Normal file
View File

@ -0,0 +1,29 @@
name: lpdaac
channels:
- conda-forge
dependencies:
- dask
- earthaccess
- fiona
- gdal
- geopandas
- geoviews
- h5netcdf
- h5py
- harmony-py
- hvplot
- jupyter
- jupyter_bokeh
- jupyterlab
- libgdal-hdf4
- odc-stac
- pyresample
- pystac-client
- python=3.12
- rasterio
- ray-default
- rioxarray
- scikit-image
- seaborn
- spectral
- selenium

437
setup/lpdaac.yml Normal file
View File

@ -0,0 +1,437 @@
name: lpdaac
channels:
- conda-forge
dependencies:
- _libavif_api=1.3.0=h57928b3_0
- _openmp_mutex=4.5=2_gnu
- affine=2.4.0=pyhd8ed1ab_1
- aiobotocore=2.23.2=pyhe01879c_1
- aiohappyeyeballs=2.6.1=pyhd8ed1ab_0
- aiohttp=3.12.15=py312h05f76fc_0
- aiohttp-cors=0.8.1=pyhd8ed1ab_0
- aioitertools=0.12.0=pyhd8ed1ab_1
- aiosignal=1.4.0=pyhd8ed1ab_0
- annotated-types=0.7.0=pyhd8ed1ab_1
- anyio=4.10.0=pyhe01879c_0
- aom=3.9.1=he0c23c2_0
- argon2-cffi=25.1.0=pyhd8ed1ab_0
- argon2-cffi-bindings=25.1.0=py312he06e257_0
- arrow=1.3.0=pyhd8ed1ab_1
- asciitree=0.3.3=py_2
- asttokens=3.0.0=pyhd8ed1ab_1
- async-lru=2.0.5=pyh29332c3_0
- attrs=25.3.0=pyh71513ae_0
- aws-c-auth=0.9.0=h467f71e_16
- aws-c-cal=0.9.2=hef2a5b8_1
- aws-c-common=0.12.4=hfd05255_0
- aws-c-compression=0.3.1=ha8a2810_6
- aws-c-event-stream=0.5.5=h16d2062_1
- aws-c-http=0.10.2=h909f643_3
- aws-c-io=0.21.0=h20b9e97_1
- aws-c-mqtt=0.13.1=h8a47558_4
- aws-c-s3=0.8.3=hcc9d52c_1
- aws-c-sdkutils=0.2.4=ha8a2810_1
- aws-checksums=0.2.7=ha8a2810_2
- aws-crt-cpp=0.32.10=h16ee0b7_3
- aws-sdk-cpp=1.11.510=h41ea3a3_14
- babel=2.17.0=pyhd8ed1ab_0
- beautifulsoup4=4.13.4=pyha770c72_0
- bleach=6.2.0=pyh29332c3_4
- bleach-with-css=6.2.0=h82add2a_4
- blosc=1.21.6=hfd34d9b_1
- bokeh=3.7.3=pyhd8ed1ab_0
- botocore=1.39.8=pyge310_1234567_0
- bounded-pool-executor=0.0.3=pyhd8ed1ab_0
- branca=0.8.1=pyhd8ed1ab_0
- brotli=1.1.0=h2466b09_3
- brotli-bin=1.1.0=h2466b09_3
- brotli-python=1.1.0=py312h275cf98_3
- bzip2=1.0.8=h2466b09_7
- c-ares=1.34.5=h2466b09_0
- c-blosc2=2.19.1=h3cf07e4_0
- ca-certificates=2025.8.3=h4c7d964_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cachetools=5.5.2=pyhd8ed1ab_0
- cartopy=0.25.0=py312hc128f0a_0
- certifi=2025.8.3=pyhd8ed1ab_0
- cffi=1.17.1=py312h4389bb4_0
- cftime=1.6.4=py312h1a27103_1
- charls=2.4.2=h1537add_0
- charset-normalizer=3.4.3=pyhd8ed1ab_0
- click=8.2.1=pyh7428d3b_0
- click-plugins=1.1.1.2=pyhd8ed1ab_0
- cligj=0.7.2=pyhd8ed1ab_2
- cloudpickle=3.1.1=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_1
- colorcet=3.1.0=pyhd8ed1ab_1
- colorful=0.5.6=pyhd8ed1ab_0
- comm=0.2.3=pyhe01879c_0
- configobj=5.0.9=pyhd8ed1ab_1
- contourpy=1.3.3=py312hf90b1b7_1
- cpython=3.12.11=py312hd8ed1ab_0
- cryptography=45.0.6=py312h84d000f_0
- curlify=2.2.1=pyh44b312d_0
- cycler=0.12.1=pyhd8ed1ab_1
- cytoolz=1.0.1=py312h4389bb4_0
- dask=2025.7.0=pyhe01879c_0
- dask-core=2025.7.0=pyhe01879c_1
- datashader=0.18.2=pyhd8ed1ab_0
- dav1d=1.2.1=hcfcfb64_0
- debugpy=1.8.16=py312ha1a9051_0
- decorator=5.2.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- deprecated=1.2.18=pyhd8ed1ab_0
- distlib=0.4.0=pyhd8ed1ab_0
- distributed=2025.7.0=pyhe01879c_0
- donfig=0.8.1.post1=pyhd8ed1ab_1
- earthaccess=0.14.0=pyhd8ed1ab_0
- exceptiongroup=1.3.0=pyhd8ed1ab_0
- executing=2.2.0=pyhd8ed1ab_0
- fasteners=0.19=pyhd8ed1ab_1
- filelock=3.18.0=pyhd8ed1ab_0
- fiona=1.10.1=py312h6e88f47_3
- folium=0.20.0=pyhd8ed1ab_0
- fonttools=4.59.0=py312h05f76fc_0
- fqdn=1.5.1=pyhd8ed1ab_1
- freetype=2.13.3=h57928b3_1
- freexl=2.0.0=hf297d47_2
- frozenlist=1.7.0=py312hfdf67e6_0
- fsspec=2025.7.0=pyhd8ed1ab_0
- gdal=3.10.3=py312h07de9ea_12
- geopandas=1.1.1=pyhd8ed1ab_0
- geopandas-base=1.1.1=pyha770c72_0
- geos=3.13.1=h9ea8674_0
- geotiff=1.7.4=h86c3423_2
- geoviews=1.14.0=hd8ed1ab_0
- geoviews-core=1.14.0=pyha770c72_0
- giflib=5.2.2=h64bf75a_0
- google-api-core=2.25.1=pyhd8ed1ab_0
- google-auth=2.40.3=pyhd8ed1ab_0
- googleapis-common-protos=1.70.0=pyhd8ed1ab_0
- grpcio=1.71.0=py312h18946f6_1
- h11=0.16.0=pyhd8ed1ab_0
- h2=4.2.0=pyhd8ed1ab_0
- h5netcdf=1.6.4=pyhd8ed1ab_0
- h5py=3.14.0=nompi_py312h6cc2a29_100
- harmony-py=1.2.0=pyhd8ed1ab_0
- hdf4=4.2.15=h5557f11_7
- hdf5=1.14.6=nompi_he30205f_103
- holoviews=1.21.0=pyhd8ed1ab_0
- hpack=4.1.0=pyhd8ed1ab_0
- httpcore=1.0.9=pyh29332c3_0
- httpx=0.28.1=pyhd8ed1ab_0
- hvplot=0.12.0=pyhd8ed1ab_0
- hyperframe=6.1.0=pyhd8ed1ab_0
- idna=3.10=pyhd8ed1ab_1
- imagecodecs=2025.8.2=py312h2f18ccd_0
- imageio=2.37.0=pyhfb79c49_0
- importlib-metadata=8.7.0=pyhe01879c_1
- importlib-resources=6.5.2=pyhd8ed1ab_0
- importlib_resources=6.5.2=pyhd8ed1ab_0
- ipykernel=6.30.1=pyh3521513_0
- ipython=9.4.0=pyh6be1c34_0
- ipython_pygments_lexers=1.1.1=pyhd8ed1ab_0
- ipywidgets=8.1.7=pyhd8ed1ab_0
- isoduration=20.11.0=pyhd8ed1ab_1
- jedi=0.19.2=pyhd8ed1ab_1
- jinja2=3.1.6=pyhd8ed1ab_0
- jmespath=1.0.1=pyhd8ed1ab_1
- joblib=1.5.1=pyhd8ed1ab_0
- json5=0.12.0=pyhd8ed1ab_0
- jsonpointer=3.0.0=py312h2e8e312_1
- jsonschema=4.25.0=pyhe01879c_0
- jsonschema-specifications=2025.4.1=pyh29332c3_0
- jsonschema-with-format-nongpl=4.25.0=he01879c_0
- jupyter=1.1.1=pyhd8ed1ab_1
- jupyter-lsp=2.2.6=pyhe01879c_0
- jupyter_bokeh=4.0.5=pyhd8ed1ab_1
- jupyter_client=8.6.3=pyhd8ed1ab_1
- jupyter_console=6.6.3=pyhd8ed1ab_1
- jupyter_core=5.8.1=pyh5737063_0
- jupyter_events=0.12.0=pyh29332c3_0
- jupyter_server=2.16.0=pyhe01879c_0
- jupyter_server_terminals=0.5.3=pyhd8ed1ab_1
- jupyterlab=4.4.5=pyhd8ed1ab_0
- jupyterlab_pygments=0.3.0=pyhd8ed1ab_2
- jupyterlab_server=2.27.3=pyhd8ed1ab_1
- jupyterlab_widgets=3.0.15=pyhd8ed1ab_0
- jxrlib=1.1=hcfcfb64_3
- kerchunk=0.2.7=pyhd8ed1ab_0
- kiwisolver=1.4.8=py312hf90b1b7_1
- krb5=1.21.3=hdf4eb48_0
- lark=1.2.2=pyhd8ed1ab_1
- lazy-loader=0.4=pyhd8ed1ab_2
- lcms2=2.17=hbcf6048_0
- lerc=4.0.0=h6470a55_1
- libabseil=20250127.1=cxx17_h4eb7d71_0
- libaec=1.1.4=h20038f6_0
- libarchive=3.8.1=gpl_h1ca5a36_100
- libarrow=20.0.0=h7ea4809_8_cuda
- libarrow-acero=20.0.0=h7d8d6a5_8_cuda
- libarrow-dataset=20.0.0=h7d8d6a5_8_cuda
- libarrow-substrait=20.0.0=hb76e781_8_cuda
- libavif16=1.3.0=hf2698fe_0
- libblas=3.9.0=34_h5709861_mkl
- libbrotlicommon=1.1.0=h2466b09_3
- libbrotlidec=1.1.0=h2466b09_3
- libbrotlienc=1.1.0=h2466b09_3
- libcblas=3.9.0=34_h2a3cdd5_mkl
- libcrc32c=1.1.2=h0e60522_0
- libcurl=8.14.1=h88aaa65_0
- libdeflate=1.24=h76ddb4d_0
- libevent=2.1.12=h3671451_1
- libexpat=2.7.1=hac47afa_0
- libffi=3.4.6=h537db12_1
- libfreetype=2.13.3=h57928b3_1
- libfreetype6=2.13.3=h0b5ce68_1
- libgcc=15.1.0=h1383e82_4
- libgdal-core=3.10.3=h228a343_12
- libgdal-hdf4=3.10.3=ha47b6c4_12
- libgomp=15.1.0=h1383e82_4
- libgoogle-cloud=2.36.0=hf249c01_1
- libgoogle-cloud-storage=2.36.0=he5eb982_1
- libgrpc=1.71.0=h8c3449c_1
- libhwloc=2.12.1=default_h88281d1_1000
- libhwy=1.2.0=h1d1702c_0
- libiconv=1.18=h135ad9c_1
- libjpeg-turbo=3.1.0=h2466b09_0
- libjxl=0.11.1=ha161b08_2
- libkml=1.3.0=h538826c_1021
- liblapack=3.9.0=34_hf9ab0e9_mkl
- liblzma=5.8.1=h2466b09_2
- libnetcdf=4.9.2=nompi_ha45073a_118
- libparquet=20.0.0=ha850022_8_cuda
- libpng=1.6.50=h7351971_1
- libprotobuf=5.29.3=hd33f5f0_2
- libre2-11=2025.06.26=habfad5f_0
- librttopo=1.1.0=hbfc9ebc_18
- libsodium=1.0.20=hc70643c_0
- libspatialite=5.1.0=h378fb81_14
- libsqlite=3.50.4=hf5d6505_0
- libssh2=1.11.1=h9aa295b_0
- libthrift=0.21.0=hbe90ef8_0
- libtiff=4.7.0=h05922d8_5
- libutf8proc=2.10.0=hff4702e_0
- libwebp-base=1.6.0=h4d5522a_0
- libwinpthread=12.0.0.r4.gg4f2fc60ca=h57928b3_9
- libxcb=1.17.0=h0e4246c_0
- libxml2=2.13.8=h741aa76_1
- libzip=1.11.2=h3135430_0
- libzlib=1.3.1=h2466b09_2
- libzopfli=1.0.3=h0e60522_0
- linkify-it-py=2.0.3=pyhd8ed1ab_1
- llvm-openmp=20.1.8=hfa2b4ca_1
- llvmlite=0.44.0=py312h1f7db74_1
- locket=1.0.0=pyhd8ed1ab_0
- lz4=4.4.4=py312h032eceb_0
- lz4-c=1.10.0=h2466b09_1
- lzo=2.10=h6a83c73_1002
- mapclassify=2.10.0=pyhd8ed1ab_1
- markdown=3.8.2=pyhd8ed1ab_0
- markdown-it-py=3.0.0=pyhd8ed1ab_1
- markupsafe=3.0.2=py312h31fea79_1
- matplotlib-base=3.10.5=py312h0ebf65c_0
- matplotlib-inline=0.1.7=pyhd8ed1ab_1
- mdit-py-plugins=0.4.2=pyhd8ed1ab_1
- mdurl=0.1.2=pyhd8ed1ab_1
- minizip=4.0.10=h9fa1bad_0
- mistune=3.1.3=pyh29332c3_0
- mkl=2024.2.2=h57928b3_16
- msgpack-python=1.1.1=py312hd5eb7cc_0
- multidict=6.6.3=py312h05f76fc_0
- multimethod=2.0=pyhd8ed1ab_0
- multipledispatch=0.6.0=pyhd8ed1ab_1
- munkres=1.1.4=pyhd8ed1ab_1
- narwhals=2.0.1=pyhe01879c_0
- nbclient=0.10.2=pyhd8ed1ab_0
- nbconvert-core=7.16.6=pyh29332c3_0
- nbformat=5.10.4=pyhd8ed1ab_1
- nest-asyncio=1.6.0=pyhd8ed1ab_1
- netcdf4=1.7.2=nompi_py312hf8617a8_102
- networkx=3.5=pyhe01879c_0
- notebook=7.4.5=pyhd8ed1ab_0
- notebook-shim=0.2.4=pyhd8ed1ab_1
- numba=0.61.2=py312hdcac391_1
- numcodecs=0.15.1=py312h72972c8_0
- numpy=2.2.6=py312h3150e54_0
- odc-geo=0.5.0rc1=pyhd8ed1ab_0
- odc-loader=0.5.1=pyhd8ed1ab_0
- odc-stac=0.4.0=pyhd8ed1ab_0
- opencensus=0.11.3=pyhd8ed1ab_1
- opencensus-context=0.1.3=py312h2e8e312_3
- openjpeg=2.5.3=h24db6dd_1
- openssl=3.5.2=h725018a_0
- opentelemetry-api=1.36.0=pyhd8ed1ab_0
- opentelemetry-exporter-prometheus=0.48b0=pyhd8ed1ab_0
- opentelemetry-proto=1.36.0=pyhd8ed1ab_0
- opentelemetry-sdk=1.36.0=pyhd8ed1ab_0
- opentelemetry-semantic-conventions=0.57b0=pyh3cfb1c2_0
- orc=2.1.2=h35764e3_0
- outcome=1.3.0.post0=pyhd8ed1ab_1
- overrides=7.7.0=pyhd8ed1ab_1
- packaging=25.0=pyh29332c3_1
- pandas=2.3.1=py312hc128f0a_0
- pandocfilters=1.5.0=pyhd8ed1ab_0
- panel=1.7.5=pyhd8ed1ab_0
- param=2.2.1=pyhd8ed1ab_0
- parso=0.8.4=pyhd8ed1ab_1
- partd=1.4.2=pyhd8ed1ab_0
- patsy=1.0.1=pyhd8ed1ab_1
- pcre2=10.45=h99c9b8b_0
- pickleshare=0.7.5=pyhd8ed1ab_1004
- pillow=11.3.0=py312hfb502af_0
- pip=25.2=pyh8b19718_0
- platformdirs=4.3.8=pyhe01879c_0
- pockets=0.9.1=pyhd8ed1ab_1
- pqdm=0.2.0=pyhd8ed1ab_1
- progressbar2=4.2.0=pyhd8ed1ab_0
- proj=9.6.2=h7990399_1
- prometheus_client=0.22.1=pyhd8ed1ab_0
- prompt-toolkit=3.0.51=pyha770c72_0
- prompt_toolkit=3.0.51=hd8ed1ab_0
- propcache=0.3.1=py312h31fea79_0
- proto-plus=1.26.1=pyhd8ed1ab_0
- protobuf=5.29.3=py312h275cf98_0
- psutil=7.0.0=py312h4389bb4_0
- pthread-stubs=0.4=h0e40799_1002
- pure_eval=0.2.3=pyhd8ed1ab_1
- py-spy=0.4.1=h77a83cd_0
- pyarrow=20.0.0=py312h2e8e312_0
- pyarrow-core=20.0.0=py312h607bf26_0_cuda
- pyasn1=0.6.1=pyhd8ed1ab_2
- pyasn1-modules=0.4.2=pyhd8ed1ab_0
- pycparser=2.22=pyh29332c3_1
- pyct=0.5.0=pyhd8ed1ab_1
- pydantic=2.11.7=pyh3cfb1c2_0
- pydantic-core=2.33.2=py312h8422cdd_0
- pygments=2.19.2=pyhd8ed1ab_0
- pykdtree=1.4.3=py312h196c9fc_0
- pyogrio=0.11.0=py312h6e88f47_0
- pyopenssl=25.1.0=pyhd8ed1ab_0
- pyparsing=3.2.3=pyhe01879c_2
- pyproj=3.7.1=py312h5ea471a_1
- pyresample=1.34.2=py312h275cf98_0
- pyshp=3.0.0=pyhd8ed1ab_0
- pysocks=1.7.1=pyh09c184e_7
- pystac=1.13.0=pyhd8ed1ab_0
- pystac-client=0.9.0=pyhd8ed1ab_0
- python=3.12.11=h3f84c4b_0_cpython
- python-cmr=0.13.0=pyhff2d567_1
- python-dateutil=2.9.0.post0=pyhe01879c_2
- python-dotenv=0.20.0=pyhd8ed1ab_0
- python-fastjsonschema=2.21.1=pyhd8ed1ab_0
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python-tzdata=2025.2=pyhd8ed1ab_0
- python-utils=3.9.1=pyhff2d567_1
- python_abi=3.12=8_cp312
- pytz=2025.2=pyhd8ed1ab_0
- pyu2f=0.1.5=pyhd8ed1ab_1
- pyviz_comms=3.0.6=pyhd8ed1ab_0
- pywavelets=1.9.0=py312h196c9fc_0
- pywin32=311=py312h829343e_0
- pywinpty=2.0.15=py312h275cf98_0
- pyyaml=6.0.2=py312h31fea79_2
- pyzmq=27.0.1=py312h5b324a9_0
- qhull=2020.2=hc790b64_5
- rasterio=1.4.3=py312ha172ac9_1
- rav1e=0.7.1=ha073cba_3
- ray-core=2.47.1=py312h4ed40c6_0
- ray-default=2.47.1=py312h7553cdf_0
- re2=2025.06.26=h3dd2b4f_0
- referencing=0.36.2=pyh29332c3_0
- requests=2.32.4=pyhd8ed1ab_0
- rfc3339-validator=0.1.4=pyhd8ed1ab_1
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rfc3987-syntax=1.1.0=pyhe01879c_1
- rioxarray=0.19.0=pyhd8ed1ab_0
- rpds-py=0.27.0=py312hdabe01f_0
- rsa=4.9.1=pyhd8ed1ab_0
- s3fs=2025.7.0=pyhd8ed1ab_0
- scikit-image=0.25.2=py312h72972c8_1
- scikit-learn=1.7.1=py312h91ac024_0
- scipy=1.16.0=py312h1416ca1_0
- seaborn=0.13.2=hd8ed1ab_3
- seaborn-base=0.13.2=pyhd8ed1ab_3
- selenium=4.34.2=pyhe01879c_0
- selenium-manager=4.34.0=h18a1a76_0
- send2trash=1.8.3=pyh5737063_1
- setproctitle=1.3.6=py312h4389bb4_0
- setuptools=80.9.0=pyhff2d567_0
- shapely=2.0.7=py312h3f81574_1
- six=1.17.0=pyhe01879c_1
- smart_open=7.3.0.post1=pyhe01879c_0
- snappy=1.2.2=h7fa0ca8_0
- sniffio=1.3.1=pyhd8ed1ab_1
- snuggs=1.4.7=pyhd8ed1ab_2
- sortedcontainers=2.4.0=pyhd8ed1ab_1
- soupsieve=2.7=pyhd8ed1ab_0
- spectral=0.24=pyhd8ed1ab_0
- sphinxcontrib-napoleon=0.7=pyhd8ed1ab_1
- sqlite=3.50.4=hdb435a2_0
- stack_data=0.6.3=pyhd8ed1ab_1
- statsmodels=0.14.5=py312h196c9fc_0
- svt-av1=3.0.2=he0c23c2_0
- tbb=2021.13.0=h18a62a1_2
- tblib=3.1.0=pyhd8ed1ab_0
- terminado=0.18.1=pyh5737063_0
- threadpoolctl=3.6.0=pyhecae5ae_0
- tifffile=2025.6.11=pyhd8ed1ab_0
- tinycss2=1.4.0=pyhd8ed1ab_0
- tinynetrc=1.3.1=pyhd8ed1ab_0
- tk=8.6.13=h2c6b04d_2
- tomli=2.2.1=pyhe01879c_2
- toolz=1.0.0=pyhd8ed1ab_1
- tornado=6.5.2=py312he06e257_0
- tqdm=4.67.1=pyhd8ed1ab_1
- traitlets=5.14.3=pyhd8ed1ab_1
- trio=0.30.0=py312h2e8e312_0
- trio-websocket=0.12.2=pyh29332c3_0
- types-python-dateutil=2.9.0.20250809=pyhd8ed1ab_0
- typing-extensions=4.14.1=h4440ef1_0
- typing-inspection=0.4.1=pyhd8ed1ab_0
- typing_extensions=4.14.1=pyhe01879c_0
- typing_utils=0.1.0=pyhd8ed1ab_1
- tzdata=2025b=h78e105d_0
- uc-micro-py=1.0.3=pyhd8ed1ab_1
- ucrt=10.0.22621.0=h57928b3_1
- ujson=5.10.0=py312h275cf98_1
- unicodedata2=16.0.0=py312h4389bb4_0
- universal_pathlib=0.2.6=pyhd8ed1ab_1
- uri-template=1.3.0=pyhd8ed1ab_1
- uriparser=0.9.8=h5a68840_0
- urllib3=2.5.0=pyhd8ed1ab_0
- vc=14.3=h41ae7f8_31
- vc14_runtime=14.44.35208=h818238b_31
- vcomp14=14.44.35208=h818238b_31
- virtualenv=20.33.1=pyhd8ed1ab_0
- virtualizarr=1.2.0=pyhd8ed1ab_0
- vs2015_runtime=14.44.35208=h38c0c73_31
- wcwidth=0.2.13=pyhd8ed1ab_1
- webcolors=24.11.1=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_3
- websocket-client=1.8.0=pyhd8ed1ab_1
- wheel=0.45.1=pyhd8ed1ab_1
- widgetsnbextension=4.0.14=pyhd8ed1ab_0
- win_inet_pton=1.1.0=pyh7428d3b_8
- winpty=0.4.3=4
- wrapt=1.17.2=py312h4389bb4_0
- wsproto=1.2.0=pyhd8ed1ab_1
- xarray=2025.7.1=pyhd8ed1ab_0
- xerces-c=3.2.5=he0c23c2_2
- xorg-libxau=1.0.12=h0e40799_0
- xorg-libxdmcp=1.1.5=h0e40799_0
- xyzservices=2025.4.0=pyhd8ed1ab_0
- yaml=0.2.5=h6a83c73_3
- yarl=1.20.1=py312h31fea79_0
- zarr=2.18.7=pyhd8ed1ab_0
- zeromq=4.3.5=ha9f60a1_7
- zfp=1.0.1=he0c23c2_2
- zict=3.0.0=pyhd8ed1ab_1
- zipp=3.23.0=pyhd8ed1ab_0
- zlib=1.3.1=h2466b09_2
- zlib-ng=2.2.5=h1608b31_0
- zstandard=0.23.0=py312h4389bb4_2
- zstd=1.5.7=hbeecb71_2
- pip:
- beautifulsoup4==4.12.3

View File

@ -1,426 +0,0 @@
name: lpdaac_windows
channels:
- defaults
- conda-forge
dependencies:
- affine=2.4.0=pyhd8ed1ab_0
- aiobotocore=2.7.0=pyhd8ed1ab_1
- aiofiles=22.1.0=pyhd8ed1ab_0
- aiohttp=3.8.6=py310h8d17308_1
- aiohttp-cors=0.7.0=py_0
- aioitertools=0.11.0=pyhd8ed1ab_0
- aiosignal=1.3.1=pyhd8ed1ab_0
- aiosqlite=0.19.0=pyhd8ed1ab_0
- ansicon=1.89.0=py310h5588dad_7
- anyio=4.0.0=pyhd8ed1ab_0
- argon2-cffi=23.1.0=pyhd8ed1ab_0
- argon2-cffi-bindings=21.2.0=py310h8d17308_4
- arrow=1.3.0=pyhd8ed1ab_0
- asciitree=0.3.3=py_2
- asttokens=2.4.1=pyhd8ed1ab_0
- async-timeout=4.0.3=pyhd8ed1ab_0
- aws-c-auth=0.7.3=h0127223_1
- aws-c-cal=0.6.1=hfb91821_1
- aws-c-common=0.9.0=hcfcfb64_0
- aws-c-compression=0.2.17=h04c9df6_2
- aws-c-event-stream=0.3.1=h495bb32_4
- aws-c-http=0.7.11=hf013885_4
- aws-c-io=0.13.32=he824701_1
- aws-c-mqtt=0.9.3=h64f41f2_1
- aws-c-s3=0.3.14=hb8b96c7_1
- aws-c-sdkutils=0.1.12=h04c9df6_1
- aws-checksums=0.1.17=h04c9df6_1
- aws-crt-cpp=0.21.0=hf1ed06d_5
- aws-sdk-cpp=1.10.57=heb7cc7f_19
- babel=2.13.1=pyhd8ed1ab_0
- backports=1.0=pyhd8ed1ab_3
- backports.functools_lru_cache=1.6.5=pyhd8ed1ab_0
- beautifulsoup4=4.12.2=pyha770c72_0
- bleach=6.1.0=pyhd8ed1ab_0
- blessed=1.19.1=pyh95a074a_2
- blosc=1.21.5=hdccc3a2_0
- bokeh=3.3.0=pyhd8ed1ab_0
- boto3=1.28.64=pyhd8ed1ab_0
- botocore=1.31.64=pyhd8ed1ab_0
- bounded-pool-executor=0.0.3=pyhd8ed1ab_0
- branca=0.7.0=pyhd8ed1ab_1
- brotli=1.0.9=hcfcfb64_9
- brotli-bin=1.0.9=hcfcfb64_9
- brotli-python=1.0.9=py310h00ffb61_9
- bzip2=1.0.8=hcfcfb64_5
- c-ares=1.21.0=hcfcfb64_0
- ca-certificates=2025.1.31=h56e8100_0
- cached-property=1.5.2=hd8ed1ab_1
- cached_property=1.5.2=pyha770c72_1
- cachetools=5.3.2=pyhd8ed1ab_0
- cairo=1.18.0=h1fef639_0
- cartopy=0.22.0=py310hecd3228_1
- certifi=2025.1.31=pyhd8ed1ab_0
- cffi=1.16.0=py310h8d17308_0
- cfitsio=4.3.0=h9b0cee5_0
- cftime=1.6.3=py310h3e78b6c_0
- charset-normalizer=3.3.2=pyhd8ed1ab_0
- click=8.1.7=win_pyh7428d3b_0
- click-plugins=1.1.1=py_0
- cligj=0.7.2=pyhd8ed1ab_1
- cloudpickle=3.0.0=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_0
- colorcet=3.0.1=pyhd8ed1ab_0
- colorful=0.5.4=pyhd8ed1ab_0
- comm=0.1.4=pyhd8ed1ab_0
- configobj=5.0.8=pyhd8ed1ab_0
- contourpy=1.2.0=py310h232114e_0
- cryptography=41.0.5=py310h6e82f81_0
- cycler=0.12.1=pyhd8ed1ab_0
- cytoolz=0.12.2=py310h8d17308_1
- dask=2023.10.1=pyhd8ed1ab_0
- dask-core=2023.10.1=pyhd8ed1ab_0
- datashader=0.16.0=pyhd8ed1ab_0
- debugpy=1.8.0=py310h00ffb61_1
- decorator=5.1.1=pyhd8ed1ab_0
- defusedxml=0.7.1=pyhd8ed1ab_0
- distlib=0.3.7=pyhd8ed1ab_0
- distributed=2023.10.1=pyhd8ed1ab_0
- entrypoints=0.4=pyhd8ed1ab_0
- exceptiongroup=1.1.3=pyhd8ed1ab_0
- executing=2.0.1=pyhd8ed1ab_0
- expat=2.5.0=h63175ca_1
- fasteners=0.17.3=pyhd8ed1ab_0
- filelock=3.13.1=pyhd8ed1ab_0
- fiona=1.9.5=py310h65cc672_0
- folium=0.15.0=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=hab24e00_0
- fontconfig=2.14.2=hbde0cde_0
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.44.0=py310h8d17308_0
- fqdn=1.5.1=pyhd8ed1ab_0
- freetype=2.12.1=hdaf720e_2
- freexl=2.0.0=h8276f4a_0
- frozenlist=1.4.0=py310h8d17308_1
- fsspec=2023.10.0=pyhca7485f_0
- gdal=3.7.3=py310haa9213b_2
- geopandas=0.14.0=pyhd8ed1ab_1
- geopandas-base=0.14.0=pyha770c72_1
- geos=3.12.0=h1537add_0
- geotiff=1.7.1=hcf4a93f_14
- geoviews=1.11.0=pyhd8ed1ab_0
- geoviews-core=1.11.0=pyha770c72_0
- gettext=0.21.1=h5728263_0
- gitdb=4.0.11=pyhd8ed1ab_0
- gitpython=3.1.40=pyhd8ed1ab_0
- google-api-core=2.13.0=pyhd8ed1ab_0
- google-auth=2.23.4=pyhca7485f_0
- googleapis-common-protos=1.61.0=pyhd8ed1ab_0
- gpustat=1.1.1=pyhd8ed1ab_0
- grpcio=1.54.3=py310h8020be6_0
- h5netcdf=1.3.0=pyhd8ed1ab_0
- h5py=3.10.0=nompi_py310h20f5850_100
- hdf4=4.2.15=h5557f11_7
- hdf5=1.14.2=nompi_h73e8ff5_100
- holoviews=1.18.1=pyhd8ed1ab_0
- hvplot=0.9.0=pyhd8ed1ab_0
- icu=73.2=h63175ca_0
- idna=3.4=pyhd8ed1ab_0
- imagecodecs-lite=2019.12.3=py310h3e78b6c_7
- imageio=2.31.5=pyh8c1a49c_0
- importlib-metadata=6.8.0=pyha770c72_0
- importlib_metadata=6.8.0=hd8ed1ab_0
- intel-openmp=2023.2.0=h57928b3_50497
- ipykernel=6.26.0=pyha63f2e9_0
- ipython=8.17.2=pyh5737063_0
- ipython_genutils=0.2.0=py_1
- ipywidgets=8.1.1=pyhd8ed1ab_0
- isoduration=20.11.0=pyhd8ed1ab_0
- jedi=0.19.1=pyhd8ed1ab_0
- jinja2=3.1.2=pyhd8ed1ab_1
- jinxed=1.2.0=pyh95a074a_0
- jmespath=1.0.1=pyhd8ed1ab_0
- joblib=1.3.2=pyhd8ed1ab_0
- json5=0.9.14=pyhd8ed1ab_0
- jsonpointer=2.4=py310h5588dad_3
- jsonschema=4.19.2=pyhd8ed1ab_0
- jsonschema-specifications=2023.7.1=pyhd8ed1ab_0
- jsonschema-with-format-nongpl=4.19.2=pyhd8ed1ab_0
- jupyter=1.0.0=pyhd8ed1ab_10
- jupyter-resource-usage=1.0.1=pyhd8ed1ab_0
- jupyter-server-mathjax=0.2.6=pyh5bfe37b_1
- jupyter_bokeh=3.0.7=pyhd8ed1ab_0
- jupyter_client=7.4.9=pyhd8ed1ab_0
- jupyter_console=6.6.3=pyhd8ed1ab_0
- jupyter_core=5.5.0=py310h5588dad_0
- jupyter_events=0.9.0=pyhd8ed1ab_0
- jupyter_server=2.10.0=pyhd8ed1ab_0
- jupyter_server_fileid=0.9.0=pyhd8ed1ab_0
- jupyter_server_terminals=0.4.4=pyhd8ed1ab_1
- jupyter_server_ydoc=0.8.0=pyhd8ed1ab_0
- jupyter_ydoc=0.2.4=pyhd8ed1ab_0
- jupyterlab=3.6.6=pyhd8ed1ab_0
- jupyterlab-geojson=3.4.0=pyhd8ed1ab_0
- jupyterlab-git=0.44.0=pyhd8ed1ab_0
- jupyterlab_pygments=0.2.2=pyhd8ed1ab_0
- jupyterlab_server=2.25.1=pyhd8ed1ab_0
- jupyterlab_widgets=3.0.9=pyhd8ed1ab_0
- kealib=1.5.2=ha10e780_1
- kiwisolver=1.4.5=py310h232114e_1
- krb5=1.21.2=heb0366b_0
- lazy_loader=0.3=pyhd8ed1ab_0
- lcms2=2.15=h67d730c_3
- lerc=4.0.0=h63175ca_0
- libabseil=20230125.3=cxx17_h63175ca_0
- libaec=1.1.2=h63175ca_1
- libarchive=3.7.2=h6f8411a_0
- libarrow=12.0.1=he3e0f11_8_cpu
- libblas=3.9.0=19_win64_mkl
- libboost-headers=1.82.0=h57928b3_6
- libbrotlicommon=1.0.9=hcfcfb64_9
- libbrotlidec=1.0.9=hcfcfb64_9
- libbrotlienc=1.0.9=hcfcfb64_9
- libcblas=3.9.0=19_win64_mkl
- libcrc32c=1.1.2=h0e60522_0
- libcurl=8.4.0=hd5e4a3a_0
- libdeflate=1.19=hcfcfb64_0
- libevent=2.1.12=h3671451_1
- libexpat=2.5.0=h63175ca_1
- libffi=3.4.2=h8ffe710_5
- libgdal=3.7.3=h3217549_2
- libglib=2.78.1=he8f3873_0
- libgoogle-cloud=2.12.0=h00b2bdc_1
- libgrpc=1.54.3=ha177ca7_0
- libhwloc=2.9.3=default_haede6df_1009
- libiconv=1.17=h8ffe710_0
- libjpeg-turbo=3.0.0=hcfcfb64_1
- libkml=1.3.0=haf3e7a6_1018
- liblapack=3.9.0=19_win64_mkl
- libnetcdf=4.9.2=nompi_h8284064_112
- libpng=1.6.39=h19919ed_0
- libpq=16.3=hab9416b_0
- libprotobuf=3.21.12=h12be248_2
- librttopo=1.1.0=h92c5fdb_14
- libsodium=1.0.18=h8d14728_1
- libspatialindex=1.9.3=h39d44d4_4
- libspatialite=5.1.0=hbf340bc_1
- libsqlite=3.44.0=hcfcfb64_0
- libssh2=1.11.0=h7dfc565_0
- libthrift=0.18.1=h06f6336_2
- libtiff=4.6.0=h6e2ebb7_2
- libutf8proc=2.8.0=h82a8f57_0
- libwebp-base=1.3.2=hcfcfb64_0
- libxcb=1.15=hcd874cb_0
- libxml2=2.12.7=h283a6d9_1
- libzip=1.10.1=h1d365fa_3
- libzlib=1.2.13=hcfcfb64_5
- linkify-it-py=2.0.0=pyhd8ed1ab_0
- llvmlite=0.41.1=py310hb84602e_0
- locket=1.0.0=pyhd8ed1ab_0
- lz4=4.3.2=py310hbbb2075_1
- lz4-c=1.9.4=hcfcfb64_0
- lzo=2.10=he774522_1000
- m2w64-gcc-libgfortran=5.3.0=6
- m2w64-gcc-libs=5.3.0=7
- m2w64-gcc-libs-core=5.3.0=7
- m2w64-gmp=6.1.0=2
- m2w64-libwinpthread-git=5.0.0.4634.697f757=2
- mapclassify=2.6.1=pyhd8ed1ab_0
- markdown=3.5.1=pyhd8ed1ab_0
- markdown-it-py=3.0.0=pyhd8ed1ab_0
- markupsafe=2.1.3=py310h8d17308_1
- matplotlib-base=3.8.1=py310hc9baf74_0
- matplotlib-inline=0.1.6=pyhd8ed1ab_0
- mdit-py-plugins=0.4.0=pyhd8ed1ab_0
- mdurl=0.1.0=pyhd8ed1ab_0
- minizip=4.0.2=h5bed578_0
- mistune=3.0.2=pyhd8ed1ab_0
- mkl=2023.2.0=h6a75c08_50496
- msgpack-python=1.0.6=py310h232114e_0
- msys2-conda-epoch=20160418=1
- multidict=6.0.4=py310h8d17308_1
- multimethod=1.9.1=pyhd8ed1ab_0
- multipledispatch=0.6.0=py_0
- munch=4.0.0=pyhd8ed1ab_0
- munkres=1.1.4=pyh9f0ad1d_0
- nbclassic=1.0.0=pyhb4ecaf3_1
- nbclient=0.8.0=pyhd8ed1ab_0
- nbconvert=7.11.0=pyhd8ed1ab_0
- nbconvert-core=7.11.0=pyhd8ed1ab_0
- nbconvert-pandoc=7.11.0=pyhd8ed1ab_0
- nbdime=3.2.1=pyhd8ed1ab_0
- nbformat=5.9.2=pyhd8ed1ab_0
- nest-asyncio=1.5.8=pyhd8ed1ab_0
- netcdf4=1.6.5=nompi_py310h6477780_100
- networkx=3.2.1=pyhd8ed1ab_0
- notebook=6.5.6=pyha770c72_0
- notebook-shim=0.2.3=pyhd8ed1ab_0
- numba=0.58.1=py310h9ccaf4f_0
- numcodecs=0.12.1=py310h00ffb61_0
- numpy=1.26.0=py310hf667824_0
- nvidia-ml-py=12.535.133=pyhd8ed1ab_0
- opencensus=0.11.3=pyhd8ed1ab_0
- opencensus-context=0.1.3=py310h5588dad_2
- openjpeg=2.5.0=h3d672ee_3
- openssl=3.4.1=ha4e3fda_0
- orc=1.9.0=hada7b9e_1
- overrides=7.4.0=pyhd8ed1ab_0
- packaging=23.2=pyhd8ed1ab_0
- pandas=2.1.2=py310hecd3228_0
- pandoc=3.1.3=h57928b3_0
- pandocfilters=1.5.0=pyhd8ed1ab_0
- panel=1.3.1=pyhd8ed1ab_0
- param=2.0.1=pyhca7485f_0
- parso=0.8.3=pyhd8ed1ab_0
- partd=1.4.1=pyhd8ed1ab_0
- pcre2=10.40=h17e33f8_0
- pexpect=4.8.0=pyh1a96a4e_2
- pickleshare=0.7.5=py_1003
- pillow=10.1.0=py310h1e6a543_0
- pip=23.3.1=pyhd8ed1ab_0
- pixman=0.42.2=h63175ca_0
- pkgutil-resolve-name=1.3.10=pyhd8ed1ab_1
- platformdirs=3.11.0=pyhd8ed1ab_0
- poppler=23.10.0=hc2f3c52_0
- poppler-data=0.4.12=hd8ed1ab_0
- postgresql=16.3=h7f155c9_0
- pqdm=0.2.0=pyhd8ed1ab_0
- proj=9.3.0=he13c7e8_2
- prometheus_client=0.18.0=pyhd8ed1ab_0
- prompt-toolkit=3.0.39=pyha770c72_0
- prompt_toolkit=3.0.39=hd8ed1ab_0
- protobuf=4.21.12=py310h00ffb61_0
- psutil=5.9.5=py310h8d17308_1
- pthread-stubs=0.4=hcd874cb_1001
- pthreads-win32=2.9.1=hfa6e2cd_3
- ptyprocess=0.7.0=pyhd3deb0d_0
- pure_eval=0.2.2=pyhd8ed1ab_0
- py-spy=0.3.14=h975169c_0
- pyarrow=12.0.1=py310hd1a9178_8_cpu
- pyasn1=0.5.0=pyhd8ed1ab_0
- pyasn1-modules=0.3.0=pyhd8ed1ab_0
- pycparser=2.21=pyhd8ed1ab_0
- pyct=0.4.6=py_0
- pyct-core=0.4.6=py_0
- pydantic=1.10.13=py310h8d17308_1
- pygments=2.16.1=pyhd8ed1ab_0
- pykdtree=1.3.9=py310h3e78b6c_1
- pyopenssl=23.3.0=pyhd8ed1ab_0
- pyparsing=3.1.1=pyhd8ed1ab_0
- pyproj=3.6.1=py310hebb2149_4
- pyresample=1.27.1=py310hecd3228_2
- pyshp=2.3.1=pyhd8ed1ab_0
- pysocks=1.7.1=pyh0701188_6
- pystac=1.9.0=pyhd8ed1ab_0
- pystac-client=0.7.5=pyhd8ed1ab_0
- python=3.10.13=h4de0772_0_cpython
- python-dateutil=2.8.2=pyhd8ed1ab_0
- python-fastjsonschema=2.18.1=pyhd8ed1ab_0
- python-json-logger=2.0.7=pyhd8ed1ab_0
- python-tzdata=2023.3=pyhd8ed1ab_0
- python_abi=3.10=4_cp310
- pytz=2023.3.post1=pyhd8ed1ab_0
- pyu2f=0.1.5=pyhd8ed1ab_0
- pyviz_comms=2.3.2=pyhd8ed1ab_0
- pywavelets=1.4.1=py310h3e78b6c_1
- pywin32=306=py310h00ffb61_2
- pywinpty=2.0.12=py310h00ffb61_0
- pyyaml=6.0.1=py310h8d17308_1
- pyzmq=24.0.1=py310hcd737a0_1
- qtconsole-base=5.5.0=pyha770c72_0
- qtpy=2.4.1=pyhd8ed1ab_0
- rasterio=1.3.9=py310h4d3659c_0
- ray-core=2.7.1=py310h139b6d1_0
- ray-default=2.7.1=py310h5588dad_0
- re2=2023.03.02=hd4eee63_0
- referencing=0.30.2=pyhd8ed1ab_0
- requests=2.31.0=pyhd8ed1ab_0
- rfc3339-validator=0.1.4=pyhd8ed1ab_0
- rfc3986-validator=0.1.1=pyh9f0ad1d_0
- rioxarray=0.15.0=pyhd8ed1ab_0
- rpds-py=0.12.0=py310h87d50f1_0
- rsa=4.9=pyhd8ed1ab_0
- rtree=1.1.0=py310h1cbd46b_0
- s3fs=2023.10.0=pyhd8ed1ab_0
- s3transfer=0.7.0=pyhd8ed1ab_0
- scikit-image=0.20.0=py310h1c4a608_1
- scikit-learn=1.3.2=py310hfd2573f_1
- scipy=1.11.3=py310hf667824_1
- send2trash=1.8.2=pyh08f2357_0
- setproctitle=1.3.3=py310h8d17308_0
- setuptools=68.2.2=pyhd8ed1ab_0
- shapely=2.0.2=py310h839b4a8_0
- six=1.16.0=pyh6c4a22f_0
- smart_open=6.4.0=pyhd8ed1ab_0
- smmap=5.0.0=pyhd8ed1ab_0
- snappy=1.1.10=hfb803bf_0
- sniffio=1.3.0=pyhd8ed1ab_0
- snuggs=1.4.7=py_0
- sortedcontainers=2.4.0=pyhd8ed1ab_0
- soupsieve=2.5=pyhd8ed1ab_1
- spectral=0.23.1=pyh1a96a4e_0
- sqlite=3.44.0=hcfcfb64_0
- stack_data=0.6.2=pyhd8ed1ab_0
- tbb=2021.10.0=h91493d7_2
- tblib=2.0.0=pyhd8ed1ab_0
- terminado=0.17.0=pyh08f2357_0
- threadpoolctl=3.2.0=pyha21a80b_0
- tifffile=2020.6.3=py_0
- tiledb=2.16.3=h1ffc264_3
- tinycss2=1.2.1=pyhd8ed1ab_0
- tinynetrc=1.3.1=pyhd8ed1ab_0
- tk=8.6.13=h5226925_1
- tomli=2.0.1=pyhd8ed1ab_0
- toolz=0.12.0=pyhd8ed1ab_0
- tornado=6.3.3=py310h8d17308_1
- tqdm=4.66.1=pyhd8ed1ab_0
- traitlets=5.13.0=pyhd8ed1ab_0
- types-python-dateutil=2.8.19.14=pyhd8ed1ab_0
- typing_utils=0.1.0=pyhd8ed1ab_0
- tzdata=2023c=h71feb2d_0
- uc-micro-py=1.0.1=pyhd8ed1ab_0
- ucrt=10.0.22621.0=h57928b3_0
- unicodedata2=15.1.0=py310h8d17308_0
- uri-template=1.3.0=pyhd8ed1ab_0
- uriparser=0.9.7=h1537add_1
- urllib3=1.26.18=pyhd8ed1ab_0
- vc=14.3=h64f974e_17
- vc14_runtime=14.36.32532=hdcecf7f_17
- virtualenv=20.21.0=pyhd8ed1ab_0
- vs2015_runtime=14.36.32532=h05e6639_17
- wcwidth=0.2.9=pyhd8ed1ab_0
- webcolors=1.13=pyhd8ed1ab_0
- webencodings=0.5.1=pyhd8ed1ab_2
- wheel=0.41.3=pyhd8ed1ab_0
- widgetsnbextension=4.0.9=pyhd8ed1ab_0
- win_inet_pton=1.1.0=pyhd8ed1ab_6
- winpty=0.4.3=4
- wrapt=1.16.0=py310h8d17308_0
- xarray=2023.10.1=pyhd8ed1ab_0
- xerces-c=3.2.4=h63175ca_3
- xorg-libxau=1.0.11=hcd874cb_0
- xorg-libxdmcp=1.1.3=hcd874cb_0
- xyzservices=2023.10.1=pyhd8ed1ab_0
- xz=5.2.6=h8d14728_0
- y-py=0.5.5=py310h87d50f1_2
- yaml=0.2.5=h8ffe710_2
- yarl=1.9.2=py310h8d17308_1
- ypy-websocket=0.8.2=pyhd8ed1ab_0
- zarr=2.16.1=pyhd8ed1ab_0
- zeromq=4.3.4=h0e60522_1
- zict=3.0.0=pyhd8ed1ab_0
- zipp=3.17.0=pyhd8ed1ab_0
- zlib=1.2.13=hcfcfb64_5
- zstd=1.5.5=h12be248_0
- pip:
- attrs==25.3.0
- earthaccess==0.14.0
- h11==0.14.0
- importlib-resources==6.5.2
- outcome==1.3.0.post0
- python-cmr==0.13.0
- selenium==4.30.0
- trio==0.29.0
- trio-websocket==0.12.2
- typing-extensions==4.12.2
- websocket-client==1.8.0
- wsproto==1.2.0

View File

@ -5,7 +5,7 @@
------------------------------------------------------------------------------- -------------------------------------------------------------------------------
Authors: Hong Xie Authors: Hong Xie
Last Updated: 2025-07-07 Last Updated: 2025-09-11
=============================================================================== ===============================================================================
""" """
@ -256,10 +256,11 @@ def setup_logging(log_file: str = "dask_worker.log"):
Parameters Parameters
---------- ----------
log_file : str, optional log_file : str, optional
日志文件路径, by default "dask_worker.log" 日志文件路径, by default "dask_worker.log"
""" """
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format="%(levelname)s:%(asctime)s ||| %(message)s", format="%(levelname)s:%(asctime)s ||| %(message)s",
@ -269,6 +270,7 @@ def setup_logging(log_file: str = "dask_worker.log"):
], ],
) )
def load_band_as_arr(org_tif_path, band_num=1): def load_band_as_arr(org_tif_path, band_num=1):
""" """
读取波段数据 读取波段数据
@ -410,15 +412,26 @@ def create_quality_mask(quality_data, bit_nums: list = [0, 1, 2, 3, 4, 5]):
def clip_image( def clip_image(
image: xr.DataArray | xr.Dataset, roi: gpd.GeoDataFrame, clip_by_box=True image: xr.DataArray | xr.Dataset, roi: gpd.GeoDataFrame = None, clip_by_box=True
): ) -> xr.DataArray | xr.Dataset | None:
""" """
Clip Image data to ROI. Clip Image data to ROI.
args: Parameters
image (xarray.DataArray | xarray.Dataset): 通过 rioxarray.open_rasterio 加载的图像数据. ----------
roi (gpd.GeoDataFrame): 感兴趣区数据.
clip_by_box (bool): 是否使用 bbox 进行裁剪, 默认为 True. image : xarray.DataArray | xarray.Dataset
通过 rioxarray.open_rasterio 加载的图像数据.
roi : gpd.GeoDataFrame, optional
感兴趣区数据.
clip_by_box : bool, optional
是否使用 bbox 进行裁剪, 默认为 True.
Returns
-------
xarray.DataArray | xarray.Dataset | None
裁剪后的图像数据. 若裁剪后数据全为无效值, 则返回 None.
""" """
if roi is None: if roi is None:
@ -443,15 +456,25 @@ def clip_image(
return image_cliped return image_cliped
def clip_roi_image(file_path: str, grid: gpd.GeoDataFrame) -> xr.DataArray | None: def clip_roi_image(
file_path: str, grid: gpd.GeoDataFrame = None
) -> xr.DataArray | None:
""" """
按研究区范围裁剪影像 按研究区范围裁剪影像
args: Parameters
file_path (str): 待裁剪影像路径 ----------
grid (gpd.GeoDataFrame): 格网范围
return: file_path : str
raster_cliped (xr.DataArray): 裁剪后的影像 待裁剪影像路径
grid : gpd.GeoDataFrame, optional
格网范围, 默认为 None.
Returns
-------
raster_cliped : xr.DataArray
裁剪后的影像
""" """
raster = open_rasterio(file_path) raster = open_rasterio(file_path)
try: try:
@ -487,15 +510,27 @@ def reproject_image(
target_crs: str = None, target_crs: str = None,
target_shape: tuple = None, target_shape: tuple = None,
target_image: xr.DataArray = None, target_image: xr.DataArray = None,
): ) -> xr.DataArray:
""" """
Reproject Image data to target CRS or target data. Reproject Image data to target CRS or target data.
args: Parameters
image (xarray.DataArray): 通过 rioxarray.open_rasterio 加载的图像数据. ----------
target_crs (str): Target CRS, eg. EPSG:4326.
target_shape (tuple): Target shape, eg. (1000, 1000). image : xarray.DataArray
target_image (xarray.DataArray): Target image, eg. rioxarray.open_rasterio 加载的图像数据. 通过 rioxarray.open_rasterio 加载的图像数据.
target_crs : str, optional
Target CRS, eg. EPSG:4326.
target_shape : tuple, optional
Target shape, eg. (1000, 1000).
target_image : xarray.DataArray, optional
Target image, eg. rioxarray.open_rasterio 加载的图像数据.
Returns
-------
xarray.DataArray
重投影后的图像数据.
""" """
if target_image is not None: if target_image is not None:
# 使用 target_image 进行重投影匹配 # 使用 target_image 进行重投影匹配
@ -506,9 +541,9 @@ def reproject_image(
target_image.shape[1] == image.shape[1] target_image.shape[1] == image.shape[1]
and target_image.shape[2] == image.shape[2] and target_image.shape[2] == image.shape[2]
): ):
# 若判断为降尺度/等尺度, 则直接使用 cubic_spline 重采样投影到目标影像 # 若判断为降尺度/等尺度, 则直接使用 cubic 双三次插值重采样投影到目标影像
image_reprojed = image.rio.reproject_match( image_reprojed = image.rio.reproject_match(
target_image, resampling=Resampling.cubic_spline target_image, resampling=Resampling.cubic
) )
else: else:
# print("target_image shape is not match with image shape", image.shape, "to", target_image.shape) # print("target_image shape is not match with image shape", image.shape, "to", target_image.shape)
@ -520,7 +555,7 @@ def reproject_image(
# 使用 target_crs 进行重投影 # 使用 target_crs 进行重投影
reproject_kwargs = { reproject_kwargs = {
"dst_crs": target_crs, "dst_crs": target_crs,
"resampling": Resampling.cubic_spline, "resampling": Resampling.cubic,
} }
if target_shape is not None: if target_shape is not None:
reproject_kwargs["shape"] = target_shape reproject_kwargs["shape"] = target_shape
@ -563,7 +598,7 @@ def mosaic_images(
tif_list, tif_list,
nodata=nodata, nodata=nodata,
method=method, method=method,
resampling=Resampling.cubic_spline, resampling=Resampling.cubic,
) )
# 将结果重新构建为 xarray 数据集 # 将结果重新构建为 xarray 数据集
# 单张SAR影像直接读取 transform: 233400.0 30.0 0.0 3463020.0 0.0 -30.0 # 单张SAR影像直接读取 transform: 233400.0 30.0 0.0 3463020.0 0.0 -30.0

104
utils/sr2rgb.py Normal file
View File

@ -0,0 +1,104 @@
"""
COG 格式的 Red, Green, Blue 单波段地表反射率图像合成为 RGB 图像
1. 对比度线性拉伸至 0-255;
2. 合并波段;
3. 保存为 uint8 格式 RGB 图像;
"""
import os
import xarray as xr
from rioxarray import open_rasterio
import numpy as np
def sr2rgb(red_path: str, green_path: str, blue_path: str, output_path: str) -> None:
"""
将红绿蓝三个单波段地表反射率图像合成为RGB图像
参数:
red_path (str): 红色波段文件路径
green_path (str): 绿色波段文件路径
blue_path (str): 蓝色波段文件路径
output_path (str): 输出RGB图像路径
"""
# 检查文件是否存在
for path in [red_path, green_path, blue_path]:
if not os.path.exists(path):
raise FileNotFoundError(f"文件不存在: {path}")
# 读取三个波段数据
red_band = open_rasterio(red_path, masked=True).squeeze(dim="band", drop=True)
green_band = open_rasterio(green_path, masked=True).squeeze(dim="band", drop=True)
blue_band = open_rasterio(blue_path, masked=True).squeeze(dim="band", drop=True)
# 暂存元数据
y_coords = red_band.y
x_coords = red_band.x
crs = red_band.rio.crs
transform = red_band.rio.transform()
def stretch_band(band):
"""
线性拉伸到0-255范围
"""
# 处理NaN值与负值
band_no_nan = np.where(np.isnan(band), 0, band)
band_no_nan = np.where(band_no_nan < 0, 0, band_no_nan)
band_min = np.min(band_no_nan)
band_max = np.max(band_no_nan)
# 避免除零错误
if band_max == band_min:
stretched = np.zeros_like(band_no_nan, dtype=np.uint8)
else:
stretched = ((band_no_nan - band_min) / (band_max - band_min) * 255).astype(
np.uint8
)
return stretched
red_stretched = stretch_band(red_band.values)
green_stretched = stretch_band(green_band.values)
blue_stretched = stretch_band(blue_band.values)
# 合并三个波段为RGB图像
rgb_array = xr.DataArray(
np.dstack((red_stretched, green_stretched, blue_stretched)),
dims=("y", "x", "band"),
coords={"band": [1, 2, 3], "y": y_coords, "x": x_coords},
)
# 转置维度顺序以符合rioxarray要求
rgb_array = rgb_array.transpose("band", "y", "x")
# 写入元数据
rgb_array.rio.write_crs(crs, inplace=True)
rgb_array.rio.write_transform(transform, inplace=True)
rgb_array.rio.write_nodata(0, inplace=True)
# 保存为TIFF文件
rgb_array.rio.to_raster(output_path, dtype="uint8")
print(f"RGB图像已保存到: {output_path}")
return
if __name__ == "__main__":
# tif_dir = "D:\\NASA_EarthData_Script\\data\\HLS\\2024\\2024012"
# red_path = os.path.join(
# tif_dir, "HLS.S30.T49RGP.2024012T031101.v2.0.RED.subset.tif"
# )
# green_path = os.path.join(
# tif_dir, "HLS.S30.T49RGP.2024012T031101.v2.0.GREEN.subset.tif"
# )
# blue_path = os.path.join(
# tif_dir, "HLS.S30.T49RGP.2024012T031101.v2.0.BLUE.subset.tif"
# )
# output_path = os.path.join(tif_dir, "HLS.S30.T49RGP.2024012T031101.v2.0.RGB.tif")
tif_dir = "D:\\NASA_EarthData_Script\\data\\HLS\\2025\\2025011"
red_path = os.path.join(
tif_dir, "HLS.S30.T49RGP.2025011T031009.v2.0.RED.subset.tif"
)
green_path = os.path.join(
tif_dir, "HLS.S30.T49RGP.2025011T031009.v2.0.GREEN.subset.tif"
)
blue_path = os.path.join(
tif_dir, "HLS.S30.T49RGP.2025011T031009.v2.0.BLUE.subset.tif"
)
output_path = os.path.join(tif_dir, "HLS.S30.T49RGP.2025011T031009.v2.0.RGB.tif")
sr2rgb(red_path, green_path, blue_path, output_path)