hima8_pv/main.py

133 lines
5.5 KiB
Python
Raw Permalink Normal View History

2022-11-09 10:16:06 +08:00
import os
import Download_AOD as Daod
import datetime as dt
2022-11-10 16:17:29 +08:00
import glob
2022-11-24 08:54:08 +08:00
# import AOD_NetCDF_to_GeoTIFF as trans
from read_par_data import trans2csv
from logzero import logger
2022-11-09 10:16:06 +08:00
2022-11-24 10:35:58 +08:00
today = dt.date(2022, 8, 1)
2022-11-09 10:16:06 +08:00
print(today)
_yearNum = today.year
_monNum = today.month
_dayNum = today.day
_yearStr = ""
_monStr = ""
_dayStr = ""
2022-11-24 10:35:58 +08:00
_hourStr = [f"0{x}" if x < 10 else str(x) for x in range(24)]
2022-11-09 10:16:06 +08:00
if __name__ == "__main__":
# 传入IP地址
# 传入的YesdayNum在下载日数据时昨天的会需要
ftp = Daod.myFTP(host='ftp.ptree.jaxa.jp', YesdayNum=_dayNum - 1)
2022-11-09 10:51:11 +08:00
# 传入用户名和密码,可以自行注册并修改
2022-11-09 10:16:06 +08:00
ftp.Login('jh_zhao_asagi.waseda.jp', 'SP+wari8')
# 从目标路径ftp_filePath将文件下载至本地路径dst_filePath
2022-11-09 10:51:11 +08:00
dst_filePath_root = './data'
dst_filePath = dst_filePath_root + "/" + dt.datetime.strftime(today, '%Y-%m-%d')
2022-11-09 10:16:06 +08:00
if not os.path.exists(dst_filePath):
os.makedirs(dst_filePath)
'''
下载小时数据和日数据时前置路径都是/pub/himawari/L3/ARP/031(20-22)
下载每10分钟数据时前置路径是/pub/himawari/L2/ARP/030(20-22)
下载日数据时示例路径/pub/himawari/L3/ARP/031/202008/daily/
下载小时数据时示例路径/pub/himawari/L3/ARP/031/202008/19/
下载10分钟数据时示例路径/pub/himawari/L2/ARP/030/202210/10/
'''
2022-11-24 08:54:08 +08:00
logger.info("请选择要下载的数据:")
2022-11-09 10:16:06 +08:00
_choice = int(input("1.AOD小时数据当天所有 2.AOD日均数据昨天 3.PAR分钟数据当天所有\n"))
# Download_Path用于存储下载的原始数据
Download_Path = ""
# Analysis_Path用于存储处理后的数据即转为TIFF后的数据的文件夹
Analysis_Path = ""
# 如果选择为AOD小时数据
if _choice == 1:
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
ftp_filePath = "/pub/himawari/L3/ARP/031" + "/" + _yearStr + _monStr + "/" + _dayStr + "/"
Download_Path = dst_filePath + "/AOD_Hourly_Download"
if not os.path.exists(Download_Path):
os.makedirs(Download_Path)
2022-11-24 08:54:08 +08:00
Daod.deleteFile(Download_Path, suf='.temp') # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
2022-11-09 10:16:06 +08:00
Analysis_Path = dst_filePath + "/AOD_Hourly_Analysis"
if not os.path.exists(Analysis_Path):
os.makedirs(Analysis_Path)
ftp.DownLoadFileTree(Download_Path, ftp_filePath, _choice)
# 如果选择为AOD日数据昨天的
elif _choice == 2:
_yearNum, _monNum, _dayNum = Daod.getYesterday(_yearNum, _monNum, _dayNum)
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
ftp_filePath = "/pub/himawari/L3/ARP/030" + "/" + _yearStr + _monStr + "/" + "daily" + "/"
Download_Path = dst_filePath + "/AOD_Daily_Download"
if not os.path.exists(Download_Path):
os.makedirs(Download_Path)
2022-11-24 08:54:08 +08:00
Daod.deleteFile(Download_Path, suf='.temp') # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
2022-11-09 10:16:06 +08:00
Analysis_Path = dst_filePath + "/AOD_Daily_Analysis"
if not os.path.exists(Analysis_Path):
os.makedirs(Analysis_Path)
ftp.DownLoadFileTree(Download_Path, ftp_filePath, _choice)
elif _choice == 3:
2022-11-24 08:54:08 +08:00
while today <= dt.date(2022, 7, 23):
2022-11-09 10:16:06 +08:00
_yearNum = today.year
_monNum = today.month
_dayNum = today.day
_yearStr = ""
_monStr = ""
_dayStr = ""
2022-11-10 16:14:49 +08:00
_hourStr = [f"0{x}" if x < 10 else str(x) for x in range(24)]
2022-11-09 10:16:06 +08:00
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
2022-11-24 10:35:58 +08:00
ftp_filePath = "/pub/himawari/L2/PAR/020" + "/" + _yearStr + _monStr + "/" + _dayStr + "/"
2022-11-09 10:51:11 +08:00
dst_filePath = dst_filePath_root + "/" + dt.datetime.strftime(today, '%Y-%m-%d')
2022-11-09 10:16:06 +08:00
Download_Path = dst_filePath + "/PAR_Minutes_Download"
if not os.path.exists(Download_Path):
os.makedirs(Download_Path)
2022-11-24 08:54:08 +08:00
Daod.deleteFile(Download_Path, suf='.temp') # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
2022-11-09 10:16:06 +08:00
for hour in _hourStr:
2022-11-24 08:54:08 +08:00
logger.info(f"{ftp_filePath}{hour}/")
2022-11-09 10:16:06 +08:00
ftp.DownLoadFileTree(Download_Path, f"{ftp_filePath}{hour}/", _choice)
# Analysis_Path = dst_filePath + "/PAR_Minutes_Analysis"
# if not os.path.exists(Analysis_Path):
# os.makedirs(Analysis_Path)
2022-11-10 16:17:29 +08:00
data_list = glob.glob(Download_Path + "\\*.nc")
2022-11-24 08:54:08 +08:00
logger.info(data_list)
try:
2022-11-24 10:35:58 +08:00
date_df = trans2csv(data_list)
date_df.to_csv(f'./{Download_Path}/{_yearStr}-{_monStr}-{_dayStr}PAR.csv', encoding='utf-8-sig',
index=False)
2022-11-24 08:54:08 +08:00
Daod.deleteFile(Download_Path, '.nc')
except Exception as e:
logger.error(e)
logger.error(Download_Path)
2022-11-24 10:35:58 +08:00
2022-11-09 10:16:06 +08:00
today = today + dt.timedelta(days=1)
else:
2022-11-24 08:54:08 +08:00
logger.error("选择错误!")
2022-11-09 10:16:06 +08:00
# 下载结束
ftp.close()
2022-11-24 08:54:08 +08:00
logger.info("下载完成!")
2022-11-09 10:16:06 +08:00
# 下面开始数据处理
# 读取所有nc数据
# data_list = glob.glob(Download_Path + "\\*.nc")
# # for循环完成解析
# for i in range(len(data_list)):
# data = data_list[i]
# trans.NC_to_tiffs(data, Analysis_Path)
# print(data + "-----转tif成功")
2022-11-24 10:35:58 +08:00
print("----转换结束----")