update code
This commit is contained in:
parent
8fd634888a
commit
ffdebde177
|
@ -0,0 +1,57 @@
|
|||
# 模块导入
|
||||
import numpy as np
|
||||
import netCDF4 as nc
|
||||
from osgeo import gdal, osr, ogr
|
||||
import os
|
||||
import glob
|
||||
|
||||
|
||||
def NC_to_tiffs(data, Output_folder):
|
||||
# 读取一下基本信息
|
||||
nc_data_obj = nc.Dataset(data)
|
||||
Lon = nc_data_obj.variables["longitude"][:]
|
||||
Lat = nc_data_obj.variables["latitude"][:]
|
||||
|
||||
# 读取变量的时候,会自动根据scale factor对数值进行还原,但是Nodata的栅格会存储为-32768
|
||||
# 无论是日数据还是小时数居,变量名都是"SWR"
|
||||
AOD_arr = np.asarray(nc_data_obj.variables["SWR"]) # 将SWR数据读取为数组
|
||||
|
||||
# 这个循环将所有Nodata的值(即-32768)全部改为0
|
||||
for i in range(len(AOD_arr)):
|
||||
for j in range(len(AOD_arr[0])):
|
||||
if AOD_arr[i][j] == -32768:
|
||||
AOD_arr[i][j] = 0.0
|
||||
|
||||
# 影像的四秩
|
||||
LonMin, LatMax, LonMax, LatMin = [Lon.min(), Lat.max(), Lon.max(), Lat.min()]
|
||||
|
||||
# 分辨率计算,其实可以写死,都是2401*2401
|
||||
N_Lat = len(Lat)
|
||||
N_Lon = len(Lon)
|
||||
Lon_Res = (LonMax - LonMin) / (float(N_Lon) - 1)
|
||||
Lat_Res = (LatMax - LatMin) / (float(N_Lat) - 1)
|
||||
|
||||
# 此句代码必须有
|
||||
AOD_arr = np.array([AOD_arr])
|
||||
|
||||
for i in range(len(AOD_arr[:])):
|
||||
# 创建.tif文件
|
||||
driver = gdal.GetDriverByName("GTiff")
|
||||
TIFF_name = os.path.basename(data)
|
||||
out_tif_name = Output_folder + "\\" + TIFF_name.split("_")[1] + "_" + TIFF_name.split("_")[2] + ".tif"
|
||||
out_tif = driver.Create(out_tif_name, N_Lon, N_Lat, 1, gdal.GDT_Float32)
|
||||
|
||||
# 设置影像的显示范围
|
||||
# -Lat_Res一定要是负的
|
||||
geotransform = (LonMin, Lon_Res, 0, LatMax, 0, -Lat_Res)
|
||||
out_tif.SetGeoTransform(geotransform)
|
||||
|
||||
# 获取地理坐标系统信息,用于选取需要的地理坐标系统
|
||||
srs = osr.SpatialReference()
|
||||
srs.ImportFromEPSG(4326) # 定义输出的坐标系为"WGS 84",AUTHORITY["EPSG","4326"]
|
||||
out_tif.SetProjection(srs.ExportToWkt()) # 给新建图层赋予投影信息
|
||||
|
||||
# 数据写出
|
||||
out_tif.GetRasterBand(1).WriteArray(AOD_arr[i]) # 将数据写入内存,此时没有写入硬盘
|
||||
out_tif.FlushCache() # 将数据写入硬盘
|
||||
out_tif = None # 注意必须关闭tif文件
|
|
@ -0,0 +1,168 @@
|
|||
import os
|
||||
import ftplib
|
||||
import time
|
||||
|
||||
|
||||
# 这个函数用于将日期从整型转为FTP路径所需的字符串
|
||||
def getDateStr(yearNum, monNum, dayNum):
|
||||
# 四位数年份
|
||||
yearStr = str(yearNum)
|
||||
|
||||
# 两位数月份
|
||||
if monNum < 10:
|
||||
monStr = "0" + str(monNum)
|
||||
else:
|
||||
monStr = str(monNum)
|
||||
|
||||
# 两位数天
|
||||
if dayNum < 10:
|
||||
dayStr = "0" + str(dayNum)
|
||||
else:
|
||||
dayStr = str(dayNum)
|
||||
|
||||
return yearStr, monStr, dayStr
|
||||
|
||||
|
||||
# 这个函数用于获取前一天的日期号
|
||||
def getYesterday(year, month, day):
|
||||
if day == 1:
|
||||
|
||||
if month == 1:
|
||||
year -= 1
|
||||
month = 12
|
||||
day = 31
|
||||
|
||||
elif month == 2 or month == 4 or month == 6 or month == 8 or month == 9 or month == 11:
|
||||
month -= 1
|
||||
day = 31
|
||||
|
||||
elif month == 5 or month == 7 or month == 10 or month == 12:
|
||||
month -= 1
|
||||
day = 30
|
||||
|
||||
elif month == 3:
|
||||
# 闰年
|
||||
if year % 4 == 0 and year % 400 == 0:
|
||||
day = 29
|
||||
month -= 1
|
||||
# 闰年
|
||||
elif year % 4 == 0 and year % 100 != 0:
|
||||
day = 29
|
||||
month -= 1
|
||||
else:
|
||||
day = 28
|
||||
month -= 1
|
||||
else:
|
||||
day -= 1
|
||||
|
||||
return year, month, day
|
||||
|
||||
|
||||
# 获取文件后缀名
|
||||
def suffix(file, *suffixName):
|
||||
array = map(file.endswith, suffixName)
|
||||
if True in array:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
# 删除目录下扩展名为.temp的文件
|
||||
def deleteFile(fileDir):
|
||||
targetDir = fileDir
|
||||
for file in os.listdir(targetDir):
|
||||
targetFile = os.path.join(targetDir, file)
|
||||
if suffix(file, '.temp'):
|
||||
os.remove(targetFile)
|
||||
|
||||
|
||||
class myFTP:
|
||||
ftp = ftplib.FTP()
|
||||
|
||||
# 连接FTP,host是IP地址,port是端口,默认21
|
||||
def __init__(self, host, port=21, YesdayNum=1):
|
||||
self.ftp.connect(host, port)
|
||||
self._dayNum = YesdayNum
|
||||
|
||||
# 登录FTP连接,user是用户名,password是密码
|
||||
def Login(self, user, password):
|
||||
self.ftp.login(user, password)
|
||||
print(self.ftp.welcome) # 显示登录信息
|
||||
|
||||
# 下载单个文件,LocalFile表示本地存储路径和文件名,RemoteFile是FTP路径和文件名
|
||||
def DownLoadFile(self, LocalFile, RemoteFile):
|
||||
bufSize = 102400
|
||||
|
||||
file_handler = open(LocalFile, 'wb')
|
||||
print(file_handler)
|
||||
|
||||
# 接收服务器上文件并写入本地文件
|
||||
self.ftp.retrbinary('RETR ' + RemoteFile, file_handler.write, bufSize)
|
||||
self.ftp.set_debuglevel(0)
|
||||
file_handler.close()
|
||||
return True
|
||||
|
||||
# 下载整个目录下的文件,LocalDir表示本地存储路径, emoteDir表示FTP路径
|
||||
def DownLoadFileTree(self, LocalDir, RemoteDir, choice):
|
||||
# print("remoteDir:", RemoteDir)
|
||||
# 如果本地不存在该路径,则创建
|
||||
if not os.path.exists(LocalDir):
|
||||
os.makedirs(LocalDir)
|
||||
|
||||
# 获取FTP路径下的全部文件名,以列表存储
|
||||
# 好像是乱序
|
||||
self.ftp.cwd(RemoteDir)
|
||||
RemoteNames = self.ftp.nlst()
|
||||
RemoteNames.reverse()
|
||||
|
||||
# print("RemoteNames:", RemoteNames)
|
||||
for file in RemoteNames:
|
||||
# 先下载为临时文件Local,下载完成后再改名为nc4格式的文件
|
||||
# 这是为了防止上一次下载中断后,最后一个下载的文件未下载完整,而再开始下载时,程序会识别为已经下载完成
|
||||
Local = os.path.join(LocalDir, file[0:-3] + ".temp")
|
||||
LocalNew = os.path.join(LocalDir, file)
|
||||
|
||||
'''
|
||||
下载小时文件,只下载UTC时间1时至10时(北京时间9时至18时)的文件
|
||||
下载的文件必须是nc格式
|
||||
若已经存在,则跳过下载
|
||||
'''
|
||||
# 小时数据命名格式示例:H08_20200819_0700_1HARP030_FLDK.02401_02401.nc
|
||||
if choice == 1:
|
||||
if (int(file[13:15]) >= 0 and int(file[13:15]) <= 12) or (int(file[13:15]) >= 21):
|
||||
if not os.path.exists(LocalNew):
|
||||
print("Downloading the file of %s" % file)
|
||||
self.DownLoadFile(Local, file)
|
||||
os.rename(Local, LocalNew)
|
||||
print("The download of the file of %s has finished\n" % file)
|
||||
elif os.path.exists(LocalNew):
|
||||
print("The file of %s has already existed!\n" % file)
|
||||
else:
|
||||
pass
|
||||
|
||||
# 天数据命名格式示例:H08_20200802_0000_1DARP030_FLDK.02401_02401.nc
|
||||
elif choice == 2:
|
||||
if int(file[10:12]) == self._dayNum and not os.path.exists(LocalNew):
|
||||
print("Downloading the file of %s" % file)
|
||||
self.DownLoadFile(Local, file)
|
||||
os.rename(Local, LocalNew)
|
||||
print("The download of the file of %s has finished\n" % file)
|
||||
elif int(file[10:12]) == self._dayNum and os.path.exists(LocalNew):
|
||||
print("The file of %s has already existed!" % file)
|
||||
|
||||
elif choice == 3:
|
||||
# 分钟级别的数据已经明确了小时关系,所以不需要根据小时筛选
|
||||
if not os.path.exists(LocalNew) and '02401_02401' in file:
|
||||
print("Downloading the file of %s" % file)
|
||||
self.DownLoadFile(Local, file)
|
||||
os.rename(Local, LocalNew)
|
||||
print("The download of the file of %s has finished\n" % file)
|
||||
elif os.path.exists(LocalNew):
|
||||
print("The file of %s has already existed!\n" % file)
|
||||
else:
|
||||
pass
|
||||
self.ftp.cwd("..")
|
||||
return
|
||||
|
||||
def close(self):
|
||||
self.ftp.quit()
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,116 @@
|
|||
import os
|
||||
import Download_AOD as Daod
|
||||
import datetime as dt
|
||||
|
||||
today = dt.date(2022, 8, 1)
|
||||
print(today)
|
||||
_yearNum = today.year
|
||||
_monNum = today.month
|
||||
_dayNum = today.day
|
||||
_yearStr = ""
|
||||
_monStr = ""
|
||||
_dayStr = ""
|
||||
_hourStr = [f"0{x}"if x < 10 else str(x) for x in range(23)]
|
||||
|
||||
if __name__ == "__main__":
|
||||
# 传入IP地址
|
||||
# 传入的YesdayNum在下载日数据时(昨天的)会需要
|
||||
ftp = Daod.myFTP(host='ftp.ptree.jaxa.jp', YesdayNum=_dayNum - 1)
|
||||
|
||||
# 传入用户名和密码,可以自行注册并修改
|
||||
ftp.Login('jh_zhao_asagi.waseda.jp', 'SP+wari8')
|
||||
|
||||
# 从目标路径ftp_filePath将文件下载至本地路径dst_filePath
|
||||
dst_filePath = './data'
|
||||
dst_filePath = dst_filePath + "/" + dt.datetime.strftime(today, '%Y-%m-%d')
|
||||
if not os.path.exists(dst_filePath):
|
||||
os.makedirs(dst_filePath)
|
||||
|
||||
'''
|
||||
下载小时数据和日数据时,前置路径都是:/pub/himawari/L3/ARP/031(20-22年)
|
||||
下载每10分钟数据时,前置路径是:/pub/himawari/L2/ARP/030(20-22年)
|
||||
下载日数据时,示例路径:/pub/himawari/L3/ARP/031/202008/daily/
|
||||
下载小时数据时,示例路径:/pub/himawari/L3/ARP/031/202008/19/
|
||||
下载10分钟数据时,示例路径:/pub/himawari/L2/ARP/030/202210/10/
|
||||
'''
|
||||
print("请选择要下载的数据:")
|
||||
_choice = int(input("1.AOD小时数据(当天所有) 2.AOD日均数据(昨天) 3.PAR分钟数据(当天所有)\n"))
|
||||
|
||||
# Download_Path用于存储下载的原始数据
|
||||
Download_Path = ""
|
||||
# Analysis_Path用于存储处理后的数据(即转为TIFF后的数据)的文件夹
|
||||
Analysis_Path = ""
|
||||
|
||||
# 如果选择为AOD小时数据
|
||||
if _choice == 1:
|
||||
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
|
||||
ftp_filePath = "/pub/himawari/L3/ARP/031" + "/" + _yearStr + _monStr + "/" + _dayStr + "/"
|
||||
|
||||
Download_Path = dst_filePath + "/AOD_Hourly_Download"
|
||||
if not os.path.exists(Download_Path):
|
||||
os.makedirs(Download_Path)
|
||||
Daod.deleteFile(Download_Path) # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
|
||||
|
||||
Analysis_Path = dst_filePath + "/AOD_Hourly_Analysis"
|
||||
if not os.path.exists(Analysis_Path):
|
||||
os.makedirs(Analysis_Path)
|
||||
|
||||
ftp.DownLoadFileTree(Download_Path, ftp_filePath, _choice)
|
||||
|
||||
# 如果选择为AOD日数据(昨天的)
|
||||
elif _choice == 2:
|
||||
_yearNum, _monNum, _dayNum = Daod.getYesterday(_yearNum, _monNum, _dayNum)
|
||||
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
|
||||
ftp_filePath = "/pub/himawari/L3/ARP/030" + "/" + _yearStr + _monStr + "/" + "daily" + "/"
|
||||
|
||||
Download_Path = dst_filePath + "/AOD_Daily_Download"
|
||||
if not os.path.exists(Download_Path):
|
||||
os.makedirs(Download_Path)
|
||||
Daod.deleteFile(Download_Path) # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
|
||||
|
||||
Analysis_Path = dst_filePath + "/AOD_Daily_Analysis"
|
||||
if not os.path.exists(Analysis_Path):
|
||||
os.makedirs(Analysis_Path)
|
||||
|
||||
ftp.DownLoadFileTree(Download_Path, ftp_filePath, _choice)
|
||||
|
||||
elif _choice == 3:
|
||||
while today <= dt.date.today():
|
||||
_yearNum = today.year
|
||||
_monNum = today.month
|
||||
_dayNum = today.day
|
||||
_yearStr = ""
|
||||
_monStr = ""
|
||||
_dayStr = ""
|
||||
_hourStr = [f"0{x}" if x < 10 else str(x) for x in range(23)]
|
||||
_yearStr, _monStr, _dayStr = Daod.getDateStr(_yearNum, _monNum, _dayNum)
|
||||
ftp_filePath = "/pub/himawari/L2/PAR/020" + "/" + _yearStr + _monStr + "/" + _dayStr + "/"
|
||||
Download_Path = dst_filePath + "/PAR_Minutes_Download"
|
||||
if not os.path.exists(Download_Path):
|
||||
os.makedirs(Download_Path)
|
||||
Daod.deleteFile(Download_Path) # 删除存储路径中的临时文件(也就是上次未下载完整的文件)
|
||||
for hour in _hourStr:
|
||||
print(f"{ftp_filePath}{hour}/")
|
||||
ftp.DownLoadFileTree(Download_Path, f"{ftp_filePath}{hour}/", _choice)
|
||||
# Analysis_Path = dst_filePath + "/PAR_Minutes_Analysis"
|
||||
# if not os.path.exists(Analysis_Path):
|
||||
# os.makedirs(Analysis_Path)
|
||||
today = today + dt.timedelta(days=1)
|
||||
else:
|
||||
print("选择错误!")
|
||||
|
||||
# 下载结束
|
||||
ftp.close()
|
||||
print("下载完成!")
|
||||
|
||||
# 下面开始数据处理
|
||||
# 读取所有nc数据
|
||||
# data_list = glob.glob(Download_Path + "\\*.nc")
|
||||
|
||||
# # for循环完成解析
|
||||
# for i in range(len(data_list)):
|
||||
# data = data_list[i]
|
||||
# trans.NC_to_tiffs(data, Analysis_Path)
|
||||
# print(data + "-----转tif成功")
|
||||
|
||||
print("----转换结束----")
|
|
@ -0,0 +1,318 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import netCDF4 as nc"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from osgeo import gdal, osr, ogr"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": "<class 'netCDF4._netCDF4.Dataset'>\nroot group (NETCDF4 data model, file format HDF5):\n title: Himawari-08 AHI equal latitude-longitude map data\n id: H08_20221107_1800_RFL020_FLDK.02401_02401.nc\n date_created: 2022-11-07T18:25:18Z\n pixel_number: 2401\n line_number: 2401\n upper_left_latitude: 60.0\n upper_left_longitude: 80.0\n grid_interval: 0.05\n band_number: 6\n algorithm_version: 0201\n Ancillary meteorological data: JMA forcast\n Ancillary ozone data: JMA objective analysis\n BRDF correction: on (Morel and Maritorena 2001)\n dimensions(sizes): latitude(2401), longitude(2401), band(6), time(1), geometry(17)\n variables(dimensions): float32 latitude(latitude), float32 longitude(longitude), int32 band_id(band), float64 start_time(time), float64 end_time(time), float64 geometry_parameters(geometry), int16 TAOT_02(latitude, longitude), int16 TAAE(latitude, longitude), int16 PAR(latitude, longitude), int16 SWR(latitude, longitude), int16 UVA(latitude, longitude), int16 UVB(latitude, longitude), uint8 QA_flag(latitude, longitude)\n groups: "
|
||||
},
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"data = r\"D:\\Datasets\\Himawari\\pub\\L2_PAR\\20221107\\18\\H08_20221107_1800_RFL020_FLDK.02401_02401.nc\"\n",
|
||||
"nc_data = nc.Dataset(data)\n",
|
||||
"nc_data"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": "['latitude',\n 'longitude',\n 'band_id',\n 'start_time',\n 'end_time',\n 'geometry_parameters',\n 'TAOT_02',\n 'TAAE',\n 'PAR',\n 'SWR',\n 'UVA',\n 'UVB',\n 'QA_flag']"
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"list(nc_data.variables.keys())"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": "<class 'netCDF4._netCDF4.Variable'>\nint16 PAR(latitude, longitude)\n long_name: Photosynthetically active radiation\n units: umol/m^2/s\n scale_factor: 0.1\n add_offset: 0.0\n valid_min: 0\n valid_max: 25000\n missing_value: -32768\nunlimited dimensions: \ncurrent shape = (2401, 2401)\nfilling on, default _FillValue of -32767 used"
|
||||
},
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"nc_data['PAR']"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": "<class 'netCDF4._netCDF4.Variable'>\nfloat32 latitude(latitude)\n long_name: latitude\n units: degrees_north\nunlimited dimensions: \ncurrent shape = (2401,)\nfilling on, default _FillValue of 9.969209968386869e+36 used"
|
||||
},
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"nc_data['latitude']"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": "array([[ 0. , 0. , 0. , ..., 0. , 0. ,\n 0. ],\n [ 0. , 0. , 0. , ..., 0. , 0. ,\n 0. ],\n [ 0. , 0. , 0. , ..., 0. , 0. ,\n 0. ],\n ...,\n [ 0. , 0. , 0. , ..., 181.6 , 139.7 ,\n 144.40001],\n [ 0. , 0. , 0. , ..., 201.6 , 318.6 ,\n 169.7 ],\n [ 0. , 0. , 0. , ..., 240.8 , 338.9 ,\n 340.1 ]], dtype=float32)"
|
||||
},
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"par = np.asarray(nc_data['PAR'][:])\n",
|
||||
"par"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 42,
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"2401 2401\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"lat = list(map(lambda x: round(x, 2), np.asarray(nc_data['latitude'][:])))\n",
|
||||
"lon = list(map(lambda x: round(x, 2), np.asarray(nc_data['longitude'][:])))\n",
|
||||
"print(len(lat), len(lon))\n",
|
||||
"latMin, latMax, lonMin, lonMax = min(lat), max(lat), min(lon), max(lon)"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# 分辨率\n",
|
||||
"lat_Res = (latMax - latMin) / (lat.shape[0]-1)\n",
|
||||
"lon_Res = (lonMax - lonMin) / (lon.shape[0]-1)"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 53,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cols = [str(x) for x in lat]\n",
|
||||
"rows = [str(x) for x in lon]"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 54,
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"par_df = pd.DataFrame.from_records(par)\n",
|
||||
"par_df.columns = cols\n",
|
||||
"par_df.index = rows"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 58,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": " 60.0 59.95 59.9 59.85 59.8 59.75 59.7 59.65 59.6 59.55 ... \\\n199.8 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n199.85 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n199.9 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n199.95 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n200.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n\n -59.55 -59.6 -59.65 -59.7 -59.75 \\\n199.8 160.699997 160.900009 155.400009 155.500000 142.100006 \n199.85 153.800003 174.500000 147.300003 139.400009 139.600006 \n199.9 164.199997 166.800003 151.000000 153.800003 153.900009 \n199.95 152.900009 159.800003 184.300003 164.000000 164.199997 \n200.0 149.199997 148.400009 148.600006 152.300003 152.800003 \n\n -59.8 -59.85 -59.9 -59.95 -60.0 \n199.8 143.199997 143.699997 138.100006 138.300003 139.600006 \n199.85 144.199997 144.199997 160.199997 142.199997 143.699997 \n199.9 169.000000 169.300003 181.600006 139.699997 144.400009 \n199.95 167.100006 167.600006 201.600006 318.600006 169.699997 \n200.0 159.699997 240.699997 240.800003 338.899994 340.100006 \n\n[5 rows x 2401 columns]",
|
||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>60.0</th>\n <th>59.95</th>\n <th>59.9</th>\n <th>59.85</th>\n <th>59.8</th>\n <th>59.75</th>\n <th>59.7</th>\n <th>59.65</th>\n <th>59.6</th>\n <th>59.55</th>\n <th>...</th>\n <th>-59.55</th>\n <th>-59.6</th>\n <th>-59.65</th>\n <th>-59.7</th>\n <th>-59.75</th>\n <th>-59.8</th>\n <th>-59.85</th>\n <th>-59.9</th>\n <th>-59.95</th>\n <th>-60.0</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>199.8</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>160.699997</td>\n <td>160.900009</td>\n <td>155.400009</td>\n <td>155.500000</td>\n <td>142.100006</td>\n <td>143.199997</td>\n <td>143.699997</td>\n <td>138.100006</td>\n <td>138.300003</td>\n <td>139.600006</td>\n </tr>\n <tr>\n <th>199.85</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>153.800003</td>\n <td>174.500000</td>\n <td>147.300003</td>\n <td>139.400009</td>\n <td>139.600006</td>\n <td>144.199997</td>\n <td>144.199997</td>\n <td>160.199997</td>\n <td>142.199997</td>\n <td>143.699997</td>\n </tr>\n <tr>\n <th>199.9</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>164.199997</td>\n <td>166.800003</td>\n <td>151.000000</td>\n <td>153.800003</td>\n <td>153.900009</td>\n <td>169.000000</td>\n <td>169.300003</td>\n <td>181.600006</td>\n <td>139.699997</td>\n <td>144.400009</td>\n </tr>\n <tr>\n <th>199.95</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>152.900009</td>\n <td>159.800003</td>\n <td>184.300003</td>\n <td>164.000000</td>\n <td>164.199997</td>\n <td>167.100006</td>\n <td>167.600006</td>\n <td>201.600006</td>\n <td>318.600006</td>\n <td>169.699997</td>\n </tr>\n <tr>\n <th>200.0</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>149.199997</td>\n <td>148.400009</td>\n <td>148.600006</td>\n <td>152.300003</td>\n <td>152.800003</td>\n <td>159.699997</td>\n <td>240.699997</td>\n <td>240.800003</td>\n <td>338.899994</td>\n <td>340.100006</td>\n </tr>\n </tbody>\n</table>\n<p>5 rows × 2401 columns</p>\n</div>"
|
||||
},
|
||||
"execution_count": 58,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"par_df.tail()"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 68,
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": " 60.0 59.95 59.9 59.85 59.8 59.75 59.7 59.65 59.6 59.55 ... \\\n120.85 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... \n\n -59.55 -59.6 -59.65 -59.7 -59.75 -59.8 -59.85 -59.9 \\\n120.85 12.5 27.4 53.100002 57.200001 11.1 11.900001 15.6 90.0 \n\n -59.95 -60.0 \n120.85 91.599998 94.900002 \n\n[1 rows x 2401 columns]",
|
||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>60.0</th>\n <th>59.95</th>\n <th>59.9</th>\n <th>59.85</th>\n <th>59.8</th>\n <th>59.75</th>\n <th>59.7</th>\n <th>59.65</th>\n <th>59.6</th>\n <th>59.55</th>\n <th>...</th>\n <th>-59.55</th>\n <th>-59.6</th>\n <th>-59.65</th>\n <th>-59.7</th>\n <th>-59.75</th>\n <th>-59.8</th>\n <th>-59.85</th>\n <th>-59.9</th>\n <th>-59.95</th>\n <th>-60.0</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>120.85</th>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>0.0</td>\n <td>...</td>\n <td>12.5</td>\n <td>27.4</td>\n <td>53.100002</td>\n <td>57.200001</td>\n <td>11.1</td>\n <td>11.900001</td>\n <td>15.6</td>\n <td>90.0</td>\n <td>91.599998</td>\n <td>94.900002</td>\n </tr>\n </tbody>\n</table>\n<p>1 rows × 2401 columns</p>\n</div>"
|
||||
},
|
||||
"execution_count": 68,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"par_df[par_df.index=='120.85']"
|
||||
],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"outputs": [],
|
||||
"source": [],
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"pycharm": {
|
||||
"name": "#%%\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 2
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
Loading…
Reference in New Issue