diff --git a/.github/workflows/mdtf_tests.yml b/.github/workflows/mdtf_tests.yml
index 2acf9f55a..47574c4fe 100644
--- a/.github/workflows/mdtf_tests.yml
+++ b/.github/workflows/mdtf_tests.yml
@@ -108,12 +108,12 @@ jobs:
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/convective_transition_diag_obs_data.tar --output convective_transition_diag_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/EOF_500hPa_obs_data.tar --output EOF_500hPa_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/Wheeler_Kiladis_obs_data.tar --output Wheeler_Kiladis_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_teleconnection_obs_data.tar --output MJO_teleconnection_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_suite_obs_data.tar --output MJO_suite_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/precip_diurnal_cycle_obs_data.tar --output precip_diurnal_cycle_obs_data.tar
## make input data directories
mkdir -p inputdata/obs_data
echo "Untarring test files"
@@ -149,7 +149,7 @@ jobs:
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_prop_amp_obs_data.tar --output MJO_prop_amp_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/MJO_prop_amp_obs_data.tar --output MJO_prop_amp_obs_data.tar
echo "Untarring set 2 test files"
tar -xvf MJO_prop_amp_obs_data.tar
# clean up tarballs
@@ -173,10 +173,10 @@ jobs:
# attempt FTP data fetch
# allow 20 min for transfer before timeout; Github actions allows 6 hours for individual
# jobs, but we don't want to max out resources that are shared by the NOAA-GFDL repos.
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar
- curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --disable-epsv --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/temp_extremes_distshape_obs_data.tar --output temp_extremes_distshape_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/tropical_pacific_sea_level_obs_data.tar.gz --output tropical_pacific_sea_level_obs_data.tar.gz
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/mixed_layer_depth_obs_data.tar --output mixed_layer_depth_obs_data.tar
+ curl --verbose --ipv4 --connect-timeout 8 --max-time 1200 --retry 128 --ftp-ssl --ftp-pasv -u "anonymous:anonymous" ftp://ftp.gfdl.noaa.gov/perm/oar.gfdl.mdtf/ocn_surf_flux_diag_obs_data.tar --output ocn_surf_flux_diag_obs_data.tar
echo "Untarring set 3 test files"
tar -xvf temp_extremes_distshape_obs_data.tar
tar -zxvf tropical_pacific_sea_level_obs_data.tar.gz
diff --git a/diagnostics/MJO_suite/settings.jsonc b/diagnostics/MJO_suite/settings.jsonc
index aa6397413..c11ed2374 100644
--- a/diagnostics/MJO_suite/settings.jsonc
+++ b/diagnostics/MJO_suite/settings.jsonc
@@ -65,4 +65,4 @@
"scalar_coordinates": {"lev": 850}
}
}
-}
\ No newline at end of file
+}
diff --git a/diagnostics/etc_composites/MDTF_Documentation_etc_composites.pdf b/diagnostics/etc_composites/MDTF_Documentation_etc_composites.pdf
new file mode 100644
index 000000000..4774331f6
Binary files /dev/null and b/diagnostics/etc_composites/MDTF_Documentation_etc_composites.pdf differ
diff --git a/diagnostics/etc_composites/etc_composites.html b/diagnostics/etc_composites/etc_composites.html
new file mode 100644
index 000000000..5ea6f4f7c
--- /dev/null
+++ b/diagnostics/etc_composites/etc_composites.html
@@ -0,0 +1,197 @@
+
+
+
+
+
+
ETC Composites
+ETC Composite Analysis
+
+The Extratropical cyclones are identified with the Modeling, Analysis
+and Prediction (MAP) Climatology of Midlatitude
+Storminess (MCMS) algorithm (Bauer et al. 2016)
+which uses 6-hourly gridded sea level pressure fields to
+locate storm centers and then track them through the
+cyclone lifetime. This algorithm has historically been
+used to create a database of cyclone locations using the
+ERA-Interim reanalysis (Dee et al. 2011). The MCMS algorithm
+is also applied to the model and re-analysis
+sea level pressure fields.
+
+We generate cyclone centered composites of cloud cover,
+as well as other
+atmospheric variables, over multiple instances of the
+same type of atmospheric phenomenon to compare the
+models with observations or reanalysis.
+The is a plan view as a passive
+instrument would observe.
+This method presents a
+great advantage for model evaluation as they allow
+multiple cases to be included and do not necessitate a
+match in time and space between the free running
+models and observations.
+
+
+Full Documentation and Contact Information
+
+
+
Track Stats:
{{CASENAME}}
+
+
+
Variable Legend
+
+
+
+ Variable |
+ Long Name |
+ Units |
+
+
+ PR |
+ Total Precipitation |
+ mm/hr |
+
+
+ PRW |
+ Total Column Water Vapor |
+ mm/hr |
+
+
+ UV10 |
+ 10m Wind Speeds [sqrt(U^2 + V^2)] |
+ m/s |
+
+
+ W500 |
+ Vertical Velocity @ 500 hPA |
+ Pa/s |
+
+
+ CLT |
+ Total Cloud Fraction |
+ % |
+
+
+
+
+
ETC Composites
+
+
+
+
+
Difference Plots
+The figures below show the difference of the extra-tropical cyclone composites between the model run ({{CASENAME}}) and ERA-Interim, MERRA and MODIS data. The composites created for ERA, MERRA and MODIS are from the years 2008-2012. The {{CASENAME}} composites are from your selection of years.
+
+
+ {{CASENAME}} - ERA-Interim |
+
+ SH .
+ NH
+ |
+
+
+ {{CASENAME}} - MERRA |
+
+ SH
+ |
+
+
+ {{CASENAME}} - MODIS |
+
+ SH
+ |
+
+
+
+
+
+
Individual Composites
+Composites of extra-tropical cyclone for the listed variables. The composites created for ERA, MERRA and MODIS are from the years 2008-2012. The {{CASENAME}} composites are from your selection of years.
+
+
+ Hemis - Land/Ocean - Season |
+ {{CASENAME}} |
+ ERA-Interim (2008-2012) |
+
+
+
+ |
+ |
+ |
+
+
+ SH - Ocean - WARM |
+
+ PR .
+ PRW .
+ UV10 .
+ W500 .
+ CLT
+ |
+
+ PR .
+ PRW .
+ UV10 .
+ W500
+ |
+
+
+
+ NH - Ocean - WARM |
+
+ PR .
+ PRW .
+ UV10 .
+ W500 .
+ CLT
+
+ |
+
+ PR .
+ PRW .
+ UV10 .
+ W500
+ |
+
+
+
+
+
+
+
+
Additional Individual Composites
+Composites of extra-tropical cyclone for the listed variables. The composites created for MERRA and MODIS are from the years 2008-2012.
+
+
+ Hemis - Land/Ocean - Season |
+ MERRA (2008-2012) |
+ Observations (2008-2012) |
+
+
+
+ |
+ |
+ |
+
+
+ SH - Ocean - WARM |
+
+ PRW .
+ W500
+ |
+
+ CLT
+ |
+
+
+
+
+
+
diff --git a/diagnostics/etc_composites/etc_composites.py b/diagnostics/etc_composites/etc_composites.py
new file mode 100644
index 000000000..f44d3a8c6
--- /dev/null
+++ b/diagnostics/etc_composites/etc_composites.py
@@ -0,0 +1,1193 @@
+import numpy as np
+import xarray as xr
+import pandas as pd
+import os
+import glob
+import matplotlib.pyplot as plt
+import netCDF4 as nc
+from scipy import interpolate
+import pickle
+import sys
+import time as timelib
+sys.path.append(os.environ['POD_HOME']+'/util')
+
+# INFO: The variables provided in the settings.jsonc does not match the variables I use in the code. So be careful with this
+# ex: 'pr' --> 'tp', 'clt' for modis --> 'modis_cld', 'omega' --> 'w500'
+
+# getting the starting time
+start_time = timelib.time()
+
+# create the output folder if it does not exist
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp')
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp/data_converts'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp/data_converts')
+
+########################################################
+###### Creating the TOPO file used by the code #########
+########################################################
+# have to setup topo file env var, before initial setup, because defines.py needs this variable
+os.environ['topo_file'] = f"{os.environ['WK_DIR']}/tmp/data_converts/topo.nc"
+
+os.environ['hgt_var'] = 'orog'
+os.environ['hgt_var_scale'] = '1.'
+
+os.environ['lsm_var'] = 'sftlf'
+os.environ['lsm_var_scale'] = '1.'
+
+hgt_file = os.environ['DATADIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['hgt_var'] + '.static.nc'
+in_ds = xr.open_dataset(hgt_file)
+hgt = in_ds[os.environ['hgt_var']].values*float(os.environ['hgt_var_scale'])
+in_ds.close()
+
+lsm_file = os.environ['DATADIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['lsm_var'] + '.static.nc'
+in_ds = xr.open_dataset(lsm_file)
+lsm = in_ds[os.environ['lsm_var']].values*float(os.environ['lsm_var_scale'])
+in_ds.close()
+
+lat = in_ds[os.environ['lat_var']].values
+lon = in_ds[os.environ['lon_var']].values
+
+# creating the topo file
+out_topo_ds = xr.Dataset(
+ {'hgt': (('lat', 'lon'), hgt), 'lsm': (('lat', 'lon'), lsm)},
+ coords={
+ 'lat': lat,
+ 'lon': lon
+ })
+# adding the necessary attributes to the SLP file
+out_topo_ds.hgt.attrs['units'] = 'm'
+out_topo_ds.lsm.attrs['units'] = '1'
+
+# writing to the netcdf file
+out_topo_ds.to_netcdf(os.environ['topo_file'])
+
+# The following imports need the topo and obs_lat_distrib_file
+os.environ['obs_lat_distrib_file'] = os.environ['OBS_DATA'] + '/erai_lat_distrib.pkl'
+import run_tracker_setup
+import defines
+
+# check if I have to run the composites part of the pod
+# if so, then get the variables to run the pod for
+if (os.environ['RUN_COMPOSITES'] == 'True'):
+ comp_vars = defines.composite_var_list
+ print(f'Running Compoiste Pods for variables: {comp_vars}')
+
+##################################
+###### Running Cython
+##################################
+cwd = os.getcwd()
+if not (os.environ['USE_EXTERNAL_TRACKS'] == 'True'):
+ print('Running the cythonize code...')
+
+ so_files = ['g2l', 'gcd', 'rhumb_line_nav']
+ for i_so_file in so_files:
+ cmd = f"cd {os.environ['POD_HOME']}/util/tracker; python setup_{i_so_file}_v4.py build_ext --inplace"
+ os.system(cmd)
+ output_file = glob.glob(f"{os.environ['POD_HOME']}/util/tracker/{i_so_file}_v4.*.so")
+ cmd = f"mv {output_file[0]} {os.environ['POD_HOME']}/util/tracker/{i_so_file}_v4.so; cd {cwd}"
+ os.system(cmd)
+
+
+##################################
+###### Function to plot composites
+##################################
+
+def plot_area_fig(x,y,data,title,out_file):
+ ''' Function to plot the area figure! '''
+ plt.figure()
+ plt.pcolormesh(x, y, data, cmap='jet')
+ plt.colorbar()
+ plt.title(title)
+ plt.ylabel('Distance [km]')
+ plt.xlabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ plt.savefig(out_file, dpi=100.)
+ plt.close('all')
+
+def plot_empty(out_file):
+ ''' Function to plot an empty figure, if composites is not run as part of the POD! '''
+ plt.figure()
+ plt.plot(0, 0, 'w.')
+ plt.text(0, 0, 'Run Composites part of the POD to generate this Figure!\nInclude the variable to "varlist" in settings.jsonc\nAvailable Options: prw, pr, w500, clt\nAlso available: 10m Wind Speeds; for which include both u10 & v10', va='center', ha='center')
+ plt.xticks([])
+ plt.yticks([])
+ plt.savefig(out_file)
+ plt.close('all')
+
+
+def plot_empty_single(ax, var):
+ ''' Function to plot an empty figure, if composites is run but var is not available! '''
+ ax.plot(0, 0, 'w.')
+ if (var == 'uv10'):
+ ax.text(0, 0, f'Add details of "u10" & "v10" to "varlist"\nin settings.jsonc!', va='center', ha='center')
+ elif (var == 'pr'):
+ ax.text(0, 0, f'Add details of "pr" to "varlist"\nin settings.jsonc!', va='center', ha='center')
+ elif (var == 'w500'):
+ ax.text(0, 0, f'Add details of "w500" to "varlist"\nin settings.jsonc!', va='center', ha='center')
+ else:
+ ax.text(0, 0, f'Add details of "{var}" to "varlist"\nin settings.jsonc!', va='center', ha='center')
+ ax.set_xticks([])
+ ax.set_yticks([])
+
+def create_empty_figs():
+ '''Funciton to create all the empty figures!'''
+ print('Creating Empty Figures...')
+
+ # Creating empty figures
+ for hemis in defines.composite_hem_list:
+ for var in defines.composite_available_var_list:
+ for season in defines.composite_season_list:
+ for lm_type in ['land', 'ocean']:
+ out_file = os.path.join(defines.model_images_folder, f"{os.environ['CASENAME']}_area_{var}_{hemis}_{lm_type}_{season.upper()}.png")
+ plot_empty(out_file)
+
+ # Plotting empty obs figures
+ out_file = f"{os.environ['WK_DIR']}/obs/diff_merra_erai_prw_SH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ # SH
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_modis_cld_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_prw_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_w500_SH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_pr_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_prw_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_uv10_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_w500_SH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ # NH
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_modis_cld_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_prw_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_w500_NH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_pr_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_prw_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_uv10_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_w500_NH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ # Plotting empty diff plots
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_vars_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_vars_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_merra_vars_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_modis_vars_SH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_pr_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_prw_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_w500_SH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_uv10_SH_ocean_WARM.png"
+ plot_empty(out_file)
+
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_pr_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_prw_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_w500_NH_ocean_WARM.png"
+ plot_empty(out_file)
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_uv10_NH_ocean_WARM.png"
+ plot_empty(out_file)
+
+
+##################################
+###### Main Code
+##################################
+
+# os.environ['topo_file'] = '/localdrive/drive6/erai/converts/invariants.nc'
+# os.environ['topo_file'] = os.environ['DATADIR'] + '/topo.nc'
+# '/localdrive/drive6/erai/converts/invariants.nc'
+
+run_time = timelib.gmtime(timelib.time() - start_time)
+mid_time = timelib.time()
+print(f'Completed code initialization in {timelib.strftime("%H:%M:%S", run_time)}')
+
+os.environ['MODEL_OUTPUT_DIR'] = os.environ['DATADIR'] + '/6hr'
+
+### Copying over the MDTF_DOC file
+cmd = "cp " + os.environ['POD_HOME']+'/MDTF_Documentation_etc_composites.pdf ' + os.environ['WK_DIR']+'/'
+os.system(cmd)
+
+# Creating the necessary SLP yearly files for the necessary years
+sYear = int(os.environ['FIRSTYR'])
+eYear = int(os.environ['LASTYR'])
+
+# Setitng up the necessary variable names
+os.environ['slp_var'] = 'psl'
+os.environ['slp_var_scale'] = '0.01'
+os.environ['slp_file'] = '*.'+os.environ['slp_var']+'.6hr.nc'
+
+if (os.environ['RUN_COMPOSITES'] == 'True'):
+ if ('pr' in comp_vars):
+ os.environ['pr_var'] = 'pr'
+ os.environ['pr_var_scale'] = '3600.'
+ os.environ['pr_file'] = '*.'+os.environ['pr_var']+'.6hr.nc'
+ pr_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['pr_var'] + '.6hr.nc'
+
+ if ('prw' in comp_vars):
+ os.environ['prw_var'] = 'prw'
+ os.environ['prw_var_scale'] = '1'
+ os.environ['prw_file'] = '*.'+os.environ['prw_var']+'.6hr.nc'
+ prw_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['prw_var'] + '.6hr.nc'
+
+ if ('uv10' in comp_vars):
+ os.environ['u10_var'] = 'u10'
+ os.environ['u10_var_scale'] = '1'
+ os.environ['u10_file'] = '*.'+os.environ['u10_var']+'.6hr.nc'
+ u10_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['u10_var'] + '.6hr.nc'
+
+ os.environ['v10_var'] = 'v10'
+ os.environ['v10_var_scale'] = '1'
+ os.environ['v10_file'] = '*.'+os.environ['v10_var']+'.6hr.nc'
+ v10_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['v10_var'] + '.6hr.nc'
+
+ if ('w500' in comp_vars):
+ os.environ['w500_var'] = 'w500'
+ os.environ['w500_var_scale'] = '1'
+ os.environ['w500_file'] = '*.'+os.environ['w500_var']+'.6hr.nc'
+ w500_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['w500_var'] + '.6hr.nc'
+
+ if ('clt' in comp_vars):
+ os.environ['clt_var'] = 'clt'
+ os.environ['clt_var_scale'] = '100.'
+ os.environ['clt_file'] = '*.'+os.environ['clt_var']+'.6hr.nc'
+ clt_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['clt_var'] + '.6hr.nc'
+
+# Setting up the slp_file to be used
+slp_file = os.environ['MODEL_OUTPUT_DIR'] + '/' + os.environ['CASENAME'] + '.' + os.environ['slp_var'] + '.6hr.nc'
+
+# read in the SLP files from the model data
+# getting the type of calendar
+ncid = nc.Dataset(slp_file)
+calendar = ncid['time'].getncattr('calendar')
+ncid.close()
+
+# Using xarray to read in the slp_file
+in_ds = xr.open_dataset(slp_file)
+
+# Getting the necessary data
+if (calendar == 'noleap'):
+ time = in_ds.time.values
+else:
+ time = [pd.to_datetime(i) for i in in_ds.time.values]
+slp = in_ds[os.environ['slp_var']]
+lat = in_ds[os.environ['lat_var']].values
+lon = in_ds[os.environ['lon_var']].lon.values
+in_ds.close()
+
+# reading in the other variables if the composites pod is run
+
+if (os.environ['RUN_COMPOSITES'] == 'True'):
+ if ('pr' in comp_vars):
+ # Reading in total precipitation
+ in_ds = xr.open_dataset(pr_file)
+ pr = in_ds[os.environ['pr_var']]
+ in_ds.close()
+
+ if ('prw' in comp_vars):
+ # Reading in total column water vapor
+ in_ds = xr.open_dataset(prw_file)
+ prw = in_ds[os.environ['prw_var']]
+ in_ds.close()
+
+ if ('uv10' in comp_vars):
+ # Reading in eastward wind speed
+ in_ds = xr.open_dataset(u10_file)
+ u10 = in_ds[os.environ['u10_var']]
+ in_ds.close()
+
+ # Reading in wind speed
+ in_ds = xr.open_dataset(v10_file)
+ v10 = in_ds[os.environ['v10_var']]
+ in_ds.close()
+
+ if ('w500' in comp_vars):
+ # Reading in vertical velocity at 500 hpa
+ in_ds = xr.open_dataset(w500_file)
+ w500 = in_ds[os.environ['w500_var']]
+ in_ds.close()
+
+ if ('clt' in comp_vars):
+ # Reading in total cloud fraction
+ in_ds = xr.open_dataset(clt_file)
+ clt = in_ds[os.environ['clt_var']]
+ in_ds.close()
+
+# creating the year_list to chunk out the yearly sections of the files
+year_list = np.array([i.year for i in time])
+
+# loop through from sYear to eYear
+reset_firstyr = True
+
+for year in range(sYear, eYear+1):
+ ind = (year_list == year)
+ if (not np.any(ind)) & (reset_firstyr) & (year < eYear):
+ # resetting the first year, because we dont have model data for the specified years
+ os.environ['FISTYR'] = f'{year:04d}'
+ reset_firstyr = False
+ continue
+
+ # selecting only the time index for the year
+ # creating the filename of the output in the correct folder
+ slp_sel = slp[ind, :, :]*float(os.environ['slp_var_scale'])
+ out_slp_file= f"{os.environ['WK_DIR']}/tmp/data_converts/slp.{year:04d}.nc"
+ print(out_slp_file)
+
+ # variables run in the composites
+ if (os.environ['RUN_COMPOSITES'] == 'True'):
+ if ('pr' in comp_vars):
+ pr_sel = pr[ind, :, :]*float(os.environ['pr_var_scale'])
+ out_pr_file= f"{os.environ['WK_DIR']}/tmp/data_converts/{os.environ['pr_var']}.{year:04d}.nc"
+ print(out_pr_file)
+ if ('prw' in comp_vars):
+ prw_sel = prw[ind, :, :]*float(os.environ['prw_var_scale'])
+ out_prw_file= f"{os.environ['WK_DIR']}/tmp/data_converts/{os.environ['prw_var']}.{year:04d}.nc"
+ print(out_prw_file)
+ if ('uv10' in comp_vars):
+ u10_sel = u10[ind, :, :]*float(os.environ['u10_var_scale'])
+ v10_sel = v10[ind, :, :]*float(os.environ['v10_var_scale'])
+ out_uv10_file= f"{os.environ['WK_DIR']}/tmp/data_converts/uv10.{year:04d}.nc"
+ print(out_uv10_file)
+ if ('w500' in comp_vars):
+ w500_sel = w500[ind, :, :]*float(os.environ['w500_var_scale'])
+ out_w500_file= f"{os.environ['WK_DIR']}/tmp/data_converts/{os.environ['w500_var']}.{year:04d}.nc"
+ print(out_w500_file)
+ if ('clt' in comp_vars):
+ clt_sel = clt[ind, :, :]*float(os.environ['clt_var_scale'])
+ out_clt_file= f"{os.environ['WK_DIR']}/tmp/data_converts/{os.environ['clt_var']}.{year:04d}.nc"
+ print(out_clt_file)
+
+
+ # creating my custom time variable to match what is required by the tracker
+ time = np.arange(0, np.sum(ind)*6, 6)
+
+ ###### Outputting the SLP file
+ # creating the xarray dataset
+ out_slp_ds = xr.Dataset(
+ {'slp': (('time', 'lat', 'lon'), slp_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the SLP file
+ out_slp_ds.slp.attrs['units'] = 'mb'
+ out_slp_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_slp_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_slp_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_slp_ds.time.attrs['calendar'] = calendar
+ out_slp_ds.lon.attrs['long_name'] = 'longitude'
+ out_slp_ds.lon.attrs['standard_name'] = 'longitude'
+ out_slp_ds.lon.attrs['units'] = 'degrees_east'
+ out_slp_ds.lon.attrs['axis'] = 'X'
+ out_slp_ds.lat.attrs['long_name'] = 'latitude'
+ out_slp_ds.lat.attrs['standard_name'] = 'latitude'
+ out_slp_ds.lat.attrs['units'] = 'degrees_north'
+ out_slp_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_slp_ds.to_netcdf(out_slp_file)
+
+ if (os.environ['RUN_COMPOSITES'] == 'True'):
+
+ if ('pr' in comp_vars):
+ ###### Outputting the total precip file
+ # creating the xarray dataset
+ out_pr_ds = xr.Dataset(
+ {'pr': (('time', 'lat', 'lon'), pr_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the file
+ out_pr_ds.pr.attrs['units'] = 'mm/hr'
+ out_pr_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_pr_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_pr_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_pr_ds.time.attrs['calendar'] = calendar
+ out_pr_ds.lon.attrs['long_name'] = 'longitude'
+ out_pr_ds.lon.attrs['standard_name'] = 'longitude'
+ out_pr_ds.lon.attrs['units'] = 'degrees_east'
+ out_pr_ds.lon.attrs['axis'] = 'X'
+ out_pr_ds.lat.attrs['long_name'] = 'latitude'
+ out_pr_ds.lat.attrs['standard_name'] = 'latitude'
+ out_pr_ds.lat.attrs['units'] = 'degrees_north'
+ out_pr_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_pr_ds.to_netcdf(out_pr_file)
+
+ if ('prw' in comp_vars):
+ ###### Outputting the total column water vapor file
+ # creating the xarray dataset
+ out_prw_ds = xr.Dataset(
+ {'prw': (('time', 'lat', 'lon'), prw_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the file
+ out_prw_ds.prw.attrs['units'] = 'mm/hr'
+ out_prw_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_prw_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_prw_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_prw_ds.time.attrs['calendar'] = calendar
+ out_prw_ds.lon.attrs['long_name'] = 'longitude'
+ out_prw_ds.lon.attrs['standard_name'] = 'longitude'
+ out_prw_ds.lon.attrs['units'] = 'degrees_east'
+ out_prw_ds.lon.attrs['axis'] = 'X'
+ out_prw_ds.lat.attrs['long_name'] = 'latitude'
+ out_prw_ds.lat.attrs['standard_name'] = 'latitude'
+ out_prw_ds.lat.attrs['units'] = 'degrees_north'
+ out_prw_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_prw_ds.to_netcdf(out_prw_file)
+
+ if ('uv10' in comp_vars):
+ ###### Outputting the UV10 file
+ # creating the xarray dataset
+ uv10_sel = (u10_sel**2 + v10_sel**2)**.5
+ out_var_ds = xr.Dataset(
+ {'uv10': (('time', 'lat', 'lon'), uv10_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the file
+ out_var_ds.uv10.attrs['units'] = 'm/s'
+ out_var_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_var_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_var_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_var_ds.time.attrs['calendar'] = calendar
+ out_var_ds.lon.attrs['long_name'] = 'longitude'
+ out_var_ds.lon.attrs['standard_name'] = 'longitude'
+ out_var_ds.lon.attrs['units'] = 'degrees_east'
+ out_var_ds.lon.attrs['axis'] = 'X'
+ out_var_ds.lat.attrs['long_name'] = 'latitude'
+ out_var_ds.lat.attrs['standard_name'] = 'latitude'
+ out_var_ds.lat.attrs['units'] = 'degrees_north'
+ out_var_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_var_ds.to_netcdf(out_uv10_file)
+
+ if ('w500' in comp_vars):
+ ###### Outputting the W500 file
+ # creating the xarray dataset
+ out_var_ds = xr.Dataset(
+ {'w500': (('time', 'lat', 'lon'), w500_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the file
+ out_var_ds.w500.attrs['units'] = 'Pa/s'
+ out_var_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_var_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_var_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_var_ds.time.attrs['calendar'] = calendar
+ out_var_ds.lon.attrs['long_name'] = 'longitude'
+ out_var_ds.lon.attrs['standard_name'] = 'longitude'
+ out_var_ds.lon.attrs['units'] = 'degrees_east'
+ out_var_ds.lon.attrs['axis'] = 'X'
+ out_var_ds.lat.attrs['long_name'] = 'latitude'
+ out_var_ds.lat.attrs['standard_name'] = 'latitude'
+ out_var_ds.lat.attrs['units'] = 'degrees_north'
+ out_var_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_var_ds.to_netcdf(out_w500_file)
+
+ if ('clt' in comp_vars):
+ ###### Outputting the CLT file
+ # creating the xarray dataset
+ out_var_ds = xr.Dataset(
+ {'clt': (('time', 'lat', 'lon'), clt_sel)},
+ coords={
+ 'time': time,
+ 'lat': lat,
+ 'lon': lon
+ })
+ # adding the necessary attributes to the file
+ out_var_ds.clt.attrs['units'] = '{}'.format('%')
+ out_var_ds.time.attrs['delta_t'] = "0000-00-00 06:00:00";
+ out_var_ds.time.attrs['units'] = f"hours since {year:04d}-01-01 00:00:00";
+ if (calendar == 'noleap'):
+ out_var_ds.time.attrs['calendar'] = '365_day'
+ else:
+ out_var_ds.time.attrs['calendar'] = calendar
+ out_var_ds.lon.attrs['long_name'] = 'longitude'
+ out_var_ds.lon.attrs['standard_name'] = 'longitude'
+ out_var_ds.lon.attrs['units'] = 'degrees_east'
+ out_var_ds.lon.attrs['axis'] = 'X'
+ out_var_ds.lat.attrs['long_name'] = 'latitude'
+ out_var_ds.lat.attrs['standard_name'] = 'latitude'
+ out_var_ds.lat.attrs['units'] = 'degrees_north'
+ out_var_ds.lat.attrs['axis'] = 'Y'
+ # writing to the netcdf file
+ out_var_ds.to_netcdf(out_clt_file)
+
+run_time = timelib.gmtime(timelib.time() - mid_time)
+mid_time = timelib.time()
+print(f'Completed creation of yearly files for the tracker in {timelib.strftime("%H:%M:%S", run_time)}')
+
+if (os.environ['USE_EXTERNAL_TRACKS'] == 'True'):
+ print('Using external tracks...')
+ run_tracker_setup.init_setup()
+ run_tracker_setup.copy_code_over()
+else:
+ print('Running the MCMS Tracker [not using external tracks]...')
+ # Running the tracker
+ cmd = "python %s/util/run_tracker.py"%(os.environ['POD_HOME'])
+ os.system(cmd)
+
+run_time = timelib.gmtime(timelib.time() - mid_time)
+mid_time = timelib.time()
+print(f'Completed the tracker portion of the code in {timelib.strftime("%H:%M:%S", run_time)}')
+
+# I have to create the matlab dictionaries from the track output file
+print('Creating matlab dict...')
+cmd = "python %s/util/run_create_dict.py"%(os.environ['POD_HOME'])
+os.system(cmd)
+run_time = timelib.gmtime(timelib.time() - mid_time)
+mid_time = timelib.time()
+print(f'Completed creation of the mat file used for the analysis in {timelib.strftime("%H:%M:%S", run_time)}')
+
+# Running the track stats
+cmd = "python %s/util/run_track_stats.py"%(os.environ['POD_HOME'])
+os.system(cmd)
+run_time = timelib.gmtime(timelib.time() - mid_time)
+mid_time = timelib.time()
+print(f'Completed the track stats in {timelib.strftime("%H:%M:%S", run_time)}')
+
+create_empty_figs()
+run_time = timelib.gmtime(timelib.time() - mid_time)
+mid_time = timelib.time()
+print(f'Created the empty composites figures in {timelib.strftime("%H:%M:%S", run_time)}')
+
+if (os.environ['RUN_COMPOSITES'] == 'True'):
+ # Running the composites code
+ # create the necesssary variable files and composites
+ print('Running the composites code...')
+ cmd = "python %s/util/run_composites.py"%(os.environ['POD_HOME'])
+ os.system(cmd)
+
+ ###################################################
+ ##### Creating plots from obs/merra and era-interim
+ ###################################################
+
+ # load in the netcdf files
+ obs_file = f"{os.environ['OBS_DATA']}/modis_merra.nc"
+ era_file = f"{os.environ['OBS_DATA']}/era_interim.nc"
+
+ # reading in the ERA re-analysis file
+ ds = xr.open_dataset(era_file)
+ erai_x = ds['X'].values
+ erai_y = ds['Y'].values
+
+ ## the erai x and y are 1d, have to convert it to a 2d grid
+ erai_x_grid, erai_y_grid = np.meshgrid(erai_x, erai_y)
+ erai_x_1d = erai_x_grid.flatten()
+ erai_y_1d = erai_y_grid.flatten()
+
+ if ('pr' in comp_vars):
+ pr_nh_ocean_warm = ds['tp_nh_ocean_warm'].values
+ pr_sh_ocean_warm = ds['tp_sh_ocean_warm'].values
+
+ if ('prw' in comp_vars):
+ prw_nh_ocean_warm = ds['prw_nh_ocean_warm'].values
+ prw_sh_ocean_warm = ds['prw_sh_ocean_warm'].values
+
+ if ('uv10' in comp_vars):
+ uv10_nh_ocean_warm = ds['uv10_nh_ocean_warm'].values
+ uv10_sh_ocean_warm = ds['uv10_sh_ocean_warm'].values
+
+ if ('w500' in comp_vars):
+ w500_nh_ocean_warm = ds['w500_nh_ocean_warm'].values
+ w500_sh_ocean_warm = ds['w500_sh_ocean_warm'].values
+ ds.close()
+
+ # reading in the MERRA observation file
+ ds = xr.open_dataset(obs_file)
+ obs_x = ds['X'].values
+ obs_y = ds['Y'].values
+
+ ## setting up the necessary x,y values in the format required for griddata
+ obs_x_1d = obs_x.flatten()
+ obs_y_1d = obs_y.flatten()
+
+ if ('prw' in comp_vars):
+ merra_prw = ds['merra_pw'].values
+ merra_prw_1d = merra_prw.flatten()
+
+ # interpolating the ang, dist plots from observations on to the erai standard grid (same grid as the outputs from the model)
+ erai_merra_prw = interpolate.griddata((obs_x_1d, obs_y_1d), merra_prw_1d, (erai_x_1d, erai_y_1d))
+ erai_merra_prw = erai_merra_prw.reshape(erai_x_grid.shape)
+
+ if ('clt' in comp_vars):
+ modis_cld = ds['modis_cld'].values
+ modis_cld_1d = modis_cld.flatten()
+
+ # interpolating the ang, dist plots from observations on to the erai standard grid (same grid as the outputs from the model)
+ erai_modis_cld = interpolate.griddata((obs_x_1d, obs_y_1d), modis_cld_1d, (erai_x_1d, erai_y_1d))
+ erai_modis_cld = erai_modis_cld.reshape(erai_x_grid.shape)
+
+ if ('w500' in comp_vars):
+ merra_w500 = ds['merra_omega'].values
+ merra_w500_1d = merra_w500.flatten()
+
+ # interpolating the ang, dist plots from observations on to the erai standard grid (same grid as the outputs from the model)
+ erai_merra_w500 = interpolate.griddata((obs_x_1d, obs_y_1d), merra_w500_1d, (erai_x_1d, erai_y_1d))
+ erai_merra_w500 = erai_merra_w500.reshape(erai_x_grid.shape)
+
+ ds.close()
+
+ if ('clt' in comp_vars):
+ # MERRA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_modis_cld_SH_ocean_WARM.png"
+ title = 'MODIS Cloud Cover [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,erai_modis_cld,title,out_file)
+
+ if ('prw' in comp_vars):
+ # MERRA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_prw_SH_ocean_WARM.png"
+ title = 'MERRA Precipitation [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,erai_merra_prw,title,out_file)
+
+ # ERA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_prw_SH_ocean_WARM.png"
+ title = 'ERA-Interim PRW [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,prw_sh_ocean_warm,title,out_file)
+
+ # ERA NH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_prw_NH_ocean_WARM.png"
+ title = 'ERA-Interim PRW [NH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,prw_nh_ocean_warm,title,out_file)
+
+ if ('w500' in comp_vars):
+ # MERRA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_merra_w500_SH_ocean_WARM.png"
+ title = 'MERRA Omega @ 500 hPa [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,erai_merra_w500,title,out_file)
+
+ # ERA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_w500_SH_ocean_WARM.png"
+ title = 'ERA-Interim Omega @ 500hPa [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,w500_sh_ocean_warm,title,out_file)
+
+ # ERA NH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_w500_NH_ocean_WARM.png"
+ title = 'ERA-Interim Omega @ 500hPa [NH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,w500_nh_ocean_warm,title,out_file)
+
+
+ if ('pr' in comp_vars):
+ # ERA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_pr_SH_ocean_WARM.png"
+ title = 'ERA-Interim TP [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,pr_sh_ocean_warm,title,out_file)
+
+ # ERA NH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_pr_NH_ocean_WARM.png"
+ title = 'ERA-Interim TP [NH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,pr_nh_ocean_warm,title,out_file)
+
+ if ('uv10' in comp_vars):
+ # ERA SH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_uv10_SH_ocean_WARM.png"
+ title = 'ERA-Interim Wind Speed [SH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,uv10_sh_ocean_warm,title,out_file)
+
+ # ERA NH - Ocean - WARM
+ out_file = f"{os.environ['WK_DIR']}/obs/{os.environ['CASENAME']}_erai_uv10_NH_ocean_WARM.png"
+ title = 'ERA-Interim Wind Speed [NH-OCEAN-WARM]'
+ plot_area_fig(erai_x,erai_y,uv10_nh_ocean_warm,title,out_file)
+
+
+ ############################################################
+ ####### Creating Difference Plots
+ ############################################################
+
+ ## Reading in the model composites
+ model_file = f"{os.environ['WK_DIR']}/tmp/RUNDIR/tmprun/read_tmprun/composites.pkl"
+ model_data = pickle.load(open(model_file, 'rb'))
+
+ # Creating the plots
+ ##################### Model - ERA-Interim SH [All vars] ###########################################
+ plt.figure(figsize=(12,12))
+
+ ax = plt.subplot(4,3,1)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'prw'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]}\nPRW [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,2)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, prw_sh_ocean_warm, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title('ERA-Interim\nPRW [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,3)
+ if (var in comp_vars):
+ diff_val = model_val - prw_sh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]} - ERA-Interim\nPRW [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,4)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'pr'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=0.6)
+ plt.colorbar()
+ plt.title(f'PR [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,5)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, pr_sh_ocean_warm, cmap='jet', vmin=0, vmax=0.6)
+ plt.colorbar()
+ plt.title('PR [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,6)
+ if (var in comp_vars):
+ diff_val = model_val - pr_sh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'PR [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,7)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'uv10'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=14)
+ plt.colorbar()
+ plt.title(f'UV10 [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,8)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, uv10_sh_ocean_warm, cmap='jet', vmin=0, vmax=14)
+ plt.colorbar()
+ plt.title('UV10 [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,9)
+ if (var in comp_vars):
+ diff_val = model_val - uv10_sh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'UV10 [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,10)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'w500'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=-0.3, vmax=.06)
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,11)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, w500_sh_ocean_warm, cmap='jet', vmin=-.3, vmax=.06)
+ plt.colorbar()
+ plt.title('Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,12)
+ if (var in comp_vars):
+ diff_val = model_val - w500_sh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_vars_SH_ocean_WARM.png"
+ plt.tight_layout()
+ plt.savefig(out_file)
+ plt.close('all')
+
+ ##################### NH ###########################################
+ ##################### Model - ERA-Interim NH [All vars] ###########################################
+ plt.figure(figsize=(12,12))
+
+ ax = plt.subplot(4,3,1)
+ hemis = 'NH'; lo = 'ocean'; season = 'warm'; var = 'prw'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]}\nPRW [NH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,2)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, prw_nh_ocean_warm, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title('ERA-Interim\nPRW [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,3)
+ if (var in comp_vars):
+ diff_val = model_val - prw_nh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]} - ERA-Interim\nPRW [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,4)
+ hemis = 'NH'; lo = 'ocean'; season = 'warm'; var = 'pr'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=0.6)
+ plt.colorbar()
+ plt.title(f'TP [NH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,5)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, pr_nh_ocean_warm, cmap='jet', vmin=0, vmax=0.6)
+ plt.colorbar()
+ plt.title('TP [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,6)
+ if (var in comp_vars):
+ diff_val = model_val - pr_nh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'TP [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,7)
+ hemis = 'NH'; lo = 'ocean'; season = 'warm'; var = 'uv10'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=14)
+ plt.colorbar()
+ plt.title(f'UV10 [NH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,8)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, uv10_nh_ocean_warm, cmap='jet', vmin=0, vmax=14)
+ plt.colorbar()
+ plt.title('UV10 [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,9)
+ if (var in comp_vars):
+ diff_val = model_val - uv10_nh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'UV10 [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,10)
+ hemis = 'NH'; lo = 'ocean'; season = 'warm'; var = 'w500'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=-0.3, vmax=.06)
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [NH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,11)
+ if (var in comp_vars):
+ plt.pcolormesh(erai_x, erai_y, w500_nh_ocean_warm, cmap='jet', vmin=-.3, vmax=.06)
+ plt.colorbar()
+ plt.title('Omega @ 500 hPa [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(4,3,12)
+ if (var in comp_vars):
+ diff_val = model_val - w500_nh_ocean_warm
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [NH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ plt.tight_layout()
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_erai_vars_NH_ocean_WARM.png"
+ plt.savefig(out_file)
+ plt.close('all')
+
+ ####################### MODEL - MERRA variables
+ plt.figure(figsize=(12,6))
+
+ ax = plt.subplot(2,3,1)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'prw'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]}\nPRW [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(2,3,2)
+ if (var in comp_vars):
+ tmp = erai_merra_prw.copy()
+ tmp[np.isnan(model_val)] = np.nan
+ plt.pcolormesh(erai_x, erai_y, tmp, cmap='jet', vmin=0, vmax=24)
+ plt.colorbar()
+ plt.title('MERRA\nPRW [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(2,3,3)
+ if (var in comp_vars):
+ diff_val = model_val - erai_merra_prw
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]} - MERRA\nPRW [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(2,3,4)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'w500'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=-.3, vmax=.06)
+ # plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet')
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(2,3,5)
+ if (var in comp_vars):
+ tmp = (erai_merra_w500.copy())
+ tmp[np.isnan(model_val)] = np.nan
+ # plt.pcolormesh(erai_x, erai_y, tmp, cmap='jet', vmin=-.3, vmax=.06)
+ plt.pcolormesh(erai_x, erai_y, tmp, cmap='jet')
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(2,3,6)
+ if (var in comp_vars):
+ diff_val = model_val - erai_merra_w500
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'Omega @ 500 hPa [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ plt.tight_layout()
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_merra_vars_SH_ocean_WARM.png"
+ plt.savefig(out_file)
+ plt.close('all')
+
+
+ ####################### MODEL - MODIS variables
+ plt.figure(figsize=(12,3))
+
+ ax = plt.subplot(1,3,1)
+ hemis = 'SH'; lo = 'ocean'; season = 'warm'; var = 'clt'
+ if (var in comp_vars):
+ model_val = model_data[hemis][lo][season][var]['area_sum']/model_data[hemis][lo][season][var]['area_cnt']
+ plt.pcolormesh(erai_x, erai_y, model_val, cmap='jet', vmin=0, vmax=100)
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]}\nCLT [SH-OCEAN-WARM]')
+ plt.ylabel('Distance [km]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(1,3,2)
+ if (var in comp_vars):
+ tmp = erai_modis_cld.copy()
+ tmp[np.isnan(model_val)] = np.nan
+ plt.pcolormesh(erai_x, erai_y, tmp, cmap='jet', vmin=0, vmax=100)
+ plt.colorbar()
+ plt.title('MODIS\nCLT [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ ax = plt.subplot(1,3,3)
+ if (var in comp_vars):
+ diff_val = model_val - erai_modis_cld
+ vmax = np.nanpercentile(np.abs(diff_val).flatten(), 95)
+ vmin = -1*vmax
+ plt.pcolormesh(erai_x, erai_y, diff_val, vmin=vmin, vmax=vmax, cmap='bwr')
+ plt.colorbar()
+ plt.title(f'{os.environ["CASENAME"]} - MODIS\nCLT [SH-OCEAN-WARM]')
+ plt.ylim(-1500, 1500)
+ plt.xlim(-1500, 1500)
+ else:
+ plot_empty_single(ax, var)
+
+ plt.tight_layout()
+ out_file = f"{os.environ['WK_DIR']}/model/diff_{os.environ['CASENAME']}_modis_vars_SH_ocean_WARM.png"
+ plt.savefig(out_file)
+ plt.close('all')
+
+ # End run composites true/false
+ run_time = timelib.gmtime(timelib.time() - mid_time)
+ mid_time = timelib.time()
+ print(f'Completed the compositing portion of the code and created the plots in {timelib.strftime("%H:%M:%S", run_time)}')
+
+end_time = timelib.time()
+run_time = timelib.gmtime(end_time - start_time)
+
+# Completed Code
+print(f'Done Completing ETC-composites driver code in {timelib.strftime("%H:%M:%S", run_time)} from {start_time} to {end_time}')
diff --git a/diagnostics/etc_composites/settings.jsonc b/diagnostics/etc_composites/settings.jsonc
new file mode 100755
index 000000000..5f4f02b5a
--- /dev/null
+++ b/diagnostics/etc_composites/settings.jsonc
@@ -0,0 +1,96 @@
+{
+ "settings" : {
+ "driver" : "etc_composites.py",
+ "long_name" : "ETC composites",
+ "realm" : "atmos",
+ "description" : "Extratropical Cyclone Composites POD",
+ "pod_env_vars" : {
+ "lat_var": "lat",
+ "lon_var": "lon",
+ "USE_EXTERNAL_TRACKS": "False",
+ "EXTERNAL_TRACKS_FILE": "/localdrive/drive10/jj/etc_push/inputdata/model/ERA5.NEW.DEG10.001/6hr/track_output.txt",
+ "RUN_COMPOSITES": "False"
+ },
+ "runtime_requirements": {
+ "python3": ["matplotlib", "xarray", "netCDF4", "numpy", "cartopy"]
+ }
+ },
+ "dimensions": {
+ "lat": {"standard_name": "latitude"},
+ "lon": {"standard_name": "longitude"},
+ "time": {"standard_name": "time"},
+ "lev": {
+ "standard_name": "air_pressure",
+ "units": "hPa",
+ "positive": "down",
+ "axis": "Z"
+ },
+ "altitude": {
+ "standard_name": "altitude",
+ "units": "m",
+ "positive": "up",
+ "axis": "Z"
+ }
+ },
+ "data": {
+ "frequency": "6hr"
+ },
+ "varlist" : {
+ "orog": {
+ "standard_name" : "surface_altitude",
+ "units": "m",
+ "dimensions": ["lat", "lon"],
+ "use_exact_name": true
+ },
+ "sftlf": {
+ "standard_name" : "land_area_fraction",
+ "units": "1",
+ "dimensions": ["lat", "lon"],
+ "use_exact_name": true
+ },
+ "psl": {
+ "standard_name" : "air_pressure_at_mean_sea_level",
+ "units": "Pa",
+ "dimensions": ["time", "lat", "lon"],
+ "use_exact_name": true
+ },
+ "clt": {
+ "standard_name" : "cloud_area_fraction",
+ "units": "1",
+ "dimensions": ["time", "lat", "lon"],
+ "use_exact_name": true
+ },
+ "pr": {
+ "standard_name" : "precipitation_flux",
+ "units": "kg m-2 s-1",
+ "dimensions": ["time", "lat", "lon"],
+ "use_exact_name": true
+ },
+ "w500": {
+ "standard_name" : "lagrangian_tendency_of_air_pressure",
+ "units": "Pa s-1",
+ "dimensions": ["time", "lat", "lon"],
+ "scalar_coordinates": {"lev": 500}
+ },
+ "prw": {
+ "standard_name" : "atmosphere_mass_content_of_water_vapor",
+ "units": "kg m-2",
+ "dimensions": ["time", "lat", "lon"],
+ "use_exact_name": true
+ },
+ "u10": {
+ "standard_name" : "eastward_wind",
+ "units": "m s-1",
+ "dimensions": ["time", "lat", "lon"],
+ "modifier": "atmos_height",
+ "use_exact_name": true
+ },
+ "v10": {
+ "standard_name" : "westward_wind",
+ "units": "m s-1",
+ "dimensions": ["time", "lat", "lon"],
+ "modifier": "atmos_height",
+ "use_exact_name": true
+ }
+ }
+}
diff --git a/diagnostics/etc_composites/util/README.md b/diagnostics/etc_composites/util/README.md
new file mode 100755
index 000000000..6ec4a173e
--- /dev/null
+++ b/diagnostics/etc_composites/util/README.md
@@ -0,0 +1,97 @@
+# Extra-tropical Cyclone Diagnostic Tool
+## Created by Jeyavinoth Jeyaratnam (The City College of New York, CUNY)
+
+Last Modified: Jan 24th, 2020
+
+# Custom Notes:
+
+All needed data is in /localdrive/drive6/.
+
+the code for the converts are in /localdrive/drive10/mcms\_tracker/data\_preprocessing
+
+
+# Modules
+
+This code has many parts (the main driver codes for each part is in the main folder):
+
+1. Tracker (run\_tracker.py)- Tracks ETCs using Re-analysis/Model Data
+2. Statistics (run\_track\_stats.py) - Diagnositics on the tracked cyclones (i.e. track density, feature density, genesis, lysis).
+3. Composite Analysis (run\_composites.py) - Creates figures of composites for the selected variables.
+4. Front Detection (run\_front\_detection.py) - Tracks ETC fronts on Re-analysis/Model Data
+5. Transect Analysis (run\_transect.py) - Diagnostics on the tracked fronts (i.e. transect anlaysis based on front)
+6. TODO: create front density plots
+
+# Tracker
+
+** This Tracker module has to be run for this entire diagnostic tool to work. **
+
+
+To get the tracker running, check the tracker\_readme.md file. All the necessary steps to install the tracker is provided here.
+
+After setting up the python environment, chnage the defines.py file to match your local system.
+
+You can then run the following code:
+
+python run\_tracker.py
+
+## Variables needed:
+* Lon and Lat values must for all variables must be the left edges, NOT the middle value of the grid box
+
+### Topography file
+* variable 'hgt' should be in meters
+* variable 'lsm' should be in fractional value in the range 0-1
+
+### Sea-level pressure (SLP)
+* Naming connvention should be slp.{year}.nc
+* The variable name inside the netcdf file must match the varname in the filename.
+* Lon should be from 0 to 360 (though -180 to 180 should work, but not thoroughly tested)
+* Lat can be from -90 to 90 or 90 to -90 (again latter not tested thoroughly)
+* Units must be mb/hPa
+* Fillvalues assumed to be np.nan
+* Time variable should have attribute "delta\_t" set in the format YYYY-MM-DD hh:mm:ss
+* Units of time should be 'proleptic\_gregorian' or 'julian' or 'standard', if not specified it is * assumed to be 'standard'. ***hours since start of the current year.***
+
+# Front Detection
+
+The front detection does not need any settings in defines.py, unless you want to detect fronts only for a subset of years.
+
+## Variables needed:
+* Naming connvention should be {var\_name}.{year}.nc
+* The variable name inside the netcdf file must match the varname in the filename.
+* U, V at 850 hPa [m/s]
+* Z at all levels [m]
+* T at all levels [k]
+* mean sea level pressure [hPa/mb]
+* Surface Pressure [Pa]
+* time variable should be the same as above (units should be hours since start of year)
+
+# Transect Analysis
+
+You have to provide the list of variables you need to run the transect analysis in defines.py file.
+And specify the hemispheres in which you want to run the analysis on.
+
+This code automatically separates land and ocean for you.
+
+## Variables needed:
+* Naming connvention should be {var\_name}.{year}.nc
+* The variable name inside the netcdf file must match the varname in the filename.
+* Provide variables in the format [time x level x lat x lon], and the appropriate units that you want the outputs in.
+
+# Composite Analysis
+
+You have to provide the list of variables you need to run the composite analysis in defines.py file.
+And specify the hemispheres in which you want to run the analysis on.
+
+The default area to create composites are 100 km x 100 km. You can change this to your liking in defines.py, under area and circ.
+
+This code automatically separates land and ocean for you.
+
+## Variables needed:
+* Naming connvention should be {var\_name}.{year}.nc
+* The variable name inside the netcdf file must match the varname in the filename.
+* Provide variables in the format [time x lat x lon], and the appropriate units that you want the outputs in.
+
+
+**Notes:**
+
+Make sure the create\_matlab\_dictionaries is set to True, if you want to run the other modules. You need the temporary matfiles for this to work.
diff --git a/diagnostics/etc_composites/util/composites/.gitignore b/diagnostics/etc_composites/util/composites/.gitignore
new file mode 100644
index 000000000..356f9fb1a
--- /dev/null
+++ b/diagnostics/etc_composites/util/composites/.gitignore
@@ -0,0 +1,2 @@
+backups/
+old/
diff --git a/diagnostics/etc_composites/util/composites/__init__.py b/diagnostics/etc_composites/util/composites/__init__.py
new file mode 100755
index 000000000..68a41db88
--- /dev/null
+++ b/diagnostics/etc_composites/util/composites/__init__.py
@@ -0,0 +1,252 @@
+import numpy as np
+import matplotlib.pyplot as plt
+import math
+
+class dotdict(dict):
+ '''
+ a dictionary that supports dot notation
+ as well as dictionary access notation
+ usage: d = DotDict() or d = DotDict({'val1':'first'})
+ set attributes: d.val2 = 'second' or d['val2'] = 'second'
+ get attributes: d.val2 or d['val2']
+ '''
+ __getattr__ = dict.__getitem__
+ __setattr__ = dict.__setitem__
+ __delattr__ = dict.__delitem__
+
+ def __init__(self, dct):
+ for key, value in dct.items():
+ if hasattr(value, 'keys'):
+ value = DotDict(value)
+ self[key] = value
+
+def plot(H, ax=None):
+ if (ax is None):
+ plt.figure()
+ plt.pcolormesh(H.x, H.y, H.sum/H.cnt);
+ plt.colorbar()
+ plt.show()
+ else:
+ pc = ax.pcolormesh(H.x, H.y, H.sum/H.cnt);
+ plt.colorbar(pc, ax=ax)
+
+def gplot(lon, lat, data):
+ plt.close('all')
+ plt.pcolormesh(lon, lat, data);
+ plt.colorbar()
+ plt.show()
+
+def pplot(lon, lat, dist, values, ax=None):
+ mask = dist < 1500
+ minLat = np.nanmin(lat[mask])
+ maxLat = np.nanmax(lat[mask])
+ minLon = np.nanmin(lon[mask])
+ maxLon = np.nanmax(lon[mask])
+ values[dist > 1500] = np.nan
+
+ if (ax is None):
+ plt.figure()
+ plt.pcolormesh(lon, lat, values)
+ plt.xlim([minLon, maxLon])
+ plt.ylim([minLat, maxLat])
+ plt.show()
+ else:
+ pc = ax.pcolormesh(lon, lat, values)
+ plt.colorbar(pc, ax=ax)
+ ax.set_xlim([minLon, maxLon])
+ ax.set_ylim([minLat, maxLat])
+
+def haversine_distance(lat, lon, centerLat, centerLon):
+ '''
+ Computes the haversine distance between two points.
+ '''
+ # km per degree value
+ mean_radius_earth = 6371
+
+ # Haversine function to find distances between lat and lon
+ lat1 = lat * math.pi / 180;
+ lat2 = centerLat * math.pi / 180;
+
+ lon1 = lon * math.pi / 180;
+ lon2 = centerLon * math.pi / 180;
+
+ # convert dx and dy to radians as well
+ dLat = lat1 - lat2
+ dLon = lon1 - lon2
+
+ R = mean_radius_earth
+
+ # computing distance in X direction
+ a = np.sin(dLat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dLon/2)**2
+ c = np.arctan2(np.sqrt(a), np.sqrt(1-a));
+ dist = 2 * R * c;
+
+ return dist
+
+def angle_from_center(lat, lon, centerLat, centerLon):
+ '''
+ Computes the angle between two points.
+ '''
+ # NOTE: have to fix edge issues, for now we will test logic first
+ delta_lat = (lat - centerLat)
+ delta_lon = (lon - centerLon)
+ theta = np.arctan2(delta_lat, delta_lon)
+ return theta
+
+def custom_hist2d(X, Y, values, bins):
+ '''
+ Create a 2d histogram given the x and y values along with the bins
+ Returns sum and cnt histogram 2d matrix
+ '''
+ X = X.flatten()
+ Y = Y.flatten()
+ values = values.flatten()
+
+ ind = (~np.isnan(X)) & (~np.isnan(Y)) & (~np.isnan(values))
+ X = X[ind]
+ Y = Y[ind]
+ values = values[ind]
+
+ x_edges = bins[0]
+ y_edges = bins[1]
+
+ x = x_edges[:-1] + (x_edges[1] - x_edges[0])/2.
+ y = y_edges[:-1] + (y_edges[1] - y_edges[0])/2.
+
+ H_sum, _, _ = np.histogram2d(X, Y, bins=bins, weights=values)
+ H_sum = H_sum.T
+ H_cnt, _, _ = np.histogram2d(X, Y, bins=bins)
+ H_cnt = H_cnt.T
+
+ return dotdict({'sum': H_sum, 'cnt': H_cnt, 'x': x, 'y': y, 'x_edges': x_edges, 'y_edges': y_edges})
+
+def circular_avg_one_step(lat, lon, values, centerLat, centerLon, bins=None):
+ '''
+ get the histogram 2d of count and sum centered at centerLat, centerLon
+ '''
+
+ lat = lat.flatten()
+ lon = lon.flatten()
+ values = values.flatten()
+
+ # calculate distance from center
+ dist = haversine_distance(lat, lon, centerLat, centerLon)
+ dist_mask = (dist < 1500)
+
+ valid_mask = (dist_mask) & (~np.isnan(values))
+
+ ang = angle_from_center(lat, lon, centerLat, centerLon)
+
+ if bins is None:
+ dist_bins = np.arange(0, 1700, 100)
+ ang_bins = np.arange(-180, 180+20, 20)*np.pi/180
+ else:
+ dist_bins = bins[0]
+ ang_bins = bins[1]
+
+ H = custom_hist2d(dist[valid_mask], ang[valid_mask], values[valid_mask], bins=(dist_bins, ang_bins))
+
+ return H
+
+def area_avg_one_step(lat, lon, values, centerLat, centerLon, bins=None):
+ '''
+ get the histogram 2d of count and sum centered at centerLat, centerLon
+ '''
+
+ # calculate distance from center
+ dist = haversine_distance(lat, lon, centerLat, centerLon)
+ dist_y = haversine_distance(lat, centerLon, centerLat, centerLon)
+ dist_x = np.sqrt(dist**2 - dist_y**2)
+
+ # apply mask distance of 1500 km
+ dist_mask = (dist < 1500)
+
+ valid_mask = (dist_mask) & (~np.isnan(values))
+
+ # creating masks to add -ve values
+ if (centerLon < 60):
+ west_mask = ((lon - centerLon) < 0) | (lon > 300)
+ elif (centerLon > 300):
+ east_mask = ((lon - centerLon) >= 0) | (lon < 60)
+ west_mask = np.invert(east_mask)
+ else:
+ west_mask = ((lon - centerLon) < 0)
+ # creating mask so that everything polewards is positive
+ # and anything
+ equatorward_mask = ((np.abs(lat) - np.abs(centerLat)) < 0)
+
+ dist_x[west_mask] *= -1
+ dist_y[equatorward_mask] *= -1
+
+ if not bins:
+ dist_bins = np.arange(-1500, 1700, 100)
+ else:
+ dist_bins = bins[0]
+
+ H = custom_hist2d(dist_x[valid_mask], dist_y[valid_mask], values[valid_mask], bins=(dist_bins, dist_bins))
+
+ return H
+
+def test_plot(val, lon=None, lat=None):
+ # plt.style.use(['classic', 'ggplot'])
+ plt.ion()
+ plt.figure()
+ if (lat is None) | (lon is None):
+ plt.pcolormesh(val, cmap='jet')
+ else:
+ plt.pcolormesh(lon, lat, val, cmap='jet')
+ plt.colorbar()
+
+def plot_polar(theta, r, values, type='pcolormesh'):
+ '''
+ Plots the figure in polar cordinates given theta and r values
+ '''
+ r, theta = np.meshgrid(r,theta)
+
+ fig, ax = plt.subplots(subplot_kw=dict(projection='polar'))
+ if (type == 'contourf'):
+ pc = ax.contourf(theta, r, values, cmap='jet')
+ elif (type == 'pcolormesh'):
+ pc = ax.pcolormesh(theta, r, values, cmap='jet')
+ plt.colorbar(pc)
+ # plt.show()
+
+def plot_area(X, Y, values, type='pcolormesh'):
+ '''
+ Plots the figure in equal area grid
+ '''
+ # plt.style.use('ggplot')
+ fig, ax = plt.subplots()
+ if (type == 'contourf'):
+ pc = ax.contourf(X, Y, values, cmap='jet')
+ elif (type == 'pcolormesh'):
+ pc = ax.pcolormesh(X, Y, values, cmap='jet')
+ plt.colorbar(pc)
+ plt.ylabel('Distance [km]')
+ plt.xlabel('Distance [km]')
+ # plt.show()
+
+def compute_dist_from_cdt(lat, lon, centerLat, centerLon):
+
+ # km per degree value
+ mean_radius_earth = 6371
+
+ # Haversine function to find distances between lat and lon
+ lat1 = lat * math.pi / 180;
+ lat2 = centerLat * math.pi / 180;
+
+ lon1 = lon * math.pi / 180;
+ lon2 = centerLon * math.pi / 180;
+
+ # convert dx and dy to radians as well
+ dLat = lat1 - lat2
+ dLon = lon1 - lon2
+
+ R = mean_radius_earth
+
+ # computing distance in X direction
+ a = np.sin(dLat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dLon/2)**2
+ c = np.arctan2(np.sqrt(a), np.sqrt(1-a));
+ dist = 2 * R * c;
+
+ return dist
diff --git a/diagnostics/etc_composites/util/defines.py b/diagnostics/etc_composites/util/defines.py
new file mode 100755
index 000000000..4ab65cdc0
--- /dev/null
+++ b/diagnostics/etc_composites/util/defines.py
@@ -0,0 +1,139 @@
+import os
+
+#########################################################################################
+############################ TRACKER MODULE SETTINGS ####################################
+#########################################################################################
+
+# SLP variable
+os.environ['slp_var'] = 'slp'
+
+# SLP file
+os.environ['slp_file'] = '*.' + os.environ['slp_var'] + '.6hr.nc'
+
+# model output filename file
+os.environ['MODEL_OUTPUT_DIR'] = os.environ['DATADIR'] + '/6hr'
+
+# make the necessary directory
+if not os.path.exists(os.environ['WK_DIR'] + '/model'):
+ os.makedirs(os.environ['WK_DIR'] + '/model')
+if not os.path.exists(os.environ['WK_DIR'] + '/obs'):
+ os.makedirs(os.environ['WK_DIR'] + '/obs')
+
+# this is where the tracker code will be run from
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp')
+
+# this is where I will be converting the model data into chunks of years that the code is run for
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp/data_converts'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp/data_converts')
+
+# this is where I will be running my code from
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp/RUNDIR'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp/RUNDIR')
+
+# location of the source code that is required to run
+# this is from pod home files
+source_code_folder = os.environ['POD_HOME'] + '/util/'
+
+# again this has to be provided, but for now I have to change this to match the data
+topo_file = os.environ['topo_file']
+model = 'tmprun'
+
+# the latitude distribution file for ERA-Interim/MERRA
+obs_lat_distrib_file = os.environ['obs_lat_distrib_file']
+
+# over write years have to changed from firstyr to last year
+over_write_years = [int(os.environ['FIRSTYR']), int(os.environ['LASTYR'])]
+
+# this is needed to create the composites, for now I will assume the pre-processing code creates the necessary chunks of data
+slp_data_directory = os.environ['WK_DIR'] + '/tmp/data_converts'
+var_data_directory = os.environ['WK_DIR'] + '/tmp/data_converts'
+
+# location to which to save the outputs from the tracker
+# also this is the location from which the tracker will be run
+# NOTE: the tracker does not run from the source code location
+main_folder_location = os.environ['WK_DIR'] + '/tmp/RUNDIR/'
+
+# creating the links to other folder locations that are called by other python codes
+main_folder = os.path.join(main_folder_location, model) + '/'
+code_folder = os.path.join(main_folder, 'CODE') + '/'
+out_folder = os.path.join(main_folder, 'out_%s'%(model)) + '/'
+read_folder = os.path.join(main_folder, 'read_%s'%(model)) + '/'
+out_files_folder = os.path.join(out_folder, '%s_files'%(model)) + '/'
+slp_folder = os.path.join(main_folder, 'data') + '/'
+images_folder = os.path.join(read_folder, 'images') + '/'
+fronts_folder = os.path.join(read_folder, 'fronts') + '/'
+data_folder = os.path.join(main_folder, 'var_data') + '/'
+
+# output images folders
+model_images_folder = os.environ['WK_DIR'] + '/model/'
+obs_images_folder = os.environ['WK_DIR'] + '/obs/'
+
+# threshold for height to defining land mask and topo.
+# JJJ - b/c of interpolation and non-zero height of some SST region,
+# need to use a value larger than 0 otherwise parts of the ocean become land.
+# thresh_landsea = 50.0/100.0
+thresh_landsea_hgt = 50 # in meters # was 50 for all testing, changed this to match the v2 version of the code
+thresh_landsea_lsm = 50.0/100.0 # in fractional amount of land #was 50 for all testing, changed this to match the v2 version of the code
+
+# Print a lot to screen to debug
+verbose = 0
+
+# Flag to hard copy data files over to the RUN directory
+# If false, it will only create a symbolic link to outputs folder
+hard_copy = False
+
+################ ADDITIONAL OPTIONS
+# check if we have to run the MCMS tracker or not
+if (os.environ['USE_EXTERNAL_TRACKS'] == 'True'):
+ track_file = os.environ['EXTERNAL_TRACKS_FILE']
+
+#########################################################################################
+########################## COMPOSITE ANALYSIS SETTINGS ##################################
+#########################################################################################
+
+composite_years = over_write_years
+
+# Number of cores used to process the datacyc creation, set it to 1 for serial processing
+# Or if available, set it to use the number of cores
+num_cores = 1
+
+folder_6hr = os.environ['DATADIR'] + '/6hr/'
+files = os.listdir(folder_6hr)
+
+# getting the composites var list from the created variable in the "DATADIR"/6hr folder
+# some variable names have to changed to work with the tracker
+composite_var_list = [file.replace(os.environ['CASENAME']+'.', '').replace('.6hr.nc', '') for file in files if not '.psl.6hr.nc' in file]
+if ('u10' in composite_var_list) & ('v10' in composite_var_list):
+ # if both exists then add uv10 to the list
+ composite_var_list.append('uv10')
+# always remove the u10 and v10 from the list
+if ('u10' in composite_var_list):
+ composite_var_list.remove('u10')
+if ('v10' in composite_var_list):
+ composite_var_list.remove('v10')
+
+#renaming the wap500 to w500 used by the code
+if ('omega' in composite_var_list):
+ composite_var_list.remove('omega')
+ composite_var_list.append('w500')
+
+print(f'Variables to run composites: {composite_var_list}')
+
+composite_available_var_list = ['pr', 'prw', 'w500', 'uv10', 'clt']
+
+composite_hem_list = ['NH', 'SH']
+composite_season_list = ['all', 'djf', 'jja', 'son', 'mam', 'warm']
+
+# bins for histogram
+circ = {
+ 'dist_div': 100.,
+ 'ang_div': 20.,
+ 'dist_max': 1500.,
+ }
+
+area = {
+ 'dist_div': 100.,
+ 'dist_max': 1500.
+ }
+
diff --git a/diagnostics/etc_composites/util/reader/__init__.py b/diagnostics/etc_composites/util/reader/__init__.py
new file mode 100755
index 000000000..1321fe65b
--- /dev/null
+++ b/diagnostics/etc_composites/util/reader/__init__.py
@@ -0,0 +1,96 @@
+import scipy.io as sio
+import numpy as np
+from netCDF4 import Dataset
+import datetime as dt
+
+import matplotlib.pyplot as plt
+
+class dot_dict(dict):
+ __getattr__ = dict.__getitem__
+ __setattr__ = dict.__setitem__
+ __delattr__ = dict.__delitem__
+
+ def __init__(self, dict):
+ for key, value in dict.items():
+ if hasattr(value, 'keys'):
+ value = dot_dict(value)
+ self[key] = value
+
+ def find_centers_for_date(self, date):
+ ind = (self.yy == date.year) & (self.mm == date.month) & (self.dd == date.day) & (self.hh == date.hour)
+
+ return dot_dict({'lat': self.lat[ind], 'lon': self.lon[ind], 'date': self.date[ind],
+ 'yy': self.yy[ind], 'mm': self.mm[ind], 'dd': self.dd[ind], 'hh': self.hh[ind],
+ 'warm_flag': self.warm_flag[ind], 'obs_flag': self.obs_flag[ind], 'lm_flag': self.lm_flag[ind],
+ 'all_select_flag': self.all_select_flag[ind], 'obs_select_flag': self.obs_select_flag[ind]})
+
+def get_date(matlab_datenum):
+ date = dt.datetime.fromordinal(int(matlab_datenum) - 366) + dt.timedelta(hours=int((matlab_datenum-int(matlab_datenum))*24))
+ return date
+
+def read_center_from_mat_file(in_file):
+ '''
+ Code to read in the center's matlab file that is created using main_create_dict.py
+ '''
+ data = sio.loadmat(in_file)['cyc']
+
+ lat = []
+ lon = []
+ date = []
+ yy = []
+ mm = []
+ dd = []
+ hh = []
+ warm_flag = []
+ obs_flag = []
+ lm_flag = []
+ all_select_flag = []
+ obs_select_flag = []
+
+ for i in range(data['fulllat'].shape[1]):
+
+ track_lat = np.squeeze(data['fulllat'])[i]
+ track_lon = np.squeeze(data['fulllon'])[i]
+ track_fulldate = np.squeeze(data['fulldate'])[i]
+ track_yy = np.squeeze(data['fullyr'])[i]
+ track_mm = np.squeeze(data['fullmon'])[i]
+ track_dd = np.squeeze(data['fullday'])[i]
+ track_warm_flag = np.squeeze(data['warm_flag'])[i]
+ track_obs_flag = np.squeeze(data['obs_flag'])[i]
+ track_lm_flag = np.squeeze(data['lm_flag'])[i]
+
+ track_date = [get_date(date) for date in np.squeeze(track_fulldate)]
+ track_hh = [int((date - int(date))*24) for date in np.squeeze(track_fulldate)]
+
+ track_obs_select_flag = np.zeros((len(track_hh), 1))
+ track_all_select_flag = np.zeros((len(track_hh), 1))
+
+ lat.extend(np.squeeze(track_lat))
+ lon.extend(np.squeeze(track_lon))
+ date.extend(np.squeeze(track_date))
+ yy.extend(np.squeeze(track_yy))
+ mm.extend(np.squeeze(track_mm))
+ dd.extend(np.squeeze(track_dd))
+ hh.extend(np.squeeze(track_hh))
+ all_select_flag.extend(track_all_select_flag)
+ obs_select_flag.extend(track_obs_select_flag)
+ warm_flag.extend(np.squeeze(track_warm_flag))
+ obs_flag.extend(np.squeeze(track_obs_flag))
+ lm_flag.extend(np.squeeze(track_lm_flag))
+
+ lat = np.squeeze(np.asarray(lat))
+ lon = np.squeeze(np.asarray(lon))
+ date = np.squeeze(np.asarray(date))
+ yy = np.squeeze(np.asarray(yy))
+ mm = np.squeeze(np.asarray(mm))
+ dd = np.squeeze(np.asarray(dd))
+ hh = np.squeeze(np.asarray(hh))
+ warm_flag = np.squeeze(np.asarray(warm_flag))
+ obs_flag = np.squeeze(np.asarray(obs_flag))
+ lm_flag = np.squeeze(np.asarray(lm_flag))
+ all_select_flag = np.squeeze(np.asarray(all_select_flag))
+ obs_select_flag = np.squeeze(np.asarray(obs_select_flag))
+
+
+ return dot_dict({'lat': lat, 'lon': lon, 'date': date, 'yy':yy, 'mm':mm, 'dd':dd, 'hh':hh, 'all_select_flag': all_select_flag, 'obs_select_flag': obs_select_flag, 'warm_flag': warm_flag,
+ 'obs_flag': obs_flag, 'lm_flag': lm_flag})
diff --git a/diagnostics/etc_composites/util/run_composites.py b/diagnostics/etc_composites/util/run_composites.py
new file mode 100755
index 000000000..eda53155d
--- /dev/null
+++ b/diagnostics/etc_composites/util/run_composites.py
@@ -0,0 +1,389 @@
+#!/usr/bin/env python
+
+# supressing warnings, because there are a lot of NaN value warnings
+# comment lines below when debugging
+# only supress in production
+import sys
+if not sys.warnoptions:
+ import warnings
+ warnings.simplefilter("ignore")
+
+import numpy as np
+import matplotlib.pyplot as plt
+import os, glob
+
+import defines
+
+import xarray as xr
+import pandas as pd
+
+import pickle
+# from tqdm import tqdm
+
+import sys
+sys.path.append(os.environ['POD_HOME']+'/util')
+
+import composites
+import reader
+
+import random
+random.seed(0)
+
+# ---------------------- NEW CODE ----------------------------
+
+debug_stop_at_flag = False
+
+###################################################################################
+################## COPY/LINK THE FILES OVER #######################################
+###################################################################################
+
+# var_list = ['tp', 'w500', 'clt', 'slp', 'cls850']
+var_list = defines.composite_var_list
+to_folder = defines.data_folder
+if (not os.path.exists(to_folder)):
+ os.makedirs(to_folder)
+for var in var_list:
+ search_string = os.path.join(defines.var_data_directory, f'{var}.*.nc')
+ print(search_string)
+ for tmp_file in glob.glob(search_string):
+ tmp_file = os.path.basename(tmp_file)
+ from_file = os.path.join(defines.var_data_directory, tmp_file)
+ to_file = os.path.join(to_folder, tmp_file)
+ cmd = f'ln -s {from_file} {to_file}'
+ os.system(cmd)
+
+print('Done symlinking the data files...')
+
+# ------------------------------------------------------------
+# ---------------------- MAIN --------------------------------
+# ------------------------------------------------------------
+
+# defining the years to run the code for
+year_list = range(defines.composite_years[0], defines.composite_years[1]+1)
+
+# land mask
+ds = xr.open_dataset(defines.topo_file)
+if ('time' in ds.coords.keys()):
+ lm = ds.lsm.isel(time=0).values
+else:
+ lm = ds.lsm.values
+lm = (lm > defines.thresh_landsea_lsm)
+ds.close()
+
+# defining the dimensions of the composite values
+circ_dist_bins = np.arange(0, defines.circ['dist_max']+defines.circ['dist_div'], defines.circ['dist_div'])
+circ_ang_bins = np.arange(-180., 180+defines.circ['ang_div'], defines.circ['ang_div'])*np.pi/180
+circ_H_sum = np.zeros((len(circ_ang_bins)-1, len(circ_dist_bins)-1))
+circ_H_cnt = np.zeros((len(circ_ang_bins)-1, len(circ_dist_bins)-1))
+
+area_dist_bins = np.arange(-defines.area['dist_max'], defines.area['dist_max']+defines.area['dist_div'], defines.area['dist_div'])
+area_H_sum = np.zeros((len(area_dist_bins)-1, len(area_dist_bins)-1))
+area_H_cnt = np.zeros((len(area_dist_bins)-1, len(area_dist_bins)-1))
+
+# composite analysis creating init variables
+# ciruclar average, and area average
+comp = {}
+for hemis in defines.composite_hem_list:
+ comp[hemis] = {}
+ for lm_type in ['land', 'ocean']:
+ comp[hemis][lm_type] = {}
+ for season in ['all', 'djf', 'mam', 'jja', 'son', 'warm']:
+ comp[hemis][lm_type][season] = {}
+ for var in defines.composite_var_list:
+ comp[hemis][lm_type][season][var] = {}
+ comp[hemis][lm_type][season][var]['circ_sum'] = np.zeros(circ_H_sum.shape)
+ comp[hemis][lm_type][season][var]['circ_cnt'] = np.zeros(circ_H_cnt.shape)
+ comp[hemis][lm_type][season][var]['area_sum'] = np.zeros(area_H_sum.shape)
+ comp[hemis][lm_type][season][var]['area_cnt'] = np.zeros(area_H_cnt.shape)
+
+# Read in the lat distribution from ERA-Interim/OBS
+obs_distrib = pickle.load(open(os.environ['obs_lat_distrib_file'], 'rb'))
+
+# Randomly sample the dataset so that the bins have the same pct
+actual_obs_pct = obs_distrib['obs_pct']
+actual_all_pct = obs_distrib['all_pct']
+
+# loop through all the years and create the composite
+for year in year_list:
+ # print('Debug: Reading in data ...', end=" ")
+
+ # SLP data as the sample dataset
+ # getting the reference longitude and latitude values
+ slp_file = os.path.join(defines.var_data_directory, f'slp.{year}.nc')
+ slp = xr.open_dataset(slp_file)
+ reflon = slp.lon.values
+ reflat = slp.lat.values
+
+ # creating the co-ordinate grid
+ lon, lat = np.meshgrid(reflon, reflat)
+
+ # opening all the necessary variables to create the composites for
+ ds_list = {}
+ for var in defines.composite_var_list:
+ var_file = os.path.join(defines.var_data_directory, f'{var}.{year}.nc')
+ ds_list[var] = xr.open_dataset(var_file)
+
+ ############# Get Centers for the given date ######################
+ in_file = os.path.join(defines.read_folder, f'{defines.model}_{year}.mat')
+ all_centers = reader.read_center_from_mat_file(in_file)
+ if (not isinstance(slp.indexes['time'], pd.core.indexes.datetimes.DatetimeIndex)):
+ datetimeindex = slp.indexes['time'].to_datetimeindex()
+ else:
+ datetimeindex = slp.indexes['time']
+
+ ############ This portion of the code is used to match the latitude distriubtion from the observations
+ # to the model data
+
+ # Trying to match the lat distrib of the cases with ERA-Interim
+ all_lat = all_centers['lat']
+ all_lon = all_centers['lon']
+ all_obs_flag = all_centers['obs_flag']
+ all_select_flag = all_centers['all_select_flag']
+ obs_select_flag = all_centers['obs_select_flag']
+
+ # bining all the center lat from the model data
+ bins = np.arange(-60, 60, 5)
+ minlength = len(bins)+1
+ model_full_lat_ind = np.digitize(all_lat, bins=bins)
+ model_full_lat_bin_counts = np.bincount(model_full_lat_ind, minlength=minlength)
+
+ # create the pct distrib of model for all centers
+ model_all_ind = (np.abs(all_lat) > 30) & (np.abs(all_lat) < 60)
+ model_all_lat_ind = np.digitize(all_lat[model_all_ind], bins=bins)
+ model_all_lat_bin_counts = np.bincount(model_all_lat_ind, minlength=minlength)
+ model_all_pct = model_all_lat_bin_counts/np.sum(model_all_lat_bin_counts)
+ model_all_pct[model_all_pct == 0] = np.nan
+
+ # getting the number of samples that I need to select for each lat bin
+ model_all_tot_counts = model_all_lat_bin_counts/actual_all_pct
+ total_all_model_count = np.nanmin(model_all_tot_counts)
+ # model_all_min_ind = np.nanargmin(model_all_pct)
+ # model_all_min_count = model_all_lat_bin_counts[model_all_min_ind]
+ # actual_all_min_pct = actual_all_pct[model_all_min_ind]
+ # total_all_model_count = model_all_min_count/actual_all_min_pct
+ model_all_new_distrib = actual_all_pct*total_all_model_count
+ model_all_new_distrib[np.isnan(model_all_new_distrib)] = 0
+ model_all_new_distrib = np.array(model_all_new_distrib, dtype=np.int)
+
+ # create the pct distrib of model for SH Ocean Warm
+ model_obs_ind = (np.abs(all_lat) > 30) & (np.abs(all_lat) < 60) & (all_obs_flag == 1)
+ bins = np.arange(-60, 60, 5)
+ model_obs_lat_ind = np.digitize(all_lat[model_obs_ind], bins=bins)
+ model_obs_lat_bin_counts = np.bincount(model_obs_lat_ind, minlength=minlength)
+ model_obs_pct = model_obs_lat_bin_counts/np.sum(model_obs_lat_bin_counts)
+ model_obs_pct[model_obs_pct == 0] = np.nan
+
+ # getting the number of samples that I need to select for each lat bin
+ model_obs_tot_counts = model_obs_lat_bin_counts/actual_obs_pct
+ total_obs_model_count = np.nanmin(model_obs_tot_counts)
+ # model_obs_min_ind = np.nanargmin(model_obs_pct)
+ # model_obs_min_count = model_obs_lat_bin_counts[model_obs_min_ind]
+ # actual_obs_min_pct = actual_obs_pct[model_obs_min_ind]
+ # total_obs_model_count = model_obs_min_count/actual_obs_min_pct
+ model_obs_new_distrib = actual_obs_pct*total_obs_model_count
+ model_obs_new_distrib[np.isnan(model_obs_new_distrib)] = 0
+ model_obs_new_distrib = np.array(model_obs_new_distrib, dtype=np.int)
+
+ # sampling the model distribution to match the values
+ model_indexes = np.arange(0, len(all_lat))
+ for i in range(1, len(bins)+1):
+ # setting up the all_select_flag
+ tmp_ind = (model_full_lat_ind == i) & (model_all_ind)
+ tmp_indexes = model_indexes[tmp_ind].tolist()
+ sample_ind = random.sample(tmp_indexes, model_all_new_distrib[i])
+ all_select_flag[sample_ind] = 1
+
+ # setting up the obs_select_flag
+ tmp_ind = (model_full_lat_ind == i) & (model_obs_ind)
+ tmp_indexes = model_indexes[tmp_ind].tolist()
+ sample_ind = random.sample(tmp_indexes, model_obs_new_distrib[i])
+ obs_select_flag[sample_ind] = 1
+
+ # setting the flags for which centers to use under
+ # distributions that match the enitre erai lat distrib
+ # and selected SH Warm Ocean lat distrib
+ all_centers['all_select_flag'] = all_select_flag
+ all_centers['obs_select_flag'] = obs_select_flag
+
+ # loop through all time steps in the year
+ for t_step in range(1, len(slp.time)):
+
+ # creating a datetime variable for the current time step
+ # date = pd.Timestamp(slp.time[t_step].values).to_pydatetime()
+ date = datetimeindex[t_step].to_pydatetime()
+ # print(date)
+
+ # getting the season for the given time step date
+ t_step_month = date.month
+ if (t_step_month == 12) | (t_step_month == 1) | (t_step_month == 2):
+ t_season = 'djf'
+ elif (t_step_month == 3) | (t_step_month == 4) | (t_step_month == 5):
+ t_season = 'mam'
+ elif (t_step_month == 6) | (t_step_month == 7) | (t_step_month == 8):
+ t_season = 'jja'
+ elif (t_step_month == 9) | (t_step_month == 10) | (t_step_month == 11):
+ t_season = 'son'
+
+ t_season_warm = False
+ if (t_step_month == 11) | (t_step_month == 12) | (t_step_month == 1) | (t_step_month == 2) | (t_step_month == 3):
+ t_season_warm = True
+
+ # check if the t_season is requested in the defines.py
+ # atleast 'all' should be given in the season_list
+ if ('all' not in defines.composite_season_list) & (t_season not in defines.composite_season_list) & ('warm' not in defines.composite_season_list):
+ continue
+
+ fd_date = date
+ centers = all_centers.find_centers_for_date(fd_date)
+
+ # looping through all the centers for the given data
+ for i_center, _ in enumerate(centers.lat):
+ date_str = fd_date.strftime('%Y%m%d%H')
+
+ center = {}
+ for key in centers.keys():
+ center[key] = centers[key][i_center]
+
+ # skip all cyclones close to the equator or poles
+ if (np.abs(center['lat']) > 60) | (np.abs(center['lat']) < 30):
+ continue
+
+ # testing edge cases
+ # put a debug stop in the compute_dist_from_cdt and check for edges
+ # if (center['lon'] > 40) & (center['lon'] < 320):
+ # continue
+
+ if (center['lat'] >= 0):
+ hemis_type = 'NH'
+ elif (center['lat'] < 0):
+ hemis_type = 'SH'
+
+ # # debug: running only for edge cases
+ # if not ((center['lon'] < -165) | (center['lon'] > 165)):
+ # continue
+
+ # # distance from given center
+ # dist_grid = composites.compute_dist_from_cdt(lat, lon, center['lat'], center['lon'])
+ #
+ # # index of center of cyclone
+ # c_ind = np.nanargmin(dist_grid)
+ # cx, cy = np.unravel_index(c_ind, dist_grid.shape)
+ # lm_flag = lm[cx, cy]
+ # if (lm_flag):
+ # lm_type = 'land'
+ # else:
+ # lm_type = 'ocean'
+
+ if (center['lm_flag']):
+ lm_type = 'land'
+ else:
+ lm_type = 'ocean'
+
+ # if this particular center is not selected as either part of all or SH ocean warm distrib skip this center
+ if (center['all_select_flag'] == 0.0) & (center['obs_select_flag'] == 0.0):
+ continue
+
+ for var in defines.composite_var_list:
+ ds_tstep = ds_list[var].isel(time=t_step)
+ data = ds_tstep[var].values
+
+ # creating the circular average values and area average values
+ circ_H = composites.circular_avg_one_step(lat, lon, data, center['lat'], center['lon'], bins=(circ_dist_bins, circ_ang_bins))
+ # if this is part of the all distrib we include it in our composites
+ if (center['all_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['all'][var]['circ_sum'] += circ_H.sum
+ comp[hemis_type][lm_type]['all'][var]['circ_cnt'] += circ_H.cnt
+
+ comp[hemis_type][lm_type][t_season][var]['circ_sum'] += circ_H.sum
+ comp[hemis_type][lm_type][t_season][var]['circ_cnt'] += circ_H.cnt
+
+ # if this is part of the SH Ocean Warm distrib we include it in our composites
+ if (t_season_warm):
+ if (hemis_type == 'SH') & (lm_type == 'ocean'):
+ if (center['obs_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['warm'][var]['circ_sum'] += circ_H.sum
+ comp[hemis_type][lm_type]['warm'][var]['circ_cnt'] += circ_H.cnt
+ else:
+ if (center['all_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['warm'][var]['circ_sum'] += circ_H.sum
+ comp[hemis_type][lm_type]['warm'][var]['circ_cnt'] += circ_H.cnt
+
+ area_H = composites.area_avg_one_step(lat, lon, data, center['lat'], center['lon'], bins=(area_dist_bins, area_dist_bins))
+ # if this is part of the all distrib we include it in our composites
+ if (center['all_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['all'][var]['area_sum'] += area_H.sum
+ comp[hemis_type][lm_type]['all'][var]['area_cnt'] += area_H.cnt
+
+ comp[hemis_type][lm_type][t_season][var]['area_sum'] += area_H.sum
+ comp[hemis_type][lm_type][t_season][var]['area_cnt'] += area_H.cnt
+
+ # if this is part of the SH Ocean Warm distrib we include it in our composites, only if the obs_select_flag is set
+ if (t_season_warm):
+ if (hemis_type == 'SH') & (lm_type == 'ocean'):
+ if (center['obs_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['warm'][var]['area_sum'] += area_H.sum
+ comp[hemis_type][lm_type]['warm'][var]['area_cnt'] += area_H.cnt
+ else:
+ if (center['all_select_flag'] == 1.0):
+ comp[hemis_type][lm_type]['warm'][var]['area_sum'] += area_H.sum
+ comp[hemis_type][lm_type]['warm'][var]['area_cnt'] += area_H.cnt
+
+ # if (var == 'slp') & (hemis_type == 'SH'):
+ # plt.close('all')
+ # plt.figure()
+ # plt.title(f'Variable: {var}')
+ # plt.subplot(1,2,1)
+ # plt.pcolormesh(lon, lat, slp.isel(time=t_step).slp.values, vmin=900, vmax=1100, cmap='jet'); plt.colorbar()
+ # plt.plot(center['lon'], center['lat'], 'r*');
+ # plt.subplot(1,2,2)
+ # plt.pcolormesh(area_dist_bins, area_dist_bins, area_H.sum/area_H.cnt, vmin=900, vmax=1100, cmap='jet')
+ # plt.colorbar()
+ # import pdb; pdb.set_trace()
+ # plt.show()
+
+ # end for var
+
+ # end i_center
+
+
+ # if (debug_stop_at_flag):
+ # if (date > dt.datetime(year, 1, 31)):
+ # break
+
+ # end t_step
+
+# end year
+
+
+# saving the data files
+comp['x'] = area_H.x
+comp['y'] = area_H.y
+comp['x_edges'] = area_H.x_edges
+comp['y_edges'] = area_H.y_edges
+pickle.dump(comp, open(os.path.join(defines.read_folder, 'composites.pkl'), 'wb'))
+
+for hemis_type in defines.composite_hem_list:
+ for var in defines.composite_var_list:
+ for season in defines.composite_season_list:
+ for lm_type in ['land', 'ocean']:
+
+ tmp_dict = comp[hemis_type][lm_type][season][var]
+
+ # plt.close('all')
+ # tmp = tmp_dict['circ_sum']/tmp_dict['circ_cnt']
+ # composites.plot_polar(circ_H.y, circ_H.x, tmp)
+ #ERA-Interim plt.title(f'{var.upper()} {lm_type} {hemis_type}')
+ # out_file = os.path.join(defines.images_folder, f'{defines.model}_{defines.over_write_years[0]}_{defines.over_write_years[1]}_circ_{var}_{hemis_type}_{lm_type}.png')
+ # plt.savefig(out_file, dpi=300.)
+
+ plt.close('all')
+ tmp = tmp_dict['area_sum']/tmp_dict['area_cnt']
+ composites.plot_area(area_H.y_edges, area_H.x_edges, tmp)
+ plt.title(f'{var.upper()} {lm_type} {hemis_type}')
+ # out_file = os.path.join(defines.images_folder, f'{defines.model}_{defines.over_write_years[0]}_{defines.over_write_years[1]}_area_{var}_{hemis_type}_{lm_type}_{season.upper()}.png')
+ out_file = os.path.join(defines.model_images_folder, f'{os.environ["CASENAME"]}_area_{var}_{hemis_type}_{lm_type}_{season.upper()}.png')
+ plt.title(f'{os.environ["CASENAME"]} {var.upper()} [{hemis_type.upper()}-{lm_type.upper()}-{season.upper()}]')
+ plt.savefig(out_file, dpi=100.)
+
+
+
diff --git a/diagnostics/etc_composites/util/run_create_dict.py b/diagnostics/etc_composites/util/run_create_dict.py
new file mode 100644
index 000000000..47ee55dfd
--- /dev/null
+++ b/diagnostics/etc_composites/util/run_create_dict.py
@@ -0,0 +1,198 @@
+#!/usr/bin/python
+import scipy.io as sio
+import numpy as np
+import os
+import pandas as pd
+
+import defines
+import xarray as xr
+import composites
+
+from scipy.stats import mode
+from datetime import date
+from numpy.core.records import fromarrays
+
+
+def read_in_txt_file(start_year, end_year):
+ ''' Code used to read in a text file with the tracks.
+ The tracks file must have 8 columns separated by white spaces.
+ The columns are as follows:
+ 1. yy - year
+ 2. mm - month
+ 3. dd - day
+ 4. hh - hour
+ 5. lat - latitude of cyclone center (provided as integer value, (90 - lat)*100)
+ 6. lon - longitude of cyclone center (provided as integer value, 100*lon)
+ 7. slp - SLP at cyclone center
+ 8. uid - unique identifier for each track
+ lat & lon is provided in this format to avoid decimal places and to avoid negative values
+ lon is provided from 0 to 360, hence no need to add any value
+ '''
+
+ # the input track file has to be provided as track_data.txt in the "inputdata/{model}/6hr" directory, under the model name
+ # the tracks are normally tracked on 6 hourly data
+ track_file = defines.track_file
+
+ main_df = pd.read_csv(track_file, sep='\s+')
+
+ # extracting only the certain columns I need from the main dataframe
+ df = main_df.iloc[:, [0, 1, 2, 3, 4, 5, 6, 7]].copy()
+ df = df.copy()
+
+ # naming the dataframe columns
+ df.columns = ['yy', 'mm', 'dd', 'hh', 'lat', 'lon', 'slp', 'usi']
+ df.lat = 90 - df.lat/100.
+ df.lon = df.lon/100.
+
+ return df
+
+def read_in_MCMS_txt_file(base_dir, in_model, start_year, end_year):
+
+ # Read in the cyc file
+ if (start_year == end_year):
+ in_file = os.path.join(base_dir, 'out_%s_output_%04d.txt'%(in_model, start_year))
+ else:
+ in_file = os.path.join(base_dir, 'out_%s_output_%04d_%04d.txt'%(in_model, start_year, end_year))
+
+ main_df = pd.read_csv(in_file, sep='\s+')
+
+ # extracting only the certain columns I need from the main dataframe
+ df = main_df.iloc[:, [0, 1, 2, 3, 5, 6, 8, 11, 14, 15]].copy()
+
+ # naming the dataframe columns
+ df.columns = ['yy', 'mm', 'dd', 'hh', 'lat', 'lon', 'slp', 'flags', 'csi', 'usi']
+ df.lat = 90. - df.lat/100.
+ df.lon = df.lon/100.
+
+ return df
+
+########################################
+################ Main Code #############
+########################################
+
+start_year = defines.over_write_years[0]
+end_year = defines.over_write_years[1]
+
+in_model = defines.model
+base_dir = os.path.join(defines.main_folder_location, in_model, 'read_%s'%(in_model))
+if (not os.path.exists(base_dir)):
+ os.makedirs(base_dir)
+
+# reading in txt file with the tracks for all the years
+# This depends on which tracker I am using
+# If the MCMS tracker is run then read in the MCMS output
+if (os.environ['USE_EXTERNAL_TRACKS'] == 'True'):
+ df = read_in_txt_file(start_year, end_year)
+elif (os.environ['USE_EXTERNAL_TRACKS'] == 'False'):
+ df = read_in_MCMS_txt_file(base_dir, in_model, start_year, end_year)
+
+# Reading in the topographic information
+# also reading in the lat/lon values for the topo file
+ds = xr.open_dataset(defines.topo_file)
+reflat = ds.lat.values
+reflon = ds.lon.values
+reflon, reflat = np.meshgrid(reflon, reflat)
+if ('time' in ds.coords.keys()):
+ lm = ds.lsm.isel(time=0).values
+else:
+ lm = ds.lsm.values
+lm = (lm > defines.thresh_landsea_lsm)
+
+# loop through all the years and create the datacycs
+for i_year in range(start_year, end_year+1):
+
+ # get the usi values of the tracks that are for the given year
+ uni_usi = df.usi[df['yy'] == i_year].unique()
+
+ # create empty arrays
+ temp_uid = []
+ temp_uidsingle = []
+ temp_fulllon = []
+ temp_fulllat = []
+ temp_fullslp = []
+
+ temp_fulldate = []
+ temp_date1 = []
+ temp_fullyr = []
+ temp_fullmon = []
+ temp_fullday = []
+ temp_fullhr = []
+ temp_mon_mode = []
+ temp_yr_mode = []
+
+ temp_lm_flag = []
+ temp_warm_flag = []
+ temp_obs_flag = []
+
+ # loop through all the unique usi values that have a cyclone for the given i_year
+ for i_ind, i_usi in enumerate(uni_usi):
+
+ # getting the index that match each usi value
+ usi_ind = df.index[df.usi == i_usi].tolist()
+
+ # creating numpy arrays from dataframe for processing full_date
+ yy = np.asarray(df.yy[usi_ind],dtype=int)
+ mm = np.asarray(df.mm[usi_ind],dtype=int)
+ dd = np.asarray(df.dd[usi_ind],dtype=int)
+ hh = np.asarray(df.hh[usi_ind],dtype=int)
+
+ # creating full_date
+ # matlab datenum format
+ full_date = [date.toordinal(date(i_yy, i_mm, i_dd))+366.+i_hh/24. for i_yy, i_mm, i_dd, i_hh in zip(yy, mm, dd, hh)]
+
+ # check if the total track time extends atleast 36 hours
+ # we have to account for delta_t to have the capability to run the tracker for hourly or 6-hourly data
+ delta_t = (full_date[1] - full_date[0])*24
+ if (len(hh)*delta_t) < 36:
+ continue
+
+ # checking if the mode year is the current year, if not this track will be passed onto the next year or previous year accordingly
+ if (mode(yy).mode[0] != i_year):
+ continue
+
+ # appending to the temporary list of data variables, to be saved as mat files
+ temp_yr_mode.append(mode(yy).mode[0])
+ temp_fulldate.append(full_date)
+ temp_date1.append(full_date[0])
+ temp_fullyr.append(yy)
+ temp_fullmon.append(mm)
+ temp_fullday.append(dd)
+ temp_fullhr.append(hh)
+ temp_mon_mode.append(mode(mm).mode[0])
+
+ temp_uid.append(np.asarray(df.usi[usi_ind], dtype=float))
+ temp_uidsingle.append(np.asarray(df.usi[usi_ind[0]], dtype=float))
+ temp_fulllon.append(np.asarray(df.lon[usi_ind], dtype=float))
+ temp_fulllat.append(np.asarray(df.lat[usi_ind], dtype=float))
+ temp_fullslp.append(np.asarray(df.slp[usi_ind], dtype=float))
+
+ # creating flags that will be used in the POD
+ # sh_ocean_warm called the obs_flag
+ # land/ocean called the lm_flag
+ lm_flag = np.zeros((len(df.lat[usi_ind])), dtype=int)
+ warm_flag = np.zeros((len(df.lat[usi_ind])), dtype=int)
+ obs_flag = np.zeros((len(df.lat[usi_ind])), dtype=int)
+ for i, (ilon, ilat, imm) in enumerate(zip(df.lon[usi_ind], df.lat[usi_ind], mm)):
+ dist_grid = composites.compute_dist_from_cdt(reflat, reflon, ilat, ilon)
+ c_ind = np.nanargmin(dist_grid)
+ cx, cy = np.unravel_index(c_ind, dist_grid.shape)
+ lm_flag[i] = int(lm[cx, cy])
+ if ((imm == 11) | (imm == 12) | (imm == 1) | (imm == 2) | (imm == 3)):
+ warm_flag[i] = 1
+ if (ilat < 0) & (warm_flag[i] == 1) & (lm_flag[i] == 0):
+ obs_flag[i] = 1
+
+
+ temp_lm_flag.append(np.asarray(lm_flag, dtype=int))
+ temp_warm_flag.append(np.asarray(warm_flag, dtype=int))
+ temp_obs_flag.append(np.asarray(obs_flag, dtype=int))
+
+
+ # creating a record to save mat files, like the one jimmy creates using matlab
+ out_cyc = fromarrays([temp_uid, temp_uidsingle, temp_fulllon, temp_fulllat, temp_fullslp, temp_fulldate, temp_date1, temp_fullyr, temp_fullmon, temp_fullday, temp_fullhr, temp_mon_mode, temp_yr_mode, temp_lm_flag, temp_warm_flag, temp_obs_flag], names=['UID', 'UIDsingle', 'fulllon', 'fulllat', 'fullslp', 'fulldate', 'date1', 'fullyr', 'fullmon', 'fullday', 'fullhr', 'mon_mode', 'yr_mode', 'lm_flag', 'warm_flag', 'obs_flag'])
+
+ # saving mat files for each year
+ out_mat_file = os.path.join(defines.main_folder_location, '%s/read_%s/%s_%d.mat'%(in_model, in_model, in_model, i_year))
+ sio.savemat(out_mat_file, {'cyc':out_cyc})
+
+ print(f'Created .mat file for {i_year}.')
diff --git a/diagnostics/etc_composites/util/run_track_stats.py b/diagnostics/etc_composites/util/run_track_stats.py
new file mode 100755
index 000000000..21e03e6f2
--- /dev/null
+++ b/diagnostics/etc_composites/util/run_track_stats.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+
+# supressing warnings, because there are a lot of NaN value warnings
+# comment lines below when debugging
+# only supress in production
+import sys
+if not sys.warnoptions:
+ import warnings
+ warnings.simplefilter("ignore")
+
+import numpy as np
+import scipy.io as sio
+import cartopy
+import matplotlib.ticker as mticker
+import matplotlib.pyplot as plt
+from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
+import defines
+import os
+import pickle
+
+def hist_2d(lon, lat, val=None, bins=None):
+ '''
+ Given lat/lon values, we create a 2d histogram global map.
+ '''
+
+ if (bins is None):
+ # creating my bins
+ lat_div = 10.
+ lon_div = 10.
+ bins = (np.arange(-180, 180+lon_div, lon_div), np.arange(-90, 90+lat_div, lat_div))
+
+ # convert lat and lon into 2d array
+ lat = np.array(lat).flatten()
+ lon = np.array(lon).flatten()
+ lon[lon >= 180.] -= 360.
+
+ # make sure the lens equal each other
+ assert(len(lat) == len(lon))
+ if (val is not None):
+ val = np.array(val).flatten()
+ assert(len(lon) == len(val))
+
+ # bins for the latitude and longitude
+ lon_bins = bins[0]
+ lat_bins = bins[1]
+ lon_mids = lon_bins[:-1] + (lon_bins[1] - lon_bins[0])/2.
+ lat_mids = lat_bins[:-1] + (lat_bins[1] - lat_bins[0])/2.
+
+ H_cnts, x, y = np.histogram2d(lon, lat, bins=bins)
+ if (val is None):
+ H_sums = H_cnts
+ else:
+ H_sums, x, y = np.histogram2d(lon, lat, bins=bins, weights=val)
+
+ return {'cnts': H_cnts.T, 'sums': H_sums.T, 'lon': lon_mids, 'lat': lat_mids}
+
+def global_map(ax=None):
+ '''Create a global map for plotting the figures.'''
+ if (ax is None):
+ plt.style.use('seaborn-talk')
+ ax = plt.axes(projection=cartopy.crs.PlateCarree())
+ else:
+ ax.coastlines(lw=1.)
+ ax.set_extent([-180, 180, -90, 90])
+ gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), draw_labels=True, lw=2., color='gray', alpha=0.5, linestyle='--')
+ gl.xlabels_top = False
+ gl.ylabels_right = False
+ gl.xlocator = mticker.FixedLocator([-180, -90, 0, 90, 180])
+ gl.ylocator = mticker.FixedLocator([-90, -45, 0, 45, 90])
+ gl.xformatter = LONGITUDE_FORMATTER
+ gl.yformatter = LATITUDE_FORMATTER
+
+ return ax
+
+def read_tracks(year):
+ '''Reading in tracks for a given year'''
+ in_file = os.path.join(defines.read_folder, f'{defines.model}_{year}.mat')
+ tracks = sio.loadmat(in_file)
+ return tracks['cyc'][0]
+
+def plot_2d(ax, x, y, z):
+ _ = ax.contourf(x, y, z)
+ _ = ax.colorbar(cax=cax)
+
+def get_data(tracks):
+
+ g_lat = []
+ g_lon = []
+ g_slp = []
+ l_lat = []
+ l_lon = []
+ l_slp = []
+ lat = []
+ lon = []
+ slp = []
+ for track in tracks:
+
+ # lysiss
+ l_lat.append(track['fulllat'][0][0])
+ l_lon.append(track['fulllon'][0][0])
+ l_slp.append(track['fullslp'][0][0])
+
+ # genesis
+ g_lat.append(track['fulllat'][0][-1])
+ g_lon.append(track['fulllon'][0][-1])
+ g_slp.append(track['fullslp'][0][-1])
+
+ # all
+ lat.extend(track['fulllat'][0].tolist())
+ lon.extend(track['fulllon'][0].tolist())
+ slp.extend(track['fullslp'][0].tolist())
+
+ return {'genesis': {'lat': g_lat, 'lon': g_lon, 'slp': g_slp}, \
+ 'lysis': {'lat': l_lat, 'lon': l_lon, 'slp': l_slp}, \
+ 'all': {'lat': lat, 'lon': lon, 'slp': slp}}
+
+def track_density_2d(lon, lat, ax=None):
+ H = hist_2d(lon, lat)
+ if (ax is not None):
+ # levels=np.arange(0, 0.004, 0.0001)
+ levels=10 # cuz I don't know the range of the colorbar
+ cf = ax.contourf(H['lon'], H['lat'], H['cnts']/np.sum(H['cnts']), cmap='jet', levels=levels, extend='max')
+ _ = plt.colorbar(cf, ax=ax, shrink=0.5, extend='max')
+ return H
+
+def track_feature_density_2d(lon, lat, ax=None):
+ H = hist_2d(lon, lat)
+ if (ax is not None):
+ levels=np.arange(0, 0.004, 0.0001)
+ cf = ax.contourf(H['lon'], H['lat'], H['cnts']/np.sum(H['cnts']), cmap='jet', levels=levels, extend='max')
+ _ = plt.colorbar(cf, ax=ax, shrink=0.5, extend='max')
+ return H
+
+############### main test code #################
+
+# check if mat file exists, if not run the mat file creator code
+mat_file = os.path.join(defines.read_folder, f'{defines.model}_{defines.over_write_years[0]}.mat')
+if (not os.path.exists(mat_file)):
+ print('No MAT file, create the mat file from tracker output')
+ os.system(f"python3 {os.environ['POD_HOME']}/util/run_create_dict.py")
+
+# loop through all the years
+# this part of the code is where I have to keep adding to the histogram
+# because now we only have to count one occurence per grid, not all occurences
+
+# Defining the bins
+lat_div = 10.
+lon_div = 10.
+bins = (np.arange(-180, 180+lon_div, lon_div), np.arange(-90, 90+lat_div, lat_div))
+lon_mids = bins[0][:-1] + (bins[0][1] - bins[0][0])/2.
+lat_mids = bins[1][:-1] + (bins[1][1] - bins[1][0])/2.
+
+# initializing dict that I need
+init_shape = (len(lat_mids), len(lon_mids))
+stats = {}
+for stat_type in ['all', 'genesis', 'lysis']:
+ stats[stat_type] = {}
+ if (stat_type == 'all'):
+ stats[stat_type]['feature_density'] = np.zeros(init_shape)
+ stats[stat_type]['track_density'] = np.zeros(init_shape)
+ else:
+ stats[stat_type] = np.zeros(init_shape)
+
+g_lon = []
+g_lat = []
+l_lon = []
+l_lat = []
+# loop through all the years and save the tracks
+for year in range(defines.over_write_years[0], defines.over_write_years[1]+1):
+ tracks = read_tracks(year)
+ for track in tracks:
+ lon = np.squeeze(track['fulllon'])
+ lat = np.squeeze(track['fulllat'])
+
+ # considering only lat cases between -60 and 60
+ ind = (np.abs(lat) < 60)
+ if (not np.any(ind)):
+ continue
+ lon = lon[ind]
+ lat = lat[ind]
+
+ l_lon.append(lon[-1])
+ l_lat.append(lat[-1])
+ g_lon.append(lon[0])
+ g_lat.append(lat[0])
+
+ # feature density
+ H = hist_2d(lon, lat, bins=bins)
+
+ # feature density - count all occurences
+ stats['all']['feature_density'] += H['cnts']
+ stats['all']['track_density'] += np.double(H['cnts'] > 0)
+
+# lysis
+H = hist_2d(l_lon, l_lat)
+stats['lysis'] = H['cnts']
+
+# genesis
+H = hist_2d(g_lon, g_lat)
+stats['genesis'] = H['cnts']
+
+# normalizing all the global histograms
+stats['genesis'] /= np.nansum(stats['genesis'])
+stats['lysis'] /= np.nansum(stats['lysis'])
+stats['all']['feature_density'] /= np.nansum(stats['all']['feature_density'])
+stats['all']['track_density'] /= np.nansum(stats['all']['track_density'])
+
+# Creating the necessary plots
+# track density
+plt.close('all')
+
+out_file = os.path.join(defines.model_images_folder, f"{os.environ['CASENAME']}_track_stats.png")
+cmap = 'jet'
+
+# creating the 2x2 plot
+fig, axes = plt.subplots(ncols=2, nrows=2, subplot_kw={'projection': cartopy.crs.PlateCarree()}, figsize=(16,8))
+
+ax = global_map(axes[0, 0])
+levels = np.arange(0, 0.011, 0.001)
+ax.set_title(f'Feature Density')
+cf = ax.contourf(lon_mids, lat_mids, stats['all']['feature_density'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7, ticks=np.arange(0, 0.015, 0.005), orientation='horizontal')
+ax.set_ylim(-60, 60)
+
+ax = global_map(axes[0, 1])
+ax.set_title(f'Track Density')
+levels = np.arange(0, 0.011, 0.001)
+cf = ax.contourf(lon_mids, lat_mids, stats['all']['track_density'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7, ticks=np.arange(0, 0.015, 0.005), orientation='horizontal')
+ax.set_ylim(-60, 60)
+
+ax = global_map(axes[1, 0])
+ax.set_title(f'Genesis')
+levels = np.arange(0, 0.011, 0.001)
+cf = ax.contourf(lon_mids, lat_mids, stats['genesis'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7, ticks=np.arange(0, 0.015, 0.005), orientation='horizontal')
+ax.set_ylim(-60, 60)
+
+ax = global_map(axes[1, 1])
+ax.set_title(f'Lysis')
+levels = np.arange(0, 0.011, 0.001)
+cf = ax.contourf(lon_mids, lat_mids, stats['lysis'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7, ticks=np.arange(0, 0.015, 0.005), orientation='horizontal')
+ax.set_ylim(-60, 60)
+
+plt.suptitle(f'{os.environ["CASENAME"]} ({defines.over_write_years[0]} - {defines.over_write_years[1]})')
+plt.tight_layout()
+plt.savefig(out_file, dpi=300.)
+plt.close('all')
+
+out_file = os.path.join(os.environ['WK_DIR'], 'track_stats.pkl')
+out_struct = {'lon': lon_mids, 'lat': lat_mids, 'stats': stats}
+pickle.dump(out_struct, open(out_file, 'wb'))
diff --git a/diagnostics/etc_composites/util/run_tracker.py b/diagnostics/etc_composites/util/run_tracker.py
new file mode 100755
index 000000000..aa2bba898
--- /dev/null
+++ b/diagnostics/etc_composites/util/run_tracker.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+################################# INSTRUCTIONS ##########################
+# Edit the defines.py with the folder and information
+# then run this code run_tracker.py
+
+import os
+print('Copying over the files to run the tracker...')
+cmd = 'cp %s/util/defines.py %s/util/tracker/defines.py'%(os.environ['POD_HOME'], os.environ['POD_HOME'])
+os.system(cmd)
+import defines
+import run_tracker_setup
+
+################## MAIN CODE #################
+
+# Initially create the folders
+# then copy the codes over
+run_tracker_setup.init_setup()
+run_tracker_setup.copy_code_over()
+
+os.chdir(defines.code_folder)
+print ("Curently in folder: ", os.getcwd())
+
+####### running the code to track ###########
+os.system('python3 setup_v4.py')
+os.system('python3 center_finder_v4.py')
+os.system('python3 track_finder_v4.py')
+os.system('python3 read_mcms_v4.py template_temp_multi_1.py')
+os.system('python3 read_mcms_v4.py template_temp_multi_2.py')
diff --git a/diagnostics/etc_composites/util/run_tracker_setup.py b/diagnostics/etc_composites/util/run_tracker_setup.py
new file mode 100644
index 000000000..fb4f9b0ba
--- /dev/null
+++ b/diagnostics/etc_composites/util/run_tracker_setup.py
@@ -0,0 +1,62 @@
+import defines
+import os
+
+def init_setup():
+ '''
+ Creates the necessary directories and copies over the slp data into the folders
+ '''
+
+ # Create main folder specified in defines
+ if not os.path.exists(defines.main_folder):
+ print ("Making Directory!")
+ os.makedirs(defines.main_folder)
+ os.makedirs(defines.code_folder)
+ os.makedirs(defines.out_folder)
+ os.makedirs(defines.out_files_folder)
+ os.makedirs(defines.slp_folder)
+ os.makedirs(defines.read_folder)
+ os.makedirs(defines.images_folder)
+ os.makedirs(defines.fronts_folder)
+ print ("Completed making directories...")
+ else:
+ print ("Folder already exists!")
+
+ if not defines.slp_data_directory:
+ print ("SLP source directory not defined, copy slp data into the data folder!")
+ elif (defines.hard_copy):
+ sys_cmd = 'rsync %sslp*.nc %s'%(defines.slp_data_directory, defines.slp_folder)
+ os.system(sys_cmd)
+ print ("Loaded slp data files into the data folder...")
+ else:
+ for root, dirs, files in os.walk(defines.slp_data_directory):
+ for fn in files:
+ if (fn.endswith('.nc') & fn.startswith('slp')):
+ full_file = os.path.join(root, fn)
+ link_file = os.path.join(defines.slp_folder, fn)
+ sys_cmd = "ln -s %s %s"%(full_file, link_file)
+ os.system(sys_cmd)
+ print ("Soft linked slp data files into the data folder...")
+
+ # Copying topographic files over
+ if (defines.hard_copy):
+ sys_cmd = 'rsync --progress %s %s'%(defines.topo_file, os.path.join(defines.out_files_folder, '%s_hgt.nc'%(defines.model)))
+ os.system(sys_cmd)
+ print ("Copied code and topography file...")
+ else:
+ sys_cmd = 'ln -s %s %s'%(defines.topo_file, os.path.join(defines.out_files_folder, '%s_hgt.nc'%(defines.model)))
+ os.system(sys_cmd)
+ print ("Copied code and soft linked topography file...")
+
+ # cd'ing into the CODE folder
+ os.system('cd %s'%(defines.code_folder))
+ print ("Cd'ing into the code folder...")
+
+def copy_code_over():
+ '''
+ Function to copy code over from the specified locations to the locations needed by the tracker.
+ '''
+ print ("Copying files over...")
+ sys_cmd = 'rsync -r --exclude ".git*" --exclude "*.mat" --exclude "*.nc" %s/ %s'%(os.path.join(defines.source_code_folder, 'tracker'), defines.code_folder)
+ os.system(sys_cmd)
+
+
diff --git a/diagnostics/etc_composites/util/tracker/center_finder_v4.py b/diagnostics/etc_composites/util/tracker/center_finder_v4.py
new file mode 100755
index 000000000..4464f81e3
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/center_finder_v4.py
@@ -0,0 +1,1623 @@
+""" This module extracts cyclone centers/tracks from a series of sea level
+ pressure fields. The is module contains the main logic of this process.
+
+ Options/Arguments:
+ centers_file -- file_name template for storing results.
+ dumped_centers_file -- file_name template for storing results
+ for rejected centers.
+ defs_set -- alterations of defaults from defs_vX.py.
+ imports -- list of modules to import.
+ over_write_out_path -- override value from setup_vX.py.
+ over_write_shared_path -- override value from setup_vX.py.
+ over_write_slp_path -- override value from setup_vX.py.
+ over_write_years -- override value from setup_vX.py.
+ exit_on_error -- have program exit on error.
+ plot_on_error -- have program plot errors.
+ save_plot -- save some plots
+
+ Returns/Creates:
+ centers_file -- ASCII file of candidate centers found and kept.
+ dumped_centers_file -- ASCII file of candidate centers found and
+ discarded.
+
+ Examples:
+
+ Notes: See bottom of this document for extended notes that are denoted
+ in the code. For parallel execution see drive_center_finder.
+
+ If using python version 2.6 and greater we can make use of multiple
+ CPUs or multi-core CPUs in a symmetric multi-processing (SMP)
+ or shared memory environment.
+
+ Memory Use: Depending on resolution and options expect at least 200MB per
+ instance.
+
+ Run Time: With the NCEP Reanalysis I get about 140 timesteps per minute per
+ instance. Thus a year takes roughly 10 minutes. This is on a
+ Mac Pro with 2.26 GHz processors, 12 GB of RAM, and a 3 disk RAID.
+
+ Author: Mike Bauer
+
+ Log:
+ 2006/12 MB - File created.
+ lost track of dates of updates and fixes... sorry
+ 2008/10 MB - Added input checks, docstring.
+ 2008/11 MB - Added Polar_Fix to keep N pole from having 40 centers.
+ 2009/11 MB - Fixed problem on Julian Days for GCMs using noleap calendars.
+ 2009/11 MB - Reduced memory leak issue my removing year loop.
+"""
+
+import sys,os
+import defines
+import jj_calendar as jjCal
+
+def test_laplacian(test):
+ """Provide tests to Laplacian filter
+
+ Laplacian
+ LAP_P = (d^2P/dx^2 + d^2P/dy^2) in spherical coordinates (radians)
+ = 1/a^2 * d^2P/dlat^2 + 1/a^2sin^2(lat) * d^2P/dlon^2
+ + cot(lat)/a^2 * dP/dlat
+
+ Scaled to (deg lat)^2 by altering km^2 and fixed km per deg
+ latitude = 111.0
+
+ Centered finite differences:
+ d^2P/dlon^2 = ( P(lon+1) - 2*P(lon) + P(lon-1) ) / dlon**2
+ dP/dlon = ( P(lon+1) - P(lon-1) ) / 2*dlon
+ """
+ # Test case
+ if test == 1:
+ tgrid = 8149
+ tgrids = [7859, 7860, 7861, 7862, 7863, 8003, 8004, 8005,
+ 8006, 8007, 8147, 8148, 8149, 8150, 8151, 8291,
+ 8292, 8293, 8294, 8295, 8435, 8436, 8437, 8438, 8439]
+ tslps = [992600, 989700, 989300, 990500, 992700, 983800, 979200,
+ 978200, 980700, 984900, 978100, 972600, 971500, 974200,
+ 978500, 980400, 974200, 972700, 975100, 979200, 991200,
+ 986100, 984200, 985800, 990100]
+ # 9-pnt average around tgrid
+ nine_pnt = [979000.0, 977377.777778, 979033.333333, 976744.444444,
+ 975377.777778, 977222.222222, 981666.666667,
+ 980655.555556, 982277.777778]
+
+ # termA, termB, termC, lap for this field
+ aterma = 0.000100564132136
+ atermb = 9.41719495596e-05
+ atermc = -7.76431575284e-07
+ alap = 2.38977684913
+ else:
+ sys.exit("Error test not defined!")
+ return (tgrid,tgrids,tslps,nine_pnt,aterma,atermb,atermc,alap)
+
+def print_region(height,width,area,center,source,
+ fmt1="(%06d)",fmt2=" %06d ",local=0,cols=6):
+ """Prints the local matrix of area,source around center"""
+ fmt = "%% %dd " % (cols)
+ k = len(area)-1
+ for j in range(height-1,-1,-1):
+ if j == height-1:
+ print (" ", end='')
+ for jj in range(width):
+ print (fmt % (jj), end='')
+ print ("")
+ print ("%02d" % (j), end='')
+ row = []
+ for i in range(width):
+ row.append(k)
+ k -= 1
+ row.reverse()
+ if local:
+ for i in row:
+ if area[i] == center:
+ print (fmt1 % (source[i]), end='')
+ else:
+ print (fmt2 % (source[i]), end='')
+ else:
+ for i in row:
+ if area[i] == center:
+ print (fmt1 % (source[area[i]]), end='')
+ else:
+ print (fmt2 % (source[area[i]]), end='')
+ print ("")
+ print ("\n")
+
+def main(centers_file, defs_set, dumped_centers_file, imports,
+ over_write_out_path, shared_path, over_write_slp_path,
+ loop_year, exit_on_error, plot_on_error, save_plot, import_read,
+ save_stats):
+
+ # import os, sys # JJ
+ #
+ # months = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May',
+ # 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October',
+ # 11: 'November', 12: 'December'} # JJ
+
+ # --------------------------------------------------------------------------
+ # Setup Section
+ # --------------------------------------------------------------------------
+ for i in imports:
+ exec (i, globals())
+
+ defs_v4 = globals()['defs']
+ numpy = globals()['numpy']
+ strip_read = globals()['strip_read']
+
+ # Fetch definitions and impose those set in defs_set.
+ defs = defs_v4.defs(**defs_set)
+
+ # Predefined data storage
+ # center_data_type = numpy.dtype(defs.center_data) # JJ
+
+ # Pre-bind for speed
+ Tree_Traversal = tree_traversal.tree_traversal
+ #Pull_Data = pull_data.pull_data
+ GCD = gcd.gcd
+ G2L = g2l.g2l
+ # IJ2Grid = ij2grid.ij2grid # JJ
+ # Grid2ij = grid2ij.grid2ij # JJ
+ Rhumb_Line_Nav = rhumb_line_nav.rhumb_line_nav
+ Polar_Fix = polar_fix.polar_fix
+ if save_plot or plot_on_error:
+ Plot_Map = plot_map.plotmap
+ Save_NetCDF = save_netcdf.Save_NetCDF
+ Error_Plot = error_plot.error_plot_cf
+ if save_stats:
+ Save_NetCDF = save_netcdf.Save_NetCDF
+
+ # Get some definitions. Note must have run setup_vx.py already!
+ sf_file = "%ss_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(sf_file, 'rb'))
+ (im,jm,maxid,lats,lons,timestep,dx,dy,dlon,dlat,start_lat,start_lon,
+ dlon_sq,dlat_sq,two_dlat,model_flag,eq_grid,tropical_n,tropical_s,
+ bot,mid,top,row_start,row_end,tropical_n_alt,tropical_s_alt,
+ bot_alt,top_alt,lon_shift,lat_flip,the_calendar,found_years,
+ super_years,dim_lat,dim_lon,dim_time,var_lat,var_lon,var_time,
+ var_slp,var_topo,var_land_sea_mask,file_seperator,no_topo,
+ no_mask,slp_path,model,out_path,shared_path,lat_edges,lon_edges,
+ land_gridids,troubled_centers,faux_grids) = fnc_out
+ # Save memory
+ if not defs.troubled_filter:
+ del troubled_centers
+#Lap comment out if want to screen with and/or change tropical screen.
+ #bot = bot_alt
+ #top = top_alt
+ #tt = [x for x in land_gridids if x not in troubled_centers]
+ #troubled_centers.extend(tt)
+ del land_gridids
+ del lat_edges
+ del lon_edges
+ del fnc_out
+ # except: # JJ added line below
+ except Exception as ex:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (sf_file))
+
+ # Update over_write values
+ if over_write_slp_path:
+ slp_path = over_write_slp_path
+ if over_write_out_path:
+ out_path = over_write_out_path
+
+ # Import a bunch of model grid specific information
+ # Note must have run setup_vX.py already!
+ fnc_out = []
+ cf_file = "%scf_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(cf_file, 'rb'))
+ (use_all_lons,search_radius,regional_nys,gdict,rdict,ldict,ijdict,
+ min_centers_per_tstep,max_centers_per_tstep,max_centers_per_tstep_change,
+ lapp_cutoff,hpg_cutoff) = fnc_out
+ del fnc_out
+ except:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (cf_file))
+
+ # Pre-bind object calls for speed.
+ N_less = numpy.less; N_greater = numpy.greater
+ N_average = numpy.average; N_take = numpy.take
+ N_ones = numpy.ones; N_size = numpy.size;
+ # N_array = numpy.array # JJ
+ N_multiply = numpy.multiply;
+ # N_sometrue = numpy.sometrue # JJ
+ N_subtract = numpy.subtract; N_add = numpy.add
+ # strip_read = strip_read.strip_read # JJ
+
+ stored_centers = [];
+ # stored_centers_append = stored_centers.append # JJ
+
+ # Summary Stats: Checks on operation to report and catch oddities.
+ known_flags = {0 : "Passed all filters",
+ 1 : "Failed concavity/Laplacian filter",
+ 2 : "Failed regional minimum filter",
+ 3 : "Failed critical radius filter",
+ 4 : "Failed troubled center filter",
+ 9 : "Failed polar screen"}
+ flag_files = {0 : "passed",
+ 1 : "lap",
+ 2 : "reg",
+ 3 : "crit",
+ 4 : "troub",
+ 9 : "polar"}
+ flag_colors = { 0 : "black",
+ 1 : "blue",
+ 2 : "yellow",
+ 3 : "green",
+ 4 : "red",
+ 9 : "orange"
+ }
+ total_time_steps = 0
+ nflags = 10
+ # Used flags
+ flags_used = [1,2,3,4,9]
+ #flags_used = [1,2,3,9]
+
+ # Quit on error else just send message to logfile?
+ if exit_on_error:
+ do_this = 'print (smsg); print (msg); sys.exit("\t\tDate_Stamp:"+date_stamp)'
+ else:
+ do_this = 'print (smsg); print (msg); print "\t\tDate_Stamp:"+date_stamp'
+
+##CUT
+ ## Temp array for histograms
+ #plot_dat_x = []
+ #plot_dat_y = []
+ #plot_dat_z = []
+ #plot_dat_x_append =plot_dat_x.append
+ #plot_dat_y_append = plot_dat_y.append
+ #plot_dat_z_append = plot_dat_z.append
+
+ if save_plot:
+ fplot = Plot_Map(missing=-10000000000.0,color_scheme="hot_r")
+
+ if plot_on_error or save_plot:
+ plot = Plot_Map(clevs=[980,1020,2],cints=[960.0,1013.0],color_scheme="bone")
+
+ inv_accuracy = 1.0/defs.accuracy
+ # Counters for report
+ total_centers_used = 0
+ total_centers_cnt = [0]*nflags # make size of total flag count
+
+ #print "\n=============%d=============" % (loop_year)
+
+ # Define some files
+ header = "mcms_%s_%04d_" % (model,loop_year)
+ centers_file = "%s%scenters.txt" % (out_path,header)
+ dumped_centers_file = "%s%sdumped_centers.txt" % (out_path,header)
+
+ # Open files for storage.
+ centers_save = open(centers_file,"w")
+ if defs.keep_discards:
+ dumped_centers_save = open(dumped_centers_file,"w")
+ else:
+ dumped_centers_file = ""
+
+ # ---------------------------------------------------------------------
+ # Pull in reference field
+ # ---------------------------------------------------------------------
+
+ # Open data file, extract data and model definitions
+ exec(import_read, globals())
+ pull_data = globals()['pull_data']
+ fnc = pull_data.pull_data(NetCDF,numpy,slp_path,file_seperator,loop_year,
+ defs.read_scale,var_slp,var_time,lat_flip,lon_shift)
+ (slp,times,the_time_units) = fnc
+ del fnc
+
+ # # Jeyavinoth: Start
+ # # comment from here till "Jeyavinoth: End"
+ # # getting the dtimes and adates
+ # # Work with the time dimension a bit.
+ # # This is set in setup_vX.py
+ # jd_fake = 0
+ # print (" Jimmy the calendar is "+the_calendar)
+ # if the_calendar != 'standard':
+ # # As no calendar detected assume non-standard
+ # jd_fake = 1
+ #
+ # tsteps = len(times)
+ #
+ # the_time_range = [times[0],times[tsteps-1]]
+ # start = "%s" % (the_time_units)
+ # tmp = start.split()
+ # tmp1 = tmp[2].split("-")
+ # tmp2 = tmp[3].split(":")
+ # #tmp3 = tmp2[2][0]
+ # tmp3 = 0
+ # start = "%s %s %04d-%02d-%02d %02d:%02d:%02d" % \
+ # (tmp[0],tmp[1],int(tmp1[0]),int(tmp1[1]),
+ # int(tmp1[2]),int(tmp2[0]),int(tmp2[1]),
+ # int(tmp3))
+ # # Warning this could get weird for non-standard
+ # # calendars if not set correctly (say to noleap)
+ # # in setup_vX.py
+ # cdftime = netcdftime.utime(start,calendar=the_calendar)
+ # get_datetime = cdftime.num2date
+ # dtimes = [get_datetime(times[step]) for step in range(0,tsteps)]
+ #
+ # # Get Julian Days.. unless GCM uses non-standard calendar in which case
+ # # enumerate with timestep and use uci_stamps for datetime things.
+ # # JIMadd
+ # jd_fake = True
+ #
+ # if (the_calendar == 'proleptic_gregorian'):
+ # jd_fake = False
+ #
+ # # jd_fake = False ######## JJ set this to fake to make it work with leap years
+ # if jd_fake:
+ # # Use timesteps rather than dates
+ # # examples '000000000', '000000001'
+ # print ("JIMMY INSIDE jd_fake creation of adates")
+ # print (loop_year)
+ # start_year = over_write_years[0]
+ # print (start_year)
+ # counter_upper=(loop_year-start_year)*1460
+ # adates = ["%09d" % (x+counter_upper) for x in range(tsteps)]
+ # print (adates)
+ # # Modify output format for timesteps
+ # fmt_1 = "%s %s %05d %05d %06d %07d " # use date_stamp
+ # fmt_2 = "%07d %05d %02d %02d %04d %s%05d%05d %s\n" # use uci_stamp
+ # defs.center_fmt = fmt_1 + fmt_2
+ # else:
+ # # Using regular date/times
+ # # examples 244460562, 244971850i
+ # date2jd = netcdftime.JulianDayFromDate
+ # adates = [int(100*date2jd(x,calendar='standard')) for x in dtimes]
+ # # Modify output format for datetimes
+ # fmt_1 = "%s %09d %05d %05d %06d %07d " # use date_stamp
+ # fmt_2 = "%07d %05d %02d %02d %04d %s%05d%05d %s\n" # use uci_stamp
+ # defs.center_fmt = fmt_1 + fmt_2
+ #
+ # # Jeyavinoth: End
+
+ # Jeyavinoth
+ # we don't have to set a new format here,
+ # our outputs match the format specified in defs_v4.py
+ # code below replaces the above code to get dtimes, date_stamps, and adates
+ dtimes, date_stamps, adates = jjCal.get_time_info(the_time_units, times, calendar=the_calendar)
+
+ tsteps = len(times)
+ the_time_range = [times[0],times[tsteps-1]]
+
+ # copied this over from the above code, to use the correct format for date_stamp and uci_stamp
+ fmt_1 = "%s %s %05d %05d %06d %07d " # use date_stamp
+ fmt_2 = "%07d %05d %02d %02d %04d %s%05d%05d %s\n" # use uci_stamp
+ defs.center_fmt = fmt_1 + fmt_2
+
+ uci_stamps = ['%4d%02d%02d%02d' % (d.year,d.month,d.day,d.hour) for d in dtimes]
+ date_stamps = ["%4d %02d %02d %02d" % (d.year,d.month,d.day,d.hour) for d in dtimes]
+
+ if save_plot or save_stats:
+ # Files for histogram
+#Lap
+ #lap_file = centers_file.replace("centers","laplacian")
+ #lap_save = open(lap_file,"w")
+
+ # flag_sum = numpy.zeros((jm*im,nflags),dtype=numpy.float) # JJ
+ flag_cnt = numpy.zeros((jm*im,nflags),dtype=numpy.float)
+ ## Uncomment following lines to plot SLP Field to check all is reading
+ #splot = Plot_Map(clevs=[960,1040,4],cints=[960.0,1040.0],color_scheme="jet")
+ #for step in range(tsteps):
+ # msg = "State at %s UTC" % (date_stamps[step])
+ # msg1 = "%sfigs/%s_slp_field_%s.pdf"
+ # pname = msg1 % (out_path,model,date_stamps[step].replace(" ",""))
+ # splot.create_fig()
+ # slp_step = slp[step,:,:].copy()
+ # slp_step.shape = jm*im
+ # #splot.add_field(lons,lats,slp_step,ptype='pcolor')
+ # splot.add_field(lons,lats,slp_step,ptype='contour')
+ # splot.finish(pname,title=msg)
+ # print ("\tMade figure: %s" % (pname))
+ # # Uncomment break to plot all steps
+ # break
+ #del slp_step
+ #del splot
+ del times
+ del dtimes
+
+ # --------------------------------------------------------------------------
+ # Main Program Logic
+ # --------------------------------------------------------------------------
+ # Read SLP field one time step at a time
+
+##CUT
+ #tsteps = 2
+
+ # last_center_cnt = -1 # JJ
+ for step in range(0,tsteps):
+
+ if plot_on_error or save_plot:
+ temp_centers = []
+ temp_discards = []
+ temp_centers_append = temp_centers.append
+ temp_discards_append = temp_discards.append
+
+ adate = adates[step]
+ uci_stamp = uci_stamps[step]
+ date_stamp = date_stamps[step]
+
+ #print ("Doing",date_stamp)
+
+ # Get SLP field, make 1d integer array. To allow for exact comparisons
+ # impose a fixed accuracy (significant digits) via defs.accuracy.
+ slp_step = N_multiply(slp[step,:,:].copy(),defs.accuracy)
+ slp_step.shape = im*jm
+ # Jeyavinoth changed this to int64
+ slpint = slp_step.astype(numpy.int64)
+
+ # Check 4 corners of array
+ #print (slpint[0],slpint[143],slpint[-144],slpint[-1])
+
+ # If searching for high pressure reverse pressure field so
+ # that highs are lows.
+ if defs.find_highs:
+ slpint = slpint*-1
+
+ # Screen SLP field by defs.plim.
+ if defs.plim_filter:
+ tmp1 = N_less(slpint,defs.plim_filter)
+ else:
+ tmp1 = N_ones(N_size(slpint))
+
+ # Screen SLP field in tropics
+ if defs.tropical_filter:
+ tmp1[bot:top] = 0
+
+ # Apply previous filters and create initial center list
+ centers = {}
+ for gridid in range(maxid):
+ if tmp1[gridid]:
+ centers[gridid] = slpint[gridid]
+
+ #----------------------------------------------------------------------
+ # Stage 1: Cycle over initial center list, apply more filters and checks
+ #----------------------------------------------------------------------
+ if not centers:
+ err_num = 1
+ smsg = "\n\tFail Check %d: No initial centers for this timestep." % (err_num)
+ msg = "\t\tlen(centers): %d" % (len(centers))
+ if plot_on_error:
+ msg1 = "Fail Check %d: No initial centers for this timestep." % (err_num)
+ center_loc = []
+ discard_loc = []
+ Error_Plot("%s/error_%d_%s.png" % (out_path,err_num,adate),
+ plot,slp_step*inv_accuracy,lons,lats,center_loc,discard_loc,msg1)
+ exec(do_this)
+
+ total_time_steps += 1 # Got to initial center list
+
+ # Pre bind for some speedup
+ kept_centers = []
+ kept_centers_append = kept_centers.append
+
+ #print ("\tStage 1 Grid Pool CNT:",len(centers))
+
+ for center in centers:
+ # Centers to be kept fall out the bottom
+ # of this loop.
+
+ # Use temporary flag for potential problem centers
+ # that pass but might bear closer examination.
+ temp_flag = 0
+
+ # Retrieve pre-calculated 8 neighboring gridIDs
+ upm,upc,upp,cnm,cnt,cnp,dnm,dnc,dnp = gdict[center][:]
+
+ # Screen for along longitude non-minima
+ if N_less(slpint[cnm],slpint[center]) or \
+ N_greater(slpint[center],slpint[cnp]):
+ continue
+
+ # Screen for along latitude non-minima
+ if N_less(slpint[dnc],slpint[center]) or \
+ N_greater(slpint[center],slpint[upc]):
+ continue # drop center
+
+ # Screen for diagonal non-minima
+ if N_less(slpint[upm],slpint[center]) or \
+ N_greater(slpint[center],slpint[upp]):
+ continue # drop center
+ else:
+ if N_less(slpint[dnm],slpint[center]) or \
+ N_greater(slpint[center],slpint[dnp]):
+ continue # drop center
+
+ # Okay if got here at least a local minima
+ total_centers_used += 1
+
+ # DISCOVERY MODE: Turn off (via commenting from DISCOVERY START
+ # to DISCOVERY END and uncommenting the next line down. Allows
+ # for center finding without Concavity Test.
+ center_laplacian = 0
+
+#LAP when used with trouble_land use as land/sea mask
+ #if defs.troubled_filter:
+ # if center in troubled_centers:
+ # total_centers_cnt[4] += 1
+ # if save_plot or save_stats:
+ # flag_cnt[center,4] += 1
+ # if plot_on_error:
+ # temp_discards_append(msg)
+ # continue # drop center
+
+ # DISCOVERY START
+ if defs.troubled_filter:
+ # Derived SLP Test: SLPs from grids whose surface elevation is
+ # above sea level or who have immediate neighbors with that
+ # quality are tested with more caution due to the indirect
+ # methods used to determine SLP.
+ if center in troubled_centers:
+
+ # Filter to see if center SLP too high implying that SLP
+ # reduction error in play. The value 1020 is based on
+ # examination of 2D histograms of center SLP and the
+ # horizontal SLP gradient from NCEP Reanalysis suggesting
+ # that high SLP centers over land mostly occur over very
+ # high topography (Greenland and Antarctica) and are suspect.
+ if slpint[center] > 1020000:
+ if defs.keep_discards:
+ msg = defs.center_fmt % (date_stamp,adate,
+ int((90.0-ijdict[center][3])*100),
+ int(ijdict[center][2]*100),
+ center,centers[center],0,
+ center_laplacian,4,0,0,
+ uci_stamp,
+ int(round((90.0-
+ ijdict[center][3])*100)),
+ int(round(ijdict[center][2]*100)),
+ defs.usi_template
+ )
+ dumped_centers_save.writelines(msg)
+ total_centers_cnt[4] += 1
+ if save_plot or save_stats:
+ flag_cnt[center,4] += 1
+ if plot_on_error:
+ temp_discards_append(msg)
+ continue # drop center
+
+ # Ring Symmetry Test: Normally the SLP around a cyclone increases
+ # radially in a fairly smooth and symmetrical way. Errors in SLP
+ # reduction can create large departures from this and we flag
+ # these as potentially non-physical results. Found to be the
+ # result of large relief (absolute topography changes) around
+ # the center rather than just high topography.
+ ring = numpy.take(slpint,gdict[center][:])
+ ring_slp_diff = [x - ring[4] for x in ring]
+ tmp = ring_slp_diff[:]
+ tmp.sort()
+ # Find HPG (horizontal pressure gradient hPa/km)
+ clon = ijdict[center][2]
+ clat = ijdict[center][3]
+ bit = ring_slp_diff.index(tmp[-1])
+ bite_high = gdict[center][bit]
+ # Find Horizontal Pressure Gradient
+ rlon = ijdict[bite_high][2]
+ rlat = ijdict[bite_high][3]
+ distx = GCD(clon,clat,rlon,rlat)
+ # Take the steepest gradient for comparison.
+ hpg_high = tmp[-1]/distx
+
+ # Holton says the horizontal pressure gradient is on the
+ # order of 0.01 hPa/km.The NCEP Reanalysis suggests that
+ # overland HPG above this value are often in areas of
+ # high relief and/or high topography.
+ # Find the Absolute HPG
+
+ # Save HPG for post analysis uncomment save lap below
+ #lap_save.writelines("%f " % (abs(hpg_high)*inv_accuracy))
+
+ if abs(hpg_high)*inv_accuracy > hpg_cutoff: # make hPa/km
+ if defs.keep_discards:
+ msg = defs.center_fmt % (date_stamp,adate,
+ int((90.0-ijdict[center][3])*100),
+ int(ijdict[center][2]*100),
+ center,centers[center],0,
+ center_laplacian,4,0,0,
+ uci_stamp,
+ int(round((90.0-
+ ijdict[center][3])*100)),
+ int(round(ijdict[center][2]*100)),
+ defs.usi_template
+ )
+ dumped_centers_save.writelines(msg)
+ total_centers_cnt[4] += 1
+ if save_plot or save_stats:
+ flag_cnt[center,4] += 1
+ if plot_on_error:
+ temp_discards_append(msg)
+ continue # drop center
+
+ # Concavity Test: See Note 1
+
+ # Uncomment test_lap to check algorithm (not related to slp source!).
+ #test_lap = 1
+ #if test_lap:
+ # ltest = 1
+ # center,tgrids,tslps,nine_pnt,aterma,atermb,atermc,alap = test_laplacian(ltest)
+ # i = 0
+ # for each in tgrids:
+ # slpint[each] = tslps[i]
+ # i += 1
+ # print ("\n\nLaplacian Test: Source")
+ # print_region((5,5,tgrids,center,slpint))
+
+ # Find 9-pnt average SLP for each of the 9-pnts around center.
+ all_nine = gdict[center][:]
+ nine_pnt_aves = []
+ nine_pnt_aves_append = nine_pnt_aves.append # prebind for speedup
+ for eachone in all_nine:
+ nine_pnt_aves_append(N_average(N_take(
+ slpint,gdict[eachone][:])))
+
+ #if test_lap:
+ # if abs(sum(nine_pnt) - sum(nine_pnt_aves)) > 0.1:
+ # smsg = "\n\tFailed Laplacian Test %d: 9-pnt Ave" % (ltest)
+ # msg = ""
+ # print ("Got")
+ # print_region((3,3,all_nine,center,nine_pnt_aves,fmt1="(%08.2f)",fmt2=" %08.2f ",cols=10,local=1))
+ # print ("Wanted")
+ # print_region((3,3,all_nine,center,nine_pnt,fmt1="(%08.2f)",fmt2=" %08.2f ",cols=10,local=1))
+ # exec(do_this)
+ # else:
+ # print ("Passed Laplacian Test %d: 9-pnt Ave" % (ltest))
+
+ # Find 9-pnt Laplacian of the averaged SLPs. See Note 2
+ center_laplacian = 0
+ if ldict[center][0]: # non-polar (90 degrees)
+
+ # Scale pressures back to hPa
+ nine_pnt_aves = [x*inv_accuracy for x in nine_pnt_aves]
+
+ # 1/a^2sin^2(lat) * d^2P/dlon^2
+ termA = ldict[center][1] * (nine_pnt_aves[3] -
+ 2.0*nine_pnt_aves[4] +
+ nine_pnt_aves[5]) / dlon_sq
+ # 1/a^2 * d^2P/dlat^2
+ termB = defs.inv_earth_radius_sq * ((nine_pnt_aves[1] -
+ 2.0*nine_pnt_aves[4] +
+ nine_pnt_aves[7])/
+ dlat_sq)
+ # cot(lat)/a^2 * dP/dlat
+ termC = ldict[center][2] * (nine_pnt_aves[1] -
+ nine_pnt_aves[7]) / two_dlat
+
+ lapp = defs.two_deg_lat*(termA + termB + termC) # hPa/lat^2
+#Lap Save laplacian for post analysis
+ #lap_save.writelines("%f " % (lapp))
+
+ #if test_lap:
+ # print "\n9-pnt Ave"
+ # print_region(3,3,all_nine,center,nine_pnt,fmt1="(%08.2f)",fmt2=" %08.2f ",cols=10,local=1)
+ # if abs(lapp-alap) > 0.01:
+ # smsg = "\nFailed Laplacian Test %d: Laplacian" % (ltest)
+ # msg = ""
+ # print "Got:",lapp
+ # print "Wanted:",alap
+ # print "termA:",termA
+ # print "termB:",termB
+ # print "termC:",termC
+ # exec(do_this)
+ # else:
+ # print "Passed Laplacian Test %d: Laplacian" % (ltest)
+
+ # Scale for saving
+ center_laplacian = int(lapp*1000.0)
+
+ if lapp < lapp_cutoff:
+ #if lapp < 0.15:
+ if defs.keep_discards:
+ msg = defs.center_fmt % (date_stamp,adate,
+ int((90.0-ijdict[center][3])*100),
+ int(ijdict[center][2]*100),
+ center,centers[center],0,
+ center_laplacian,1,0,0,
+ uci_stamp,
+ int(round((90.0-
+ ijdict[center][3])*100)),
+ int(round(ijdict[center][2]*100)),
+ defs.usi_template
+ )
+ dumped_centers_save.writelines(msg)
+ total_centers_cnt[1] += 1
+ if save_plot or save_stats:
+ flag_cnt[center,1] += 1
+ if plot_on_error:
+ temp_discards_append(msg)
+ continue # drop center
+ # DISCOVERY END
+
+ # Calculate regional_average SLP (pyhack)
+ slp_ave = int(N_average(N_take(slpint,rdict[center])))
+
+ ## out for raw
+ ## Check center is also a regional minimum, which means
+ ## it's the lowest-or equal SLP within a great circle
+ ## radius of (defs.critical_radius).
+ #not_reg_min = False
+ #if N_sometrue(N_greater(slpint[center],
+ # N_take(slpint,rdict[center])
+ # +defs.regional_slp_threshold)):
+ # if defs.keep_discards:
+ # msg = defs.center_fmt % (date_stamp,adate,
+ # int((90.0-ijdict[center][3])*100),
+ # int(ijdict[center][2]*100),
+ # center,centers[center],slp_ave,
+ # center_laplacian,2,0,0,
+ # uci_stamp,
+ # int(round((90.0-
+ # ijdict[center][3])*100)),
+ # int(round(ijdict[center][2]*100)),
+ # defs.usi_template
+ # )
+ # dumped_centers_save.writelines(msg)
+ # total_centers_cnt[2] += 1
+ # if save_plot or save_stats:
+ # flag_cnt[center,2] += 1
+ # if plot_on_error:
+ # temp_discards_append(msg)
+ # not_reg_min = True
+ #if not_reg_min:
+ # continue
+ ## out for raw
+
+ kept_centers_append((center,centers[center],ijdict[center][3],
+ ijdict[center][2],slp_ave,center_laplacian,
+ temp_flag))
+
+ #-----------------------------------------------------------------------
+ # Stage 2: Cycle over centers that passed Stage 1. Reduce the center
+ # list to contain only a single center within defs.critical_radius of
+ # each other.
+ #-----------------------------------------------------------------------
+ #msg1 = "\tStage 2 Center CNT: %d from %d candidates"
+ #print msg1 % (len(kept_centers),total_centers_used)
+ #msg1 = "\t\t% 3d %s"
+ #for e in flags_used:
+ # print msg1 % (total_centers_cnt[e],known_flags[e])
+
+ if not kept_centers:
+ err_num = 2
+ # smsg = "\n\tFail Check %d: No kept centers for this timestep." % (err_num) # JJ
+ msg = "\t\tlen(kept_centers): %d" % (len(kept_centers))
+ if plot_on_error:
+ msg1 = "Fail Check %d: No kept centers for this timestep." % (err_num)
+ center_loc = []
+ discard_loc = []
+ for c in temp_centers:
+ parts = c.split()
+ llon = int(parts[6])*0.01
+ llat = 90.0 - int(parts[5])*0.01
+ center_loc.append((llon,llat))
+ for c in temp_discards:
+ parts = c.split()
+ llon = int(parts[6])*0.01
+ llat = 90.0 - int(parts[5])*0.01
+ discard_loc.append((llon,llat))
+ Error_Plot("%s/error_%d_%s.png" % (out_path,err_num,adate),
+ plot,slp_step*inv_accuracy,lons,lats,center_loc,discard_loc,msg1)
+ exec(do_this)
+
+ # out for raw
+ # Remove all but one center if at such high latitude that all longitudes
+ # fit within defs.critical_radius. Can be done even with wavenumber based
+ # radius because possibility of many centers along longitude.
+ # Check if polar rows (can set use_all_lons = [] above to skip this check)
+ if len(use_all_lons):
+ kept_centers,dumped = Polar_Fix(use_all_lons,kept_centers,row_end)
+ for cdump in dumped:
+ msg = defs.center_fmt % (date_stamp,adate,
+ int((90.0-cdump[2])*100),
+ int(cdump[3]*100),cdump[0],cdump[1],cdump[4],
+ cdump[5],9,0,0,
+ uci_stamp,
+ int(round(90.0-cdump[2])*100),
+ int(round(cdump[3]*100)),defs.usi_template)
+ total_centers_cnt[9] += 1
+ if save_plot or save_stats:
+ flag_cnt[int(cdump[0]),9] += 1
+ if plot_on_error:
+ temp_discards_append(msg)
+ dumped_centers_save.writelines(msg)
+ # out for raw
+
+ # Find fractional grid positions based on fitting a parabolic function
+ # to the local slp field.
+ new_kept_centers = []
+ new_kept_centers_append = new_kept_centers.append
+ for g in kept_centers:
+ upm,upc,upp,cnm,cnt,cnp,dnm,dnc,dnp = gdict[g[0]][:]
+ numerator = N_subtract(slpint[cnm],slpint[cnp])
+ denominator = N_subtract(N_add(slpint[cnm],
+ slpint[cnp]),2*slpint[g[0]])
+ if denominator == 0:
+ factor = 0.0 # use original grid
+ else:
+ factor = 0.5 * float(numerator)/float(denominator)
+ # This happens because of adjacent equality
+ if factor > 0.5:
+ factor = 0.5
+ if factor < -0.5:
+ factor = -0.5
+ final_x = factor + ijdict[g[0]][0]
+ if final_x < 0.0: # wrap around
+ final_x = (im-1) + final_x
+
+ numerator = N_subtract(slpint[dnc],slpint[upc])
+ denominator = N_subtract(N_add(slpint[dnc],slpint[upc]),
+ 2*slpint[g[0]])
+ if denominator == 0:
+ factor = 0.0 # use original grid
+ else:
+ factor = 0.5 * float(numerator)/float(denominator)
+ # This happens because of adjacent equality
+ if factor > 0.5:
+ factor = 0.5
+ if factor < -0.5:
+ factor = -0.5
+ final_y = factor + ijdict[g[0]][1]
+ if final_y > jm-1:
+ final_y = jm-1
+ if final_y < 0.0:
+ final_y = 0
+
+ new_kept_centers_append((g[0],g[1],int(final_y*100),
+ int(100*final_x),g[4],g[5],g[6]))
+ kept_centers = new_kept_centers
+
+ # out for raw
+ # Step 3: See Note 3 at bottom of this file
+
+ # Dictionary all uci for each center that fall w/in search_radius.
+ center_tree = {}
+ for test_center in kept_centers:
+ # Which latitude row is center in?
+ for rowe in row_end:
+ if test_center[0] <= rowe:
+ row = row_end.index(rowe)
+ break
+
+ # Use fractional grid positions.
+ # At high lats, better than grid-centers
+ tci = float(test_center[3])*0.01
+ tcj = float(test_center[2])*0.01
+ tclon = G2L(tci,start_lon,start_lat,dlon,dlat,jm,
+ "lon","free",False,True,faux_grids)
+ tclat = G2L(tcj,start_lon,start_lat,dlon,dlat,jm,
+ "lat","free",False,True,faux_grids)
+
+ # Other close by centers?
+ embeded = []
+ embeded_append = embeded.append
+ for embeded_center in kept_centers:
+ if embeded_center[0] != test_center[0]:
+ # Use fractional grid positions.
+ # At high lats, better than grid-centers
+ eci = float(embeded_center[3])*0.01
+ ecj = float(embeded_center[2])*0.01
+ eclon = G2L(eci,start_lon,start_lat,dlon,dlat,jm,
+ "lon","free",False,True,faux_grids)
+ eclat = G2L(ecj,start_lon,start_lat,dlon,dlat,jm,
+ "lat","free",False,True,faux_grids)
+ if defs.use_gcd:
+ distx = GCD(tclon,tclat,eclon,eclat)
+ else:
+ fnc = Rhumb_Line_Nav(eclon,eclat,tclon,tclat,True)
+ distx = fnc[1]
+
+ if distx < search_radius[row]: # neighbor
+ if embeded_center[0] not in embeded:
+ embeded_append(embeded_center)
+ else:
+ if embeded_center[0] not in embeded:
+ embeded_append(test_center)
+ center_tree[test_center[0]] = embeded
+
+ # Traverse the tree of inter-referencing centers in center_tree.
+ linked_centers = {}
+ ctree_keys = list(center_tree.keys())
+ ctree_keys.sort()
+ # for test_center in center_tree.keys(): # center A
+ for test_center in ctree_keys: # center A
+ harvest = {}
+ Tree_Traversal(test_center,center_tree,harvest)
+ linked_centers[test_center] = harvest
+
+ # Traverse linked_centers for each center to see if in used_or_discarded
+ # if not then for each also not in used_or_discarded rank by lowest slp
+ # and then start of add to used list
+ lost_centers = []
+ used_or_discarded = [] # centers not to be used again
+ used = [] # centers to be kept
+ lc_keys = list(linked_centers.keys())
+ lc_keys.sort()
+ # for test_center in linked_centers:
+ for test_center in lc_keys:
+ # A center can only be used once
+ if test_center not in used_or_discarded:
+
+ # For each linked center see if it is unused,
+ # if so make list of these slps
+ # list of slps
+ inner_centers = [x for x in linked_centers[test_center]
+ if x not in used_or_discarded]
+ slps = []
+ slps = [linked_centers[test_center][x] for x in inner_centers]
+ # Only unique values
+ u = {}
+ for x in slps:
+ u[x] = 1
+ slps = list(u.keys())
+ # Order slps
+ slps.sort()
+
+ # Starting with the lowest SLP exclude all centers overlapping
+ # that center. Note that there could be ties by slp value in
+ # this case just take the middle center for the lowest slps.
+ for lowest in slps:
+ keys = []
+ keys = [x for x in linked_centers[test_center]
+ if linked_centers[test_center][x] == lowest]
+ if len(keys) > 1:
+ if len(keys)%2:
+ middle = int(len(keys)*0.5)-1
+ else:
+ middle = int(len(keys)*0.5)-1
+ # Tie(s), drop all but the middle entry
+ for xx in keys:
+ if keys.index(xx) != middle:
+ if xx not in used_or_discarded:
+ used_or_discarded.append(xx)
+ # Use this center
+ key = [x for x in keys if x not in used_or_discarded]
+ if key:
+ if key[0] not in used:
+ used.append(key[0])
+ if key[0] not in used_or_discarded:
+ used_or_discarded.append(key[0])
+ # Discard all centers w/in search radius of this
+ # center from further consideration.
+ for each in linked_centers[key[0]].keys():
+ if each not in used_or_discarded:
+ used_or_discarded.append(each)
+ if defs.keep_discards:
+ lost_centers = [x for x in kept_centers if x[0] not in used]
+
+ kept_centers = [x for x in kept_centers[:] if x[0] in used]
+
+ # Save these centers to disk
+ for g in lost_centers:
+ msg = defs.center_fmt % (date_stamp,adate,
+ int((90.0-ijdict[g[0]][3])*100),
+ int(ijdict[g[0]][2]*100),
+ g[0],g[1],g[4],g[5],3,0,0,
+ uci_stamp,
+ int(round((90.0-ijdict[g[0]][3])*100)),
+ int(round(ijdict[g[0]][2]*100)),
+ defs.usi_template
+ )
+ total_centers_cnt[3] += 1
+ if save_plot or save_stats:
+ flag_cnt[int(g[0]),3] += 1
+ if plot_on_error:
+ temp_discards_append(msg)
+ dumped_centers_save.writelines(msg)
+ # out for raw
+
+ # Save these centers to disk as potential cyclones
+ for g in kept_centers:
+ # Find fractional location of center
+ flon = G2L(g[3]*0.01,start_lon,start_lat,dlon,dlat,jm,
+ "lon","free",False,True,faux_grids)
+ flat = 90.0 - G2L(g[2]*0.01,start_lon,start_lat,dlon,dlat,jm,
+ "lat","free",False,True,faux_grids)
+ msg = defs.center_fmt % (date_stamp,adate,
+ int(flat*100),int(flon*100),
+ g[0],g[1],g[4],g[5],g[6],0,0,
+ uci_stamp,
+ int(round((90.0-ijdict[g[0]][3])*100)),
+ int(round(ijdict[g[0]][2]*100)),
+ defs.usi_template
+ )
+ total_centers_cnt[0] += 1
+ if save_plot or save_stats:
+ flag_cnt[int(g[0]),0] += 1
+ if plot_on_error or save_plot:
+ temp_centers_append(msg)
+ centers_save.writelines(msg)
+
+ # Plot timestep
+ if save_plot:
+ # set to step < tsteps+1 to do every step
+ if step < 1:
+ err_num = 0
+ msg1 = "Kept %d of %d centers on %s." % (len(kept_centers),total_centers_used,date_stamp)
+ center_loc = []
+ discard_loc = []
+ d_colors = []
+ c_colors = []
+ for c in temp_centers:
+ parts = c.split()
+ llon = int(parts[6])*0.01
+ llat = 90.0 - int(parts[5])*0.01
+ center_loc.append((llon,llat))
+ c_colors.append(flag_colors[int(parts[11])])
+ for c in temp_discards:
+ parts = c.split()
+ llon = int(parts[6])*0.01
+ llat = 90.0 - int(parts[5])*0.01
+ discard_loc.append((llon,llat))
+ d_colors.append(flag_colors[int(parts[11])])
+ msg1 = "%sfigs/%s_center_finder_%s.png"
+ pname = msg1 % (out_path,model,date_stamps[step].replace(" ",""))
+ Error_Plot(pname,plot,slp_step*inv_accuracy,lons,lats,center_loc,
+ discard_loc,pname,c_colors,d_colors)
+ print ("\tMade figure: %s" % (pname))
+
+#tmp
+ #if step == 6:
+ # break
+
+ # # Sanity Check: Flag potential problems.
+ #k_centers = len(kept_centers)
+ #if k_centers <= min_centers_per_tstep:
+ # err_num = 3
+ # smsg = "\n\tFail Check %d: Too few centers for this timestep." % (err_num)
+ # msg = "\t\tlen(k_centers): %d" % (k_centers)
+ # exec(do_this)
+ #if k_centers >= max_centers_per_tstep:
+ # err_num = 4
+ # smsg = "\n\tFail Check %d: Too many centers for this timestep." % (err_num)
+ # msg = "\t\tlen(k_centers): %d" % (k_centers)
+ # exec(do_this)
+ #if last_center_cnt > 0:
+ # #ttest = abs(last_center_cnt - k_centers)
+ # #if ttest >= max_centers_per_tstep_change:
+ # # err_num = 5
+ # # smsg = "\n\tFail Check %d: Too much change in center count this and previous timestep." % (err_num)
+ # # msg = "\t\tlen(k_centers): %d\n\t\tlen(last_center_cnt): %d" % (k_centers,last_center_cnt)
+ # # exec(do_this)
+ # # Warn if last center count doubled/halved to this center count
+ # double = round(last_center_cnt*0.6)
+ # ttest = abs(last_center_cnt - k_centers)
+ # if ttest >= double:
+ # err_num = 5
+ # smsg = "\n\tFail Check %d: Too much change in center count this and previous timestep." % (err_num)
+ # msg = "\t\tlen(k_centers): %d\n\t\tlen(last_center_cnt): %d\n\t\tdouble: %d" % (k_centers,last_center_cnt,double)
+ # exec(do_this)
+
+##CUT
+ #if last_center_cnt > 0:
+ # plot_dat_x_append(k_centers)
+ # plot_dat_y_append(double)
+ # plot_dat_z_append(ttest)
+ #last_center_cnt = k_centers
+
+ #msg1 = "\tFinal Center CNT: %d from %d candidates where"
+ #print msg1 % (len(kept_centers),total_centers_used)
+ #msg1 = "\t\t% 3d %s"
+ #for e in flags_used:
+ # print msg1 % (total_centers_cnt[e],known_flags[e])
+ # -------------------------------------------------------------------------
+ # Clean up
+ # -------------------------------------------------------------------------
+
+ # Close open files
+ centers_save.close()
+ if defs.keep_log:
+ log_file.close()
+ sys.stdout = screenout # redirect stdout back to screen
+ if defs.keep_discards:
+ dumped_centers_save.close()
+
+ #
+ # FINAL check to be sure all timesteps run and all centers accounted for.
+ #
+ report_file = centers_file.replace("centers.txt","centers_report.txt")
+ report_file = report_file.replace(out_path,"%sstats/" % (out_path))
+ report_save = open(report_file,"w")
+
+ report_save.writelines("%d\n" % (loop_year))
+ msg1 = "Final Center CNT: %d (%6.2f%%) from %d candidates where\n"
+ msg = msg1 % (total_centers_cnt[0],
+ 100.0*(float(total_centers_cnt[0])/float(total_centers_used)),
+ total_centers_used)
+ # Last minute check that reasonable count.
+ if total_centers_cnt[0] < 10*tsteps:
+ err_num = 6
+ smsg = "\n\tFail Check %d: Final Center CNT < 10*tsteps" % (err_num)
+ msg = "\t\tFinal Center CNT: %d < 10*tsteps %d " % (total_centers_cnt[0],10*tsteps)
+ date_stamp = "Full Record"
+ exec(do_this)
+ report_save.writelines(msg)
+ msg1 = "\t% 6d\t(%6.2f%%)\t%s\n"
+ for e in flags_used:
+ msg = msg1 % (total_centers_cnt[e],
+ 100.0*(float(total_centers_cnt[e])/float(total_centers_used)),
+ known_flags[e])
+ report_save.writelines(msg)
+ if total_centers_used != sum(total_centers_cnt):
+ msg = "%d Total Count Error:\n\ttotal_centers_used = %d\n\ttotal_centers_cnt = %s sum(%d)"
+ sys.exit(msg % (loop_year,total_centers_used,repr(total_centers_cnt),
+ sum(total_centers_cnt)))
+ report_save.close()
+
+ if save_plot or save_stats:
+ # Make frequency plot
+ for flag in flag_files:
+ # Just counts
+ # FIX error with numpy/matplot lib and missing so just set to zero now
+ comp_out = numpy.where(flag_cnt[:,flag] < 1.,-10000000000.0,flag_cnt[:,flag])
+ comp_out = numpy.where(flag_cnt[:,flag] < 1.,0.0,flag_cnt[:,flag])
+ pname = "%sfigs/%s_freq_%s_%d.png" % (out_path,model,flag_files[flag],loop_year)
+ if save_plot:
+ fplot.create_fig()
+ fplot.add_field(lons,lats,comp_out,ptype='pcolor',)
+ fplot.finish(pname)
+ #print "\tMade figure %s" % (pname)
+ if save_stats:
+ pname = pname.replace(".png",".nc")
+ pname = pname.replace("figs","netcdfs")
+ save_it = Save_NetCDF(flag_cnt[:,flag],lons,lats,pname,0)
+ #print "\tCreated file %s" % (pname)
+ del save_it
+ del comp_out
+#Lap
+ #lap_save.close()
+
+##CUT
+## Plot temp array histograms
+## for plotting
+#import matplotlib.pyplot as plt
+#pname = "%splot_dat_x.png" % (out_path)
+#fig = plt.figure()
+#n, bins, patches = plt.hist(plot_dat_x,50, normed=1,facecolor='green',alpha=0.75)
+#plt.grid(True)
+#fig.savefig(pname,dpi=144)
+#print "Created %s" % (pname)
+#tmp = numpy.array(plot_dat_x)
+#print "\tMin: %f Max: %f Mean: %f" %(tmp.max(),tmp.min(),tmp.mean())
+#plt.close('all')
+#pname = "%splot_dat_y.png" % (out_path)
+#fig = plt.figure()
+#n, bins, patches = plt.hist(plot_dat_y,50, normed=1,facecolor='green',alpha=0.75)
+#plt.grid(True)
+#fig.savefig(pname,dpi=144)
+#print "Created %s" % (pname)
+#tmp = numpy.array(plot_dat_y)
+#print "\tMin: %f Max: %f Mean: %f" %(tmp.max(),tmp.min(),tmp.mean())
+#plt.close('all')
+#pname = "%splot_dat_z.png" % (out_path)
+#fig = plt.figure()
+#n, bins, patches = plt.hist(plot_dat_z,50, normed=1,facecolor='green',alpha=0.75)
+#plt.grid(True)
+#fig.savefig(pname,dpi=144)
+#print "Created %s" % (pname)
+#tmp = numpy.array(plot_dat_z)
+#print "\tMin: %f Max: %f Mean: %f" %(tmp.max(),tmp.min(),tmp.mean())
+#plt.close('all')
+##pname = "%shisto2d.png" % (out_path)
+##fig = plt.figure()
+##x = numpy.array(plot_dat_z)
+##xmin = x.min()
+##xmax = x.max()
+##y = numpy.array(plot_dat_y)
+##ymin = y.min()
+##ymax = y.max()
+##gridsize = int(xmax-xmin)
+##plt.hexbin(plot_dat_z,plot_dat_y,gridsize=gridsize)
+##plt.axis([xmin, xmax, ymin, ymax])
+##fig.savefig(pname,dpi=144)
+##print "Created %s" % (pname)
+##plt.close('all')
+
+ # Save memory when pull_data called in loop stores a copy of slp
+ # and thus doubles the memory footprint of the code.
+ del pull_data,slpint,slp_step,slp
+
+ # Jeyavinoth
+ # we don't seem to use the variables named "start", "tmp2", "tmp3", "cdftime"
+ # it was something I commented out before
+ # so I change the followning line
+ # del tsteps,the_time_range,start,tmp1,tmp2,tmp3,cdftime
+ # with:
+ del tsteps,the_time_range,tmp1
+
+ del adates,uci_stamps,date_stamps
+ if plot_on_error or save_plot:
+ del plot_map,Plot_Map,fplot,plot,Error_Plot
+
+ # Exit
+ msg = "Finished %d" % (loop_year)
+ return (msg)
+
+# --------------------------------------------------------------------------
+# Done. Below is a special case when center_finder_main can be called
+# directly (rather than via a driver) for debugging and such. Not normally
+# used, but works perfectly albeit on a single processor.
+#
+# NOTES are at the very bottom of the file.
+# --------------------------------------------------------------------------
+
+#---Start of main code block.
+if __name__=='__main__':
+ import pickle
+
+ # --------------------------------------------------------------------------
+ # Select options for this run.
+ # --------------------------------------------------------------------------
+
+ # This next set of lines should be copied from setup_vX.py
+ # Short names by which model will be labeled.
+ model = defines.model
+
+ # Halt program on error or just warn?
+ exit_on_error = 0
+
+ # Plot map on error (requires matplotlib, also doubles or more memory
+ # footprint)
+ plot_on_error = 0
+
+ # Plot Stats (debugging mostly,requires matplotlib, also
+ # doubles or more memory footprint)
+ save_plot = 0
+
+ # Save debugging data for post analysis.
+ # Note to make unified plot of save_plot data see big_one.py
+ save_stats = 0
+
+ # Extract version number from this scripts name.
+ tmp = sys.argv[0]
+ file_len = len(tmp.split("_"))
+ vnum = "_"+tmp.split("_")[file_len-1][:2]
+
+ # --------------------------------------------------------------------------
+ # Define all modules to be imported.
+ # --------------------------------------------------------------------------
+
+ # Basic standard Python modules to import.
+ imports = []
+ # Jeyavinoth: Removed netcdftime from line below
+ # system_imports = "import math,numpy,netcdftime,pickle"
+ system_imports = "import math,numpy,pickle"
+ imports.append(system_imports)
+ imports.append("import netCDF4 as NetCDF")
+
+
+ # My modules to import w/ version number appended.
+ my_base = ["tree_traversal","defs","gcd","g2l","ij2grid",
+ "grid2ij","rhumb_line_nav","polar_fix","strip_read"]
+ if save_plot or plot_on_error:
+ my_base.append("save_netcdf")
+ my_base.append("plot_map")
+ my_base.append("error_plot")
+ if save_stats and "save_netcdf" not in my_base:
+ my_base.append("save_netcdf")
+ for x in my_base:
+ tmp = "import %s%s as %s" % (x,vnum,x)
+ imports.append(tmp)
+
+ # To save a double copy of the data being retained by pull_data it is
+ # necessary to reimport and delete pull_data_vX.py inside each loop.
+ import_read = "import %s%s as %s" % ("pull_data",vnum,"pull_data")
+
+ # --------------------------------------------------------------------------
+ # Alter default behavior found in either defs_vX.py or setup_vX.py
+ # --------------------------------------------------------------------------
+
+ # The default behavior is to read SLP data from the
+ # directory slp_path defined in setup_vX.py.
+ # Here you can elect to override this behavior.
+ over_write_slp_path = ""
+
+ # The default behavior is to save results
+ # in the directory out_path defined in
+ # setup_vX.py. Here you can elect to override
+ # this behavior.
+ over_write_out_path = ""
+
+ # This next set of lines should be copied from setup_vX.py
+ # Full path to the root directory where model specific output will be stored.
+ result_directory = defines.out_folder
+
+ if not os.path.exists(result_directory):
+ sys.exit("ERROR: result_directory not found.")
+
+ # Directory to be created for storing temporary model specific files.
+ shared_path = "%s%s_files/" % (result_directory,model)
+
+ # The default behavior is to run over all the
+ # years found by setup_vX.py. Here you can
+ # elect to override this behavior.
+ # example of hard-coded years:
+ # over_write_years = [2010,2010]
+ over_write_years = defines.over_write_years
+
+ # Here you can alter the default behavior as determined
+ # by defs_vX.py and possibly setup_vX.py.
+
+ defs_set = {"keep_log":False,"troubled_filter":True,
+ "tropical_filter":True,"read_scale":1.0}
+
+ # Define some files
+ centers_file = "centers.txt"
+ dumped_centers_file = "dumped_centers.txt"
+
+ # --------------------------------------------------------------------------
+ # Run main()
+ # --------------------------------------------------------------------------
+
+ msg = "\n\t====\tCenter Finding\t===="
+ print (msg)
+ if over_write_slp_path:
+ print ("\tUsing over_write_slp_path: %s" % (over_write_slp_path))
+ else:
+ print ("\tUsing default slp_path")
+ if over_write_out_path:
+ print ("\tUsing over_write_out_path: %s" % (over_write_out_path))
+ else:
+ print ("\tUsing default out_path")
+ if not os.path.exists(shared_path):
+ print (shared_path)
+ sys.exit("\tCan't find shared_path!")
+ else:
+ print ("\tUsing shared_path: %s" % (shared_path))
+ if over_write_years:
+ print ("\tUsing over_write_years: %s" % (repr(over_write_years)))
+ else:
+ print ("\tUsing default years")
+ if defs_set:
+ print ("\tUsing modified defs for defs_vX.py:")
+ for d in defs_set:
+ print ("\t\t%20s:\t%s" % (d,defs_set[d]))
+ else:
+ print ("\tUsing defaults from defs_vX.py")
+
+ # Get some definitions. Note must have run setup_vx.py already!
+ sf_file = "%ss_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(sf_file, 'rb'))
+ inputs = ("im","jm","maxid","lats","lons","timestep","dx","dy","dlon","dlat",
+ "start_lat","start_lon","dlon_sq","dlat_sq","two_dlat","model_flag","eq_grid",
+ "tropical_n","tropical_s","bot","mid","top","row_start","row_end",
+ "tropical_n_alt","tropical_s_alt","bot_alt","top_alt","lon_shift","lat_flip",
+ "the_calendar","found_years","super_years","dim_lat","dim_lon","dim_time",
+ "var_lat","var_lon","var_time","var_slp","var_topo","var_land_sea_mask",
+ "file_seperator","no_topo","no_mask","slp_path","model","out_path",
+ "shared_path","lat_edges","lon_edges","land_gridids","troubled_centers",
+ "faux_grids")
+ super_years = fnc_out[inputs.index("super_years")]
+ out_path = fnc_out[inputs.index("out_path")]
+ del fnc_out
+ # except: # JJ added line below
+ except Exception as ex:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (sf_file))
+ if over_write_years:
+ super_years = over_write_years
+ if over_write_out_path:
+ out_path = over_write_out_path
+
+ # Create out_path if it doesn't exist.
+ print (out_path)
+ if not os.path.exists(out_path):
+ dirs = list(map(os.makedirs, (out_path,
+ out_path+'/comps/',
+ out_path+'/pdfs/',
+ out_path+'/stats/',
+ out_path+'/stats/tmp/',
+ out_path+'/netcdfs/',
+ out_path+'/figs/pdfs/',
+ out_path+'/figs/comps/')))
+ print ("\tDirectory %s Created." % (out_path))
+
+ # -------------------------------------------------------------------------
+ # Start Main Loop over super_years
+ # -------------------------------------------------------------------------
+
+ # This is a single processor version
+ for loop_year in range(int(super_years[0]),int(super_years[-1])+1):
+ print ("\n\t=============%d=============" % (loop_year))
+ msg = main(centers_file, defs_set, dumped_centers_file, imports,
+ out_path, shared_path, over_write_slp_path, loop_year, exit_on_error,
+ plot_on_error, save_plot, import_read,save_stats)
+ print (msg)
+
+ total_cnt = 0
+ candidates = 0
+ lap_cnt = 0
+ reg_cnt = 0
+ rad_cnt = 0
+ trb_cnt = 0
+ pol_cnt = 0
+ verbose = 0
+
+ report_file = "%sstats/mcms_%s_center_final_report_%d-%d.txt"
+ report_file = report_file % (out_path,model,int(super_years[0]),int(super_years[-1]))
+ print ("\tCreating",report_file)
+ try:
+ report_save = open(report_file,"w")
+ # except: # JJ added line below
+ except Exception as ex:
+ sys.exit("Error opening: %s" % (report_file))
+ report_save.writelines("%d--%d\n" % (int(super_years[0]),int(super_years[-1])))
+ big_buffer = {}
+ # Read partial reports
+ iyear = 0
+ for loop_year in range(int(super_years[0]),int(super_years[-1])+1):
+ r_buffer = []
+ r_file = "%sstats/mcms_%s_%d_centers_report.txt" % (out_path,model,loop_year)
+ if verbose:
+ print ("\tAdding",r_file)
+ try:
+ r_read = open(r_file,"r")
+ # except: # JJ added line below
+ except Exception as ex:
+ sys.exit("Error opening: %s" % (r_file))
+ for line in r_read:
+ r_buffer.append(line)
+ r_read.close()
+ big_buffer[iyear] = r_buffer
+ iyear += 1
+ iyears = iyear
+ for iyear in range(iyears):
+ main_line = big_buffer[iyear][1]
+ args = main_line.split(" ")
+ # Remove any empty strings....
+ args = [x for x in args if x]
+ if verbose:
+ print ("\n\tYear", big_buffer[iyear][0],)
+ print ("\tmain_line:",main_line,)
+ print ("\targs:",args)
+
+ # if percentage == '(100.00%)' 8 args if '( 99.99%)' 9
+ if len(args) == 8:
+ total_cnt += int(args[3])
+ candidates += int(args[6])
+ else:
+ total_cnt += int(args[3])
+ candidates += int(args[7])
+ if verbose:
+ print ("\tTweaked:",args)
+ print ("\ttotal_cnt:",total_cnt)
+ print ("\tcandidates:",candidates)
+
+ if candidates > 0:
+ fraction = 100.0*(float(total_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)" % (fraction)
+ if verbose:
+ print ("\tfraction:",fraction)
+
+ lap = big_buffer[iyear][2]
+ brgs = lap.split(" ")
+ # Remove any empty strings....
+ brgs = [x for x in brgs if x]
+ aa = brgs[1].split("\t")
+ lap_cnt += int(aa[0])
+
+ reg = big_buffer[iyear][3]
+ brgs = reg.split(" ")
+ # Remove any empty strings....
+ brgs = [x for x in brgs if x]
+ aa = brgs[1].split("\t")
+ reg_cnt += int(aa[0])
+
+ rad = big_buffer[iyear][4]
+ brgs = rad.split(" ")
+ # Remove any empty strings....
+ brgs = [x for x in brgs if x]
+ aa = brgs[1].split("\t")
+ rad_cnt += int(aa[0])
+
+ trb = big_buffer[iyear][5]
+ brgs = trb.split(" ")
+ # Remove any empty strings....
+ brgs = [x for x in brgs if x]
+ aa = brgs[1].split("\t")
+ trb_cnt += int(aa[0])
+
+ pol = big_buffer[iyear][6]
+ brgs = pol.split(" ")
+ # Remove any empty strings....
+ brgs = [x for x in brgs if x]
+ aa = brgs[1].split("\t")
+ pol_cnt += int(aa[0])
+
+ if verbose:
+ print ("\tlap",lap,)
+ print ("\treg",reg,)
+ print ("\trad",rad,)
+ print ("\ttrb",trb,)
+ print ("\tpol",pol)
+
+ if iyear == iyears-1:
+ if len(args) == 8:
+ args[6] = str(candidates)
+ args[3] = str(total_cnt)
+ else:
+ args[3] = str(total_cnt)
+ args[7] = str(candidates)
+ args[5] = f
+ msg = " ".join(args)
+ report_save.writelines(msg)
+ if verbose:
+ print (msg)
+
+ msg = lap.split(" ")
+ # Remove any empty strings....
+ msg = [x for x in msg if x]
+ a = '% d\t(' % (lap_cnt)
+ msg[1] = a
+ if candidates > 0:
+ fraction = 100.0*(float(lap_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)\tFailed" % (fraction)
+ msg[2] = f
+ smsg = " ".join(msg)
+ report_save.writelines(smsg)
+ if verbose:
+ print (smsg)
+
+ msg = reg.split(" ")
+ # Remove any empty strings....
+ msg = [x for x in msg if x]
+ a = '% d\t(' % (reg_cnt)
+ msg[1] = a
+ if candidates > 0:
+ fraction = 100.0*(float(reg_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)\tFailed" % (fraction)
+ msg[2] = f
+ smsg = " ".join(msg)
+ report_save.writelines(smsg)
+ if verbose:
+ print (smsg)
+
+ msg = rad.split(" ")
+ # Remove any empty strings....
+ msg = [x for x in msg if x]
+ a = '% d\t(' % (rad_cnt)
+ msg[1] = a
+ if candidates > 0:
+ fraction = 100.0*(float(rad_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)\tFailed" % (fraction)
+ msg[2] = f
+ smsg = " ".join(msg)
+ report_save.writelines(smsg)
+ if verbose:
+ print (smsg)
+
+ msg = trb.split(" ")
+ # Remove any empty strings....
+ msg = [x for x in msg if x]
+ a = '% d\t(' % (trb_cnt)
+ msg[1] = a
+ if candidates > 0:
+ fraction = 100.0*(float(trb_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)\tFailed" % (fraction)
+ msg[2] = f
+ smsg = " ".join(msg)
+ report_save.writelines(smsg)
+ if verbose:
+ print (smsg)
+
+ msg = pol.split(" ")
+ # Remove any empty strings....
+ msg = [x for x in msg if x]
+ a = '% d\t(' % (pol_cnt)
+ msg[1] = a
+ if candidates > 0:
+ fraction = 100.0*(float(pol_cnt)/float(candidates))
+ else:
+ fraction = 0.0
+ f = "%6.2f%%)\tFailed" % (fraction)
+ msg[2] = f
+ smsg = " ".join(msg)
+ report_save.writelines(smsg)
+ if verbose:
+ print (smsg)
diff --git a/diagnostics/etc_composites/util/tracker/clean_dict_v4.py b/diagnostics/etc_composites/util/tracker/clean_dict_v4.py
new file mode 100755
index 000000000..2f1f90952
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/clean_dict_v4.py
@@ -0,0 +1,8 @@
+def clean_dict(d,dump):
+ """recursive delete of a key value"""
+ for key,val in list(d.items()):
+ if isinstance(val, dict):
+ _ = clean_dict(val,dump)
+ if key == dump:
+ del d[key]
+ return d
diff --git a/diagnostics/etc_composites/util/tracker/conda_py36_.yml b/diagnostics/etc_composites/util/tracker/conda_py36_.yml
new file mode 100644
index 000000000..dd2f22bf0
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/conda_py36_.yml
@@ -0,0 +1,98 @@
+name: mcms
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ - _libgcc_mutex=0.1=main
+ - asn1crypto=1.2.0=py36_0
+ - basemap=1.2.0=py36h705c2d8_0
+ - blas=1.0=mkl
+ - bzip2=1.0.6=h14c3975_5
+ - ca-certificates=2019.11.27=0
+ - cartopy=0.16.0=py36hfa13621_0
+ - certifi=2019.11.28=py36_0
+ - cffi=1.13.2=py36h2e261b9_0
+ - cftime=1.0.3.4=py36hdd07704_0
+ - chardet=3.0.4=py36_1003
+ - cryptography=2.8=py36h1ba5d50_0
+ - curl=7.64.0=hbc83047_2
+ - cycler=0.10.0=py36_0
+ - cython=0.29.6=py36he6710b0_0
+ - dbus=1.13.6=h746ee38_0
+ - expat=2.2.6=he6710b0_0
+ - fontconfig=2.13.0=h9420a91_0
+ - freetype=2.9.1=h8a8886c_1
+ - geos=3.6.2=heeff764_2
+ - glib=2.56.2=hd408876_0
+ - gst-plugins-base=1.14.0=hbbd80ab_1
+ - gstreamer=1.14.0=hb453b48_1
+ - hdf4=4.2.13=h3ca952b_2
+ - hdf5=1.10.4=hb1b8bf9_0
+ - icu=58.2=h9c2bf20_1
+ - idna=2.8=py36_0
+ - intel-openmp=2019.3=199
+ - jpeg=9b=h024ee3a_2
+ - kiwisolver=1.0.1=py36hf484d3e_0
+ - krb5=1.16.1=h173b8e3_7
+ - libcurl=7.64.0=h20c2e04_2
+ - libedit=3.1.20181209=hc058e9b_0
+ - libffi=3.2.1=hd88cf55_4
+ - libgcc-ng=8.2.0=hdf63c60_1
+ - libgfortran-ng=7.3.0=hdf63c60_0
+ - libnetcdf=4.6.1=h11d0813_2
+ - libpng=1.6.36=hbc83047_0
+ - libssh2=1.8.0=h1ba5d50_4
+ - libstdcxx-ng=8.2.0=hdf63c60_1
+ - libtiff=4.1.0=h2733197_0
+ - libuuid=1.0.3=h1bed415_2
+ - libxcb=1.13=h1bed415_1
+ - libxml2=2.9.9=he19cac6_0
+ - libxslt=1.1.33=h7d1a2b0_0
+ - lxml=4.4.2=py36hefd8a0e_0
+ - matplotlib=3.0.3=py36h5429711_0
+ - mkl=2019.3=199
+ - mkl_fft=1.0.10=py36ha843d7b_0
+ - mkl_random=1.0.2=py36hd81dba3_0
+ - ncurses=6.1=he6710b0_1
+ - netcdf4=1.4.2=py36h808af73_0
+ - netcdftime=1.0.0a2=py36_0
+ - numpy=1.16.2=py36h7e9f1db_0
+ - numpy-base=1.16.2=py36hde5b4d6_0
+ - olefile=0.46=py36_0
+ - openssl=1.1.1d=h7b6447c_3
+ - owslib=0.18.0=py_0
+ - pandas=0.24.2=py36he6710b0_0
+ - pcre=8.43=he6710b0_0
+ - pillow=6.2.1=py36h34e0f95_0
+ - pip=19.0.3=py36_0
+ - proj4=5.0.1=h14c3975_0
+ - pycparser=2.19=py36_0
+ - pyepsg=0.4.0=py36_0
+ - pyopenssl=19.1.0=py36_0
+ - pyparsing=2.3.1=py36_0
+ - pyproj=1.9.5.1=py36h7b21b82_1
+ - pyqt=5.9.2=py36h05f1152_2
+ - pyshp=2.0.1=py36_0
+ - pysocks=1.7.1=py36_0
+ - python=3.6.8=h0371630_0
+ - python-dateutil=2.8.0=py36_0
+ - pytz=2018.9=py36_0
+ - qt=5.9.7=h5867ecd_1
+ - readline=7.0=h7b6447c_5
+ - requests=2.22.0=py36_1
+ - scipy=1.2.1=py36h7c811a0_0
+ - setuptools=40.8.0=py36_0
+ - shapely=1.6.4=py36h7ef4460_0
+ - sip=4.19.8=py36hf484d3e_0
+ - six=1.12.0=py36_0
+ - sqlite=3.27.2=h7b6447c_0
+ - tk=8.6.8=hbc83047_0
+ - tornado=6.0.2=py36h7b6447c_0
+ - urllib3=1.25.7=py36_0
+ - wheel=0.33.1=py36_0
+ - xarray=0.14.1=py_1
+ - xz=5.2.4=h14c3975_4
+ - zlib=1.2.11=h7b6447c_3
+ - zstd=1.3.7=h0b5b093_0
+prefix: /home/jj/anaconda3/envs/mcms
+
diff --git a/diagnostics/etc_composites/util/tracker/conda_py37.yml b/diagnostics/etc_composites/util/tracker/conda_py37.yml
new file mode 100644
index 000000000..84e8ff418
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/conda_py37.yml
@@ -0,0 +1,97 @@
+name: mcms
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ - _libgcc_mutex=0.1=main
+ - asn1crypto=1.2.0=py36_0
+ - basemap=1.2.0=py36h705c2d8_0
+ - blas=1.0=mkl
+ - bzip2=1.0.6=h14c3975_5
+ - ca-certificates=2019.11.27=0
+ - cartopy=0.16.0=py36hfa13621_0
+ - certifi=2019.11.28=py36_0
+ - cffi=1.13.2=py36h2e261b9_0
+ - cftime=1.0.3.4=py36hdd07704_0
+ - chardet=3.0.4=py36_1003
+ - cryptography=2.8=py36h1ba5d50_0
+ - curl=7.64.0=hbc83047_2
+ - cycler=0.10.0=py36_0
+ - cython=0.29.6=py36he6710b0_0
+ - dbus=1.13.6=h746ee38_0
+ - expat=2.2.6=he6710b0_0
+ - fontconfig=2.13.0=h9420a91_0
+ - freetype=2.9.1=h8a8886c_1
+ - geos=3.6.2=heeff764_2
+ - glib=2.56.2=hd408876_0
+ - gst-plugins-base=1.14.0=hbbd80ab_1
+ - gstreamer=1.14.0=hb453b48_1
+ - hdf4=4.2.13=h3ca952b_2
+ - hdf5=1.10.4=hb1b8bf9_0
+ - icu=58.2=h9c2bf20_1
+ - idna=2.8=py36_0
+ - intel-openmp=2019.3=199
+ - jpeg=9b=h024ee3a_2
+ - kiwisolver=1.0.1=py36hf484d3e_0
+ - krb5=1.16.1=h173b8e3_7
+ - libcurl=7.64.0=h20c2e04_2
+ - libedit=3.1.20181209=hc058e9b_0
+ - libffi=3.2.1=hd88cf55_4
+ - libgcc-ng=8.2.0=hdf63c60_1
+ - libgfortran-ng=7.3.0=hdf63c60_0
+ - libnetcdf=4.6.1=h11d0813_2
+ - libpng=1.6.36=hbc83047_0
+ - libssh2=1.8.0=h1ba5d50_4
+ - libstdcxx-ng=8.2.0=hdf63c60_1
+ - libtiff=4.1.0=h2733197_0
+ - libuuid=1.0.3=h1bed415_2
+ - libxcb=1.13=h1bed415_1
+ - libxml2=2.9.9=he19cac6_0
+ - libxslt=1.1.33=h7d1a2b0_0
+ - lxml=4.4.2=py36hefd8a0e_0
+ - matplotlib=3.0.3=py36h5429711_0
+ - mkl=2019.3=199
+ - mkl_fft=1.0.10=py36ha843d7b_0
+ - mkl_random=1.0.2=py36hd81dba3_0
+ - ncurses=6.1=he6710b0_1
+ - netcdf4=1.4.2=py36h808af73_0
+ - numpy=1.16.2=py36h7e9f1db_0
+ - numpy-base=1.16.2=py36hde5b4d6_0
+ - olefile=0.46=py36_0
+ - openssl=1.1.1d=h7b6447c_3
+ - owslib=0.18.0=py_0
+ - pandas=0.24.2=py36he6710b0_0
+ - pcre=8.43=he6710b0_0
+ - pillow=6.2.1=py36h34e0f95_0
+ - pip=19.0.3=py36_0
+ - proj4=5.0.1=h14c3975_0
+ - pycparser=2.19=py36_0
+ - pyepsg=0.4.0=py36_0
+ - pyopenssl=19.1.0=py36_0
+ - pyparsing=2.3.1=py36_0
+ - pyproj=1.9.5.1=py36h7b21b82_1
+ - pyqt=5.9.2=py36h05f1152_2
+ - pyshp=2.0.1=py36_0
+ - pysocks=1.7.1=py36_0
+ - python=3.7.3=h0371630_0
+ - python-dateutil=2.8.0=py36_0
+ - pytz=2018.9=py36_0
+ - qt=5.9.7=h5867ecd_1
+ - readline=7.0=h7b6447c_5
+ - requests=2.22.0=py36_1
+ - scipy=1.2.1=py36h7c811a0_0
+ - setuptools=40.8.0=py36_0
+ - shapely=1.6.4=py36h7ef4460_0
+ - sip=4.19.8=py36hf484d3e_0
+ - six=1.12.0=py36_0
+ - sqlite=3.27.2=h7b6447c_0
+ - tk=8.6.8=hbc83047_0
+ - tornado=6.0.2=py36h7b6447c_0
+ - urllib3=1.25.7=py36_0
+ - wheel=0.33.1=py36_0
+ - xarray=0.14.1=py_1
+ - xz=5.2.4=h14c3975_4
+ - zlib=1.2.11=h7b6447c_3
+ - zstd=1.3.7=h0b5b093_0
+prefix: /home/jj/anaconda3/envs/mcms
+
diff --git a/diagnostics/etc_composites/util/tracker/defines.py b/diagnostics/etc_composites/util/tracker/defines.py
new file mode 100755
index 000000000..4ab65cdc0
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/defines.py
@@ -0,0 +1,139 @@
+import os
+
+#########################################################################################
+############################ TRACKER MODULE SETTINGS ####################################
+#########################################################################################
+
+# SLP variable
+os.environ['slp_var'] = 'slp'
+
+# SLP file
+os.environ['slp_file'] = '*.' + os.environ['slp_var'] + '.6hr.nc'
+
+# model output filename file
+os.environ['MODEL_OUTPUT_DIR'] = os.environ['DATADIR'] + '/6hr'
+
+# make the necessary directory
+if not os.path.exists(os.environ['WK_DIR'] + '/model'):
+ os.makedirs(os.environ['WK_DIR'] + '/model')
+if not os.path.exists(os.environ['WK_DIR'] + '/obs'):
+ os.makedirs(os.environ['WK_DIR'] + '/obs')
+
+# this is where the tracker code will be run from
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp')
+
+# this is where I will be converting the model data into chunks of years that the code is run for
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp/data_converts'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp/data_converts')
+
+# this is where I will be running my code from
+if not os.path.exists(os.environ['WK_DIR'] + '/tmp/RUNDIR'):
+ os.makedirs(os.environ['WK_DIR'] + '/tmp/RUNDIR')
+
+# location of the source code that is required to run
+# this is from pod home files
+source_code_folder = os.environ['POD_HOME'] + '/util/'
+
+# again this has to be provided, but for now I have to change this to match the data
+topo_file = os.environ['topo_file']
+model = 'tmprun'
+
+# the latitude distribution file for ERA-Interim/MERRA
+obs_lat_distrib_file = os.environ['obs_lat_distrib_file']
+
+# over write years have to changed from firstyr to last year
+over_write_years = [int(os.environ['FIRSTYR']), int(os.environ['LASTYR'])]
+
+# this is needed to create the composites, for now I will assume the pre-processing code creates the necessary chunks of data
+slp_data_directory = os.environ['WK_DIR'] + '/tmp/data_converts'
+var_data_directory = os.environ['WK_DIR'] + '/tmp/data_converts'
+
+# location to which to save the outputs from the tracker
+# also this is the location from which the tracker will be run
+# NOTE: the tracker does not run from the source code location
+main_folder_location = os.environ['WK_DIR'] + '/tmp/RUNDIR/'
+
+# creating the links to other folder locations that are called by other python codes
+main_folder = os.path.join(main_folder_location, model) + '/'
+code_folder = os.path.join(main_folder, 'CODE') + '/'
+out_folder = os.path.join(main_folder, 'out_%s'%(model)) + '/'
+read_folder = os.path.join(main_folder, 'read_%s'%(model)) + '/'
+out_files_folder = os.path.join(out_folder, '%s_files'%(model)) + '/'
+slp_folder = os.path.join(main_folder, 'data') + '/'
+images_folder = os.path.join(read_folder, 'images') + '/'
+fronts_folder = os.path.join(read_folder, 'fronts') + '/'
+data_folder = os.path.join(main_folder, 'var_data') + '/'
+
+# output images folders
+model_images_folder = os.environ['WK_DIR'] + '/model/'
+obs_images_folder = os.environ['WK_DIR'] + '/obs/'
+
+# threshold for height to defining land mask and topo.
+# JJJ - b/c of interpolation and non-zero height of some SST region,
+# need to use a value larger than 0 otherwise parts of the ocean become land.
+# thresh_landsea = 50.0/100.0
+thresh_landsea_hgt = 50 # in meters # was 50 for all testing, changed this to match the v2 version of the code
+thresh_landsea_lsm = 50.0/100.0 # in fractional amount of land #was 50 for all testing, changed this to match the v2 version of the code
+
+# Print a lot to screen to debug
+verbose = 0
+
+# Flag to hard copy data files over to the RUN directory
+# If false, it will only create a symbolic link to outputs folder
+hard_copy = False
+
+################ ADDITIONAL OPTIONS
+# check if we have to run the MCMS tracker or not
+if (os.environ['USE_EXTERNAL_TRACKS'] == 'True'):
+ track_file = os.environ['EXTERNAL_TRACKS_FILE']
+
+#########################################################################################
+########################## COMPOSITE ANALYSIS SETTINGS ##################################
+#########################################################################################
+
+composite_years = over_write_years
+
+# Number of cores used to process the datacyc creation, set it to 1 for serial processing
+# Or if available, set it to use the number of cores
+num_cores = 1
+
+folder_6hr = os.environ['DATADIR'] + '/6hr/'
+files = os.listdir(folder_6hr)
+
+# getting the composites var list from the created variable in the "DATADIR"/6hr folder
+# some variable names have to changed to work with the tracker
+composite_var_list = [file.replace(os.environ['CASENAME']+'.', '').replace('.6hr.nc', '') for file in files if not '.psl.6hr.nc' in file]
+if ('u10' in composite_var_list) & ('v10' in composite_var_list):
+ # if both exists then add uv10 to the list
+ composite_var_list.append('uv10')
+# always remove the u10 and v10 from the list
+if ('u10' in composite_var_list):
+ composite_var_list.remove('u10')
+if ('v10' in composite_var_list):
+ composite_var_list.remove('v10')
+
+#renaming the wap500 to w500 used by the code
+if ('omega' in composite_var_list):
+ composite_var_list.remove('omega')
+ composite_var_list.append('w500')
+
+print(f'Variables to run composites: {composite_var_list}')
+
+composite_available_var_list = ['pr', 'prw', 'w500', 'uv10', 'clt']
+
+composite_hem_list = ['NH', 'SH']
+composite_season_list = ['all', 'djf', 'jja', 'son', 'mam', 'warm']
+
+# bins for histogram
+circ = {
+ 'dist_div': 100.,
+ 'ang_div': 20.,
+ 'dist_max': 1500.,
+ }
+
+area = {
+ 'dist_div': 100.,
+ 'dist_max': 1500.
+ }
+
diff --git a/diagnostics/etc_composites/util/tracker/defs_v4.py b/diagnostics/etc_composites/util/tracker/defs_v4.py
new file mode 100755
index 000000000..d36fb495d
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/defs_v4.py
@@ -0,0 +1,556 @@
+"""
+Module containing global (unchanging) parameters for the MCMS routines.
+#/usr/bin/env python -tt
+
+Usage: imported as a module not executable as stand alone.
+
+Options:
+
+Examples:
+
+Notes: See bottom of this document
+"""
+
+# __author__ = "Mike Bauer "
+# __status__ = "beta"
+# __version__ = "1.0 "+__status__
+# __date__ = "Created: 6 June 2008 Updated: 6 June 2008"
+#
+# NOTES:
+#
+# 1) One can alter the default values when this object is instantiated.
+#
+# import center_finder_defs as cfgd
+# use_defs = cfgd.defs(use_gcd=False)
+# print use_defs.use_gcd
+# > False
+#
+# 2) One can take advantage of the object-orientated nature of Python by
+# creating a whole new file, possibly with additional options, and
+# using inheritance.
+#
+# in a new file called cf.py
+#
+# import center_finder_defs
+# # create a new class inheriting from another
+# class defs(center_finder_defs):
+#
+# def __init__(self,**kwargs):
+# """Create an instance
+# """
+#
+# # use_gcd determines if the great circle or rhumbline method
+# # is to be used. Default is to use great circle distances (GCD)
+# if kwargs.in('use_gcd'):
+# self.use_gcd = kwargs['use_gcd']
+# else:
+# self.use_gcd = False
+#
+# # Some new parameter with a default value of 10 if use_gcd is
+# # True else set to 0
+# if kwargs.in('NEW'):
+# self.NEW = kwargs['NEW']
+# else:
+# if self.use_gcd:
+# self.NEW = 10
+# else:
+# self.NEW = 0
+#
+# Then use it as
+#
+# import cfs as cfgd
+# use_defs = cfgd.defs(use_gcd=True)
+# print use_defs.NEW
+# > 10
+
+#-------------------------------------------------------------------------
+# Options and Defaults: Arranges such that the parameters mostly likely
+# to be altered by users are nearer the top of the file. Parameters
+# nearer the bottom are generally constants; like Earth's radius.
+#-------------------------------------------------------------------------
+
+def garbage():
+ pass
+
+class defs:
+ "Class to hold global definitions"
+
+ def __init__(self,**kwargs):
+ """This is where the default values are set with the option to
+ alter then upon instantiation.
+ """
+
+ # How to read this: the 'if' statement allows for keyword
+ # arguments 'kwargs' to alter the default value when 'defs'
+ # are called. Thus the parameter name 'pname' is all that's
+ # required. The 'else' defines a default value for 'pname'
+ # if 'pname' is not passed in 'kwargs'.
+ #
+ # if kwargs.in('pname'): # pname in quotes
+ # self.pname = kwargs['pname']
+ # # assign value from kwargs written as for example
+ # # pname=10.0. Here self.pname gets the value 10.0
+ # else: # use default
+ # self.pname = 0.0
+ #
+
+ # Name: keep_log
+ # Purpose: Store output to file rather than print to screen
+ # Default: True
+ #
+
+ if 'keep_log' in kwargs:
+ self.keep_log = kwargs['keep_log']
+ else:
+ self.keep_log = True
+
+ # Name: accuracy
+ # Purpose: Scales the source data to that it has a fixed
+ # accuracy as the data are used as integers.
+ # Default: 1000 which takes an SLP in hPa and makes it accurate
+ # to 0.001 or a deci-Pascal
+ if 'accuracy' in kwargs:
+ self.accuracy = kwargs['accuracy']
+ else:
+ self.accuracy = 1000
+
+ # Name: read_scale
+ # Purpose: Scales the source data so units of Pa.
+ # Default: 0.00001
+ if 'read_scale' in kwargs:
+ self.read_scale = kwargs['read_scale']
+ else:
+ self.read_scale = self.accuracy*0.00001
+ self.read_scale = self.accuracy*0.001
+
+ # Name: use_gcd
+ # Purpose: determines if great circle or rhumbline method is to be
+ # used for distance calculations.
+ # Default: True
+ #
+ # Note: The basic difference is great circles are better in general
+ # but they generally don't allow for a fixed angle between points.
+ # Rhumblines allow for a fixed course but the distances are longer
+ # than great circles generally and near the poles much longer and
+ # even wrap around the globe.
+ #
+ if 'use_gcd' in kwargs:
+ self.use_gcd = kwargs['use_gcd']
+ else:
+ self.use_gcd = True
+
+ # Name: fake_jd
+ # Purpose: Use if netcdf of the source file does not use a standard
+ # calendar; for example no leap years.
+ #
+ # Default: False
+ #
+ # Note: When True timesteps are used instead of Julian Dates.
+ #
+ if 'fake_jd' in kwargs:
+ self.fake_jd = kwargs['fake_jd']
+ else:
+ self.fake_jd = False
+
+ # Name: faux_grids
+ # Purpose: Origin of the grids as defined by the lat/lon values
+ # when the data is point_registered. That is, the data
+ # are not grid representative but instead point values.
+ #
+ # faux_grids = 0 Nothing done.
+ # faux_grids = 1 Use lon/lat to define grid centers and edges.
+ # faux_grids = 2 Same as 1 but create polar cap which is half-width.
+ # faux_grids = 3 Interpolate to make a grid
+ #
+ # Default: 0
+ #
+ # Note:
+ #
+ if 'faux_grids' in kwargs:
+ self.faux_grids = kwargs['faux_grids']
+ else:
+ self.faux_grids = False
+
+ # Name: keep_discards
+ # Purpose: Store all centers that were found but not keep in the
+ # final result. The reason for their rejection is encoded so
+ # this acts as a kind of log for the center_finder process.
+ # Default: True
+ #
+ if 'keep_discards' in kwargs:
+ self.keep_discards = kwargs['keep_discards']
+ else:
+ self.keep_discards = True
+
+ # Name: tropical_boundary
+ # Purpose: Absolute latitude (degrees) bounding tropics. Mostly
+ # used by center_finder.
+ # Default: 15.0
+ #
+ if 'tropical_boundary' in kwargs:
+ self.tropical_boundary = kwargs['tropical_boundary']
+ else:
+ self.tropical_boundary = 15.0
+
+ # Name: tropical_boundary_alt
+ # Purpose: Absolute latitude (degrees) bounding tropics. Mostly
+ # used for tracking.
+ # Default: 30.0
+ #
+ if 'tropical_boundary_alt' in kwargs:
+ self.tropical_boundary_alt = kwargs['tropical_boundary_alt']
+ else:
+ self.tropical_boundary_alt = 30.0
+
+ # Name: tropical_filter
+ # Purpose: Speeds up searches by skipping the tropics. For now
+ # defined as bounded by tropical_boundary.
+ # Default: True
+ #
+ if 'tropical_filter' in kwargs:
+ self.tropical_filter = kwargs['tropical_filter']
+ else:
+ self.tropical_filter = True
+
+ # Name: troubled_filter
+ # Purpose: List of troublesome centers which flagged for
+ # special treatment.
+ # Default: False
+ #
+ # Note:
+ #
+ if 'troubled_filter' in kwargs:
+ self.troubled_filter = kwargs['troubled_filter']
+ else:
+ self.troubled_filter = False
+
+ # Name: laplacian_filter
+ # Purpose: Ignore potential centers that have weak SLP gradients
+ # Default: True
+ #
+ if 'laplacian_filter' in kwargs:
+ self.laplacian_filter = kwargs['laplacian_filter']
+ else:
+ self.laplacian_filter = True
+
+ # Name: regional_slp_threshold
+ # Purpose: Sets a minimum value to reject a center from being a regional
+ # minimum. Keeps something like a 0.01 hPa difference from re-
+ # jecting a potential center.
+ # Default: 1 hPa
+ #
+ if 'regional_slp_threshold' in kwargs:
+ self.regional_slp_threshold = kwargs['regional_slp_threshold']
+ else:
+ self.regional_slp_threshold = 1*self.accuracy
+
+ # Name: critical_radius
+ # Purpose: Sets the distance/radius for a center being considered
+ # a regional minimum.
+ # Default: 720.0 km
+ #
+ # Note: if set to 0.0 then the radius is set by the wavenumber method
+ #
+ if 'critical_radius' in kwargs:
+ self.critical_radius = kwargs['critical_radius']
+ else:
+ self.critical_radius = 720.0
+
+ # Name: wavenumber
+ # Purpose: Sets the distance/radius for a center being considered
+ # a regional minimum. This is done with the wavenumber so
+ # the radius changes with latitude.
+ # Default: 13.0 (Center Finding/Tracking) and 4.0 for attribution.
+ #
+ # Note: Synoptic features span the range of wavenumber 4-13. If
+ # critical_radius is non-zero wavenumber is not used.
+ #
+ if 'wavenumber' in kwargs:
+ self.wavenumber = kwargs['wavenumber']
+ else:
+ self.wavenumber = 13.0
+
+ # Name: plim_filter
+ # Purpose: Ignores all potential screens with a SLP above this
+ # value. If set to 0 no filtering is done. Note the
+ # value should be in hPa/mb as scaled by accuracy
+ # Default: 0 (1020 hPa used in past)
+ #
+ if 'plim_filter' in kwargs:
+ self.plim_filter = kwargs['plim_filter']*self.accuracy
+ else:
+ self.plim_filter = 0
+
+ # Name: skip_polars
+ # Purpose: Skip checks on centers at top/bottom most lat rows.
+ # Default: True
+ #
+ if 'skip_polars' in kwargs:
+ self.skip_polars = kwargs['skip_polars']
+ else:
+ self.skip_polars = True
+
+ # Name: detached_filter
+ # Purpose: Filters potential centers for only those that in theory
+ # could be tracked. That is, they have another center within
+ # travel distance 1 time step before or after the current time.
+ # Default: True
+ #
+ if 'detached_filter' in kwargs:
+ self.detached_filter = kwargs['detached_filter']*self.accuracy
+ else:
+ self.detached_filter = True
+
+ # Name: polar_filter
+ # Purpose: Ignore tracking when centers at latitudes where they could
+ # travel across a pole in a single timestep.
+ # Default: True
+ #
+ if 'polar_filter' in kwargs:
+ self.polar_filter = kwargs['polar_filter']*self.accuracy
+ else:
+ self.polar_filter = True
+
+ # Name: max_cyclone_speed
+ # Purpose: Sets maximum allowable cyclone propagation speed.
+ # Used to determine if two centers at different times
+ # could be part of the same system.
+ # Default: 120.0 km/hr
+ #
+ if 'max_cyclone_speed' in kwargs:
+ self.max_cyclone_speed = kwargs['max_cyclone_speed']
+ else:
+ self.max_cyclone_speed = 120.0
+
+ # Name: maxdp
+ # Purpose: Defines a "bomb" cyclone SLP tendency (hPa/hr).
+ # Default: 40 hPa/24 hours
+ #
+ # Note: Must multiply by timestep
+ if 'maxdp' in kwargs:
+ self.maxdp = kwargs['maxdp']
+ else:
+ self.maxdp = (40.0*self.accuracy)/24.0
+
+ # Name: travel_distance
+ # Purpose: Defines maximum allowed travel distance
+ # Default: max_cyclone_speed*timestep (km)
+ #
+ # Note: Must multiply by timestep
+ if 'travel_distance' in kwargs:
+ self.travel_distance = kwargs['travel_distance']
+ else:
+ self.travel_distance = self.max_cyclone_speed
+
+ # Name: min_trk_travel
+ # Purpose: Defines minimum allowed total lifetime travel distance.
+ # Default: 200.0 km
+ #
+ if 'min_trk_travel' in kwargs:
+ self.min_trk_travel = kwargs['min_trk_travel']
+ else:
+ self.min_trk_travel = 200.0
+
+ # Name: max_coarse
+ # Purpose: Defines maximum allowed course change
+ # Default: 95 degrees
+ #
+ if 'max_coarse' in kwargs:
+ self.max_coarse = kwargs['max_coarse']
+ else:
+ self.max_coarse = 95
+
+ # Name: age_limit
+ # Purpose: Defines minimum allowed track lifetime.
+ # Default: 24.0 hours
+ #
+ if 'age_limit' in kwargs:
+ self.age_limit = kwargs['age_limit']
+ else:
+ self.age_limit = 24.0
+
+ # Name: keep_slp
+ # Purpose: Defines minimum lifetime SLP a track must reach.
+ # Default: 1010 hPa
+ #
+ if 'keep_slp' in kwargs:
+ self.keep_slp = kwargs['keep_slp']
+ else:
+ self.keep_slp = 1010*self.accuracy
+
+ # Name: min_contour
+ # Purpose: Defines minimum SLP contour for Attribution searches. All
+ # lower SLP values are placed in this contour.
+ # Default: 940 hPa
+ #
+ if 'min_contour' in kwargs:
+ self.min_contour = kwargs['min_contour']
+ else:
+ self.min_contour = 940*self.accuracy
+
+ # Name: max_contour
+ # Purpose: Defines maximum SLP contour for Attribution searches. All
+ # higher SLP values are placed in this contour.
+ # Default: 1013 hPa, 1015 hPa and 1020 hPa have been used.
+ #
+ if 'max_contour' in kwargs:
+ self.max_contour = kwargs['max_contour']
+ else:
+ self.max_contour = 1015*self.accuracy
+
+ # Name: interval
+ # Purpose: Defines contour interval for Attribution searches.
+ # Default: 5.0 hPa (Caution small value allows for many problems,keep about 2 hPa).
+ #
+ if 'interval' in kwargs:
+ self.interval = kwargs['interval']
+ else:
+ self.interval = int(5.0*self.accuracy)
+
+ # Name: z_anomaly_cutoff
+ # Purpose: Defines minimum value at which the zonal anomaly is
+ # deep enough to allow grids use for Attribution searches.
+ # Default: -10.0 hPa (should be something like interval)
+ #
+ if 'z_anomaly_cutoff' in kwargs:
+ self.z_anomaly_cutoff = kwargs['z_anomaly_cutoff']
+ else:
+# self.z_anomaly_cutoff = int(-10.0*self.accuracy)
+ self.z_anomaly_cutoff = int(-5.0*self.accuracy)
+# self.z_anomaly_cutoff = int(-0.01*self.accuracy)
+
+ # Name: check_flare
+ # Purpose: Defines minimum number of grids in a system before
+ # the flare and inflation tests applied.
+ # Default: 25
+ #
+ if 'check_flare' in kwargs:
+ self.check_flare = kwargs['check_flare']
+ else:
+ self.check_flare = 25
+
+ # Name: check_inflate
+ # Purpose: Defines minimum center SLP to apply
+ # the inflation test.
+ # Default: 1000 hPa
+ #
+ if 'check_inflate' in kwargs:
+ self.check_inflate = kwargs['check_inflate']
+ else:
+ self.check_inflate = int(1000.0*self.accuracy)
+
+ # Name: inflated
+ # Purpose: Maximum ratio of new grids being added
+ # to a center with pre-existing grids.
+ # Default: 5
+ #
+ # Note: So the if new_grids/old_girds >= inflated
+ # it is likely that the attributed contours are
+ # running along a wide shallow slp field which
+ # if left unchecked result in huge attributed storms.
+ # Almost always occurs with weak lows (SLPs > 1000 hPa).
+ if 'inflated' in kwargs:
+ self.inflated = kwargs['inflated']
+ else:
+ self.inflated = 5.0
+
+ # Name: find_highs
+ # Purpose: Find slp maximums/highs instead of minimas/lows.
+ # Default: False
+ #
+ # Note: EXPERIMENTAL and not well tested.
+ #
+ if 'find_highs' in kwargs:
+ self.find_highs = kwargs['find_highs']
+ else:
+ self.find_highs = False
+
+ # Name: earth_radius
+ # Purpose: constant
+ # Default: 6371.2 km
+ #
+ self.earth_radius = 6371.2
+
+ # Name: inv_earth_radius_sq
+ # Purpose: constant
+ # Default:
+ #
+ self.inv_earth_radius_sq = 1.0/(self.earth_radius*self.earth_radius)
+
+ # Name: two_deg_lat
+ # Purpose: constant
+ # Default: 111.0 km squared
+ #
+ self.two_deg_lat = 111.0*111.0
+
+ # Name: usi_template
+ # Purpose: place holder prior to tracking
+ # Default: "00000000000000000000"
+ #
+ self.usi_template = "00000000000000000000"
+
+ # Name: center_fmt
+ # Purpose: Format statement for writing center to file
+ # Default:
+ #
+
+ # Jeyavinoth: Since I changed the adate values, I have more than 9 characters for it,
+ # my adates has 12 characters
+ # So I change the line below:
+ # fmt_1 = "%4d %02d %02d %02d %09d %05d %05d %06d %07d "
+ # to:
+ fmt_1 = "%4d %02d %02d %02d %09d %05d %05d %06d %07d " # since adates is in days *100, I leave the formula as is
+
+ fmt_2 = "%07d %05d %02d %02d %04d %4d%02d%02d%02d%05d%05d %s\n"
+ self.center_fmt = fmt_1 + fmt_2
+
+ # Name: center_fmt2
+ # Purpose: Version of center_fmt
+ # Default:
+ #
+ fmt_2 = "%07d %05d %02d %02d %04d %s %s\n"
+ self.center_fmt2 = fmt_1 + fmt_2
+
+ # Name: intensity_fmt
+ # Purpose: Format statement for writing intensity to file
+ # Default:
+ #
+ self.intensity_fmt = "%6s %s %07d %07d %07d %07d %d\n"
+
+ # Name: seasons
+ # Purpose: Define various seasons for screening.
+ # Default:
+ #
+ seasons = {}
+ seasons["djf"] = [1,1,0,0,0,0,0,0,0,0,0,1]
+ seasons["mam"] = [0,0,1,1,1,0,0,0,0,0,0,0]
+ seasons["jja"] = [0,0,0,0,0,1,1,1,0,0,0,0]
+ seasons["son"] = [0,0,0,0,0,0,0,0,1,1,1,0]
+ seasons["ndjfma"] = [1,1,1,1,0,0,0,0,0,0,1,1]
+ seasons["mjjaso"] = [0,0,0,0,1,1,1,1,1,1,0,0]
+ self.seasons = seasons
+
+ # Name: seasons_pick
+ # Purpose: method for selecting seasons
+ # Default:
+ #
+ self.season_pick = ["djf","djf","mam","mam","mam","jja","jja","jja",
+ "son","son","son","djf"]
+
+ self.center_data = [('YYYY' , 'int64'),
+ ('MM' , 'int64'),
+ ('DD' , 'int64'),
+ ('HH' , 'int64'),
+ ('JD' , 'int64'),
+ ('CoLat' , 'int64'),
+ ('Lon' , 'int64'),
+ ('GridID' , 'int64'),
+ ('GridSLP' , 'int64'),
+ ('RegSLP' , 'int64'),
+ ('GridLAP' , 'int64'),
+ ('Flags' , 'int64'),
+ ('Intensity' , 'int64'),
+ ('Disimularity', 'int64'),
+ ('UCI' , 'S20'),
+ ('USI' , 'S20')]
+
diff --git a/diagnostics/etc_composites/util/tracker/first_last_lons_v4.py b/diagnostics/etc_composites/util/tracker/first_last_lons_v4.py
new file mode 100755
index 000000000..c83181511
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/first_last_lons_v4.py
@@ -0,0 +1,12 @@
+def first_last_lons(jm,im):
+ """Get gridIDs for 1st lon of each lat"""
+ row_start = [0]
+ maxID = jm*im
+ for gridID in range(maxID-1):
+ test = gridID % im + 1
+ if test == im:
+ row_start.append(gridID+1)
+ row_end = []
+ for each in row_start:
+ row_end.append(each+im-1)
+ return row_start,row_end
diff --git a/diagnostics/etc_composites/util/tracker/g2l_v4.c b/diagnostics/etc_composites/util/tracker/g2l_v4.c
new file mode 100644
index 000000000..72c86b013
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/g2l_v4.c
@@ -0,0 +1,4132 @@
+/* Generated by Cython 0.29.23 */
+
+/* BEGIN: Cython Metadata
+{
+ "distutils": {
+ "name": "g2l_v4",
+ "sources": [
+ "g2l_v4.pyx"
+ ]
+ },
+ "module_name": "g2l_v4"
+}
+END: Cython Metadata */
+
+#ifndef PY_SSIZE_T_CLEAN
+#define PY_SSIZE_T_CLEAN
+#endif /* PY_SSIZE_T_CLEAN */
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
+ #error Cython requires Python 2.6+ or Python 3.3+.
+#else
+#define CYTHON_ABI "0_29_23"
+#define CYTHON_HEX_VERSION 0x001D17F0
+#define CYTHON_FUTURE_DIVISION 0
+#include
+#ifndef offsetof
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#define __PYX_COMMA ,
+#ifndef HAVE_LONG_LONG
+ #if PY_VERSION_HEX >= 0x02070000
+ #define HAVE_LONG_LONG
+ #endif
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+ #define CYTHON_COMPILING_IN_PYPY 1
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #undef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #undef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 1
+ #undef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 0
+ #undef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 0
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#elif defined(PYSTON_VERSION)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 1
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#else
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 1
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
+ #if PY_MAJOR_VERSION < 3
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #elif !defined(CYTHON_USE_PYLONG_INTERNALS)
+ #define CYTHON_USE_PYLONG_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #if PY_VERSION_HEX < 0x030300F0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
+ #define CYTHON_USE_UNICODE_WRITER 1
+ #endif
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #ifndef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 1
+ #endif
+ #ifndef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 1
+ #endif
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
+ #ifndef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
+ #endif
+ #ifndef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
+ #endif
+#endif
+#if !defined(CYTHON_FAST_PYCCALL)
+#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
+#endif
+#if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #undef SHIFT
+ #undef BASE
+ #undef MASK
+ #ifdef SIZEOF_VOID_P
+ enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
+ #endif
+#endif
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include
+#endif
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+ #define Py_OptimizeFlag 0
+#endif
+#define __PYX_BUILD_PY_SSIZE_T "n"
+#define CYTHON_FORMAT_SSIZE_T "z"
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#else
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#endif
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_FINALIZE
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#ifndef METH_STACKLESS
+ #define METH_STACKLESS 0
+#endif
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
+#else
+ #define __Pyx_PyCFunctionFast _PyCFunctionFast
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+#endif
+#if CYTHON_FAST_PYCCALL
+#define __Pyx_PyFastCFunction_Check(func)\
+ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
+#else
+#define __Pyx_PyFastCFunction_Check(func) 0
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
+ #define PyMem_RawMalloc(n) PyMem_Malloc(n)
+ #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
+ #define PyMem_RawFree(p) PyMem_Free(p)
+#endif
+#if CYTHON_COMPILING_IN_PYSTON
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
+ return 0;
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
+ #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
+ #else
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
+ #endif
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define PyUnicode_1BYTE_KIND 1
+ #define PyUnicode_2BYTE_KIND 2
+ #define PyUnicode_4BYTE_KIND 4
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
+ #define PyObject_ASCII(o) PyObject_Repr(o)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+#ifndef PyObject_Unicode
+ #define PyObject_Unicode PyObject_Str
+#endif
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#if PY_VERSION_HEX >= 0x030900A4
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
+ #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
+#else
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
+ #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
+#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
+ #ifndef PyUnicode_InternFromString
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
+ #endif
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
+#else
+ #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ #if PY_VERSION_HEX >= 0x030500B1
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
+ #else
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
+ typedef struct {
+ unaryfunc am_await;
+ unaryfunc am_aiter;
+ unaryfunc am_anext;
+ } __Pyx_PyAsyncMethodsStruct;
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+ #define _USE_MATH_DEFINES
+#endif
+#include
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
+#define __Pyx_truncl trunc
+#else
+#define __Pyx_truncl truncl
+#endif
+
+#define __PYX_MARK_ERR_POS(f_index, lineno) \
+ { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
+#define __PYX_ERR(f_index, lineno, Ln_error) \
+ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#define __PYX_HAVE__g2l_v4
+#define __PYX_HAVE_API__g2l_v4
+/* Early includes */
+#ifdef _OPENMP
+#include
+#endif /* _OPENMP */
+
+#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_uchar_cast(c) ((unsigned char)c)
+#define __Pyx_long_cast(x) ((long)x)
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
+ (sizeof(type) < sizeof(Py_ssize_t)) ||\
+ (sizeof(type) > sizeof(Py_ssize_t) &&\
+ likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX) &&\
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
+ v == (type)PY_SSIZE_T_MIN))) ||\
+ (sizeof(type) == sizeof(Py_ssize_t) &&\
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX))) )
+static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
+ return (size_t) i < (size_t) limit;
+}
+#if defined (__cplusplus) && __cplusplus >= 201103L
+ #include
+ #define __Pyx_sst_abs(value) std::abs(value)
+#elif SIZEOF_INT >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) abs(value)
+#elif SIZEOF_LONG >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) labs(value)
+#elif defined (_MSC_VER)
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define __Pyx_sst_abs(value) llabs(value)
+#elif defined (__GNUC__)
+ #define __Pyx_sst_abs(value) __builtin_llabs(value)
+#else
+ #define __Pyx_sst_abs(value) ((value<0) ? -value : value)
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
+#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
+#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
+#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
+#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return (size_t)(u_end - u - 1);
+}
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
+#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
+#define __Pyx_PySequence_Tuple(obj)\
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_ASSUME_SAFE_MACROS
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
+#else
+#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
+#endif
+#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ const char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ if (strcmp(default_encoding_c, "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (!ascii_chars_u) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ Py_DECREF(ascii_chars_u);
+ Py_DECREF(ascii_chars_b);
+ }
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
+ if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+/* Test for GCC > 2.95 */
+#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* !__GNUC__ or GCC < 2.95 */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
+
+static PyObject *__pyx_m = NULL;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_cython_runtime = NULL;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static PyObject *__pyx_empty_unicode;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+ "g2l_v4.pyx",
+};
+
+/*--- Type declarations ---*/
+
+/* --- Runtime support code (head) --- */
+/* Refnanny.proto */
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ if (acquire_gil) {\
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ PyGILState_Release(__pyx_gilstate_save);\
+ } else {\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext()\
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif
+#define __Pyx_XDECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_XDECREF(tmp);\
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_DECREF(tmp);\
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+/* RaiseArgTupleInvalid.proto */
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
+
+/* RaiseDoubleKeywords.proto */
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
+
+/* ParseKeywords.proto */
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
+ const char* function_name);
+
+/* IncludeStringH.proto */
+#include
+
+/* BytesEquals.proto */
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals);
+
+/* UnicodeEquals.proto */
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals);
+
+/* StrEquals.proto */
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals
+#else
+#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals
+#endif
+
+/* PyIntFromDouble.proto */
+#if PY_MAJOR_VERSION < 3
+static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value);
+#else
+#define __Pyx_PyInt_FromDouble(value) PyLong_FromDouble(value)
+#endif
+
+/* PyIntCompare.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, long inplace);
+
+/* PyDictVersioning.proto */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
+#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
+ (version_var) = __PYX_GET_DICT_VERSION(dict);\
+ (cache_var) = (value);
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
+ (VAR) = __pyx_dict_cached_value;\
+ } else {\
+ (VAR) = __pyx_dict_cached_value = (LOOKUP);\
+ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
+ }\
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
+#else
+#define __PYX_GET_DICT_VERSION(dict) (0)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
+#endif
+
+/* PyObjectGetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/* PyThreadStateGet.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
+#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
+#else
+#define __Pyx_PyThreadState_declare
+#define __Pyx_PyThreadState_assign
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#endif
+
+/* PyErrFetchRestore.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#else
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
+#endif
+
+/* CLineInTraceback.proto */
+#ifdef CYTHON_CLINE_IN_TRACEBACK
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
+#endif
+
+/* CodeObjectCache.proto */
+typedef struct {
+ PyCodeObject* code_object;
+ int code_line;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+/* AddTraceback.proto */
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename);
+
+/* GCCDiagnostics.proto */
+#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
+#define __Pyx_HAS_GCC_DIAGNOSTIC
+#endif
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+/* FastTypeChecks.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/* CheckBinaryVersion.proto */
+static int __Pyx_check_binary_version(void);
+
+/* InitStrings.proto */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
+
+
+/* Module declarations from 'g2l_v4' */
+#define __Pyx_MODULE_NAME "g2l_v4"
+extern int __pyx_module_is_main_g2l_v4;
+int __pyx_module_is_main_g2l_v4 = 0;
+
+/* Implementation of 'g2l_v4' */
+static const char __pyx_k_jm[] = "jm";
+static const char __pyx_k_g2l[] = "g2l";
+static const char __pyx_k_lat[] = "lat";
+static const char __pyx_k_lon[] = "lon";
+static const char __pyx_k_dlat[] = "dlat";
+static const char __pyx_k_dlon[] = "dlon";
+static const char __pyx_k_free[] = "free";
+static const char __pyx_k_main[] = "__main__";
+static const char __pyx_k_name[] = "__name__";
+static const char __pyx_k_test[] = "__test__";
+static const char __pyx_k_edge1[] = "edge1";
+static const char __pyx_k_edge2[] = "edge2";
+static const char __pyx_k_center[] = "center";
+static const char __pyx_k_g2l_v4[] = "g2l_v4";
+static const char __pyx_k_flag_360[] = "flag_360";
+static const char __pyx_k_location[] = "location";
+static const char __pyx_k_the_grid[] = "the_grid";
+static const char __pyx_k_edge_flag[] = "edge_flag";
+static const char __pyx_k_start_lat[] = "start_lat";
+static const char __pyx_k_start_lon[] = "start_lon";
+static const char __pyx_k_faux_grids[] = "faux_grids";
+static const char __pyx_k_g2l_v4_pyx[] = "g2l_v4.pyx";
+static const char __pyx_k_center_flag[] = "center_flag";
+static const char __pyx_k_lat_lon_flag[] = "lat_lon_flag";
+static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
+static PyObject *__pyx_n_s_center;
+static PyObject *__pyx_n_s_center_flag;
+static PyObject *__pyx_n_s_cline_in_traceback;
+static PyObject *__pyx_n_s_dlat;
+static PyObject *__pyx_n_s_dlon;
+static PyObject *__pyx_n_s_edge1;
+static PyObject *__pyx_n_s_edge2;
+static PyObject *__pyx_n_s_edge_flag;
+static PyObject *__pyx_n_s_faux_grids;
+static PyObject *__pyx_n_s_flag_360;
+static PyObject *__pyx_n_s_free;
+static PyObject *__pyx_n_s_g2l;
+static PyObject *__pyx_n_s_g2l_v4;
+static PyObject *__pyx_kp_s_g2l_v4_pyx;
+static PyObject *__pyx_n_s_jm;
+static PyObject *__pyx_n_s_lat;
+static PyObject *__pyx_n_s_lat_lon_flag;
+static PyObject *__pyx_n_s_location;
+static PyObject *__pyx_n_s_lon;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_name;
+static PyObject *__pyx_n_s_start_lat;
+static PyObject *__pyx_n_s_start_lon;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_the_grid;
+static PyObject *__pyx_pf_6g2l_v4_g2l(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_the_grid, double __pyx_v_start_lon, double __pyx_v_start_lat, double __pyx_v_dlon, double __pyx_v_dlat, int __pyx_v_jm, PyObject *__pyx_v_lat_lon_flag, PyObject *__pyx_v_center_flag, PyObject *__pyx_v_edge_flag, PyObject *__pyx_v_flag_360, PyObject *__pyx_v_faux_grids); /* proto */
+static PyObject *__pyx_float_57_2957795;
+static PyObject *__pyx_int_0;
+static PyObject *__pyx_int_2;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_codeobj__2;
+/* Late includes */
+
+/* "g2l_v4.pyx":1
+ * def g2l(double the_grid,double start_lon, double start_lat, # <<<<<<<<<<<<<<
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0):
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_6g2l_v4_1g2l(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_6g2l_v4_1g2l = {"g2l", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_6g2l_v4_1g2l, METH_VARARGS|METH_KEYWORDS, 0};
+static PyObject *__pyx_pw_6g2l_v4_1g2l(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ double __pyx_v_the_grid;
+ double __pyx_v_start_lon;
+ double __pyx_v_start_lat;
+ double __pyx_v_dlon;
+ double __pyx_v_dlat;
+ int __pyx_v_jm;
+ PyObject *__pyx_v_lat_lon_flag = 0;
+ PyObject *__pyx_v_center_flag = 0;
+ PyObject *__pyx_v_edge_flag = 0;
+ PyObject *__pyx_v_flag_360 = 0;
+ PyObject *__pyx_v_faux_grids = 0;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("g2l (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_the_grid,&__pyx_n_s_start_lon,&__pyx_n_s_start_lat,&__pyx_n_s_dlon,&__pyx_n_s_dlat,&__pyx_n_s_jm,&__pyx_n_s_lat_lon_flag,&__pyx_n_s_center_flag,&__pyx_n_s_edge_flag,&__pyx_n_s_flag_360,&__pyx_n_s_faux_grids,0};
+ PyObject* values[11] = {0,0,0,0,0,0,0,0,0,0,0};
+ values[7] = ((PyObject *)__pyx_n_s_center);
+
+ /* "g2l_v4.pyx":3
+ * def g2l(double the_grid,double start_lon, double start_lat,
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0): # <<<<<<<<<<<<<<
+ * # Note dlat and dlon in RADIANS!
+ * cdef double location, edge1, edge2
+ */
+ values[8] = ((PyObject *)Py_False);
+ values[9] = ((PyObject *)Py_False);
+ values[10] = ((PyObject *)__pyx_int_0);
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ CYTHON_FALLTHROUGH;
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ CYTHON_FALLTHROUGH;
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ CYTHON_FALLTHROUGH;
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ CYTHON_FALLTHROUGH;
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ CYTHON_FALLTHROUGH;
+ case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ CYTHON_FALLTHROUGH;
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_the_grid)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_lon)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 1); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_start_lat)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 2); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dlon)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 3); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 4:
+ if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dlat)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 4); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 5:
+ if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_jm)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 5); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 6:
+ if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lat_lon_flag)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, 6); __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 7:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_center_flag);
+ if (value) { values[7] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 8:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_edge_flag);
+ if (value) { values[8] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 9:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_flag_360);
+ if (value) { values[9] = value; kw_args--; }
+ }
+ CYTHON_FALLTHROUGH;
+ case 10:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_faux_grids);
+ if (value) { values[10] = value; kw_args--; }
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "g2l") < 0)) __PYX_ERR(0, 1, __pyx_L3_error)
+ }
+ } else {
+ switch (PyTuple_GET_SIZE(__pyx_args)) {
+ case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10);
+ CYTHON_FALLTHROUGH;
+ case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9);
+ CYTHON_FALLTHROUGH;
+ case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8);
+ CYTHON_FALLTHROUGH;
+ case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7);
+ CYTHON_FALLTHROUGH;
+ case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
+ values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+ values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ }
+ __pyx_v_the_grid = __pyx_PyFloat_AsDouble(values[0]); if (unlikely((__pyx_v_the_grid == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error)
+ __pyx_v_start_lon = __pyx_PyFloat_AsDouble(values[1]); if (unlikely((__pyx_v_start_lon == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error)
+ __pyx_v_start_lat = __pyx_PyFloat_AsDouble(values[2]); if (unlikely((__pyx_v_start_lat == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 1, __pyx_L3_error)
+ __pyx_v_dlon = __pyx_PyFloat_AsDouble(values[3]); if (unlikely((__pyx_v_dlon == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 2, __pyx_L3_error)
+ __pyx_v_dlat = __pyx_PyFloat_AsDouble(values[4]); if (unlikely((__pyx_v_dlat == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 2, __pyx_L3_error)
+ __pyx_v_jm = __Pyx_PyInt_As_int(values[5]); if (unlikely((__pyx_v_jm == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 2, __pyx_L3_error)
+ __pyx_v_lat_lon_flag = values[6];
+ __pyx_v_center_flag = values[7];
+ __pyx_v_edge_flag = values[8];
+ __pyx_v_flag_360 = values[9];
+ __pyx_v_faux_grids = values[10];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("g2l", 0, 7, 11, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("g2l_v4.g2l", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return NULL;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_6g2l_v4_g2l(__pyx_self, __pyx_v_the_grid, __pyx_v_start_lon, __pyx_v_start_lat, __pyx_v_dlon, __pyx_v_dlat, __pyx_v_jm, __pyx_v_lat_lon_flag, __pyx_v_center_flag, __pyx_v_edge_flag, __pyx_v_flag_360, __pyx_v_faux_grids);
+
+ /* "g2l_v4.pyx":1
+ * def g2l(double the_grid,double start_lon, double start_lat, # <<<<<<<<<<<<<<
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0):
+ */
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_6g2l_v4_g2l(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_the_grid, double __pyx_v_start_lon, double __pyx_v_start_lat, double __pyx_v_dlon, double __pyx_v_dlat, int __pyx_v_jm, PyObject *__pyx_v_lat_lon_flag, PyObject *__pyx_v_center_flag, PyObject *__pyx_v_edge_flag, PyObject *__pyx_v_flag_360, PyObject *__pyx_v_faux_grids) {
+ double __pyx_v_location;
+ double __pyx_v_edge1;
+ double __pyx_v_edge2;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ int __pyx_t_1;
+ PyObject *__pyx_t_2 = NULL;
+ PyObject *__pyx_t_3 = NULL;
+ PyObject *__pyx_t_4 = NULL;
+ PyObject *__pyx_t_5 = NULL;
+ double __pyx_t_6;
+ int __pyx_t_7;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("g2l", 0);
+
+ /* "g2l_v4.pyx":7
+ * cdef double location, edge1, edge2
+ *
+ * location = -999.0 # <<<<<<<<<<<<<<
+ * edge1 = -999.0
+ * edge2 = -990.0
+ */
+ __pyx_v_location = -999.0;
+
+ /* "g2l_v4.pyx":8
+ *
+ * location = -999.0
+ * edge1 = -999.0 # <<<<<<<<<<<<<<
+ * edge2 = -990.0
+ * if lat_lon_flag == "lon" :
+ */
+ __pyx_v_edge1 = -999.0;
+
+ /* "g2l_v4.pyx":9
+ * location = -999.0
+ * edge1 = -999.0
+ * edge2 = -990.0 # <<<<<<<<<<<<<<
+ * if lat_lon_flag == "lon" :
+ * # for regular and gaussian grids.
+ */
+ __pyx_v_edge2 = -990.0;
+
+ /* "g2l_v4.pyx":10
+ * edge1 = -999.0
+ * edge2 = -990.0
+ * if lat_lon_flag == "lon" : # <<<<<<<<<<<<<<
+ * # for regular and gaussian grids.
+ * if center_flag == "center" :
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_lat_lon_flag, __pyx_n_s_lon, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 10, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "g2l_v4.pyx":12
+ * if lat_lon_flag == "lon" :
+ * # for regular and gaussian grids.
+ * if center_flag == "center" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (int(the_grid)) \
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_center_flag, __pyx_n_s_center, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 12, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "g2l_v4.pyx":14
+ * if center_flag == "center" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (int(the_grid)) \ # <<<<<<<<<<<<<<
+ * * 57.2957795*(dlon)
+ * elif center_flag == "free" :
+ */
+ __pyx_t_2 = PyFloat_FromDouble(__pyx_v_start_lon); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_t_3 = __Pyx_PyInt_FromDouble(__pyx_v_the_grid); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+
+ /* "g2l_v4.pyx":15
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (int(the_grid)) \
+ * * 57.2957795*(dlon) # <<<<<<<<<<<<<<
+ * elif center_flag == "free" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ */
+ __pyx_t_4 = PyNumber_Multiply(__pyx_t_3, __pyx_float_57_2957795); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_3 = PyFloat_FromDouble(__pyx_v_dlon); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_5 = PyNumber_Multiply(__pyx_t_4, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 15, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+
+ /* "g2l_v4.pyx":14
+ * if center_flag == "center" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (int(the_grid)) \ # <<<<<<<<<<<<<<
+ * * 57.2957795*(dlon)
+ * elif center_flag == "free" :
+ */
+ __pyx_t_3 = PyNumber_Add(__pyx_t_2, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_6 = __pyx_PyFloat_AsDouble(__pyx_t_3); if (unlikely((__pyx_t_6 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 14, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_location = __pyx_t_6;
+
+ /* "g2l_v4.pyx":12
+ * if lat_lon_flag == "lon" :
+ * # for regular and gaussian grids.
+ * if center_flag == "center" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (int(the_grid)) \
+ */
+ goto __pyx_L4;
+ }
+
+ /* "g2l_v4.pyx":16
+ * location = start_lon + (int(the_grid)) \
+ * * 57.2957795*(dlon)
+ * elif center_flag == "free" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (the_grid) * \
+ */
+ __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_center_flag, __pyx_n_s_free, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 16, __pyx_L1_error)
+ if (__pyx_t_1) {
+
+ /* "g2l_v4.pyx":18
+ * elif center_flag == "free" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (the_grid) * \ # <<<<<<<<<<<<<<
+ * 57.2957795*(dlon)
+ * edge1 = location - 28.64788975*(dlon)
+ */
+ __pyx_v_location = (__pyx_v_start_lon + ((__pyx_v_the_grid * 57.2957795) * __pyx_v_dlon));
+
+ /* "g2l_v4.pyx":16
+ * location = start_lon + (int(the_grid)) \
+ * * 57.2957795*(dlon)
+ * elif center_flag == "free" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lon + (the_grid) * \
+ */
+ }
+ __pyx_L4:;
+
+ /* "g2l_v4.pyx":20
+ * location = start_lon + (the_grid) * \
+ * 57.2957795*(dlon)
+ * edge1 = location - 28.64788975*(dlon) # <<<<<<<<<<<<<<
+ * edge2 = location + 28.64788975*(dlon)
+ * if edge1 < 0.0 :
+ */
+ __pyx_v_edge1 = (__pyx_v_location - (28.64788975 * __pyx_v_dlon));
+
+ /* "g2l_v4.pyx":21
+ * 57.2957795*(dlon)
+ * edge1 = location - 28.64788975*(dlon)
+ * edge2 = location + 28.64788975*(dlon) # <<<<<<<<<<<<<<
+ * if edge1 < 0.0 :
+ * edge1 = 360.0 + edge1
+ */
+ __pyx_v_edge2 = (__pyx_v_location + (28.64788975 * __pyx_v_dlon));
+
+ /* "g2l_v4.pyx":22
+ * edge1 = location - 28.64788975*(dlon)
+ * edge2 = location + 28.64788975*(dlon)
+ * if edge1 < 0.0 : # <<<<<<<<<<<<<<
+ * edge1 = 360.0 + edge1
+ * if edge2 > 360.0 :
+ */
+ __pyx_t_1 = ((__pyx_v_edge1 < 0.0) != 0);
+ if (__pyx_t_1) {
+
+ /* "g2l_v4.pyx":23
+ * edge2 = location + 28.64788975*(dlon)
+ * if edge1 < 0.0 :
+ * edge1 = 360.0 + edge1 # <<<<<<<<<<<<<<
+ * if edge2 > 360.0 :
+ * edge2 = 360.0 - edge2
+ */
+ __pyx_v_edge1 = (360.0 + __pyx_v_edge1);
+
+ /* "g2l_v4.pyx":22
+ * edge1 = location - 28.64788975*(dlon)
+ * edge2 = location + 28.64788975*(dlon)
+ * if edge1 < 0.0 : # <<<<<<<<<<<<<<
+ * edge1 = 360.0 + edge1
+ * if edge2 > 360.0 :
+ */
+ }
+
+ /* "g2l_v4.pyx":24
+ * if edge1 < 0.0 :
+ * edge1 = 360.0 + edge1
+ * if edge2 > 360.0 : # <<<<<<<<<<<<<<
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) :
+ */
+ __pyx_t_1 = ((__pyx_v_edge2 > 360.0) != 0);
+ if (__pyx_t_1) {
+
+ /* "g2l_v4.pyx":25
+ * edge1 = 360.0 + edge1
+ * if edge2 > 360.0 :
+ * edge2 = 360.0 - edge2 # <<<<<<<<<<<<<<
+ * if(not flag_360) :
+ * if location > 180.0 : # put into +/- form
+ */
+ __pyx_v_edge2 = (360.0 - __pyx_v_edge2);
+
+ /* "g2l_v4.pyx":24
+ * if edge1 < 0.0 :
+ * edge1 = 360.0 + edge1
+ * if edge2 > 360.0 : # <<<<<<<<<<<<<<
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) :
+ */
+ }
+
+ /* "g2l_v4.pyx":26
+ * if edge2 > 360.0 :
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) : # <<<<<<<<<<<<<<
+ * if location > 180.0 : # put into +/- form
+ * location = location - 360.0
+ */
+ __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_flag_360); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 26, __pyx_L1_error)
+ __pyx_t_7 = ((!__pyx_t_1) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":27
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) :
+ * if location > 180.0 : # put into +/- form # <<<<<<<<<<<<<<
+ * location = location - 360.0
+ * if edge1 > 180.0 :
+ */
+ __pyx_t_7 = ((__pyx_v_location > 180.0) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":28
+ * if(not flag_360) :
+ * if location > 180.0 : # put into +/- form
+ * location = location - 360.0 # <<<<<<<<<<<<<<
+ * if edge1 > 180.0 :
+ * edge1 = edge1 - 360.0
+ */
+ __pyx_v_location = (__pyx_v_location - 360.0);
+
+ /* "g2l_v4.pyx":27
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) :
+ * if location > 180.0 : # put into +/- form # <<<<<<<<<<<<<<
+ * location = location - 360.0
+ * if edge1 > 180.0 :
+ */
+ }
+
+ /* "g2l_v4.pyx":29
+ * if location > 180.0 : # put into +/- form
+ * location = location - 360.0
+ * if edge1 > 180.0 : # <<<<<<<<<<<<<<
+ * edge1 = edge1 - 360.0
+ * if edge2 > 180.0 :
+ */
+ __pyx_t_7 = ((__pyx_v_edge1 > 180.0) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":30
+ * location = location - 360.0
+ * if edge1 > 180.0 :
+ * edge1 = edge1 - 360.0 # <<<<<<<<<<<<<<
+ * if edge2 > 180.0 :
+ * edge2 = edge2 - 360.0
+ */
+ __pyx_v_edge1 = (__pyx_v_edge1 - 360.0);
+
+ /* "g2l_v4.pyx":29
+ * if location > 180.0 : # put into +/- form
+ * location = location - 360.0
+ * if edge1 > 180.0 : # <<<<<<<<<<<<<<
+ * edge1 = edge1 - 360.0
+ * if edge2 > 180.0 :
+ */
+ }
+
+ /* "g2l_v4.pyx":31
+ * if edge1 > 180.0 :
+ * edge1 = edge1 - 360.0
+ * if edge2 > 180.0 : # <<<<<<<<<<<<<<
+ * edge2 = edge2 - 360.0
+ *
+ */
+ __pyx_t_7 = ((__pyx_v_edge2 > 180.0) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":32
+ * edge1 = edge1 - 360.0
+ * if edge2 > 180.0 :
+ * edge2 = edge2 - 360.0 # <<<<<<<<<<<<<<
+ *
+ * elif lat_lon_flag == "lat" :
+ */
+ __pyx_v_edge2 = (__pyx_v_edge2 - 360.0);
+
+ /* "g2l_v4.pyx":31
+ * if edge1 > 180.0 :
+ * edge1 = edge1 - 360.0
+ * if edge2 > 180.0 : # <<<<<<<<<<<<<<
+ * edge2 = edge2 - 360.0
+ *
+ */
+ }
+
+ /* "g2l_v4.pyx":26
+ * if edge2 > 360.0 :
+ * edge2 = 360.0 - edge2
+ * if(not flag_360) : # <<<<<<<<<<<<<<
+ * if location > 180.0 : # put into +/- form
+ * location = location - 360.0
+ */
+ }
+
+ /* "g2l_v4.pyx":10
+ * edge1 = -999.0
+ * edge2 = -990.0
+ * if lat_lon_flag == "lon" : # <<<<<<<<<<<<<<
+ * # for regular and gaussian grids.
+ * if center_flag == "center" :
+ */
+ goto __pyx_L3;
+ }
+
+ /* "g2l_v4.pyx":34
+ * edge2 = edge2 - 360.0
+ *
+ * elif lat_lon_flag == "lat" : # <<<<<<<<<<<<<<
+ * # for regular grids (linear)
+ * if center_flag == "center" :
+ */
+ __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_lat_lon_flag, __pyx_n_s_lat, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 34, __pyx_L1_error)
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":36
+ * elif lat_lon_flag == "lat" :
+ * # for regular grids (linear)
+ * if center_flag == "center" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lat + (int(the_grid)) * \
+ */
+ __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_center_flag, __pyx_n_s_center, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 36, __pyx_L1_error)
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":38
+ * if center_flag == "center" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lat + (int(the_grid)) * \ # <<<<<<<<<<<<<<
+ * 57.2957795*(dlat)
+ * # Deal with polar cap w/ faux_grids
+ */
+ __pyx_t_3 = PyFloat_FromDouble(__pyx_v_start_lat); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_5 = __Pyx_PyInt_FromDouble(__pyx_v_the_grid); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_2 = PyNumber_Multiply(__pyx_t_5, __pyx_float_57_2957795); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "g2l_v4.pyx":39
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lat + (int(the_grid)) * \
+ * 57.2957795*(dlat) # <<<<<<<<<<<<<<
+ * # Deal with polar cap w/ faux_grids
+ * if faux_grids == 2:
+ */
+ __pyx_t_5 = PyFloat_FromDouble(__pyx_v_dlat); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = PyNumber_Multiply(__pyx_t_2, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 39, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+
+ /* "g2l_v4.pyx":38
+ * if center_flag == "center" :
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lat + (int(the_grid)) * \ # <<<<<<<<<<<<<<
+ * 57.2957795*(dlat)
+ * # Deal with polar cap w/ faux_grids
+ */
+ __pyx_t_5 = PyNumber_Add(__pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __pyx_t_6 = __pyx_PyFloat_AsDouble(__pyx_t_5); if (unlikely((__pyx_t_6 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 38, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_v_location = __pyx_t_6;
+
+ /* "g2l_v4.pyx":41
+ * 57.2957795*(dlat)
+ * # Deal with polar cap w/ faux_grids
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * if int(the_grid) == 0:
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ */
+ __pyx_t_5 = __Pyx_PyInt_EqObjC(__pyx_v_faux_grids, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 41, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":42
+ * # Deal with polar cap w/ faux_grids
+ * if faux_grids == 2:
+ * if int(the_grid) == 0: # <<<<<<<<<<<<<<
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ * if int(the_grid) == jm-1:
+ */
+ __pyx_t_5 = __Pyx_PyInt_FromDouble(__pyx_v_the_grid); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = __Pyx_PyInt_EqObjC(__pyx_t_5, __pyx_int_0, 0, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 42, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":43
+ * if faux_grids == 2:
+ * if int(the_grid) == 0:
+ * location = start_lat + 57.2957795*(dlat*0.25) # <<<<<<<<<<<<<<
+ * if int(the_grid) == jm-1:
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ */
+ __pyx_v_location = (__pyx_v_start_lat + (57.2957795 * (__pyx_v_dlat * 0.25)));
+
+ /* "g2l_v4.pyx":42
+ * # Deal with polar cap w/ faux_grids
+ * if faux_grids == 2:
+ * if int(the_grid) == 0: # <<<<<<<<<<<<<<
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ * if int(the_grid) == jm-1:
+ */
+ }
+
+ /* "g2l_v4.pyx":44
+ * if int(the_grid) == 0:
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ * if int(the_grid) == jm-1: # <<<<<<<<<<<<<<
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ * elif center_flag == "free":
+ */
+ __pyx_t_4 = __Pyx_PyInt_FromDouble(__pyx_v_the_grid); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_5 = __Pyx_PyInt_From_long((__pyx_v_jm - 1)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_3 = PyObject_RichCompare(__pyx_t_4, __pyx_t_5, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+ __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 44, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":45
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ * if int(the_grid) == jm-1:
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25) # <<<<<<<<<<<<<<
+ * elif center_flag == "free":
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ */
+ __pyx_v_location = ((-1.0 * __pyx_v_start_lat) - (57.2957795 * (__pyx_v_dlat * 0.25)));
+
+ /* "g2l_v4.pyx":44
+ * if int(the_grid) == 0:
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ * if int(the_grid) == jm-1: # <<<<<<<<<<<<<<
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ * elif center_flag == "free":
+ */
+ }
+
+ /* "g2l_v4.pyx":41
+ * 57.2957795*(dlat)
+ * # Deal with polar cap w/ faux_grids
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * if int(the_grid) == 0:
+ * location = start_lat + 57.2957795*(dlat*0.25)
+ */
+ }
+
+ /* "g2l_v4.pyx":36
+ * elif lat_lon_flag == "lat" :
+ * # for regular grids (linear)
+ * if center_flag == "center" : # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * location = start_lat + (int(the_grid)) * \
+ */
+ goto __pyx_L11;
+ }
+
+ /* "g2l_v4.pyx":46
+ * if int(the_grid) == jm-1:
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ * elif center_flag == "free": # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2:
+ */
+ __pyx_t_7 = (__Pyx_PyString_Equals(__pyx_v_center_flag, __pyx_n_s_free, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 46, __pyx_L1_error)
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":48
+ * elif center_flag == "free":
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * if the_grid < 0.5:
+ * location = start_lat + (
+ */
+ __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_faux_grids, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 48, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 48, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":49
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2:
+ * if the_grid < 0.5: # <<<<<<<<<<<<<<
+ * location = start_lat + (
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ */
+ __pyx_t_7 = ((__pyx_v_the_grid < 0.5) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":50
+ * if faux_grids == 2:
+ * if the_grid < 0.5:
+ * location = start_lat + ( # <<<<<<<<<<<<<<
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ * if the_grid > float(jm)-1.5:
+ */
+ __pyx_v_location = (__pyx_v_start_lat + (((__pyx_v_the_grid + 0.5) * 57.2957795) * (__pyx_v_dlat * 0.5)));
+
+ /* "g2l_v4.pyx":49
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2:
+ * if the_grid < 0.5: # <<<<<<<<<<<<<<
+ * location = start_lat + (
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ */
+ }
+
+ /* "g2l_v4.pyx":52
+ * location = start_lat + (
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ * if the_grid > float(jm)-1.5: # <<<<<<<<<<<<<<
+ * location = -1.0*start_lat - (
+ * (float(jm)-the_grid-0.5)*57.2957795*(dlat*0.5))
+ */
+ __pyx_t_7 = ((__pyx_v_the_grid > (((double)__pyx_v_jm) - 1.5)) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":53
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ * if the_grid > float(jm)-1.5:
+ * location = -1.0*start_lat - ( # <<<<<<<<<<<<<<
+ * (float(jm)-the_grid-0.5)*57.2957795*(dlat*0.5))
+ * else:
+ */
+ __pyx_v_location = ((-1.0 * __pyx_v_start_lat) - ((((((double)__pyx_v_jm) - __pyx_v_the_grid) - 0.5) * 57.2957795) * (__pyx_v_dlat * 0.5)));
+
+ /* "g2l_v4.pyx":52
+ * location = start_lat + (
+ * (the_grid+0.5)*57.2957795*(dlat*0.5))
+ * if the_grid > float(jm)-1.5: # <<<<<<<<<<<<<<
+ * location = -1.0*start_lat - (
+ * (float(jm)-the_grid-0.5)*57.2957795*(dlat*0.5))
+ */
+ goto __pyx_L17;
+ }
+
+ /* "g2l_v4.pyx":56
+ * (float(jm)-the_grid-0.5)*57.2957795*(dlat*0.5))
+ * else:
+ * location = start_lat + (the_grid) * \ # <<<<<<<<<<<<<<
+ * 57.2957795*(dlat)
+ * else:
+ */
+ /*else*/ {
+
+ /* "g2l_v4.pyx":57
+ * else:
+ * location = start_lat + (the_grid) * \
+ * 57.2957795*(dlat) # <<<<<<<<<<<<<<
+ * else:
+ * location = start_lat + (the_grid) * \
+ */
+ __pyx_v_location = (__pyx_v_start_lat + ((__pyx_v_the_grid * 57.2957795) * __pyx_v_dlat));
+ }
+ __pyx_L17:;
+
+ /* "g2l_v4.pyx":48
+ * elif center_flag == "free":
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * if the_grid < 0.5:
+ * location = start_lat + (
+ */
+ goto __pyx_L15;
+ }
+
+ /* "g2l_v4.pyx":59
+ * 57.2957795*(dlat)
+ * else:
+ * location = start_lat + (the_grid) * \ # <<<<<<<<<<<<<<
+ * 57.2957795*(dlat)
+ * edge1 = location - 28.64788975*(dlat)
+ */
+ /*else*/ {
+
+ /* "g2l_v4.pyx":60
+ * else:
+ * location = start_lat + (the_grid) * \
+ * 57.2957795*(dlat) # <<<<<<<<<<<<<<
+ * edge1 = location - 28.64788975*(dlat)
+ * edge2 = location + 28.64788975*(dlat)
+ */
+ __pyx_v_location = (__pyx_v_start_lat + ((__pyx_v_the_grid * 57.2957795) * __pyx_v_dlat));
+ }
+ __pyx_L15:;
+
+ /* "g2l_v4.pyx":46
+ * if int(the_grid) == jm-1:
+ * location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ * elif center_flag == "free": # <<<<<<<<<<<<<<
+ * # NOTE! the_grid needs to be the i,j not the gridID!
+ * if faux_grids == 2:
+ */
+ }
+ __pyx_L11:;
+
+ /* "g2l_v4.pyx":61
+ * location = start_lat + (the_grid) * \
+ * 57.2957795*(dlat)
+ * edge1 = location - 28.64788975*(dlat) # <<<<<<<<<<<<<<
+ * edge2 = location + 28.64788975*(dlat)
+ * if faux_grids == 2:
+ */
+ __pyx_v_edge1 = (__pyx_v_location - (28.64788975 * __pyx_v_dlat));
+
+ /* "g2l_v4.pyx":62
+ * 57.2957795*(dlat)
+ * edge1 = location - 28.64788975*(dlat)
+ * edge2 = location + 28.64788975*(dlat) # <<<<<<<<<<<<<<
+ * if faux_grids == 2:
+ * # Deal with polar cap w/ faux_grids
+ */
+ __pyx_v_edge2 = (__pyx_v_location + (28.64788975 * __pyx_v_dlat));
+
+ /* "g2l_v4.pyx":63
+ * edge1 = location - 28.64788975*(dlat)
+ * edge2 = location + 28.64788975*(dlat)
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * # Deal with polar cap w/ faux_grids
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0:
+ */
+ __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_v_faux_grids, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 63, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 63, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":65
+ * if faux_grids == 2:
+ * # Deal with polar cap w/ faux_grids
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0: # <<<<<<<<<<<<<<
+ * edge1 = location - 14.323944875*(dlat)
+ * edge2 = location + 14.323944875*(dlat)
+ */
+ __pyx_t_1 = ((__pyx_v_the_grid < 1.0) != 0);
+ if (!__pyx_t_1) {
+ } else {
+ __pyx_t_7 = __pyx_t_1;
+ goto __pyx_L20_bool_binop_done;
+ }
+ __pyx_t_1 = ((__pyx_v_the_grid > (((double)__pyx_v_jm) - 2.0)) != 0);
+ __pyx_t_7 = __pyx_t_1;
+ __pyx_L20_bool_binop_done:;
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":66
+ * # Deal with polar cap w/ faux_grids
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0:
+ * edge1 = location - 14.323944875*(dlat) # <<<<<<<<<<<<<<
+ * edge2 = location + 14.323944875*(dlat)
+ * if edge2 > 90.0:
+ */
+ __pyx_v_edge1 = (__pyx_v_location - (14.323944875 * __pyx_v_dlat));
+
+ /* "g2l_v4.pyx":67
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0:
+ * edge1 = location - 14.323944875*(dlat)
+ * edge2 = location + 14.323944875*(dlat) # <<<<<<<<<<<<<<
+ * if edge2 > 90.0:
+ * edge2 = edge1
+ */
+ __pyx_v_edge2 = (__pyx_v_location + (14.323944875 * __pyx_v_dlat));
+
+ /* "g2l_v4.pyx":65
+ * if faux_grids == 2:
+ * # Deal with polar cap w/ faux_grids
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0: # <<<<<<<<<<<<<<
+ * edge1 = location - 14.323944875*(dlat)
+ * edge2 = location + 14.323944875*(dlat)
+ */
+ }
+
+ /* "g2l_v4.pyx":63
+ * edge1 = location - 28.64788975*(dlat)
+ * edge2 = location + 28.64788975*(dlat)
+ * if faux_grids == 2: # <<<<<<<<<<<<<<
+ * # Deal with polar cap w/ faux_grids
+ * if the_grid < 1.0 or the_grid > float(jm)-2.0:
+ */
+ }
+
+ /* "g2l_v4.pyx":68
+ * edge1 = location - 14.323944875*(dlat)
+ * edge2 = location + 14.323944875*(dlat)
+ * if edge2 > 90.0: # <<<<<<<<<<<<<<
+ * edge2 = edge1
+ * if edge1 < -90.0:
+ */
+ __pyx_t_7 = ((__pyx_v_edge2 > 90.0) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":69
+ * edge2 = location + 14.323944875*(dlat)
+ * if edge2 > 90.0:
+ * edge2 = edge1 # <<<<<<<<<<<<<<
+ * if edge1 < -90.0:
+ * edge1 = edge2
+ */
+ __pyx_v_edge2 = __pyx_v_edge1;
+
+ /* "g2l_v4.pyx":68
+ * edge1 = location - 14.323944875*(dlat)
+ * edge2 = location + 14.323944875*(dlat)
+ * if edge2 > 90.0: # <<<<<<<<<<<<<<
+ * edge2 = edge1
+ * if edge1 < -90.0:
+ */
+ }
+
+ /* "g2l_v4.pyx":70
+ * if edge2 > 90.0:
+ * edge2 = edge1
+ * if edge1 < -90.0: # <<<<<<<<<<<<<<
+ * edge1 = edge2
+ *
+ */
+ __pyx_t_7 = ((__pyx_v_edge1 < -90.0) != 0);
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":71
+ * edge2 = edge1
+ * if edge1 < -90.0:
+ * edge1 = edge2 # <<<<<<<<<<<<<<
+ *
+ * if edge_flag :
+ */
+ __pyx_v_edge1 = __pyx_v_edge2;
+
+ /* "g2l_v4.pyx":70
+ * if edge2 > 90.0:
+ * edge2 = edge1
+ * if edge1 < -90.0: # <<<<<<<<<<<<<<
+ * edge1 = edge2
+ *
+ */
+ }
+
+ /* "g2l_v4.pyx":34
+ * edge2 = edge2 - 360.0
+ *
+ * elif lat_lon_flag == "lat" : # <<<<<<<<<<<<<<
+ * # for regular grids (linear)
+ * if center_flag == "center" :
+ */
+ }
+ __pyx_L3:;
+
+ /* "g2l_v4.pyx":73
+ * edge1 = edge2
+ *
+ * if edge_flag : # <<<<<<<<<<<<<<
+ * return edge2,location,edge1
+ * else :
+ */
+ __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_v_edge_flag); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 73, __pyx_L1_error)
+ if (__pyx_t_7) {
+
+ /* "g2l_v4.pyx":74
+ *
+ * if edge_flag :
+ * return edge2,location,edge1 # <<<<<<<<<<<<<<
+ * else :
+ * return location
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_3 = PyFloat_FromDouble(__pyx_v_edge2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 74, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_5 = PyFloat_FromDouble(__pyx_v_location); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 74, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_4 = PyFloat_FromDouble(__pyx_v_edge1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 74, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_4);
+ __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 74, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __Pyx_GIVEREF(__pyx_t_3);
+ PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_5);
+ __Pyx_GIVEREF(__pyx_t_4);
+ PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_4);
+ __pyx_t_3 = 0;
+ __pyx_t_5 = 0;
+ __pyx_t_4 = 0;
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+
+ /* "g2l_v4.pyx":73
+ * edge1 = edge2
+ *
+ * if edge_flag : # <<<<<<<<<<<<<<
+ * return edge2,location,edge1
+ * else :
+ */
+ }
+
+ /* "g2l_v4.pyx":76
+ * return edge2,location,edge1
+ * else :
+ * return location # <<<<<<<<<<<<<<
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_2 = PyFloat_FromDouble(__pyx_v_location); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 76, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_2);
+ __pyx_r = __pyx_t_2;
+ __pyx_t_2 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "g2l_v4.pyx":1
+ * def g2l(double the_grid,double start_lon, double start_lat, # <<<<<<<<<<<<<<
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0):
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_2);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_XDECREF(__pyx_t_4);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_AddTraceback("g2l_v4.g2l", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyMethodDef __pyx_methods[] = {
+ {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
+static int __pyx_pymod_exec_g2l_v4(PyObject* module); /*proto*/
+static PyModuleDef_Slot __pyx_moduledef_slots[] = {
+ {Py_mod_create, (void*)__pyx_pymod_create},
+ {Py_mod_exec, (void*)__pyx_pymod_exec_g2l_v4},
+ {0, NULL}
+};
+#endif
+
+static struct PyModuleDef __pyx_moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "g2l_v4",
+ 0, /* m_doc */
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ 0, /* m_size */
+ #else
+ -1, /* m_size */
+ #endif
+ __pyx_methods /* m_methods */,
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_moduledef_slots, /* m_slots */
+ #else
+ NULL, /* m_reload */
+ #endif
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL /* m_free */
+};
+#endif
+#ifndef CYTHON_SMALL_CODE
+#if defined(__clang__)
+ #define CYTHON_SMALL_CODE
+#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
+ #define CYTHON_SMALL_CODE __attribute__((cold))
+#else
+ #define CYTHON_SMALL_CODE
+#endif
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+ {&__pyx_n_s_center, __pyx_k_center, sizeof(__pyx_k_center), 0, 0, 1, 1},
+ {&__pyx_n_s_center_flag, __pyx_k_center_flag, sizeof(__pyx_k_center_flag), 0, 0, 1, 1},
+ {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
+ {&__pyx_n_s_dlat, __pyx_k_dlat, sizeof(__pyx_k_dlat), 0, 0, 1, 1},
+ {&__pyx_n_s_dlon, __pyx_k_dlon, sizeof(__pyx_k_dlon), 0, 0, 1, 1},
+ {&__pyx_n_s_edge1, __pyx_k_edge1, sizeof(__pyx_k_edge1), 0, 0, 1, 1},
+ {&__pyx_n_s_edge2, __pyx_k_edge2, sizeof(__pyx_k_edge2), 0, 0, 1, 1},
+ {&__pyx_n_s_edge_flag, __pyx_k_edge_flag, sizeof(__pyx_k_edge_flag), 0, 0, 1, 1},
+ {&__pyx_n_s_faux_grids, __pyx_k_faux_grids, sizeof(__pyx_k_faux_grids), 0, 0, 1, 1},
+ {&__pyx_n_s_flag_360, __pyx_k_flag_360, sizeof(__pyx_k_flag_360), 0, 0, 1, 1},
+ {&__pyx_n_s_free, __pyx_k_free, sizeof(__pyx_k_free), 0, 0, 1, 1},
+ {&__pyx_n_s_g2l, __pyx_k_g2l, sizeof(__pyx_k_g2l), 0, 0, 1, 1},
+ {&__pyx_n_s_g2l_v4, __pyx_k_g2l_v4, sizeof(__pyx_k_g2l_v4), 0, 0, 1, 1},
+ {&__pyx_kp_s_g2l_v4_pyx, __pyx_k_g2l_v4_pyx, sizeof(__pyx_k_g2l_v4_pyx), 0, 0, 1, 0},
+ {&__pyx_n_s_jm, __pyx_k_jm, sizeof(__pyx_k_jm), 0, 0, 1, 1},
+ {&__pyx_n_s_lat, __pyx_k_lat, sizeof(__pyx_k_lat), 0, 0, 1, 1},
+ {&__pyx_n_s_lat_lon_flag, __pyx_k_lat_lon_flag, sizeof(__pyx_k_lat_lon_flag), 0, 0, 1, 1},
+ {&__pyx_n_s_location, __pyx_k_location, sizeof(__pyx_k_location), 0, 0, 1, 1},
+ {&__pyx_n_s_lon, __pyx_k_lon, sizeof(__pyx_k_lon), 0, 0, 1, 1},
+ {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
+ {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
+ {&__pyx_n_s_start_lat, __pyx_k_start_lat, sizeof(__pyx_k_start_lat), 0, 0, 1, 1},
+ {&__pyx_n_s_start_lon, __pyx_k_start_lon, sizeof(__pyx_k_start_lon), 0, 0, 1, 1},
+ {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
+ {&__pyx_n_s_the_grid, __pyx_k_the_grid, sizeof(__pyx_k_the_grid), 0, 0, 1, 1},
+ {0, 0, 0, 0, 0, 0, 0}
+};
+static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
+ return 0;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
+
+ /* "g2l_v4.pyx":1
+ * def g2l(double the_grid,double start_lon, double start_lat, # <<<<<<<<<<<<<<
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0):
+ */
+ __pyx_tuple_ = PyTuple_Pack(14, __pyx_n_s_the_grid, __pyx_n_s_start_lon, __pyx_n_s_start_lat, __pyx_n_s_dlon, __pyx_n_s_dlat, __pyx_n_s_jm, __pyx_n_s_lat_lon_flag, __pyx_n_s_center_flag, __pyx_n_s_edge_flag, __pyx_n_s_flag_360, __pyx_n_s_faux_grids, __pyx_n_s_location, __pyx_n_s_edge1, __pyx_n_s_edge2); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple_);
+ __Pyx_GIVEREF(__pyx_tuple_);
+ __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(11, 0, 14, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_g2l_v4_pyx, __pyx_n_s_g2l, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
+ if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ __pyx_float_57_2957795 = PyFloat_FromDouble(57.2957795); if (unlikely(!__pyx_float_57_2957795)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error)
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
+
+static int __Pyx_modinit_global_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
+ /*--- Global init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
+ /*--- Variable export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
+ /*--- Function export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
+ /*--- Type init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
+ /*--- Type import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
+ /*--- Variable import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
+ /*--- Function import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+
+#ifndef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#elif PY_MAJOR_VERSION < 3
+#ifdef __cplusplus
+#define __Pyx_PyMODINIT_FUNC extern "C" void
+#else
+#define __Pyx_PyMODINIT_FUNC void
+#endif
+#else
+#ifdef __cplusplus
+#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
+#else
+#define __Pyx_PyMODINIT_FUNC PyObject *
+#endif
+#endif
+
+
+#if PY_MAJOR_VERSION < 3
+__Pyx_PyMODINIT_FUNC initg2l_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC initg2l_v4(void)
+#else
+__Pyx_PyMODINIT_FUNC PyInit_g2l_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC PyInit_g2l_v4(void)
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+{
+ return PyModuleDef_Init(&__pyx_moduledef);
+}
+static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
+ #if PY_VERSION_HEX >= 0x030700A1
+ static PY_INT64_T main_interpreter_id = -1;
+ PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
+ if (main_interpreter_id == -1) {
+ main_interpreter_id = current_id;
+ return (unlikely(current_id == -1)) ? -1 : 0;
+ } else if (unlikely(main_interpreter_id != current_id))
+ #else
+ static PyInterpreterState *main_interpreter = NULL;
+ PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
+ if (!main_interpreter) {
+ main_interpreter = current_interpreter;
+ } else if (unlikely(main_interpreter != current_interpreter))
+ #endif
+ {
+ PyErr_SetString(
+ PyExc_ImportError,
+ "Interpreter change detected - this module can only be loaded into one interpreter per process.");
+ return -1;
+ }
+ return 0;
+}
+static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
+ int result = 0;
+ if (likely(value)) {
+ if (allow_none || value != Py_None) {
+ result = PyDict_SetItemString(moddict, to_name, value);
+ }
+ Py_DECREF(value);
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Clear();
+ } else {
+ result = -1;
+ }
+ return result;
+}
+static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
+ PyObject *module = NULL, *moddict, *modname;
+ if (__Pyx_check_single_interpreter())
+ return NULL;
+ if (__pyx_m)
+ return __Pyx_NewRef(__pyx_m);
+ modname = PyObject_GetAttrString(spec, "name");
+ if (unlikely(!modname)) goto bad;
+ module = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (unlikely(!module)) goto bad;
+ moddict = PyModule_GetDict(module);
+ if (unlikely(!moddict)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
+ return module;
+bad:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+static CYTHON_SMALL_CODE int __pyx_pymod_exec_g2l_v4(PyObject *__pyx_pyinit_module)
+#endif
+#endif
+{
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannyDeclarations
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ if (__pyx_m) {
+ if (__pyx_m == __pyx_pyinit_module) return 0;
+ PyErr_SetString(PyExc_RuntimeError, "Module 'g2l_v4' has already been imported. Re-initialisation is not supported.");
+ return -1;
+ }
+ #elif PY_MAJOR_VERSION >= 3
+ if (__pyx_m) return __Pyx_NewRef(__pyx_m);
+ #endif
+ #if CYTHON_REFNANNY
+__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+}
+#endif
+ __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_g2l_v4(void)", 0);
+ if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #ifdef __Pxy_PyFrame_Initialize_Offsets
+ __Pxy_PyFrame_Initialize_Offsets();
+ #endif
+ __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #ifdef __Pyx_CyFunction_USED
+ if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_FusedFunction_USED
+ if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Generator_USED
+ if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_StopAsyncIteration_USED
+ if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ /*--- Library function declarations ---*/
+ /*--- Threads initialization code ---*/
+ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+ #ifdef WITH_THREAD /* Python build with threading support? */
+ PyEval_InitThreads();
+ #endif
+ #endif
+ /*--- Module creation code ---*/
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_m = __pyx_pyinit_module;
+ Py_INCREF(__pyx_m);
+ #else
+ #if PY_MAJOR_VERSION < 3
+ __pyx_m = Py_InitModule4("g2l_v4", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
+ #else
+ __pyx_m = PyModule_Create(&__pyx_moduledef);
+ #endif
+ if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_d);
+ __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_b);
+ __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_cython_runtime);
+ if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ /*--- Initialize various global constants etc. ---*/
+ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
+ if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ if (__pyx_module_is_main_g2l_v4) {
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ #if PY_MAJOR_VERSION >= 3
+ {
+ PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
+ if (!PyDict_GetItemString(modules, "g2l_v4")) {
+ if (unlikely(PyDict_SetItemString(modules, "g2l_v4", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ }
+ #endif
+ /*--- Builtin init code ---*/
+ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Constants init code ---*/
+ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Global type/function init code ---*/
+ (void)__Pyx_modinit_global_init_code();
+ (void)__Pyx_modinit_variable_export_code();
+ (void)__Pyx_modinit_function_export_code();
+ (void)__Pyx_modinit_type_init_code();
+ (void)__Pyx_modinit_type_import_code();
+ (void)__Pyx_modinit_variable_import_code();
+ (void)__Pyx_modinit_function_import_code();
+ /*--- Execution code ---*/
+ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
+ if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+
+ /* "g2l_v4.pyx":1
+ * def g2l(double the_grid,double start_lon, double start_lat, # <<<<<<<<<<<<<<
+ * double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ * edge_flag=False,flag_360=False,faux_grids=0):
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6g2l_v4_1g2l, NULL, __pyx_n_s_g2l_v4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_g2l, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /*--- Wrapped vars code ---*/
+
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ if (__pyx_m) {
+ if (__pyx_d) {
+ __Pyx_AddTraceback("init g2l_v4", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ }
+ Py_CLEAR(__pyx_m);
+ } else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_ImportError, "init g2l_v4");
+ }
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ return (__pyx_m != NULL) ? 0 : -1;
+ #elif PY_MAJOR_VERSION >= 3
+ return __pyx_m;
+ #else
+ return;
+ #endif
+}
+
+/* --- Runtime support code --- */
+/* Refnanny */
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+ PyObject *m = NULL, *p = NULL;
+ void *r = NULL;
+ m = PyImport_ImportModule(modname);
+ if (!m) goto end;
+ p = PyObject_GetAttrString(m, "RefNannyAPI");
+ if (!p) goto end;
+ r = PyLong_AsVoidPtr(p);
+end:
+ Py_XDECREF(p);
+ Py_XDECREF(m);
+ return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif
+
+/* RaiseArgTupleInvalid */
+static void __Pyx_RaiseArgtupleInvalid(
+ const char* func_name,
+ int exact,
+ Py_ssize_t num_min,
+ Py_ssize_t num_max,
+ Py_ssize_t num_found)
+{
+ Py_ssize_t num_expected;
+ const char *more_or_less;
+ if (num_found < num_min) {
+ num_expected = num_min;
+ more_or_less = "at least";
+ } else {
+ num_expected = num_max;
+ more_or_less = "at most";
+ }
+ if (exact) {
+ more_or_less = "exactly";
+ }
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
+ func_name, more_or_less, num_expected,
+ (num_expected == 1) ? "" : "s", num_found);
+}
+
+/* RaiseDoubleKeywords */
+static void __Pyx_RaiseDoubleKeywordsError(
+ const char* func_name,
+ PyObject* kw_name)
+{
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION >= 3
+ "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+ #else
+ "%s() got multiple values for keyword argument '%s'", func_name,
+ PyString_AsString(kw_name));
+ #endif
+}
+
+/* ParseKeywords */
+static int __Pyx_ParseOptionalKeywords(
+ PyObject *kwds,
+ PyObject **argnames[],
+ PyObject *kwds2,
+ PyObject *values[],
+ Py_ssize_t num_pos_args,
+ const char* function_name)
+{
+ PyObject *key = 0, *value = 0;
+ Py_ssize_t pos = 0;
+ PyObject*** name;
+ PyObject*** first_kw_arg = argnames + num_pos_args;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ name = first_kw_arg;
+ while (*name && (**name != key)) name++;
+ if (*name) {
+ values[name-argnames] = value;
+ continue;
+ }
+ name = first_kw_arg;
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyString_Check(key))) {
+ while (*name) {
+ if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**name, key)) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ if ((**argname == key) || (
+ (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**argname, key))) {
+ goto arg_passed_twice;
+ }
+ argname++;
+ }
+ }
+ } else
+ #endif
+ if (likely(PyUnicode_Check(key))) {
+ while (*name) {
+ int cmp = (**name == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**name, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ int cmp = (**argname == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**argname, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) goto arg_passed_twice;
+ argname++;
+ }
+ }
+ } else
+ goto invalid_keyword_type;
+ if (kwds2) {
+ if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+ } else {
+ goto invalid_keyword;
+ }
+ }
+ return 0;
+arg_passed_twice:
+ __Pyx_RaiseDoubleKeywordsError(function_name, key);
+ goto bad;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ goto bad;
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+bad:
+ return -1;
+}
+
+/* BytesEquals */
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+ if (s1 == s2) {
+ return (equals == Py_EQ);
+ } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {
+ const char *ps1, *ps2;
+ Py_ssize_t length = PyBytes_GET_SIZE(s1);
+ if (length != PyBytes_GET_SIZE(s2))
+ return (equals == Py_NE);
+ ps1 = PyBytes_AS_STRING(s1);
+ ps2 = PyBytes_AS_STRING(s2);
+ if (ps1[0] != ps2[0]) {
+ return (equals == Py_NE);
+ } else if (length == 1) {
+ return (equals == Py_EQ);
+ } else {
+ int result;
+#if CYTHON_USE_UNICODE_INTERNALS
+ Py_hash_t hash1, hash2;
+ hash1 = ((PyBytesObject*)s1)->ob_shash;
+ hash2 = ((PyBytesObject*)s2)->ob_shash;
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ return (equals == Py_NE);
+ }
+#endif
+ result = memcmp(ps1, ps2, (size_t)length);
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
+ return (equals == Py_NE);
+ } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {
+ return (equals == Py_NE);
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+#endif
+}
+
+/* UnicodeEquals */
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
+#if CYTHON_COMPILING_IN_PYPY
+ return PyObject_RichCompareBool(s1, s2, equals);
+#else
+#if PY_MAJOR_VERSION < 3
+ PyObject* owned_ref = NULL;
+#endif
+ int s1_is_unicode, s2_is_unicode;
+ if (s1 == s2) {
+ goto return_eq;
+ }
+ s1_is_unicode = PyUnicode_CheckExact(s1);
+ s2_is_unicode = PyUnicode_CheckExact(s2);
+#if PY_MAJOR_VERSION < 3
+ if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) {
+ owned_ref = PyUnicode_FromObject(s2);
+ if (unlikely(!owned_ref))
+ return -1;
+ s2 = owned_ref;
+ s2_is_unicode = 1;
+ } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) {
+ owned_ref = PyUnicode_FromObject(s1);
+ if (unlikely(!owned_ref))
+ return -1;
+ s1 = owned_ref;
+ s1_is_unicode = 1;
+ } else if (((!s2_is_unicode) & (!s1_is_unicode))) {
+ return __Pyx_PyBytes_Equals(s1, s2, equals);
+ }
+#endif
+ if (s1_is_unicode & s2_is_unicode) {
+ Py_ssize_t length;
+ int kind;
+ void *data1, *data2;
+ if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0))
+ return -1;
+ length = __Pyx_PyUnicode_GET_LENGTH(s1);
+ if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {
+ goto return_ne;
+ }
+#if CYTHON_USE_UNICODE_INTERNALS
+ {
+ Py_hash_t hash1, hash2;
+ #if CYTHON_PEP393_ENABLED
+ hash1 = ((PyASCIIObject*)s1)->hash;
+ hash2 = ((PyASCIIObject*)s2)->hash;
+ #else
+ hash1 = ((PyUnicodeObject*)s1)->hash;
+ hash2 = ((PyUnicodeObject*)s2)->hash;
+ #endif
+ if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {
+ goto return_ne;
+ }
+ }
+#endif
+ kind = __Pyx_PyUnicode_KIND(s1);
+ if (kind != __Pyx_PyUnicode_KIND(s2)) {
+ goto return_ne;
+ }
+ data1 = __Pyx_PyUnicode_DATA(s1);
+ data2 = __Pyx_PyUnicode_DATA(s2);
+ if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) {
+ goto return_ne;
+ } else if (length == 1) {
+ goto return_eq;
+ } else {
+ int result = memcmp(data1, data2, (size_t)(length * kind));
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ) ? (result == 0) : (result != 0);
+ }
+ } else if ((s1 == Py_None) & s2_is_unicode) {
+ goto return_ne;
+ } else if ((s2 == Py_None) & s1_is_unicode) {
+ goto return_ne;
+ } else {
+ int result;
+ PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ if (!py_result)
+ return -1;
+ result = __Pyx_PyObject_IsTrue(py_result);
+ Py_DECREF(py_result);
+ return result;
+ }
+return_eq:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_EQ);
+return_ne:
+ #if PY_MAJOR_VERSION < 3
+ Py_XDECREF(owned_ref);
+ #endif
+ return (equals == Py_NE);
+#endif
+}
+
+/* PyIntFromDouble */
+#if PY_MAJOR_VERSION < 3
+static CYTHON_INLINE PyObject* __Pyx_PyInt_FromDouble(double value) {
+ if (value >= (double)LONG_MIN && value <= (double)LONG_MAX) {
+ return PyInt_FromLong((long)value);
+ }
+ return PyLong_FromDouble(value);
+}
+#endif
+
+/* PyIntCompare */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) {
+ if (op1 == op2) {
+ Py_RETURN_TRUE;
+ }
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(op1))) {
+ const long b = intval;
+ long a = PyInt_AS_LONG(op1);
+ if (a == b) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ }
+ #endif
+ #if CYTHON_USE_PYLONG_INTERNALS
+ if (likely(PyLong_CheckExact(op1))) {
+ int unequal;
+ unsigned long uintval;
+ Py_ssize_t size = Py_SIZE(op1);
+ const digit* digits = ((PyLongObject*)op1)->ob_digit;
+ if (intval == 0) {
+ if (size == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ } else if (intval < 0) {
+ if (size >= 0)
+ Py_RETURN_FALSE;
+ intval = -intval;
+ size = -size;
+ } else {
+ if (size <= 0)
+ Py_RETURN_FALSE;
+ }
+ uintval = (unsigned long) intval;
+#if PyLong_SHIFT * 4 < SIZEOF_LONG*8
+ if (uintval >> (PyLong_SHIFT * 4)) {
+ unequal = (size != 5) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))
+ | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[4] != ((uintval >> (4 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));
+ } else
+#endif
+#if PyLong_SHIFT * 3 < SIZEOF_LONG*8
+ if (uintval >> (PyLong_SHIFT * 3)) {
+ unequal = (size != 4) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))
+ | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));
+ } else
+#endif
+#if PyLong_SHIFT * 2 < SIZEOF_LONG*8
+ if (uintval >> (PyLong_SHIFT * 2)) {
+ unequal = (size != 3) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))
+ | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));
+ } else
+#endif
+#if PyLong_SHIFT * 1 < SIZEOF_LONG*8
+ if (uintval >> (PyLong_SHIFT * 1)) {
+ unequal = (size != 2) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))
+ | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));
+ } else
+#endif
+ unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK));
+ if (unequal == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ }
+ #endif
+ if (PyFloat_CheckExact(op1)) {
+ const long b = intval;
+ double a = PyFloat_AS_DOUBLE(op1);
+ if ((double)a == (double)b) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ }
+ return (
+ PyObject_RichCompare(op1, op2, Py_EQ));
+}
+
+/* PyDictVersioning */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
+ PyObject **dictptr = NULL;
+ Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
+ if (offset) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
+#else
+ dictptr = _PyObject_GetDictPtr(obj);
+#endif
+ }
+ return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
+}
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
+ return 0;
+ return obj_dict_version == __Pyx_get_object_dict_version(obj);
+}
+#endif
+
+/* PyObjectGetAttrStr */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_getattro))
+ return tp->tp_getattro(obj, attr_name);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_getattr))
+ return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
+#endif
+ return PyObject_GetAttr(obj, attr_name);
+}
+#endif
+
+/* PyErrFetchRestore */
+#if CYTHON_FAST_THREAD_STATE
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ tmp_type = tstate->curexc_type;
+ tmp_value = tstate->curexc_value;
+ tmp_tb = tstate->curexc_traceback;
+ tstate->curexc_type = type;
+ tstate->curexc_value = value;
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+}
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
+ *type = tstate->curexc_type;
+ *value = tstate->curexc_value;
+ *tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+}
+#endif
+
+/* CLineInTraceback */
+#ifndef CYTHON_CLINE_IN_TRACEBACK
+static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
+ PyObject *use_cline;
+ PyObject *ptype, *pvalue, *ptraceback;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject **cython_runtime_dict;
+#endif
+ if (unlikely(!__pyx_cython_runtime)) {
+ return c_line;
+ }
+ __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
+#if CYTHON_COMPILING_IN_CPYTHON
+ cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
+ if (likely(cython_runtime_dict)) {
+ __PYX_PY_DICT_LOOKUP_IF_MODIFIED(
+ use_cline, *cython_runtime_dict,
+ __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
+ } else
+#endif
+ {
+ PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
+ if (use_cline_obj) {
+ use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
+ Py_DECREF(use_cline_obj);
+ } else {
+ PyErr_Clear();
+ use_cline = NULL;
+ }
+ }
+ if (!use_cline) {
+ c_line = 0;
+ PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
+ }
+ else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
+ c_line = 0;
+ }
+ __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
+ return c_line;
+}
+#endif
+
+/* CodeObjectCache */
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
+ int start = 0, mid = 0, end = count - 1;
+ if (end >= 0 && code_line > entries[end].code_line) {
+ return count;
+ }
+ while (start < end) {
+ mid = start + (end - start) / 2;
+ if (code_line < entries[mid].code_line) {
+ end = mid;
+ } else if (code_line > entries[mid].code_line) {
+ start = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ if (code_line <= entries[mid].code_line) {
+ return mid;
+ } else {
+ return mid + 1;
+ }
+}
+static PyCodeObject *__pyx_find_code_object(int code_line) {
+ PyCodeObject* code_object;
+ int pos;
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
+ return NULL;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
+ return NULL;
+ }
+ code_object = __pyx_code_cache.entries[pos].code_object;
+ Py_INCREF(code_object);
+ return code_object;
+}
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
+ int pos, i;
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
+ if (unlikely(!code_line)) {
+ return;
+ }
+ if (unlikely(!entries)) {
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (likely(entries)) {
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = 64;
+ __pyx_code_cache.count = 1;
+ entries[0].code_line = code_line;
+ entries[0].code_object = code_object;
+ Py_INCREF(code_object);
+ }
+ return;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
+ PyCodeObject* tmp = entries[pos].code_object;
+ entries[pos].code_object = code_object;
+ Py_DECREF(tmp);
+ return;
+ }
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
+ int new_max = __pyx_code_cache.max_count + 64;
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
+ __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
+ if (unlikely(!entries)) {
+ return;
+ }
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = new_max;
+ }
+ for (i=__pyx_code_cache.count; i>pos; i--) {
+ entries[i] = entries[i-1];
+ }
+ entries[pos].code_line = code_line;
+ entries[pos].code_object = code_object;
+ __pyx_code_cache.count++;
+ Py_INCREF(code_object);
+}
+
+/* AddTraceback */
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
+ const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ #if PY_MAJOR_VERSION < 3
+ py_srcfile = PyString_FromString(filename);
+ #else
+ py_srcfile = PyUnicode_FromString(filename);
+ #endif
+ if (!py_srcfile) goto bad;
+ if (c_line) {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #else
+ py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #endif
+ }
+ else {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromString(funcname);
+ #else
+ py_funcname = PyUnicode_FromString(funcname);
+ #endif
+ }
+ if (!py_funcname) goto bad;
+ py_code = __Pyx_PyCode_New(
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ __pyx_empty_bytes, /*PyObject *code,*/
+ __pyx_empty_tuple, /*PyObject *consts,*/
+ __pyx_empty_tuple, /*PyObject *names,*/
+ __pyx_empty_tuple, /*PyObject *varnames,*/
+ __pyx_empty_tuple, /*PyObject *freevars,*/
+ __pyx_empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ py_line,
+ __pyx_empty_bytes /*PyObject *lnotab*/
+ );
+ Py_DECREF(py_srcfile);
+ Py_DECREF(py_funcname);
+ return py_code;
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ return NULL;
+}
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ if (c_line) {
+ c_line = __Pyx_CLineForTraceback(tstate, c_line);
+ }
+ py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
+ if (!py_code) {
+ py_code = __Pyx_CreateCodeObjectForTraceback(
+ funcname, c_line, py_line, filename);
+ if (!py_code) goto bad;
+ __pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
+ }
+ py_frame = PyFrame_New(
+ tstate, /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ __pyx_d, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ __Pyx_PyFrame_SetLineNumber(py_frame, py_line);
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+
+/* CIntFromPyVerify */
+#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
+#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
+#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
+ {\
+ func_type value = func_value;\
+ if (sizeof(target_type) < sizeof(func_type)) {\
+ if (unlikely(value != (func_type) (target_type) value)) {\
+ func_type zero = 0;\
+ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
+ return (target_type) -1;\
+ if (is_unsigned && unlikely(value < zero))\
+ goto raise_neg_overflow;\
+ else\
+ goto raise_overflow;\
+ }\
+ }\
+ return (target_type) value;\
+ }
+
+/* CIntFromPy */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const int neg_one = (int) -1, const_zero = (int) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(int) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (int) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
+ return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
+ return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
+ return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (int) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(int) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(int) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ int val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (int) -1;
+ }
+ } else {
+ int val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (int) -1;
+ val = __Pyx_PyInt_As_int(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to int");
+ return (int) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+}
+
+/* CIntToPy */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const long neg_one = (long) -1, const_zero = (long) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(long) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(long) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(long),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntToPy */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const int neg_one = (int) -1, const_zero = (int) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(int) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(int) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(int) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(int),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntFromPy */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const long neg_one = (long) -1, const_zero = (long) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(long) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (long) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
+ return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
+ return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
+ return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (long) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(long) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(long) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ long val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (long) -1;
+ }
+ } else {
+ long val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (long) -1;
+ val = __Pyx_PyInt_As_long(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to long");
+ return (long) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+}
+
+/* FastTypeChecks */
+#if CYTHON_COMPILING_IN_CPYTHON
+static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
+ while (a) {
+ a = a->tp_base;
+ if (a == b)
+ return 1;
+ }
+ return b == &PyBaseObject_Type;
+}
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
+ PyObject *mro;
+ if (a == b) return 1;
+ mro = a->tp_mro;
+ if (likely(mro)) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(mro);
+ for (i = 0; i < n; i++) {
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
+ return 1;
+ }
+ return 0;
+ }
+ return __Pyx_InBases(a, b);
+}
+#if PY_MAJOR_VERSION == 2
+static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
+ PyObject *exception, *value, *tb;
+ int res;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrFetch(&exception, &value, &tb);
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ if (!res) {
+ res = PyObject_IsSubclass(err, exc_type2);
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ }
+ __Pyx_ErrRestore(exception, value, tb);
+ return res;
+}
+#else
+static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
+ int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
+ if (!res) {
+ res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
+ }
+ return res;
+}
+#endif
+static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
+ Py_ssize_t i, n;
+ assert(PyExceptionClass_Check(exc_type));
+ n = PyTuple_GET_SIZE(tuple);
+#if PY_MAJOR_VERSION >= 3
+ for (i=0; ip) {
+ #if PY_MAJOR_VERSION < 3
+ if (t->is_unicode) {
+ *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+ } else if (t->intern) {
+ *t->p = PyString_InternFromString(t->s);
+ } else {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ }
+ #else
+ if (t->is_unicode | t->is_str) {
+ if (t->intern) {
+ *t->p = PyUnicode_InternFromString(t->s);
+ } else if (t->encoding) {
+ *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+ } else {
+ *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+ }
+ } else {
+ *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+ }
+ #endif
+ if (!*t->p)
+ return -1;
+ if (PyObject_Hash(*t->p) == -1)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
+ return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
+}
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
+ Py_ssize_t ignore;
+ return __Pyx_PyObject_AsStringAndSize(o, &ignore);
+}
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+#if !CYTHON_PEP393_ENABLED
+static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ char* defenc_c;
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+ }
+ }
+#endif
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+}
+#else
+static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ if (likely(PyUnicode_IS_ASCII(o))) {
+ *length = PyUnicode_GET_LENGTH(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+#else
+ return PyUnicode_AsUTF8AndSize(o, length);
+#endif
+}
+#endif
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+ return __Pyx_PyUnicode_AsStringAndSize(o, length);
+ } else
+#endif
+#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
+ if (PyByteArray_Check(o)) {
+ *length = PyByteArray_GET_SIZE(o);
+ return PyByteArray_AS_STRING(o);
+ } else
+#endif
+ {
+ char* result;
+ int r = PyBytes_AsStringAndSize(o, &result, length);
+ if (unlikely(r < 0)) {
+ return NULL;
+ } else {
+ return result;
+ }
+ }
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+ int is_true = x == Py_True;
+ if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+ else return PyObject_IsTrue(x);
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
+ int retval;
+ if (unlikely(!x)) return -1;
+ retval = __Pyx_PyObject_IsTrue(x);
+ Py_DECREF(x);
+ return retval;
+}
+static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
+#if PY_MAJOR_VERSION >= 3
+ if (PyLong_Check(result)) {
+ if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+ "__int__ returned non-int (type %.200s). "
+ "The ability to return an instance of a strict subclass of int "
+ "is deprecated, and may be removed in a future version of Python.",
+ Py_TYPE(result)->tp_name)) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ return result;
+ }
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ type_name, type_name, Py_TYPE(result)->tp_name);
+ Py_DECREF(result);
+ return NULL;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
+#if CYTHON_USE_TYPE_SLOTS
+ PyNumberMethods *m;
+#endif
+ const char *name = NULL;
+ PyObject *res = NULL;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x) || PyLong_Check(x)))
+#else
+ if (likely(PyLong_Check(x)))
+#endif
+ return __Pyx_NewRef(x);
+#if CYTHON_USE_TYPE_SLOTS
+ m = Py_TYPE(x)->tp_as_number;
+ #if PY_MAJOR_VERSION < 3
+ if (m && m->nb_int) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ else if (m && m->nb_long) {
+ name = "long";
+ res = m->nb_long(x);
+ }
+ #else
+ if (likely(m && m->nb_int)) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ #endif
+#else
+ if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
+ res = PyNumber_Int(x);
+ }
+#endif
+ if (likely(res)) {
+#if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
+#else
+ if (unlikely(!PyLong_CheckExact(res))) {
+#endif
+ return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
+ }
+ }
+ else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError,
+ "an integer is required");
+ }
+ return res;
+}
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+ Py_ssize_t ival;
+ PyObject *x;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(b))) {
+ if (sizeof(Py_ssize_t) >= sizeof(long))
+ return PyInt_AS_LONG(b);
+ else
+ return PyInt_AsSsize_t(b);
+ }
+#endif
+ if (likely(PyLong_CheckExact(b))) {
+ #if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)b)->ob_digit;
+ const Py_ssize_t size = Py_SIZE(b);
+ if (likely(__Pyx_sst_abs(size) <= 1)) {
+ ival = likely(size) ? digits[0] : 0;
+ if (size == -1) ival = -ival;
+ return ival;
+ } else {
+ switch (size) {
+ case 2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ }
+ }
+ #endif
+ return PyLong_AsSsize_t(b);
+ }
+ x = PyNumber_Index(b);
+ if (!x) return -1;
+ ival = PyInt_AsSsize_t(x);
+ Py_DECREF(x);
+ return ival;
+}
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
+ return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
+}
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+ return PyInt_FromSize_t(ival);
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/diagnostics/etc_composites/util/tracker/g2l_v4.o b/diagnostics/etc_composites/util/tracker/g2l_v4.o
new file mode 100644
index 000000000..6194b8f54
Binary files /dev/null and b/diagnostics/etc_composites/util/tracker/g2l_v4.o differ
diff --git a/diagnostics/etc_composites/util/tracker/g2l_v4.pyx b/diagnostics/etc_composites/util/tracker/g2l_v4.pyx
new file mode 100644
index 000000000..1cf4ed898
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/g2l_v4.pyx
@@ -0,0 +1,76 @@
+def g2l(double the_grid,double start_lon, double start_lat,
+ double dlon, double dlat, int jm, lat_lon_flag, center_flag="center",
+ edge_flag=False,flag_360=False,faux_grids=0):
+ # Note dlat and dlon in RADIANS!
+ cdef double location, edge1, edge2
+
+ location = -999.0
+ edge1 = -999.0
+ edge2 = -990.0
+ if lat_lon_flag == "lon" :
+ # for regular and gaussian grids.
+ if center_flag == "center" :
+ # NOTE! the_grid needs to be the i,j not the gridID!
+ location = start_lon + (int(the_grid)) \
+ * 57.2957795*(dlon)
+ elif center_flag == "free" :
+ # NOTE! the_grid needs to be the i,j not the gridID!
+ location = start_lon + (the_grid) * \
+ 57.2957795*(dlon)
+ edge1 = location - 28.64788975*(dlon)
+ edge2 = location + 28.64788975*(dlon)
+ if edge1 < 0.0 :
+ edge1 = 360.0 + edge1
+ if edge2 > 360.0 :
+ edge2 = 360.0 - edge2
+ if(not flag_360) :
+ if location > 180.0 : # put into +/- form
+ location = location - 360.0
+ if edge1 > 180.0 :
+ edge1 = edge1 - 360.0
+ if edge2 > 180.0 :
+ edge2 = edge2 - 360.0
+
+ elif lat_lon_flag == "lat" :
+ # for regular grids (linear)
+ if center_flag == "center" :
+ # NOTE! the_grid needs to be the i,j not the gridID!
+ location = start_lat + (int(the_grid)) * \
+ 57.2957795*(dlat)
+ # Deal with polar cap w/ faux_grids
+ if faux_grids == 2:
+ if int(the_grid) == 0:
+ location = start_lat + 57.2957795*(dlat*0.25)
+ if int(the_grid) == jm-1:
+ location = -1.0*start_lat - 57.2957795*(dlat*0.25)
+ elif center_flag == "free":
+ # NOTE! the_grid needs to be the i,j not the gridID!
+ if faux_grids == 2:
+ if the_grid < 0.5:
+ location = start_lat + (
+ (the_grid+0.5)*57.2957795*(dlat*0.5))
+ if the_grid > float(jm)-1.5:
+ location = -1.0*start_lat - (
+ (float(jm)-the_grid-0.5)*57.2957795*(dlat*0.5))
+ else:
+ location = start_lat + (the_grid) * \
+ 57.2957795*(dlat)
+ else:
+ location = start_lat + (the_grid) * \
+ 57.2957795*(dlat)
+ edge1 = location - 28.64788975*(dlat)
+ edge2 = location + 28.64788975*(dlat)
+ if faux_grids == 2:
+ # Deal with polar cap w/ faux_grids
+ if the_grid < 1.0 or the_grid > float(jm)-2.0:
+ edge1 = location - 14.323944875*(dlat)
+ edge2 = location + 14.323944875*(dlat)
+ if edge2 > 90.0:
+ edge2 = edge1
+ if edge1 < -90.0:
+ edge1 = edge2
+
+ if edge_flag :
+ return edge2,location,edge1
+ else :
+ return location
diff --git a/diagnostics/etc_composites/util/tracker/gcd_v4.c b/diagnostics/etc_composites/util/tracker/gcd_v4.c
new file mode 100644
index 000000000..a3047ab28
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/gcd_v4.c
@@ -0,0 +1,3046 @@
+/* Generated by Cython 0.29.23 */
+
+/* BEGIN: Cython Metadata
+{
+ "distutils": {
+ "depends": [],
+ "name": "gcd_v4",
+ "sources": [
+ "gcd_v4.pyx"
+ ]
+ },
+ "module_name": "gcd_v4"
+}
+END: Cython Metadata */
+
+#ifndef PY_SSIZE_T_CLEAN
+#define PY_SSIZE_T_CLEAN
+#endif /* PY_SSIZE_T_CLEAN */
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
+ #error Cython requires Python 2.6+ or Python 3.3+.
+#else
+#define CYTHON_ABI "0_29_23"
+#define CYTHON_HEX_VERSION 0x001D17F0
+#define CYTHON_FUTURE_DIVISION 0
+#include
+#ifndef offsetof
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#define __PYX_COMMA ,
+#ifndef HAVE_LONG_LONG
+ #if PY_VERSION_HEX >= 0x02070000
+ #define HAVE_LONG_LONG
+ #endif
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+ #define CYTHON_COMPILING_IN_PYPY 1
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #undef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #undef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 1
+ #undef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 0
+ #undef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 0
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#elif defined(PYSTON_VERSION)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 1
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+ #undef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS 0
+ #undef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK 0
+#else
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 1
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
+ #if PY_MAJOR_VERSION < 3
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #elif !defined(CYTHON_USE_PYLONG_INTERNALS)
+ #define CYTHON_USE_PYLONG_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #if PY_VERSION_HEX < 0x030300F0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
+ #define CYTHON_USE_UNICODE_WRITER 1
+ #endif
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #ifndef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 1
+ #endif
+ #ifndef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 1
+ #endif
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
+ #ifndef CYTHON_USE_DICT_VERSIONS
+ #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
+ #endif
+ #ifndef CYTHON_USE_EXC_INFO_STACK
+ #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
+ #endif
+#endif
+#if !defined(CYTHON_FAST_PYCCALL)
+#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
+#endif
+#if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #undef SHIFT
+ #undef BASE
+ #undef MASK
+ #ifdef SIZEOF_VOID_P
+ enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
+ #endif
+#endif
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include
+#endif
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+ #define Py_OptimizeFlag 0
+#endif
+#define __PYX_BUILD_PY_SSIZE_T "n"
+#define CYTHON_FORMAT_SSIZE_T "z"
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#else
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+#endif
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_FINALIZE
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#ifndef METH_STACKLESS
+ #define METH_STACKLESS 0
+#endif
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
+#else
+ #define __Pyx_PyCFunctionFast _PyCFunctionFast
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+#endif
+#if CYTHON_FAST_PYCCALL
+#define __Pyx_PyFastCFunction_Check(func)\
+ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
+#else
+#define __Pyx_PyFastCFunction_Check(func) 0
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
+ #define PyMem_RawMalloc(n) PyMem_Malloc(n)
+ #define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
+ #define PyMem_RawFree(p) PyMem_Free(p)
+#endif
+#if CYTHON_COMPILING_IN_PYSTON
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
+ return 0;
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
+ #if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
+ #else
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
+ #endif
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define PyUnicode_1BYTE_KIND 1
+ #define PyUnicode_2BYTE_KIND 2
+ #define PyUnicode_4BYTE_KIND 4
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
+ #define PyObject_ASCII(o) PyObject_Repr(o)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+#ifndef PyObject_Unicode
+ #define PyObject_Unicode PyObject_Str
+#endif
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#if PY_VERSION_HEX >= 0x030900A4
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
+ #define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
+#else
+ #define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
+ #define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
+#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
+ #ifndef PyUnicode_InternFromString
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
+ #endif
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
+#else
+ #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ #if PY_VERSION_HEX >= 0x030500B1
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
+ #else
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
+ typedef struct {
+ unaryfunc am_await;
+ unaryfunc am_aiter;
+ unaryfunc am_anext;
+ } __Pyx_PyAsyncMethodsStruct;
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+ #define _USE_MATH_DEFINES
+#endif
+#include
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
+#define __Pyx_truncl trunc
+#else
+#define __Pyx_truncl truncl
+#endif
+
+#define __PYX_MARK_ERR_POS(f_index, lineno) \
+ { __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
+#define __PYX_ERR(f_index, lineno, Ln_error) \
+ { __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#define __PYX_HAVE__gcd_v4
+#define __PYX_HAVE_API__gcd_v4
+/* Early includes */
+#include "math.h"
+#ifdef _OPENMP
+#include
+#endif /* _OPENMP */
+
+#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_uchar_cast(c) ((unsigned char)c)
+#define __Pyx_long_cast(x) ((long)x)
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
+ (sizeof(type) < sizeof(Py_ssize_t)) ||\
+ (sizeof(type) > sizeof(Py_ssize_t) &&\
+ likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX) &&\
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
+ v == (type)PY_SSIZE_T_MIN))) ||\
+ (sizeof(type) == sizeof(Py_ssize_t) &&\
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX))) )
+static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
+ return (size_t) i < (size_t) limit;
+}
+#if defined (__cplusplus) && __cplusplus >= 201103L
+ #include
+ #define __Pyx_sst_abs(value) std::abs(value)
+#elif SIZEOF_INT >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) abs(value)
+#elif SIZEOF_LONG >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) labs(value)
+#elif defined (_MSC_VER)
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define __Pyx_sst_abs(value) llabs(value)
+#elif defined (__GNUC__)
+ #define __Pyx_sst_abs(value) __builtin_llabs(value)
+#else
+ #define __Pyx_sst_abs(value) ((value<0) ? -value : value)
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
+#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
+#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
+#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
+#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return (size_t)(u_end - u - 1);
+}
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
+#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
+#define __Pyx_PySequence_Tuple(obj)\
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_ASSUME_SAFE_MACROS
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
+#else
+#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
+#endif
+#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ const char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ if (strcmp(default_encoding_c, "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (!ascii_chars_u) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ Py_DECREF(ascii_chars_u);
+ Py_DECREF(ascii_chars_b);
+ }
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
+ if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+/* Test for GCC > 2.95 */
+#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* !__GNUC__ or GCC < 2.95 */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
+
+static PyObject *__pyx_m = NULL;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_cython_runtime = NULL;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static PyObject *__pyx_empty_unicode;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+ "gcd_v4.pyx",
+};
+
+/*--- Type declarations ---*/
+
+/* --- Runtime support code (head) --- */
+/* Refnanny.proto */
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ if (acquire_gil) {\
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ PyGILState_Release(__pyx_gilstate_save);\
+ } else {\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext()\
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif
+#define __Pyx_XDECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_XDECREF(tmp);\
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_DECREF(tmp);\
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+/* RaiseArgTupleInvalid.proto */
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
+
+/* RaiseDoubleKeywords.proto */
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
+
+/* ParseKeywords.proto */
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
+ const char* function_name);
+
+/* PyDictVersioning.proto */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
+#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
+ (version_var) = __PYX_GET_DICT_VERSION(dict);\
+ (cache_var) = (value);
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
+ static PY_UINT64_T __pyx_dict_version = 0;\
+ static PyObject *__pyx_dict_cached_value = NULL;\
+ if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
+ (VAR) = __pyx_dict_cached_value;\
+ } else {\
+ (VAR) = __pyx_dict_cached_value = (LOOKUP);\
+ __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
+ }\
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
+#else
+#define __PYX_GET_DICT_VERSION(dict) (0)
+#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
+#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
+#endif
+
+/* PyObjectGetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/* PyThreadStateGet.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
+#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
+#else
+#define __Pyx_PyThreadState_declare
+#define __Pyx_PyThreadState_assign
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#endif
+
+/* PyErrFetchRestore.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#else
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
+#endif
+
+/* CLineInTraceback.proto */
+#ifdef CYTHON_CLINE_IN_TRACEBACK
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
+#endif
+
+/* CodeObjectCache.proto */
+typedef struct {
+ PyCodeObject* code_object;
+ int code_line;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+/* AddTraceback.proto */
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename);
+
+/* GCCDiagnostics.proto */
+#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
+#define __Pyx_HAS_GCC_DIAGNOSTIC
+#endif
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+/* FastTypeChecks.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/* CheckBinaryVersion.proto */
+static int __Pyx_check_binary_version(void);
+
+/* InitStrings.proto */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
+
+
+/* Module declarations from 'gcd_v4' */
+#define __Pyx_MODULE_NAME "gcd_v4"
+extern int __pyx_module_is_main_gcd_v4;
+int __pyx_module_is_main_gcd_v4 = 0;
+
+/* Implementation of 'gcd_v4' */
+static const char __pyx_k_d[] = "d";
+static const char __pyx_k_gcd[] = "gcd";
+static const char __pyx_k_lata[] = "lata";
+static const char __pyx_k_latb[] = "latb";
+static const char __pyx_k_lona[] = "lona";
+static const char __pyx_k_lonb[] = "lonb";
+static const char __pyx_k_main[] = "__main__";
+static const char __pyx_k_name[] = "__name__";
+static const char __pyx_k_test[] = "__test__";
+static const char __pyx_k_xlata[] = "xlata";
+static const char __pyx_k_xlatb[] = "xlatb";
+static const char __pyx_k_xlona[] = "xlona";
+static const char __pyx_k_xlonb[] = "xlonb";
+static const char __pyx_k_gcd_v4[] = "gcd_v4";
+static const char __pyx_k_cos_lata[] = "cos_lata";
+static const char __pyx_k_cos_latb[] = "cos_latb";
+static const char __pyx_k_sin_lata[] = "sin_lata";
+static const char __pyx_k_sin_latb[] = "sin_latb";
+static const char __pyx_k_gcd_v4_pyx[] = "gcd_v4.pyx";
+static const char __pyx_k_cos_delta_lon[] = "cos_delta_lon";
+static const char __pyx_k_sin_delta_lon[] = "sin_delta_lon";
+static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
+static PyObject *__pyx_n_s_cline_in_traceback;
+static PyObject *__pyx_n_s_cos_delta_lon;
+static PyObject *__pyx_n_s_cos_lata;
+static PyObject *__pyx_n_s_cos_latb;
+static PyObject *__pyx_n_s_d;
+static PyObject *__pyx_n_s_gcd;
+static PyObject *__pyx_n_s_gcd_v4;
+static PyObject *__pyx_kp_s_gcd_v4_pyx;
+static PyObject *__pyx_n_s_lata;
+static PyObject *__pyx_n_s_latb;
+static PyObject *__pyx_n_s_lona;
+static PyObject *__pyx_n_s_lonb;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_name;
+static PyObject *__pyx_n_s_sin_delta_lon;
+static PyObject *__pyx_n_s_sin_lata;
+static PyObject *__pyx_n_s_sin_latb;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_xlata;
+static PyObject *__pyx_n_s_xlatb;
+static PyObject *__pyx_n_s_xlona;
+static PyObject *__pyx_n_s_xlonb;
+static PyObject *__pyx_pf_6gcd_v4_gcd(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_lona, PyObject *__pyx_v_lata, PyObject *__pyx_v_lonb, PyObject *__pyx_v_latb); /* proto */
+static PyObject *__pyx_float_0_0174532925;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_codeobj__2;
+/* Late includes */
+
+/* "gcd_v4.pyx":10
+ * double pow(double x, double y)
+ *
+ * def gcd(double lona,lata,lonb,latb): # <<<<<<<<<<<<<<
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_6gcd_v4_1gcd(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_6gcd_v4_1gcd = {"gcd", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_6gcd_v4_1gcd, METH_VARARGS|METH_KEYWORDS, 0};
+static PyObject *__pyx_pw_6gcd_v4_1gcd(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ double __pyx_v_lona;
+ PyObject *__pyx_v_lata = 0;
+ PyObject *__pyx_v_lonb = 0;
+ PyObject *__pyx_v_latb = 0;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("gcd (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_lona,&__pyx_n_s_lata,&__pyx_n_s_lonb,&__pyx_n_s_latb,0};
+ PyObject* values[4] = {0,0,0,0};
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lona)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lata)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("gcd", 1, 4, 4, 1); __PYX_ERR(0, 10, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lonb)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("gcd", 1, 4, 4, 2); __PYX_ERR(0, 10, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_latb)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("gcd", 1, 4, 4, 3); __PYX_ERR(0, 10, __pyx_L3_error)
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "gcd") < 0)) __PYX_ERR(0, 10, __pyx_L3_error)
+ }
+ } else if (PyTuple_GET_SIZE(__pyx_args) != 4) {
+ goto __pyx_L5_argtuple_error;
+ } else {
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ }
+ __pyx_v_lona = __pyx_PyFloat_AsDouble(values[0]); if (unlikely((__pyx_v_lona == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 10, __pyx_L3_error)
+ __pyx_v_lata = values[1];
+ __pyx_v_lonb = values[2];
+ __pyx_v_latb = values[3];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("gcd", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 10, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("gcd_v4.gcd", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return NULL;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_6gcd_v4_gcd(__pyx_self, __pyx_v_lona, __pyx_v_lata, __pyx_v_lonb, __pyx_v_latb);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_6gcd_v4_gcd(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_lona, PyObject *__pyx_v_lata, PyObject *__pyx_v_lonb, PyObject *__pyx_v_latb) {
+ double __pyx_v_xlona;
+ PyObject *__pyx_v_xlata = NULL;
+ PyObject *__pyx_v_xlonb = NULL;
+ PyObject *__pyx_v_xlatb = NULL;
+ double __pyx_v_sin_lata;
+ double __pyx_v_sin_latb;
+ double __pyx_v_cos_lata;
+ double __pyx_v_cos_latb;
+ double __pyx_v_cos_delta_lon;
+ double __pyx_v_sin_delta_lon;
+ double __pyx_v_d;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ double __pyx_t_2;
+ PyObject *__pyx_t_3 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannySetupContext("gcd", 0);
+
+ /* "gcd_v4.pyx":11
+ *
+ * def gcd(double lona,lata,lonb,latb):
+ * xlona = lona*0.0174532925 # <<<<<<<<<<<<<<
+ * xlata = lata*0.0174532925
+ * xlonb = lonb*0.0174532925
+ */
+ __pyx_v_xlona = (__pyx_v_lona * 0.0174532925);
+
+ /* "gcd_v4.pyx":12
+ * def gcd(double lona,lata,lonb,latb):
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925 # <<<<<<<<<<<<<<
+ * xlonb = lonb*0.0174532925
+ * xlatb = latb*0.0174532925
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_lata, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 12, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_xlata = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "gcd_v4.pyx":13
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925
+ * xlonb = lonb*0.0174532925 # <<<<<<<<<<<<<<
+ * xlatb = latb*0.0174532925
+ * sin_lata = sin(xlata)
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_lonb, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_xlonb = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "gcd_v4.pyx":14
+ * xlata = lata*0.0174532925
+ * xlonb = lonb*0.0174532925
+ * xlatb = latb*0.0174532925 # <<<<<<<<<<<<<<
+ * sin_lata = sin(xlata)
+ * sin_latb = sin(xlatb)
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_latb, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_v_xlatb = __pyx_t_1;
+ __pyx_t_1 = 0;
+
+ /* "gcd_v4.pyx":15
+ * xlonb = lonb*0.0174532925
+ * xlatb = latb*0.0174532925
+ * sin_lata = sin(xlata) # <<<<<<<<<<<<<<
+ * sin_latb = sin(xlatb)
+ * cos_lata = cos(xlata)
+ */
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_v_xlata); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 15, __pyx_L1_error)
+ __pyx_v_sin_lata = sin(__pyx_t_2);
+
+ /* "gcd_v4.pyx":16
+ * xlatb = latb*0.0174532925
+ * sin_lata = sin(xlata)
+ * sin_latb = sin(xlatb) # <<<<<<<<<<<<<<
+ * cos_lata = cos(xlata)
+ * cos_latb = cos(xlatb)
+ */
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_v_xlatb); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 16, __pyx_L1_error)
+ __pyx_v_sin_latb = sin(__pyx_t_2);
+
+ /* "gcd_v4.pyx":17
+ * sin_lata = sin(xlata)
+ * sin_latb = sin(xlatb)
+ * cos_lata = cos(xlata) # <<<<<<<<<<<<<<
+ * cos_latb = cos(xlatb)
+ * cos_delta_lon = cos(xlonb - xlona)
+ */
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_v_xlata); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 17, __pyx_L1_error)
+ __pyx_v_cos_lata = cos(__pyx_t_2);
+
+ /* "gcd_v4.pyx":18
+ * sin_latb = sin(xlatb)
+ * cos_lata = cos(xlata)
+ * cos_latb = cos(xlatb) # <<<<<<<<<<<<<<
+ * cos_delta_lon = cos(xlonb - xlona)
+ * sin_delta_lon = sin(xlonb - xlona)
+ */
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_v_xlatb); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 18, __pyx_L1_error)
+ __pyx_v_cos_latb = cos(__pyx_t_2);
+
+ /* "gcd_v4.pyx":19
+ * cos_lata = cos(xlata)
+ * cos_latb = cos(xlatb)
+ * cos_delta_lon = cos(xlonb - xlona) # <<<<<<<<<<<<<<
+ * sin_delta_lon = sin(xlonb - xlona)
+ * d = atan2(sqrt(pow(cos_latb * sin_delta_lon,2) + pow(cos_lata * sin_latb -
+ */
+ __pyx_t_1 = PyFloat_FromDouble(__pyx_v_xlona); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = PyNumber_Subtract(__pyx_v_xlonb, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_t_3); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 19, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_v_cos_delta_lon = cos(__pyx_t_2);
+
+ /* "gcd_v4.pyx":20
+ * cos_latb = cos(xlatb)
+ * cos_delta_lon = cos(xlonb - xlona)
+ * sin_delta_lon = sin(xlonb - xlona) # <<<<<<<<<<<<<<
+ * d = atan2(sqrt(pow(cos_latb * sin_delta_lon,2) + pow(cos_lata * sin_latb -
+ * sin_lata * cos_latb * cos_delta_lon,2)),
+ */
+ __pyx_t_3 = PyFloat_FromDouble(__pyx_v_xlona); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_3);
+ __pyx_t_1 = PyNumber_Subtract(__pyx_v_xlonb, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+ __pyx_t_2 = __pyx_PyFloat_AsDouble(__pyx_t_1); if (unlikely((__pyx_t_2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 20, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_sin_delta_lon = sin(__pyx_t_2);
+
+ /* "gcd_v4.pyx":23
+ * d = atan2(sqrt(pow(cos_latb * sin_delta_lon,2) + pow(cos_lata * sin_latb -
+ * sin_lata * cos_latb * cos_delta_lon,2)),
+ * sin_lata * sin_latb + cos_lata * cos_latb * cos_delta_lon)*6372.795 # <<<<<<<<<<<<<<
+ * return d
+ */
+ __pyx_v_d = (atan2(sqrt((pow((__pyx_v_cos_latb * __pyx_v_sin_delta_lon), 2.0) + pow(((__pyx_v_cos_lata * __pyx_v_sin_latb) - ((__pyx_v_sin_lata * __pyx_v_cos_latb) * __pyx_v_cos_delta_lon)), 2.0))), ((__pyx_v_sin_lata * __pyx_v_sin_latb) + ((__pyx_v_cos_lata * __pyx_v_cos_latb) * __pyx_v_cos_delta_lon))) * 6372.795);
+
+ /* "gcd_v4.pyx":24
+ * sin_lata * cos_latb * cos_delta_lon,2)),
+ * sin_lata * sin_latb + cos_lata * cos_latb * cos_delta_lon)*6372.795
+ * return d # <<<<<<<<<<<<<<
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyFloat_FromDouble(__pyx_v_d); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 24, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_r = __pyx_t_1;
+ __pyx_t_1 = 0;
+ goto __pyx_L0;
+
+ /* "gcd_v4.pyx":10
+ * double pow(double x, double y)
+ *
+ * def gcd(double lona,lata,lonb,latb): # <<<<<<<<<<<<<<
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_3);
+ __Pyx_AddTraceback("gcd_v4.gcd", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_xlata);
+ __Pyx_XDECREF(__pyx_v_xlonb);
+ __Pyx_XDECREF(__pyx_v_xlatb);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyMethodDef __pyx_methods[] = {
+ {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
+static int __pyx_pymod_exec_gcd_v4(PyObject* module); /*proto*/
+static PyModuleDef_Slot __pyx_moduledef_slots[] = {
+ {Py_mod_create, (void*)__pyx_pymod_create},
+ {Py_mod_exec, (void*)__pyx_pymod_exec_gcd_v4},
+ {0, NULL}
+};
+#endif
+
+static struct PyModuleDef __pyx_moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "gcd_v4",
+ 0, /* m_doc */
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ 0, /* m_size */
+ #else
+ -1, /* m_size */
+ #endif
+ __pyx_methods /* m_methods */,
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_moduledef_slots, /* m_slots */
+ #else
+ NULL, /* m_reload */
+ #endif
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL /* m_free */
+};
+#endif
+#ifndef CYTHON_SMALL_CODE
+#if defined(__clang__)
+ #define CYTHON_SMALL_CODE
+#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
+ #define CYTHON_SMALL_CODE __attribute__((cold))
+#else
+ #define CYTHON_SMALL_CODE
+#endif
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+ {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
+ {&__pyx_n_s_cos_delta_lon, __pyx_k_cos_delta_lon, sizeof(__pyx_k_cos_delta_lon), 0, 0, 1, 1},
+ {&__pyx_n_s_cos_lata, __pyx_k_cos_lata, sizeof(__pyx_k_cos_lata), 0, 0, 1, 1},
+ {&__pyx_n_s_cos_latb, __pyx_k_cos_latb, sizeof(__pyx_k_cos_latb), 0, 0, 1, 1},
+ {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1},
+ {&__pyx_n_s_gcd, __pyx_k_gcd, sizeof(__pyx_k_gcd), 0, 0, 1, 1},
+ {&__pyx_n_s_gcd_v4, __pyx_k_gcd_v4, sizeof(__pyx_k_gcd_v4), 0, 0, 1, 1},
+ {&__pyx_kp_s_gcd_v4_pyx, __pyx_k_gcd_v4_pyx, sizeof(__pyx_k_gcd_v4_pyx), 0, 0, 1, 0},
+ {&__pyx_n_s_lata, __pyx_k_lata, sizeof(__pyx_k_lata), 0, 0, 1, 1},
+ {&__pyx_n_s_latb, __pyx_k_latb, sizeof(__pyx_k_latb), 0, 0, 1, 1},
+ {&__pyx_n_s_lona, __pyx_k_lona, sizeof(__pyx_k_lona), 0, 0, 1, 1},
+ {&__pyx_n_s_lonb, __pyx_k_lonb, sizeof(__pyx_k_lonb), 0, 0, 1, 1},
+ {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
+ {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
+ {&__pyx_n_s_sin_delta_lon, __pyx_k_sin_delta_lon, sizeof(__pyx_k_sin_delta_lon), 0, 0, 1, 1},
+ {&__pyx_n_s_sin_lata, __pyx_k_sin_lata, sizeof(__pyx_k_sin_lata), 0, 0, 1, 1},
+ {&__pyx_n_s_sin_latb, __pyx_k_sin_latb, sizeof(__pyx_k_sin_latb), 0, 0, 1, 1},
+ {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
+ {&__pyx_n_s_xlata, __pyx_k_xlata, sizeof(__pyx_k_xlata), 0, 0, 1, 1},
+ {&__pyx_n_s_xlatb, __pyx_k_xlatb, sizeof(__pyx_k_xlatb), 0, 0, 1, 1},
+ {&__pyx_n_s_xlona, __pyx_k_xlona, sizeof(__pyx_k_xlona), 0, 0, 1, 1},
+ {&__pyx_n_s_xlonb, __pyx_k_xlonb, sizeof(__pyx_k_xlonb), 0, 0, 1, 1},
+ {0, 0, 0, 0, 0, 0, 0}
+};
+static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
+ return 0;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
+
+ /* "gcd_v4.pyx":10
+ * double pow(double x, double y)
+ *
+ * def gcd(double lona,lata,lonb,latb): # <<<<<<<<<<<<<<
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925
+ */
+ __pyx_tuple_ = PyTuple_Pack(15, __pyx_n_s_lona, __pyx_n_s_lata, __pyx_n_s_lonb, __pyx_n_s_latb, __pyx_n_s_xlona, __pyx_n_s_xlata, __pyx_n_s_xlonb, __pyx_n_s_xlatb, __pyx_n_s_sin_lata, __pyx_n_s_sin_latb, __pyx_n_s_cos_lata, __pyx_n_s_cos_latb, __pyx_n_s_cos_delta_lon, __pyx_n_s_sin_delta_lon, __pyx_n_s_d); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 10, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple_);
+ __Pyx_GIVEREF(__pyx_tuple_);
+ __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(4, 0, 15, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_gcd_v4_pyx, __pyx_n_s_gcd, 10, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 10, __pyx_L1_error)
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
+ if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ __pyx_float_0_0174532925 = PyFloat_FromDouble(0.0174532925); if (unlikely(!__pyx_float_0_0174532925)) __PYX_ERR(0, 1, __pyx_L1_error)
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
+static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
+
+static int __Pyx_modinit_global_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
+ /*--- Global init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
+ /*--- Variable export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
+ /*--- Function export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
+ /*--- Type init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
+ /*--- Type import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
+ /*--- Variable import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
+ /*--- Function import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+
+#ifndef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#elif PY_MAJOR_VERSION < 3
+#ifdef __cplusplus
+#define __Pyx_PyMODINIT_FUNC extern "C" void
+#else
+#define __Pyx_PyMODINIT_FUNC void
+#endif
+#else
+#ifdef __cplusplus
+#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
+#else
+#define __Pyx_PyMODINIT_FUNC PyObject *
+#endif
+#endif
+
+
+#if PY_MAJOR_VERSION < 3
+__Pyx_PyMODINIT_FUNC initgcd_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC initgcd_v4(void)
+#else
+__Pyx_PyMODINIT_FUNC PyInit_gcd_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC PyInit_gcd_v4(void)
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+{
+ return PyModuleDef_Init(&__pyx_moduledef);
+}
+static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
+ #if PY_VERSION_HEX >= 0x030700A1
+ static PY_INT64_T main_interpreter_id = -1;
+ PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
+ if (main_interpreter_id == -1) {
+ main_interpreter_id = current_id;
+ return (unlikely(current_id == -1)) ? -1 : 0;
+ } else if (unlikely(main_interpreter_id != current_id))
+ #else
+ static PyInterpreterState *main_interpreter = NULL;
+ PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
+ if (!main_interpreter) {
+ main_interpreter = current_interpreter;
+ } else if (unlikely(main_interpreter != current_interpreter))
+ #endif
+ {
+ PyErr_SetString(
+ PyExc_ImportError,
+ "Interpreter change detected - this module can only be loaded into one interpreter per process.");
+ return -1;
+ }
+ return 0;
+}
+static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
+ int result = 0;
+ if (likely(value)) {
+ if (allow_none || value != Py_None) {
+ result = PyDict_SetItemString(moddict, to_name, value);
+ }
+ Py_DECREF(value);
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Clear();
+ } else {
+ result = -1;
+ }
+ return result;
+}
+static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
+ PyObject *module = NULL, *moddict, *modname;
+ if (__Pyx_check_single_interpreter())
+ return NULL;
+ if (__pyx_m)
+ return __Pyx_NewRef(__pyx_m);
+ modname = PyObject_GetAttrString(spec, "name");
+ if (unlikely(!modname)) goto bad;
+ module = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (unlikely(!module)) goto bad;
+ moddict = PyModule_GetDict(module);
+ if (unlikely(!moddict)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
+ return module;
+bad:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+static CYTHON_SMALL_CODE int __pyx_pymod_exec_gcd_v4(PyObject *__pyx_pyinit_module)
+#endif
+#endif
+{
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_lineno = 0;
+ const char *__pyx_filename = NULL;
+ int __pyx_clineno = 0;
+ __Pyx_RefNannyDeclarations
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ if (__pyx_m) {
+ if (__pyx_m == __pyx_pyinit_module) return 0;
+ PyErr_SetString(PyExc_RuntimeError, "Module 'gcd_v4' has already been imported. Re-initialisation is not supported.");
+ return -1;
+ }
+ #elif PY_MAJOR_VERSION >= 3
+ if (__pyx_m) return __Pyx_NewRef(__pyx_m);
+ #endif
+ #if CYTHON_REFNANNY
+__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+}
+#endif
+ __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_gcd_v4(void)", 0);
+ if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #ifdef __Pxy_PyFrame_Initialize_Offsets
+ __Pxy_PyFrame_Initialize_Offsets();
+ #endif
+ __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #ifdef __Pyx_CyFunction_USED
+ if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_FusedFunction_USED
+ if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Generator_USED
+ if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_StopAsyncIteration_USED
+ if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ /*--- Library function declarations ---*/
+ /*--- Threads initialization code ---*/
+ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+ #ifdef WITH_THREAD /* Python build with threading support? */
+ PyEval_InitThreads();
+ #endif
+ #endif
+ /*--- Module creation code ---*/
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_m = __pyx_pyinit_module;
+ Py_INCREF(__pyx_m);
+ #else
+ #if PY_MAJOR_VERSION < 3
+ __pyx_m = Py_InitModule4("gcd_v4", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
+ #else
+ __pyx_m = PyModule_Create(&__pyx_moduledef);
+ #endif
+ if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_d);
+ __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_b);
+ __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_cython_runtime);
+ if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ /*--- Initialize various global constants etc. ---*/
+ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
+ if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ if (__pyx_module_is_main_gcd_v4) {
+ if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ #if PY_MAJOR_VERSION >= 3
+ {
+ PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
+ if (!PyDict_GetItemString(modules, "gcd_v4")) {
+ if (unlikely(PyDict_SetItemString(modules, "gcd_v4", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ }
+ #endif
+ /*--- Builtin init code ---*/
+ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Constants init code ---*/
+ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Global type/function init code ---*/
+ (void)__Pyx_modinit_global_init_code();
+ (void)__Pyx_modinit_variable_export_code();
+ (void)__Pyx_modinit_function_export_code();
+ (void)__Pyx_modinit_type_init_code();
+ (void)__Pyx_modinit_type_import_code();
+ (void)__Pyx_modinit_variable_import_code();
+ (void)__Pyx_modinit_function_import_code();
+ /*--- Execution code ---*/
+ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
+ if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+
+ /* "gcd_v4.pyx":10
+ * double pow(double x, double y)
+ *
+ * def gcd(double lona,lata,lonb,latb): # <<<<<<<<<<<<<<
+ * xlona = lona*0.0174532925
+ * xlata = lata*0.0174532925
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_6gcd_v4_1gcd, NULL, __pyx_n_s_gcd_v4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 10, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_gcd, __pyx_t_1) < 0) __PYX_ERR(0, 10, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "gcd_v4.pyx":1
+ * # use python2.4 setup_gcd.py build_ext --inplace # <<<<<<<<<<<<<<
+ *
+ * cdef extern from "math.h":
+ */
+ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /*--- Wrapped vars code ---*/
+
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ if (__pyx_m) {
+ if (__pyx_d) {
+ __Pyx_AddTraceback("init gcd_v4", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ }
+ Py_CLEAR(__pyx_m);
+ } else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_ImportError, "init gcd_v4");
+ }
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ return (__pyx_m != NULL) ? 0 : -1;
+ #elif PY_MAJOR_VERSION >= 3
+ return __pyx_m;
+ #else
+ return;
+ #endif
+}
+
+/* --- Runtime support code --- */
+/* Refnanny */
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+ PyObject *m = NULL, *p = NULL;
+ void *r = NULL;
+ m = PyImport_ImportModule(modname);
+ if (!m) goto end;
+ p = PyObject_GetAttrString(m, "RefNannyAPI");
+ if (!p) goto end;
+ r = PyLong_AsVoidPtr(p);
+end:
+ Py_XDECREF(p);
+ Py_XDECREF(m);
+ return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif
+
+/* RaiseArgTupleInvalid */
+static void __Pyx_RaiseArgtupleInvalid(
+ const char* func_name,
+ int exact,
+ Py_ssize_t num_min,
+ Py_ssize_t num_max,
+ Py_ssize_t num_found)
+{
+ Py_ssize_t num_expected;
+ const char *more_or_less;
+ if (num_found < num_min) {
+ num_expected = num_min;
+ more_or_less = "at least";
+ } else {
+ num_expected = num_max;
+ more_or_less = "at most";
+ }
+ if (exact) {
+ more_or_less = "exactly";
+ }
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
+ func_name, more_or_less, num_expected,
+ (num_expected == 1) ? "" : "s", num_found);
+}
+
+/* RaiseDoubleKeywords */
+static void __Pyx_RaiseDoubleKeywordsError(
+ const char* func_name,
+ PyObject* kw_name)
+{
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION >= 3
+ "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+ #else
+ "%s() got multiple values for keyword argument '%s'", func_name,
+ PyString_AsString(kw_name));
+ #endif
+}
+
+/* ParseKeywords */
+static int __Pyx_ParseOptionalKeywords(
+ PyObject *kwds,
+ PyObject **argnames[],
+ PyObject *kwds2,
+ PyObject *values[],
+ Py_ssize_t num_pos_args,
+ const char* function_name)
+{
+ PyObject *key = 0, *value = 0;
+ Py_ssize_t pos = 0;
+ PyObject*** name;
+ PyObject*** first_kw_arg = argnames + num_pos_args;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ name = first_kw_arg;
+ while (*name && (**name != key)) name++;
+ if (*name) {
+ values[name-argnames] = value;
+ continue;
+ }
+ name = first_kw_arg;
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyString_Check(key))) {
+ while (*name) {
+ if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**name, key)) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ if ((**argname == key) || (
+ (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**argname, key))) {
+ goto arg_passed_twice;
+ }
+ argname++;
+ }
+ }
+ } else
+ #endif
+ if (likely(PyUnicode_Check(key))) {
+ while (*name) {
+ int cmp = (**name == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**name, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ int cmp = (**argname == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**argname, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) goto arg_passed_twice;
+ argname++;
+ }
+ }
+ } else
+ goto invalid_keyword_type;
+ if (kwds2) {
+ if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+ } else {
+ goto invalid_keyword;
+ }
+ }
+ return 0;
+arg_passed_twice:
+ __Pyx_RaiseDoubleKeywordsError(function_name, key);
+ goto bad;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ goto bad;
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+bad:
+ return -1;
+}
+
+/* PyDictVersioning */
+#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
+}
+static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
+ PyObject **dictptr = NULL;
+ Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
+ if (offset) {
+#if CYTHON_COMPILING_IN_CPYTHON
+ dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
+#else
+ dictptr = _PyObject_GetDictPtr(obj);
+#endif
+ }
+ return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
+}
+static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
+ PyObject *dict = Py_TYPE(obj)->tp_dict;
+ if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
+ return 0;
+ return obj_dict_version == __Pyx_get_object_dict_version(obj);
+}
+#endif
+
+/* PyObjectGetAttrStr */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_getattro))
+ return tp->tp_getattro(obj, attr_name);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_getattr))
+ return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
+#endif
+ return PyObject_GetAttr(obj, attr_name);
+}
+#endif
+
+/* PyErrFetchRestore */
+#if CYTHON_FAST_THREAD_STATE
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ tmp_type = tstate->curexc_type;
+ tmp_value = tstate->curexc_value;
+ tmp_tb = tstate->curexc_traceback;
+ tstate->curexc_type = type;
+ tstate->curexc_value = value;
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+}
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
+ *type = tstate->curexc_type;
+ *value = tstate->curexc_value;
+ *tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+}
+#endif
+
+/* CLineInTraceback */
+#ifndef CYTHON_CLINE_IN_TRACEBACK
+static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
+ PyObject *use_cline;
+ PyObject *ptype, *pvalue, *ptraceback;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject **cython_runtime_dict;
+#endif
+ if (unlikely(!__pyx_cython_runtime)) {
+ return c_line;
+ }
+ __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
+#if CYTHON_COMPILING_IN_CPYTHON
+ cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
+ if (likely(cython_runtime_dict)) {
+ __PYX_PY_DICT_LOOKUP_IF_MODIFIED(
+ use_cline, *cython_runtime_dict,
+ __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
+ } else
+#endif
+ {
+ PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
+ if (use_cline_obj) {
+ use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
+ Py_DECREF(use_cline_obj);
+ } else {
+ PyErr_Clear();
+ use_cline = NULL;
+ }
+ }
+ if (!use_cline) {
+ c_line = 0;
+ PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
+ }
+ else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
+ c_line = 0;
+ }
+ __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
+ return c_line;
+}
+#endif
+
+/* CodeObjectCache */
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
+ int start = 0, mid = 0, end = count - 1;
+ if (end >= 0 && code_line > entries[end].code_line) {
+ return count;
+ }
+ while (start < end) {
+ mid = start + (end - start) / 2;
+ if (code_line < entries[mid].code_line) {
+ end = mid;
+ } else if (code_line > entries[mid].code_line) {
+ start = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ if (code_line <= entries[mid].code_line) {
+ return mid;
+ } else {
+ return mid + 1;
+ }
+}
+static PyCodeObject *__pyx_find_code_object(int code_line) {
+ PyCodeObject* code_object;
+ int pos;
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
+ return NULL;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
+ return NULL;
+ }
+ code_object = __pyx_code_cache.entries[pos].code_object;
+ Py_INCREF(code_object);
+ return code_object;
+}
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
+ int pos, i;
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
+ if (unlikely(!code_line)) {
+ return;
+ }
+ if (unlikely(!entries)) {
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (likely(entries)) {
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = 64;
+ __pyx_code_cache.count = 1;
+ entries[0].code_line = code_line;
+ entries[0].code_object = code_object;
+ Py_INCREF(code_object);
+ }
+ return;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
+ PyCodeObject* tmp = entries[pos].code_object;
+ entries[pos].code_object = code_object;
+ Py_DECREF(tmp);
+ return;
+ }
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
+ int new_max = __pyx_code_cache.max_count + 64;
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
+ __pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
+ if (unlikely(!entries)) {
+ return;
+ }
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = new_max;
+ }
+ for (i=__pyx_code_cache.count; i>pos; i--) {
+ entries[i] = entries[i-1];
+ }
+ entries[pos].code_line = code_line;
+ entries[pos].code_object = code_object;
+ __pyx_code_cache.count++;
+ Py_INCREF(code_object);
+}
+
+/* AddTraceback */
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
+ const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ #if PY_MAJOR_VERSION < 3
+ py_srcfile = PyString_FromString(filename);
+ #else
+ py_srcfile = PyUnicode_FromString(filename);
+ #endif
+ if (!py_srcfile) goto bad;
+ if (c_line) {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #else
+ py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #endif
+ }
+ else {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromString(funcname);
+ #else
+ py_funcname = PyUnicode_FromString(funcname);
+ #endif
+ }
+ if (!py_funcname) goto bad;
+ py_code = __Pyx_PyCode_New(
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ __pyx_empty_bytes, /*PyObject *code,*/
+ __pyx_empty_tuple, /*PyObject *consts,*/
+ __pyx_empty_tuple, /*PyObject *names,*/
+ __pyx_empty_tuple, /*PyObject *varnames,*/
+ __pyx_empty_tuple, /*PyObject *freevars,*/
+ __pyx_empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ py_line,
+ __pyx_empty_bytes /*PyObject *lnotab*/
+ );
+ Py_DECREF(py_srcfile);
+ Py_DECREF(py_funcname);
+ return py_code;
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ return NULL;
+}
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ if (c_line) {
+ c_line = __Pyx_CLineForTraceback(tstate, c_line);
+ }
+ py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
+ if (!py_code) {
+ py_code = __Pyx_CreateCodeObjectForTraceback(
+ funcname, c_line, py_line, filename);
+ if (!py_code) goto bad;
+ __pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
+ }
+ py_frame = PyFrame_New(
+ tstate, /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ __pyx_d, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ __Pyx_PyFrame_SetLineNumber(py_frame, py_line);
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+
+/* CIntToPy */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const long neg_one = (long) -1, const_zero = (long) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(long) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(long) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(long),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntFromPyVerify */
+#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
+#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
+#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
+ {\
+ func_type value = func_value;\
+ if (sizeof(target_type) < sizeof(func_type)) {\
+ if (unlikely(value != (func_type) (target_type) value)) {\
+ func_type zero = 0;\
+ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
+ return (target_type) -1;\
+ if (is_unsigned && unlikely(value < zero))\
+ goto raise_neg_overflow;\
+ else\
+ goto raise_overflow;\
+ }\
+ }\
+ return (target_type) value;\
+ }
+
+/* CIntFromPy */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const long neg_one = (long) -1, const_zero = (long) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(long) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (long) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
+ return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
+ return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
+ return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (long) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(long) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(long) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ long val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (long) -1;
+ }
+ } else {
+ long val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (long) -1;
+ val = __Pyx_PyInt_As_long(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to long");
+ return (long) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+}
+
+/* CIntFromPy */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wconversion"
+#endif
+ const int neg_one = (int) -1, const_zero = (int) 0;
+#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
+#pragma GCC diagnostic pop
+#endif
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(int) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (int) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
+ return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
+ return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
+ return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (int) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(int) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(int) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ int val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (int) -1;
+ }
+ } else {
+ int val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (int) -1;
+ val = __Pyx_PyInt_As_int(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to int");
+ return (int) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+}
+
+/* FastTypeChecks */
+#if CYTHON_COMPILING_IN_CPYTHON
+static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
+ while (a) {
+ a = a->tp_base;
+ if (a == b)
+ return 1;
+ }
+ return b == &PyBaseObject_Type;
+}
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
+ PyObject *mro;
+ if (a == b) return 1;
+ mro = a->tp_mro;
+ if (likely(mro)) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(mro);
+ for (i = 0; i < n; i++) {
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
+ return 1;
+ }
+ return 0;
+ }
+ return __Pyx_InBases(a, b);
+}
+#if PY_MAJOR_VERSION == 2
+static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
+ PyObject *exception, *value, *tb;
+ int res;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrFetch(&exception, &value, &tb);
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ if (!res) {
+ res = PyObject_IsSubclass(err, exc_type2);
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ }
+ __Pyx_ErrRestore(exception, value, tb);
+ return res;
+}
+#else
+static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
+ int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
+ if (!res) {
+ res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
+ }
+ return res;
+}
+#endif
+static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
+ Py_ssize_t i, n;
+ assert(PyExceptionClass_Check(exc_type));
+ n = PyTuple_GET_SIZE(tuple);
+#if PY_MAJOR_VERSION >= 3
+ for (i=0; ip) {
+ #if PY_MAJOR_VERSION < 3
+ if (t->is_unicode) {
+ *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+ } else if (t->intern) {
+ *t->p = PyString_InternFromString(t->s);
+ } else {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ }
+ #else
+ if (t->is_unicode | t->is_str) {
+ if (t->intern) {
+ *t->p = PyUnicode_InternFromString(t->s);
+ } else if (t->encoding) {
+ *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+ } else {
+ *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+ }
+ } else {
+ *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+ }
+ #endif
+ if (!*t->p)
+ return -1;
+ if (PyObject_Hash(*t->p) == -1)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
+ return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
+}
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
+ Py_ssize_t ignore;
+ return __Pyx_PyObject_AsStringAndSize(o, &ignore);
+}
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+#if !CYTHON_PEP393_ENABLED
+static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ char* defenc_c;
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+ }
+ }
+#endif
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+}
+#else
+static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ if (likely(PyUnicode_IS_ASCII(o))) {
+ *length = PyUnicode_GET_LENGTH(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+#else
+ return PyUnicode_AsUTF8AndSize(o, length);
+#endif
+}
+#endif
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+ return __Pyx_PyUnicode_AsStringAndSize(o, length);
+ } else
+#endif
+#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
+ if (PyByteArray_Check(o)) {
+ *length = PyByteArray_GET_SIZE(o);
+ return PyByteArray_AS_STRING(o);
+ } else
+#endif
+ {
+ char* result;
+ int r = PyBytes_AsStringAndSize(o, &result, length);
+ if (unlikely(r < 0)) {
+ return NULL;
+ } else {
+ return result;
+ }
+ }
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+ int is_true = x == Py_True;
+ if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+ else return PyObject_IsTrue(x);
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
+ int retval;
+ if (unlikely(!x)) return -1;
+ retval = __Pyx_PyObject_IsTrue(x);
+ Py_DECREF(x);
+ return retval;
+}
+static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
+#if PY_MAJOR_VERSION >= 3
+ if (PyLong_Check(result)) {
+ if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+ "__int__ returned non-int (type %.200s). "
+ "The ability to return an instance of a strict subclass of int "
+ "is deprecated, and may be removed in a future version of Python.",
+ Py_TYPE(result)->tp_name)) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ return result;
+ }
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ type_name, type_name, Py_TYPE(result)->tp_name);
+ Py_DECREF(result);
+ return NULL;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
+#if CYTHON_USE_TYPE_SLOTS
+ PyNumberMethods *m;
+#endif
+ const char *name = NULL;
+ PyObject *res = NULL;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x) || PyLong_Check(x)))
+#else
+ if (likely(PyLong_Check(x)))
+#endif
+ return __Pyx_NewRef(x);
+#if CYTHON_USE_TYPE_SLOTS
+ m = Py_TYPE(x)->tp_as_number;
+ #if PY_MAJOR_VERSION < 3
+ if (m && m->nb_int) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ else if (m && m->nb_long) {
+ name = "long";
+ res = m->nb_long(x);
+ }
+ #else
+ if (likely(m && m->nb_int)) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ #endif
+#else
+ if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
+ res = PyNumber_Int(x);
+ }
+#endif
+ if (likely(res)) {
+#if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
+#else
+ if (unlikely(!PyLong_CheckExact(res))) {
+#endif
+ return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
+ }
+ }
+ else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError,
+ "an integer is required");
+ }
+ return res;
+}
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+ Py_ssize_t ival;
+ PyObject *x;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(b))) {
+ if (sizeof(Py_ssize_t) >= sizeof(long))
+ return PyInt_AS_LONG(b);
+ else
+ return PyInt_AsSsize_t(b);
+ }
+#endif
+ if (likely(PyLong_CheckExact(b))) {
+ #if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)b)->ob_digit;
+ const Py_ssize_t size = Py_SIZE(b);
+ if (likely(__Pyx_sst_abs(size) <= 1)) {
+ ival = likely(size) ? digits[0] : 0;
+ if (size == -1) ival = -ival;
+ return ival;
+ } else {
+ switch (size) {
+ case 2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ }
+ }
+ #endif
+ return PyLong_AsSsize_t(b);
+ }
+ x = PyNumber_Index(b);
+ if (!x) return -1;
+ ival = PyInt_AsSsize_t(x);
+ Py_DECREF(x);
+ return ival;
+}
+static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
+ return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
+}
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+ return PyInt_FromSize_t(ival);
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/diagnostics/etc_composites/util/tracker/gcd_v4.o b/diagnostics/etc_composites/util/tracker/gcd_v4.o
new file mode 100644
index 000000000..074b3726f
Binary files /dev/null and b/diagnostics/etc_composites/util/tracker/gcd_v4.o differ
diff --git a/diagnostics/etc_composites/util/tracker/gcd_v4.pyx b/diagnostics/etc_composites/util/tracker/gcd_v4.pyx
new file mode 100644
index 000000000..5bfcc1597
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/gcd_v4.pyx
@@ -0,0 +1,24 @@
+# use python2.4 setup_gcd.py build_ext --inplace
+
+cdef extern from "math.h":
+ double sin(double x)
+ double cos(double x)
+ double atan2(double y, double x)
+ double sqrt(double x)
+ double pow(double x, double y)
+
+def gcd(double lona,lata,lonb,latb):
+ xlona = lona*0.0174532925
+ xlata = lata*0.0174532925
+ xlonb = lonb*0.0174532925
+ xlatb = latb*0.0174532925
+ sin_lata = sin(xlata)
+ sin_latb = sin(xlatb)
+ cos_lata = cos(xlata)
+ cos_latb = cos(xlatb)
+ cos_delta_lon = cos(xlonb - xlona)
+ sin_delta_lon = sin(xlonb - xlona)
+ d = atan2(sqrt(pow(cos_latb * sin_delta_lon,2) + pow(cos_lata * sin_latb -
+ sin_lata * cos_latb * cos_delta_lon,2)),
+ sin_lata * sin_latb + cos_lata * cos_latb * cos_delta_lon)*6372.795
+ return d
diff --git a/diagnostics/etc_composites/util/tracker/grid2ij_v4.py b/diagnostics/etc_composites/util/tracker/grid2ij_v4.py
new file mode 100755
index 000000000..d605aa642
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/grid2ij_v4.py
@@ -0,0 +1,9 @@
+def grid2ij(k,im,jm):
+ """returns 2d grid indices of an ny,nx 'row-major' array.
+ """
+ i = -1
+ j = -1
+ if k < im*jm:
+ i = k % im
+ j = k // im
+ return i,j
diff --git a/diagnostics/etc_composites/util/tracker/grid_area_v4.py b/diagnostics/etc_composites/util/tracker/grid_area_v4.py
new file mode 100755
index 000000000..8b1b3c7d8
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/grid_area_v4.py
@@ -0,0 +1,30 @@
+def grid_area(math,the_grid,multiplier):
+ """Define the area of a grid.
+ Assumes an regular-grid for now ... perhaps with polar cap.
+ """
+ # NOTE only checked for regular grid!!
+
+ # Polar cap?
+ if the_grid[0] == the_grid[2]:
+ if the_grid[0] < the_grid[1]:
+ # N Hemi
+ bot_lat = the_grid[0]
+ top_lat = the_grid[1]
+ else:
+ # S Hemi
+ bot_lat = the_grid[1]
+ top_lat = the_grid[0]
+ else:
+ if the_grid[0] < the_grid[2]:
+ # N Hemi
+ bot_lat = the_grid[0]
+ top_lat = the_grid[2]
+ else:
+ # S Hemi
+ bot_lat = the_grid[2]
+ top_lat = the_grid[0]
+ # Find area
+ area = abs(multiplier *
+ (math.sin(math.radians(bot_lat)) -
+ math.sin(math.radians(top_lat))))
+ return area
diff --git a/diagnostics/etc_composites/util/tracker/ij2grid_v4.py b/diagnostics/etc_composites/util/tracker/ij2grid_v4.py
new file mode 100755
index 000000000..081412af1
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/ij2grid_v4.py
@@ -0,0 +1,8 @@
+def ij2grid(j,i,im,jm):
+ """returns 1d grid index of an ny,nx 'row-major' array.
+ i the 'x' index between 0 , nx-1 where nx is index max
+ j the 'y' index between 0 , ny-1 where ny is index max
+ """
+ if i < im and j < jm:
+ k = j*im + i
+ return int(k)
diff --git a/diagnostics/etc_composites/util/tracker/jd_key_v4.py b/diagnostics/etc_composites/util/tracker/jd_key_v4.py
new file mode 100755
index 000000000..2388fee5b
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/jd_key_v4.py
@@ -0,0 +1,2 @@
+def jd_key(x):
+ return x[4]
diff --git a/diagnostics/etc_composites/util/tracker/jj_calendar.py b/diagnostics/etc_composites/util/tracker/jj_calendar.py
new file mode 100644
index 000000000..8d54fd868
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/jj_calendar.py
@@ -0,0 +1,250 @@
+import datetime as dt
+from dateutil.relativedelta import relativedelta
+
+
+def get_time_info(the_time_units, times, calendar='standard'):
+ '''
+ Function to convert times to date_stamps, datetimes, adates needed by the main parts of the code
+ Input: the_time_units - the netcdf time variable units, e.g. hours since 2001-01-01 00:00:00
+ times - the timesteps as extracted from the netcdf file
+ calendar - ('standard' = 'proleptic_greogrian' = 'julian') or '365_day'
+ Note: Had to change the adates print out in hours instead of days.
+ If I use days, the adates increment by 25 for every 6hrs, which is fine
+ But for 1hrly data, it increments by 4.1666666, this causes error, as we are rounding out adates
+ So we have to convert adates to hours, which means adates now will increment by 6 for 6 hrly data.
+ So I have to get the datetime, get the days. Then do days*24 + dt
+ '''
+
+ if (calendar == 'proleptic_gregorian') | (calendar == 'julian'):
+ calendar = 'standard'
+
+ # getting the number of timesteps
+ # tsteps = len(times)
+
+ # getting the start date in datetime format
+ # first get the date in a string format from netcdf time units
+ # then convert it to an acutal datetime
+ start, delta_type = get_start_date(the_time_units)
+ start_date = dt.datetime.strptime(start, '%Y-%m-%d %H:%M:%S')
+
+ # here I check what is the unit of my timesteps
+ # example 'hours since'
+ # hours since = tmp[0],tmp[1]
+ # the if condition can be removed, if needed
+ if (delta_type == 'hours'):
+ dtimes = [start_date + relativedelta(hours=float(i_time)) for i_time in times]
+ else:
+ raise Exception('jj_calendar.py: Unknown time units in netcdf input files.')
+
+
+ # if the calendar is 365_day we have to adjust the datetimes by 1
+ if (calendar == '365_day'):
+ dtimes = adjust_dtimes(dtimes, calendar)
+
+ # note: we have to create adates and date_stamps
+ # AFTER we adjust the dtimes depending on the calendar (i.e. '365_day' calendar)
+
+ # Get the time stamps
+ date_stamps = get_datestamps(dtimes, calendar)
+
+ # adates have to be unique, when u run for multiple years
+ # adates = ['%09d'%(x) for x in range(tsteps)]
+ adates = [get_adate(x, calendar) for x in dtimes]
+
+ # import pdb; pdb.set_trace()
+ # print('Jeyavinoth: Start and End adate for Year: ', adates[0], adates[-1])
+
+ return dtimes, date_stamps, adates
+
+def get_adate(dtime, calendar='standard'):
+
+ if (calendar == 'standard'):
+ # I choose a manual start date of 1900
+ # Make sure I account for this when I revert back the adates
+ # have to add one day worth of seconds
+ adate_num = (dtime - dt.datetime(1900, 1, 1))
+
+ # here I have to divide by 3600*24 to get the timestamp in days
+ # because the code runs it in days
+ # adate_num = float(adate_num.days) + float(adate_num.seconds)/3600./24.
+ # adate = int(100*adate_num)
+ # Jeyavinoth: converting above line: convert from days to hours
+ # also I don't need to multiply 100 anymore, because we get nice rounded out numbers when we calculate using hours
+ adate_num = float(adate_num.days)*24 + float(adate_num.seconds)/3600.
+ adate = int(adate_num)
+ adate = '%09d'%(adate)
+ elif (calendar == '365_day'):
+
+ # if the calendar is julian, then I need to add 365 from day the year 1900 till the current date year
+ # then add the number of days to this
+ year = dtime.year
+
+ # get the start of the year in datetime
+ # then get the number of days from dtime to start of year
+ start_yr = dt.datetime(dtime.year, 1, 1)
+ delta_yr = (dtime - start_yr)
+
+ # we get the number of years to add to 1/1/1900
+ num_year = (year - 1900)
+
+ # create adate_num for all the years since 1/1/1900
+ # adate_num = float(num_year * 365)
+ # Jeyavinoth: converting above line: convert from days to hours
+ adate_num = float(num_year * 365)*24.
+ if (check_leap(dtime)):
+ # if it is a leap year & dtime >= feb 29th, then we have to reduce 1 day from the total count
+ # note that normal calculation has to include a +1, which we dont do here
+ if (dtime >= dt.datetime(dtime.year, 2, 29)):
+ # adate_num += delta_yr.total_seconds()/(3600*24)
+ # Jeyavinoth: converting above line: convert from days to hours
+ adate_num += delta_yr.total_seconds()/(3600)
+ else:
+ # if it is before feb 29th, then we do the normal calculation
+ # adate_num += delta_yr.total_seconds()/(3600*24) + 1
+ # Jeyavinoth: converting above line: convert from days to hours
+ adate_num += delta_yr.total_seconds()/(3600) + 24
+ else:
+ # adate_num += delta_yr.total_seconds()/(3600*24) + 1
+ # Jeyavinoth: converting above line: convert from days to hours
+ adate_num += delta_yr.total_seconds()/(3600) + 24
+
+ # convert adate to a string
+ # adate = '%09d'%(adate_num*100)
+ # Jeyavinoth: converting above line: convert from days to hours
+ adate = '%09d'%(adate_num)
+ return adate
+
+def check_leap(dtime):
+ '''
+ Check if the datetime is part of a leap year
+ '''
+ num_days = (dt.datetime(dtime.year, 12, 31) - dt.datetime(dtime.year, 1, 1)).days + 1
+ if (num_days == 366):
+ return True
+ else:
+ return False
+
+def get_datestamps(dtimes, calendar):
+ date_stamps = ["%4d %02d %02d %02d" % (d.year,d.month,d.day,d.hour) for d in dtimes]
+ return date_stamps
+
+
+def get_start_date(the_time_units):
+ '''
+ Function to get start date string from units of time in netcdf
+ e.g: from 'hours since 2001-1-1 00:00:00' to '2001-01-01',
+ so that I can use this to create my datetimes
+ '''
+
+ # getting start date from the units of time variable in netcdf
+ start = "%s" % (the_time_units)
+
+ # splitting up the start date by spaces
+ tmp = start.split()
+
+ # stripping the start variable to create get the year, month, day and time
+ tmp1 = tmp[2].split("-")
+ tmp2 = tmp[3].split(":")
+ tmp3 = 0
+
+ # String that specifies the start date
+ # will be used in the datetime function to get the start datetime variable
+ start = "%04d-%02d-%02d %02d:%02d:%02d" % \
+ (int(tmp1[0]),int(tmp1[1]),\
+ int(tmp1[2]),int(tmp2[0]),int(tmp2[1]),\
+ int(tmp3))
+
+ return start, tmp[0]
+
+
+# Jeyavinoth:
+# Functions below this are for debugging purposes, can be removed in final version
+def compare_lists(li1, li2):
+ '''
+ Debug: Function to compare two lists
+ Note: just makes sure both lists have the same variables in each,
+ doesnt care of the length
+ '''
+ li_dif = [i for i in li1 + li2 if i not in li1 or i not in li2]
+ return li_dif
+
+# def revert_adate(adate):
+# '''
+# Function converts a sinlge adate to datetime
+# '''
+#
+# ordinal = np.int(adate/100.)
+# hours = (adate/100. - ordinal)*24
+# dtime = dt.datetime.fromordinal(np.int(adate/100.)) + relativedelta(hours=hours)
+# return dtime
+
+def adjust_dtimes(dtimes, calendar='365_day'):
+ '''
+ Adjusts the date time for a julian calendar,
+ what if the list is not sorted?
+ Do I force a sort, can i sort a datetime list?
+ check this!
+ '''
+ # have to write this code
+ if (calendar=='365_day'):
+
+ # get the start_date, from the list
+ start_date = dtimes[0]
+
+ # we set a flag to adjust all the values in dtimes list
+ adjust_flag = True
+
+ # if there are multiple years in a file, then we have to account for this
+ # initially we dont have to adjust until we face a leap year and go past feb 29
+ # then we have to increment this value, and add this number of days to datetime
+ adjust_days = 0
+
+ # check if the start date is already past february 29th for a leap year
+ # if so we don't have to adjust the values for the first year
+ # our datetimes will be fine, because we started on an off date
+ # we also don't adjust the dates in the current year if it starts on non leap year
+ if (check_leap(start_date)):
+ if (start_date > dt.datetime(start_date.year, 2, 29)):
+ adjust_flag = False
+ else:
+ adjust_flag = False
+
+
+ # new dtimes list set to empty
+ new_dtimes = []
+
+ # setting the previous year to start_date, because we already did the necessary checks and flags set accordingly
+ prev_year = start_date.year
+ for dtime in dtimes:
+ # check if we changed years
+ # if so now we have do the check for a leap or not and set the flag appropriately
+ if (prev_year != dtime.year):
+ # HA! now we are in a new year
+ # if we come across a leap year, we now have to start adjusting values
+ # if the current year is leap, then we set this flag to true
+ # this will be set to false, when we pass feb 28, because at that point we will increment adjust_days
+ # there will be no need to adjust_day anymore, till the next leap year
+ if (check_leap(dtime)):
+ adjust_flag = True
+
+ # set the prev_year to be current year
+ # so we enter this if condition, when we face a new year
+ prev_year = start_date.year
+
+ # if we have to adjust (only set to true if it is a leap year),
+ # then we go and check if the current dtime is greater than feb 29
+ if (adjust_flag):
+ if (dtime >= dt.datetime(dtime.year, 2, 29)):
+ # now we have met the criteria,
+ # we are on feb 29, 0 hrs
+ # this dtime has to be incremented by a day, to make it march 1st
+ adjust_days += 1
+ adjust_flag = False
+
+ # Each time in the loop, we increment the dtime by the number of adjust_days
+ new_dtimes.append(dtime + relativedelta(days=adjust_days))
+
+ return new_dtimes
+ else:
+ raise Exception('jj_calendar.py: Unknown Calendar')
+
diff --git a/diagnostics/etc_composites/util/tracker/jj_plots.py b/diagnostics/etc_composites/util/tracker/jj_plots.py
new file mode 100644
index 000000000..f37dddf38
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/jj_plots.py
@@ -0,0 +1,35 @@
+import numpy as np
+import defines
+import os
+
+import matplotlib.pyplot as plt
+import scipy.io as sio
+import cartopy
+
+year = 2000
+mat_file = os.path.join(defines.read_folder, '%s_%d.mat'%(defines.model, year))
+print(mat_file)
+
+if (not os.path.exists(mat_file)):
+ raise Exception ('.mat file does not exist.')
+
+data = sio.loadmat(mat_file)
+cyc = data['cyc'][0]
+
+data_crs = cartopy.crs.PlateCarree()
+ax = plt.axes(projection=cartopy.crs.NorthPolarStereo())
+ax.coastlines()
+ax.set_extent([-180, 180, 30, 90], crs=cartopy.crs.PlateCarree())
+
+for i, track in enumerate(cyc):
+ lat = np.squeeze(track['fulllat'])
+ lon = np.squeeze(track['fulllon'])
+ lon[lon > 180] -= 360
+ ax.plot(lon, lat, 'b-*', transform=data_crs)
+ if(i > 100):
+ break
+
+plt.show()
+
+
+
diff --git a/diagnostics/etc_composites/util/tracker/make_screen_v4.py b/diagnostics/etc_composites/util/tracker/make_screen_v4.py
new file mode 100755
index 000000000..3b32de0ff
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/make_screen_v4.py
@@ -0,0 +1,167 @@
+def make_screen(jm,im,inv_wn,scale,row_start,row_end,ddlon,bot,top,dlat,dlon,
+ start_lon,start_lat,faux_grids,meridional_tropical_debt,twopier,
+ cos,radians,degrees,g2l,gcd):
+
+ # This makes a screen on the zonal wavenumber by latitude and
+ # each center (no matter it's latitude) uses the same screen
+ # for each latitude.
+ close_by = {}
+ lat_spread = {}
+ lat_rows = {}
+
+ # Wavenumber (zonal, Nz) is roughtly the number of equally spaced and
+ # sized lows around a given latitude circle. That is, a wavenumber
+ # of 4 suggests 4 cyclone at that latitude at any given time. The
+ # wavelenght of these waves changes with laitude and is inversely
+ # related to the wavenumber (higher wavenumber less cyclones).
+ #
+ # Given that the model has a fixed number of longitude grids (columns)
+ # the the number of columns is fixed by wavenumber. Such that for
+ # anygiven latitude the same number of columns are needed to screen
+ # by wavenumber.
+ wavelength_z = im*inv_wn
+ wavelength_z = wavelength_z + wavelength_z*scale
+ ncols = (wavelength_z - 1)*0.5 # number of grids on each side
+ # not counting center
+ ncols = int(round(ncols,0))
+
+# print "scale",scale
+# print "inv_wn",inv_wn
+# print "wavelength_z",wavelength_z, im*inv_wn
+# print "ncols",ncols
+
+ # Assume a zonal wave at 45 degrees
+ circumference = twopier * cos(radians(45.0))
+ # Nrows is determined by half the zonal wavelength, and half
+ # on each side of center
+ meridional_synoptic_cuttoff = (inv_wn*circumference + inv_wn*circumference*scale)*0.25
+ nrows = int(round(meridional_synoptic_cuttoff/111.0)/degrees(dlat))
+
+ # Latitude row/j boundry where tropical penalty felt, which means
+ # some latitude dependence of ncols,nrows, and indeed, some asymmetry.
+ # This will be reflected in the_screen.
+ trop_penalty_bot = row_start.index(bot) - nrows
+ trop_penalty_top = row_start.index(top-im+1) + nrows
+
+ the_screen = {}
+ top_row = jm - 1 # due to count starting at zero
+
+ # Loop over all allows latitudes where a center can exist.
+ for center_j in range(jm):
+ matrix = []
+ # Check to see if can just use ncols,nrows
+ if center_j <= trop_penalty_bot:
+ # Southern Hemisphere, outside tropical penalty
+ center_top = center_j+nrows
+ center_bot = center_j-nrows
+ # Add columns
+ for bottom_up in range(center_bot,center_top+1):
+ jlat = g2l(bottom_up,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ if bottom_up < 0:
+ # polar wrap over, shift 180 degrees, same row
+ middle_pnt = row_start[-1*bottom_up] + im//2
+ matrix.extend(range(middle_pnt-ncols,middle_pnt+ncols+1))
+ else:
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ elif center_j >= trop_penalty_top:
+ # Northern Hemisphere, outside tropical penalty
+ center_top = center_j+nrows
+ center_bot = center_j-nrows
+ # Add columns
+ for bottom_up in range(center_bot,center_top+1):
+ if bottom_up > top_row:
+ # polar wrap over, shift 180 degrees, same row
+ jlat = g2l(top_row-(bottom_up-top_row),start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ # polar wrap over, shift 180 degrees, same row
+ middle_pnt = row_start[top_row-(bottom_up-top_row)] + im//2
+ matrix.extend(range(middle_pnt-ncols,middle_pnt+ncols+1))
+ else:
+ jlat = g2l(bottom_up,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ # polar wrap over, shift 180 degrees, same row
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ else:
+ # Possible region of tropical penalty, so need to find
+ # distance to each included grid.
+
+ # Apply a linear tropical penitally, which means a larger
+ # dlon and dlat in the tropics.
+ lat = g2l(center_j,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ center_lat = lat[1]
+
+ # Maximum number of rows to check in each direction
+ center_top = center_j+nrows
+ center_bot = center_j-nrows
+ # IF SH(NH) then southern(northern) part uses nrows,ncols
+ if center_j <= row_start.index(bot):
+ # SH belows tropics, hence no penalty.
+ # Add columns
+ calculate = 0
+
+ for bottom_up in range(center_bot,center_top+1):
+ # Add SH part with tropical penalty, max possible equals nrows,ncols
+ if bottom_up > row_start.index(bot):
+ # Add SH part with tropical penalty, max possible equals nrows,ncols
+ jlat = g2l(bottom_up,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ dist = gcd(start_lon,jlat[1],start_lon,lat[1]) + meridional_tropical_debt[bottom_up]
+ # Use row until excedes cutoff
+ if dist <= meridional_synoptic_cuttoff:
+ # See how many columns to use, find dlon at this latitude
+ dlon_dist = gcd(start_lon,jlat[1],start_lon+ddlon,jlat[1])
+ circumference = twopier * cos(radians(jlat[1]))
+ zonal_synoptic_cuttoff = (inv_wn*circumference) - (10.0*meridional_tropical_debt[bottom_up])
+ new_ncols = (zonal_synoptic_cuttoff*0.5)/dlon_dist
+ new_ncols = int(round(new_ncols,0))
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-new_ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+new_ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ else:
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ elif center_j >= row_start.index(top-im+1):
+ # NH above tropics, hence no penalty.
+ # Add columns
+ for bottom_up in range(center_bot,center_top+1):
+ if bottom_up < row_start.index(top-im+1):
+ jlat = g2l(bottom_up,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids=faux_grids)
+ dist = gcd(start_lon,jlat[1],start_lon,lat[1]) + meridional_tropical_debt[bottom_up]
+
+ # Use row until excedes cutoff
+ if dist <= meridional_synoptic_cuttoff:
+ # See how many columns to use, find dlon at this latitude
+ dlon_dist = gcd(start_lon,jlat[1],start_lon+ddlon,jlat[1])
+ circumference = twopier * cos(radians(jlat[1]))
+ zonal_synoptic_cuttoff = (inv_wn*circumference) - (10.0*meridional_tropical_debt[bottom_up])
+ new_ncols = (zonal_synoptic_cuttoff*0.5)/dlon_dist
+ new_ncols = int(round(new_ncols,0))
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-new_ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+new_ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ else:
+ middle_pnt = row_start[bottom_up]
+ left = range(row_end[bottom_up]-ncols+1,row_end[bottom_up]+1)
+ right = range(middle_pnt,middle_pnt+ncols+1)
+ matrix.extend(left)
+ matrix.extend(right)
+ the_screen[center_j] = matrix
+ return the_screen
diff --git a/diagnostics/etc_composites/util/tracker/make_unique_name_v4.py b/diagnostics/etc_composites/util/tracker/make_unique_name_v4.py
new file mode 100755
index 000000000..016a92013
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/make_unique_name_v4.py
@@ -0,0 +1,12 @@
+def make_unique_name(os,basename,extension):
+ """Ensure basename is not used already"""
+ taken = 1
+ tag = 0
+ newname = "%s_%03d%s" % (basename,tag,extension)
+ while taken:
+ if os.path.exists(newname):
+ tag += 1
+ newname = "%s_%03d%s" % (basename,tag,extension)
+ else:
+ taken = 0
+ return newname
diff --git a/diagnostics/etc_composites/util/tracker/plot_map_v4.py b/diagnostics/etc_composites/util/tracker/plot_map_v4.py
new file mode 100755
index 000000000..27ae49244
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/plot_map_v4.py
@@ -0,0 +1,2427 @@
+import numpy,math
+from numpy import ma
+import matplotlib.pyplot as plt
+from mpl_toolkits.basemap import Basemap, shiftgrid, addcyclic
+
+# 10-08 fixed memory leak, moved using plt.close() and moving basemap instantiation.
+
+class plotmap():
+ """Class to create simple pcolor map (Cylindrical Equidistant, global )"""
+
+ __author__ = "Mike Bauer "
+ __status__ = "alpha"
+ __version__ = "0.1 "+__status__
+ __license__ = "GNU General Public License"
+ __date__ = "Created: 16 April 2008 Updated:"
+
+ def __init__(self,**kwargs):
+ """Create an instance to plot full global field. Note z not passed in
+ as we only need to create a map object once (for looping efficiency).
+ """
+
+ if 'lon_0' in kwargs:
+ self.lon_0 = kwargs['lon_0']
+ else:
+ self.lon_0 = 180.0
+
+ #if 'color_scheme' in kwargs:
+ # self.color_scheme = plt.cm.__dict__[kwargs['color_scheme']]
+ #else:
+ # self.color_scheme = plt.cm.bone
+
+ if 'discrete' in kwargs:
+ self.discrete = kwargs['discrete']
+ else:
+ self.discrete = 0
+
+ if 'color_scheme' in kwargs:
+ if self.discrete:
+ # Make non-continuous
+ self.color_scheme = plt.cm.get_cmap(kwargs['color_scheme'],self.discrete)
+ else:
+ self.color_scheme = plt.cm.__dict__[kwargs['color_scheme']]
+ else:
+ if self.discrete:
+ # Make non-continuous
+ self.color_scheme = plt.cm.get_cmap('bone',self.discrete)
+ else:
+ self.color_scheme = plt.cm.bone
+
+ # Set color for masked values
+ self.color_scheme.set_bad('grey')
+
+ if 'cints' in kwargs:
+ self.cints = kwargs['cints']
+ else:
+ self.cints = None
+
+ if 'clevs' in kwargs:
+ self.clevs = kwargs['clevs']
+ else:
+ self.clevs = None
+
+ if 'missing' in kwargs:
+ self.missing = kwargs['missing']
+ else:
+ self.missing = None
+
+ if 'nocolorbar' in kwargs:
+ self.cbar = False
+ else:
+ self.cbar = True
+
+ # Default Cylindrical Equidistant, global
+ self.m = Basemap(resolution='c',projection='cyl',
+ lon_0=self.lon_0,
+ area_thresh=300000)
+
+ ## Lambert Conformal: the projection is conformal, or shape-preserving
+ ## Set resolution=None skips processing of boundary datasets to save time.
+ ## Centered on lon_0,lat_0 and of size base_width,base_height this sets
+ ## the size and location of the composite
+ ## Mediterranean
+ #base_width = 15000000 # 15000 km wide
+ #base_height = 7500000 # 7500 km tall
+ #self.m = Basemap(lat_0=35,lon_0=20,projection='lcc',
+ # width=base_width,height=base_height,resolution='c',area_thresh=300000)
+# # South-Polar Stereographic
+# self.m = Basemap(resolution='i',projection="spstere",
+# boundinglat = -30,
+# lat_1=-90.,lon_0=0.,
+# area_thresh=100000)
+
+ def create_fig(self):
+ # Create figure.
+ golden_ratio = 1.61803399
+# # for imovie at 120 dpi, want 960x540
+# golden_ratio = 1.77777777
+# width = 8
+
+ width = 10
+ hieght = width/golden_ratio
+
+ # Create basis for map
+# self.fig = plt.figure(figsize=(width,hieght),frameon=False) # frameon transparent background
+ self.fig = plt.figure(figsize=(width,hieght),frameon=True)
+
+ ## Set axis
+ #if self.cbar:
+ # # color bar
+ # self.ax = self.fig.add_axes([0.08,0.05,0.9,0.9])
+ #else:
+ # # no color bar
+ # self.ax = self.fig.add_axes([0.08,0.05,0.9,0.9])
+ self.ax = self.fig.add_subplot(1,1,1) # 1 row, 1 column, first plot
+
+ self.shifted = False
+
+ def add_field(self,x,y,z,**kwargs):
+
+ self.x = numpy.array(x) # 1d array of longiudes
+ self.y = numpy.array(y) # 1d array of latitudes
+ self.im = len(x)
+ self.jm = len(y)
+ self.z = z # 2d array of data
+
+ if 'ptype' in kwargs:
+ self.ptype = kwargs['ptype']
+ else:
+ self.ptype = 'pcolor'
+
+ if 'colorbar_label' in kwargs:
+ self.colorbar_label = kwargs['colorbar_label']
+ else:
+ self.colorbar_label = ""
+
+ # Tweak Data for ploting
+
+ # Reshape z
+ self.z.shape = (self.jm,self.im)
+
+ # Add cyclic longitude to data and longitudes
+ self.z,self.x = addcyclic(self.z,self.x)
+
+ # JJ fix to issue of not having longitudes not increasing
+ if (self.x[-1] < self.x[-2]):
+ self.x[-1] += 360.
+
+ # Shift data and longitudes to start at start_lon
+ # also converts 0-360 to +-180 format, skip if unneeded.
+ #dx = abs(self.x[0]-self.x[1])
+ #if abs(self.x[0]-self.lon_0) > dx:
+ # self.z,self.x = shiftgrid(self.lon_0,self.z,self.x,start=False)
+
+ # Compute map projection coordinates of grid
+ lons,lats,self.xx,self.yy = self.m.makegrid(len(self.x),len(self.y), returnxy=True)
+
+ if self.ptype == 'pcolor':
+ # The NRA grid is a rectangular grid of lat/lon (i.e. not really a
+ # projection at all, with each data row is along a line of constant
+ # latitude, each column a line of equal longitude). However,
+ # p_color draws a panel between the (i,j),(i+1,j),(i+1,j+1),(i,j+1) coordinates
+ # of the X/Y matrices with a color corresponding to the data value at (i,j). Thus
+ # everything will appear shifted by one half a pixel spacing.
+ # In short, p_color is pixel registered (lon,lat give the edges of the grid).
+ #
+ # Simple fix that likely works to to shift self.z via averaging.
+ #
+
+
+ # This seems to work best for maps, although averaging means this
+ # is an imperfect method. Still otherwise, using self.z the
+ # whole thing is offset a partial grid and you skip the north
+ # pole. pcolormesh works better with missing values.
+ z = 0.5*(self.z[:-1,:-1]+self.z[1:,1:])
+ # Mask missing (shiftgrid removes original masking)
+ if self.missing != None:
+ z = numpy.ma.masked_where(z <= self.missing,z)
+ #self.the_image = self.m.pcolor(self.xx,self.yy,z,shading='flat',
+ # cmap=self.color_scheme)
+ self.the_image = self.m.pcolormesh(self.x,self.y,z,shading='flat',
+ cmap=self.color_scheme)
+
+ # Mask missing (shiftgrid removes original masking)
+ #if self.missing != None:
+ # self.z = numpy.ma.masked_where(self.z <= self.missing,self.z)
+
+
+ #self.the_image = self.m.pcolormesh(self.xx,self.yy,self.z,shading='flat',
+ # cmap=self.color_scheme)
+
+
+# self.the_image = self.m.pcolormesh(self.x,self.y,self.z,shading='flat',
+# cmap=self.color_scheme)
+
+# self.shifted = True
+
+ # Adjust range of colors.
+ if self.cints:
+ self.the_image.set_clim(self.cints[0],self.cints[1])
+
+ elif self.ptype == 'imshow':
+
+ # Heavy grid marks
+ self.the_image = self.m.imshow(self.z,cmap=self.color_scheme,ax=self.ax)
+ # Adjust range of colors.
+ if self.cints:
+ self.the_image.set_clim(self.cints[0],self.cints[1])
+
+ elif self.ptype == 'contour':
+
+ # Mask missing (shiftgrid removes original masking)
+ if self.missing != None:
+ self.z = numpy.ma.masked_where(self.z <= self.missing,self.z)
+
+ lw = 1.0
+ contours = range(self.clevs[0],self.clevs[1],self.clevs[2])
+
+ scale = 2 # amount smoothed
+ if self.m.projection=='cyl':
+ # projection in degrees longitude, so just scale grid
+ nx = (len(self.x)-1)*scale
+ ny = len(self.y)*scale
+ else:
+ # if self.m.xmax not in degrees longitude, then do this.
+# dx = 2.*math.pi*self.m.rmajor/((len(self.x)-1)*scale)
+# nx = int((self.m.xmax-self.m.xmin)/dx)+1
+# ny = int((self.m.ymax-self.m.ymin)/dx)+1
+# print nx,ny,dx
+
+ nx = int((self.m.xmax-self.m.xmin)/40000.)+1; ny = int((self.m.ymax-self.m.ymin)/40000.)+1
+ # Shift data and longitudes to start at start_lon
+ # also converts 0-360 to +-180 format, skip if unneeded.
+ dx = abs(self.x[0]-self.x[1])
+ if abs(self.x[0]-self.lon_0) > dx:
+ self.z,self.x = shiftgrid(self.lon_0,self.z,self.x,start=False)
+
+ #self.z = self.m.transform_scalar(self.z,self.x,self.y,nx,ny)
+ #self.xx = self.x
+ #self.yy = self.y
+
+ self.z,self.xx,self.yy = self.m.transform_scalar(
+ self.z,self.x,self.y,nx,ny,returnxy=True)
+
+ self.the_image = self.m.contourf(self.xx,self.yy,self.z,contours,
+ cmap=self.color_scheme,ax=self.ax)
+ self.the_image2 = self.m.contour(self.xx,self.yy,self.z,contours,
+ colors ='k',linewidths=lw,ax=self.ax)
+
+ #print dir(self.the_image2)
+ #print self.the_image2.collections
+ #print
+ #for x in self.the_image2.collections:
+ #
+ # print dir(x)
+ # print
+ # print x.get_paths
+
+ # import sys; sys.exit("Stop Here")
+
+
+ # label the contours
+ labels = []
+ for label in range(self.clevs[0],self.clevs[1],self.clevs[2]*2):
+ labels.append(label)
+ self.the_image3 = plt.clabel(self.the_image2,labels,fmt = '%d')
+
+ # draw coastlines
+ self.m.drawcoastlines(linewidth=0.25,ax=self.ax)
+
+ # setup grids
+ delat = 30.
+ circles = numpy.arange(0.,90.+delat,delat).tolist()+\
+ numpy.arange(-delat,-90.-delat,-delat).tolist()
+ delon = 60.
+# meridians = numpy.arange(-180,180,delon)
+ meridians = numpy.arange(0,360,delon)
+
+ # draw parallelsleft, right, top or bottom
+ self.m.drawparallels(circles,labels=[1,0,0,0],ax=self.ax)
+
+ # draw meridians
+ self.m.drawmeridians(meridians,labels=[0,0,0,1],ax=self.ax)
+
+ if self.cbar:
+ # add a colorbar.
+ if self.discrete:
+ self.extend = 'neither'
+ else:
+ self.extend = 'both'
+ aa = self.fig.colorbar(self.the_image,orientation='horizontal',
+ extend=self.extend,spacing='uniform',ticks=None,
+ fraction=0.1,pad=0.09,aspect=40)
+ if self.colorbar_label:
+ aa.ax.set_xlabel(self.colorbar_label,fontsize='small')
+
+# pos = self.ax.get_position()
+# l, b, w, h = pos.bounds
+# #self.cax = plt.axes([l, b-0.1, w, 0.03],frameon=False) # setup colorbar axes horizontal
+# self.cax = plt.axes(frameon=False) # setup colorbar axes horizontal
+
+## self.cax = plt.axes([l+w+0.075, b, 0.05, h],frameon=False) # setup colorbar axes vertical
+# self.fig.colorbar(self.the_image,cax=self.cax,
+# orientation='horizontal',extend=self.extend,
+# spacing='uniform',ticks=None)
+# #self.fig.colorbar(self.the_image,cax=self.cax,
+# # orientation='horizontal',extend='both',
+# # spacing='proportional',ticks=None)
+
+# if self.colorbar_label:
+# self.cax.set_xlabel(self.colorbar_label,verticalalignment='bottom',fontsize='small')
+
+# plt.axes(self.ax) # make the original axes current again
+
+ def finish(self,pname,title=None):
+
+ # Add title
+ if title:
+ plt.title(title,fontsize='small')
+
+ if pname.endswith("png"):
+ self.dpi = 140
+# # for movies
+# self.dpi = 240
+ else:
+ self.dpi = 144
+ #self.dpi = 300
+
+ # If too much crop set pad_inches=0.03
+ self.fig.savefig(pname,
+ dpi=self.dpi,
+ facecolor='w',
+ edgecolor='w',
+ orientation='landscape',
+ bbox_inches='tight', pad_inches=0.03)
+
+ ## Trim white space
+ #if pname.endswith("png"):
+ # import os
+ # (dirName, fileName) = os.path.split(pname)
+ # (fileBaseName, fileExtension)=os.path.splitext(fileName)
+ # tmp = '%s/%s%s' % (dirName,'t',fileExtension)
+ # cmd = 'convert %s -trim -trim %s' % (pname,tmp)
+ # os.system(cmd)
+ # os.rename(tmp,pname)
+ #elif pname.endswith("eps"):
+ # import os
+ # (dirName, fileName) = os.path.split(pname)
+ # (fileBaseName, fileExtension)=os.path.splitext(fileName)
+ # cmd = 'ps2eps -B -C %s' % (pname)
+ # os.system(cmd)
+ # tmp = pname+".eps"
+ # os.rename(tmp,pname)
+
+## cut sometimes
+# # Trim white space
+# import os
+# cmd = 'convert %s -trim -trim %s' % (pname,'t.png')
+# os.system(cmd)
+# os.rename('t.png',pname)
+
+ #self.fig.clf() # clear figure
+ plt.close('all') # kill all objects for this instance.
+
+ def finish_nokill(self,pname,title=None):
+
+ # Add title
+ if title:
+ plt.title(title)
+
+ if pname.endswith("png"):
+ self.dpi = 140
+# # for movies
+# self.dpi = 240
+ else:
+ self.dpi = 144
+
+ # If too much crop set pad_inches=0.03
+ self.fig.savefig(pname,
+ dpi=self.dpi,
+ facecolor='w',
+ edgecolor='w',
+ orientation='landscape',
+ bbox_inches='tight', pad_inches=0.0)
+
+ ## Trim white space
+ #if pname.endswith("png"):
+ # import os
+ # (dirName, fileName) = os.path.split(pname)
+ # (fileBaseName, fileExtension)=os.path.splitext(fileName)
+ # tmp = '%s/%s%s' % (dirName,'t',fileExtension)
+ # cmd = 'convert %s -trim -trim %s' % (pname,tmp)
+ # os.system(cmd)
+ # os.rename(tmp,pname)
+ #elif pname.endswith("eps"):
+ # import os
+ # (dirName, fileName) = os.path.split(pname)
+ # (fileBaseName, fileExtension)=os.path.splitext(fileName)
+ # cmd = 'ps2eps -B -C %s' % (pname)
+ # os.system(cmd)
+ # tmp = pname+".eps"
+ # os.rename(tmp,pname)
+
+
+ def add_contour(self,x,y,z,**kwargs):
+
+ self.cx = numpy.array(x) # 1d array of longiudes
+ self.cy = numpy.array(y) # 1d array of latitudes
+ self.cz = z # 2d array of data
+
+ # Reshape z
+ self.cz.shape = (self.jm,self.im)
+
+ # Add cyclic longitude to data and longitudes
+ self.cz,self.cx = addcyclic(self.cz,self.cx)
+
+ # Shift data and longitudes to start at start_lon
+ # also converts 0-360 to +-180 format
+ self.cz,self.cx = shiftgrid(180.0,self.cz,self.cx,start=False)
+
+ # transform to nx x ny regularly spaced native projection grid
+ # nx and ny chosen to have roughly the same horizontal res as original image
+ # times scale
+
+ # Smooth
+ scale = 2.0 # amount smoothed
+
+ # For Cylindrical Equidistant, global this works
+ nx = int(len(self.cx)*scale)
+ ny = int(len(self.cy)*scale)
+
+# # if not cyl
+# dx = 2.0*math.pi*self.m.rmajor/len(self.cx)
+# nx = int((self.m.xmax-self.m.xmin)/dx)+1
+# ny = int((self.m.ymax-self.m.ymin)/dx)+1
+
+ self.czz,self.cxx,self.cyy = self.m.transform_scalar(
+ self.cz,self.cx,self.cy,nx,ny,returnxy=True)
+
+ if 'linewidths' in kwargs:
+ lw = kwargs['linewidths']
+ else:
+ lw = 1.0
+ if 'clevs' in kwargs:
+ contours = range(kwargs['clevs'][0],kwargs['clevs'][1],kwargs['clevs'][2])
+ if 'filled' in kwargs:
+ self.the_image = self.m.contourf(self.cxx,self.cyy,self.czz,contours,
+ cmap=self.color_scheme)
+ else:
+ if 'cmap' in kwargs:
+ self.the_image = self.m.contour(self.cxx,self.cyy,self.czz,contours,
+ cmap=self.color_scheme,linewidths=lw)
+ else:
+ self.the_image = self.m.contour(self.cxx,self.cyy,self.czz,contours,
+ colors ='k',linewidths=lw)
+ else:
+ if 'filled' in kwargs:
+ self.the_image = self.m.contourf(self.cxx,self.cyy,self.czz,
+ cmap=self.color_scheme)
+ else:
+ if 'cmap' in kwargs:
+ self.the_image = self.m.contour(self.cxx,self.cyy,self.czz,
+ cmap=self.color_scheme,linewidths=lw)
+ else:
+ self.the_image = self.m.contour(self.cxx,self.cyy,self.czz,
+ colors ='k',linewidths=lw)
+
+ def add_pnts(self,*args,**kwargs):
+ self.p = args[0] # 1d array of pnt (lon,lat) tuples
+
+ if len(args) > 1:
+ # list of point labels
+ self.pnt_names = args[1]
+
+ if 'marker' in kwargs:
+ self.marker = kwargs['marker']
+ else:
+ self.marker = 'x'
+
+ if 'msize' in kwargs:
+ self.msize = kwargs['msize']
+ else:
+ self.msize = None
+
+ if 'mfc' in kwargs:
+ self.mfc = kwargs['mfc']
+ else:
+ self.mfc = None
+
+ if 'mec' in kwargs:
+ self.mec = kwargs['mec']
+ else:
+ self.mec = None
+
+ if 'lw' in kwargs:
+ self.lw = kwargs['lw']
+ else:
+ self.lw = None
+
+ if 'zorder' in kwargs:
+ # used to order pnt and contours
+ self.zorder = kwargs['zorder']
+ else:
+ self.zorder = None
+
+ # prep pnts
+ self.pnt_x = [x[0] for x in self.p]
+ self.pnt_y = [x[1] for x in self.p]
+
+ # Pcolor requires that everything be shifted by a half grid inc
+ # so the grids have moved to line up correctly I need to do the same
+ if self.shifted:
+ delon = self.x[1]-self.x[0]
+ delat = self.y[1]-self.y[0]
+ x = self.pnt_x[:]
+ self.pnt_x = x - 0.5*delon
+ y = self.pnt_y[:]
+ self.pnt_y = y + 0.5*delat
+
+ # Compute native map projection coordinates for lat/lon grid.
+ self.pnt_x, self.pnt_y = self.m(self.pnt_x,self.pnt_y)
+ # plot pnts over map/image
+ if self.zorder:
+ self.pnt_image = self.m.plot(self.pnt_x,self.pnt_y,self.marker,
+ markersize=self.msize,
+ markerfacecolor=self.mfc,
+ markeredgecolor=self.mec,
+ linewidth=self.lw,zorder=self.zorder)
+ else:
+ self.pnt_image = self.m.plot(self.pnt_x,self.pnt_y,self.marker,
+ markersize=self.msize,
+ markerfacecolor=self.mfc,
+ markeredgecolor=self.mec,
+ linewidth=self.lw)
+ # Add label to point
+ if len(args) > 1:
+ for self.i in range(len(self.pnt_x)):
+ self.pnt_image = plt.text(self.pnt_x[self.i],self.pnt_y[self.i],
+ " %d" % self.pnt_names[self.i],size='xx-small')
+
+class plotmap_polar(plotmap):
+ """Create an instance to plot polar """
+
+ def __init__(self,**kwargs):
+ """Create an instance to plot full global field. Note z not passed in
+ as we only need to create a map object once (for looping efficiency).
+ """
+
+ if 'lon_0' in kwargs:
+ self.lon_0 = kwargs['lon_0']
+ else:
+ self.lon_0 = 180.0
+
+ if 'discrete' in kwargs:
+ self.discrete = kwargs['discrete']
+ else:
+ self.discrete = 0
+
+ # Discrete w/ arrows on colorbar
+ if 'discretee' in kwargs:
+ self.discretee = kwargs['discretee']
+ else:
+ self.discretee = None
+
+ if 'color_scheme' in kwargs:
+ if self.discrete:
+ # Make non-continuous
+ self.color_scheme = plt.cm.get_cmap(kwargs['color_scheme'],self.discrete)
+ else:
+ self.color_scheme = plt.cm.__dict__[kwargs['color_scheme']]
+ else:
+ if self.discrete:
+ # Make non-continuous
+ self.color_scheme = plt.cm.get_cmap('bone',self.discrete)
+ else:
+ self.color_scheme = plt.cm.bone
+
+ # Control how colorbar ends work
+ if self.discrete:
+ self.extend = 'neither'
+ else:
+ self.extend = 'both'
+
+ # Set color for masked values
+ self.color_scheme.set_bad('grey')
+ # Set colors of out of bounds colorbars
+ self.color_scheme.set_under('white')
+ self.color_scheme.set_over('black')
+
+ if 'colorbar_label' in kwargs:
+ self.colorbar_label = kwargs['colorbar_label']
+ else:
+ self.colorbar_label = ""
+
+ if 'cints' in kwargs:
+ self.cints = kwargs['cints']
+ else:
+ self.cints = None
+
+ if 'clevs' in kwargs:
+ self.clevs = kwargs['clevs']
+ else:
+ self.clevs = None
+
+ if 'missing' in kwargs:
+ self.missing = kwargs['missing']
+ else:
+ self.missing = None
+
+ if 'nocolorbar' in kwargs:
+ self.cbar = False
+ else:
+ self.cbar = True
+
+ if 'clabels' in kwargs:
+ self.clabels = kwargs['clabels']
+ else:
+ self.clabels = False
+
+ if 'hemi' in kwargs:
+ self.hemi = kwargs['hemi']
+ else:
+ self.hemi = 'nh'
+
+ # these are the 4 polar projections
+ projs = ['laea','stere','aeqd','ortho']
+
+ if 'mproj' in kwargs:
+ if self.hemi == 'sh':
+ self.mproj = 'sp'+kwargs['mproj']
+ else:
+ self.mproj = 'np'+kwargs['mproj']
+ else:
+ self.mproj = 'nplaea'
+
+ if self.mproj == 'spstere' and self.hemi == 'nh':
+ import sys; sys.exit("Hemisphere Projection Miss Match")
+ if self.mproj == 'npstere' and self.hemi == 'sh':
+ import sys; sys.exit("Hemisphere Projection Miss Match")
+
+ self.lat_0 = 90.
+ self.polarity = 1.0
+ if self.hemi == 'sh':
+ self.polarity = -1.0
+ self.lat_0 = -90.
+
+ if 'bounding_lat' in kwargs:
+ self.bounding_lat = kwargs['bounding_lat']
+ else:
+ # Default is
+ self.bounding_lat = 30.0*self.polarity
+
+ if self.bounding_lat < 0.0 and self.hemi == 'nh':
+ import sys; sys.exit("Hemisphere Bounding_Lat Miss Match")
+ if self.bounding_lat > 0.0 and self.hemi == 'sh':
+ import sys; sys.exit("Hemisphere Bounding_Lat Miss Match")
+
+ # 'Lambert Azimuthal Equal Area' laea
+ # It accurately represents area in all regions of the sphere, but it does not
+ # does not preserve angular relationships among curves on the sphere.
+ # The longitude lon_0 is at 6-o'clock, and the latitude circle boundinglat
+ # is tangent to the edge of the map at lon_0.
+
+ # 'Stereographic' Equal-Angle 'npstere','spstere'
+ # It is conformal, meaning that it preserves angles. It is neither isometric nor
+ # area-preserving: that is, it preserves neither distances nor the areas of figures.
+ # Its main use is for mapping the polar regions. In the polar aspect all meridians
+ # are straight lines and parallels are arcs of circles.
+ # The longitude lon_0 is at 6-o'clock, and the
+ # latitude circle boundinglat is tangent to the edge
+ # of the map at lon_0. Default value of lat_ts
+ # (latitude of true scale) is pole.
+
+ # 'Azimuthal Equidistant' aeqd
+ # The most noticeable feature of this azimuthal projection is the fact that
+ # distances measured from the center are true. Therefore, a circle about
+ # the projection center defines the locus of points that are equally far
+ # away from the plot origin. Furthermore, directions from the center are also true.
+ # all distances measured from the center of the map along any longitudinal line are accurate
+ # Distortion of areas and shapes increases dramatically, the further away one gets from center point.
+ # The longitude lon_0 is at 6-o'clock, and the
+ # latitude circle boundinglat is tangent to the edge
+ # of the map at lon_0.
+
+ # 'Orthographic' ortho
+ # The orthographic azimuthal projection is a perspective projection from infinite distance.
+ # It is therefore often used to give the appearance of a globe viewed from outer space.
+ # The projection is neither equal-area nor conformal, and much distortion is introduced
+ # near the edge of the hemisphere. The directions from the center of projection are true.
+ # lon_0, lat_0 are the center point of the projection.
+
+ area_thresh=100000.
+ #area_thresh=10000
+
+ if self.mproj.find('ortho') != -1:
+ self.m = Basemap(projection='ortho',
+ lat_0=self.lat_0,lon_0=180+self.lon_0,
+ resolution='i',area_thresh=area_thresh)
+ else:
+ self.m = Basemap(projection=self.mproj,lat_0=self.lat_0,
+ boundinglat=self.bounding_lat,lon_0=self.lon_0,
+ resolution='i',area_thresh=area_thresh)
+ self.shifted = False
+
+
+ def create_fig(self):
+ # Create figure.
+ width = 4*2
+ hieght = 4*2
+
+ # Create basis for map
+ # frameon=True gives transparent background
+ self.fig = plt.figure(figsize=(width,hieght),frameon=False)
+ #self.fig = plt.figure(figsize=(width,hieght),frameon=True)
+ # Set axis 1 row, 1 column, first plot
+ self.ax = self.fig.add_subplot(1,1,1)
+
+ def add_pcolor(self,x,y,z):
+
+ self.x = numpy.array(x) # 1d array of longiudes
+ self.y = numpy.array(y) # 1d array of latitudes
+ self.z = z # 2d array of data
+
+ # Reshape z
+ self.z.shape = (len(self.y),len(self.x))
+
+ # Add cyclic longitude to data and longitudes
+ self.z,self.x = addcyclic(self.z,self.x)
+
+ # Shift data and longitudes to start at start_lon
+ # also converts 0-360 to +-180 format
+ self.z,self.x = shiftgrid(180.0,self.z,self.x,start=False)
+
+ # compute native map projection coordinates for lat/lon grid.
+ self.x,self.y = self.m(*numpy.meshgrid(self.x,self.y))
+
+ # Mask missing (shiftgrid removes original masking)
+ if self.missing != None:
+ self.z = numpy.ma.masked_where(self.z <= self.missing,self.z)
+ self.the_image = self.m.pcolormesh(self.x,self.y,self.z,
+ edgecolors='None',linewidth=0.01,cmap=self.color_scheme)
+ # adjust range of colors.
+ if self.cints:
+ self.the_image.set_clim(self.cints[0],self.cints[1])
+
+ def add_contour(self,x,y,z,no_fill,zorder):
+
+ self.x = numpy.array(x) # 1d array of longiudes
+ self.y = numpy.array(y) # 1d array of latitudes
+ self.z = z # 2d array of data
+ self.no_fill = no_fill
+ self.zorder = zorder
+
+ # Reshape z
+ self.z.shape = (len(self.y),len(self.x))
+
+ # Add cyclic longitude to data and longitudes
+ self.z,self.x = addcyclic(self.z,self.x)
+
+ # Shift data and longitudes to start at start_lon
+ # also converts 0-360 to +-180 format
+ self.z,self.x = shiftgrid(180.0,self.z,self.x,start=False)
+
+ # compute native map projection coordinates for lat/lon grid.
+ self.x,self.y = self.m(*numpy.meshgrid(self.x,self.y))
+
+ # Mask missing (shiftgrid removes original masking)
+ if self.missing != None:
+ self.z = numpy.ma.masked_where(self.z <= self.missing,self.z)
+
+ lw = 1.0
+ lw = 0.25
+ if self.clevs:
+ contours = range(self.clevs[0],self.clevs[1],self.clevs[2])
+#tmp make high pressure contours solid and low pressure dashed
+ high_c = [x for x in contours if x >= 1013]
+ low_c = [x for x in contours if x not in high_c]
+
+ # make filled contour plot and overlay contour lines
+ if self.clevs:
+ if self.no_fill:
+#tmp make high pressure contours solid and low pressure dashed
+ contours = high_c
+ self.the_image1 = self.m.contour(self.x,self.y,self.z,
+ contours,colors='k',linewidths=lw,linestyles='dotted',
+ zorder=self.zorder)
+ contours = low_c
+ self.the_image2 = self.m.contour(self.x,self.y,self.z,
+ contours,colors='k',linewidths=lw,linestyles='solid',
+ zorder=self.zorder)
+
+ #self.the_image2 = self.m.contour(self.x,self.y,self.z,
+ # contours,colors ='k',linewidths=lw)
+ else:
+ self.the_image= self.m.contourf(self.x,self.y,self.z,
+ contours,cmap=self.color_scheme)
+ # Adjust colors for contours
+ self.the_image.set_clim(self.the_image.cvalues[1],
+ self.the_image.cvalues[-2])
+ self.the_image2 = self.m.contour(self.x,self.y,self.z,
+ contours,colors ='k',linewidths=lw)
+ if self.clabels:
+ # label the contours
+ labels = []
+#tmp make high pressure contours solid and low pressure dashed
+ for label in range(high_c[0],high_c[-1],self.clevs[2]*2):
+ labels.append(label)
+ self.the_image3 = plt.clabel(self.the_image1,labels,
+ fontsize=5,inline=1,inline_spacing=0,fmt = '%d')
+ labels = []
+ for label in range(low_c[0],low_c[-1],self.clevs[2]*2):
+ labels.append(label)
+ self.the_image4 = plt.clabel(self.the_image2,labels,
+ fontsize=5,inline=1,inline_spacing=0,fmt = '%d')
+
+ #for label in range(self.clevs[0],self.clevs[1],self.clevs[2]*2):
+ # labels.append(label)
+ #self.the_image3 = plt.clabel(self.the_image2,labels,
+ # fontsize=5,inline=1,inline_spacing=0,fmt = '%d')
+ else:
+ if self.no_fill:
+ self.the_image2 = self.m.contour(self.x,self.y,self.z,
+ colors ='k',linewidths=lw)
+ else:
+ self.the_image = self.m.contourf(self.x,self.y,self.z,
+ cmap=self.color_scheme)
+ self.the_image2 = self.m.contour(self.x,self.y,self.z,
+ colors ='k',linewidths=lw)
+
+ def add_extras(self):
+ # draw coastlines
+ self.m.drawcoastlines(linewidth=0.25,ax=self.ax)
+
+ # setup grids
+ delat = 30.0*self.polarity
+ if self.hemi == 'sh':
+ circles = numpy.arange(self.bounding_lat,-90,delat).tolist()
+ else:
+ circles = numpy.arange(self.bounding_lat,90,delat).tolist()
+ delon = 60.
+ meridians = numpy.arange(0,360,delon)
+
+ # draw parallelsleft, right, top or bottom
+ #self.m.drawparallels(circles,labels=[0,0,0,0],ax=self.ax)
+ # draw meridians
+ #self.m.drawmeridians(meridians,labels=[0,0,0,0],ax=self.ax)
+
+ if self.no_fill:
+ self.cbar = 0
+ # Fill Continents
+ self.m.fillcontinents(color='0.9')
+
+ if self.cbar:
+ # add a colorbar.
+ if self.colorbar_label:
+ aa = self.fig.colorbar(self.the_image,orientation='horizontal',
+ extend=self.extend,spacing='uniform',ticks=None,
+ fraction=0.1,pad=0.09,aspect=40)
+ aa.ax.set_xlabel(self.colorbar_label,fontsize='small')
+ else:
+ if self.extend == 'both':
+ aa = self.fig.colorbar(self.the_image,orientation='horizontal',
+ extend=self.extend,spacing='uniform',ticks=None,
+ fraction=0.021,pad=0.03,aspect=40)
+ else:
+ aa = self.fig.colorbar(self.the_image,orientation='horizontal',
+ extend=self.extend,spacing='uniform',ticks=None,
+ fraction=0.0225,pad=0.03,aspect=40)
+
+
+#---Start of main code block.
+if __name__=='__main__':
+
+ import numpy,sys
+
+ # Base Definitions
+ im = 144
+ jm = 73
+ maxID = im*jm
+ dx = 2.5
+ dy = 2.5
+
+ lats = numpy.array([-90.0, -87.5, -85.0, -82.5, -80.0, -77.5, -75.0, -72.5, -70.0,
+ -67.5, -65.0, -62.5, -60.0, -57.5, -55.0, -52.5, -50.0, -47.5, -45.0,
+ -42.5, -40.0, -37.5, -35.0, -32.5, -30.0, -27.5, -25.0, -22.5, -20.0,
+ -17.5, -15.0, -12.5, -10.0, -7.5, -5.0, -2.5, 0.0, 2.5, 5.0, 7.5, 10.0,
+ 12.5, 15.0, 17.5, 20.0, 22.5, 25.0, 27.5, 30.0, 32.5, 35.0, 37.5, 40.0,
+ 42.5, 45.0, 47.5, 50.0, 52.5, 55.0, 57.5, 60.0, 62.5, 65.0, 67.5, 70.0,
+ 72.5, 75.0, 77.5, 80.0, 82.5, 85.0, 87.5, 90.0])
+
+ lons = numpy.array([0.0, 2.5, 5.0, 7.5, 10.0, 12.5, 15.0, 17.5, 20.0, 22.5, 25.0,
+ 27.5, 30.0, 32.5, 35.0, 37.5, 40.0, 42.5, 45.0, 47.5, 50.0,
+ 52.5, 55.0, 57.5, 60.0, 62.5, 65.0, 67.5, 70.0, 72.5, 75.0,
+ 77.5, 80.0, 82.5, 85.0, 87.5, 90.0, 92.5, 95.0, 97.5, 100.0,
+ 102.5, 105.0, 107.5, 110.0, 112.5, 115.0, 117.5, 120.0, 122.5,
+ 125.0, 127.5, 130.0, 132.5, 135.0, 137.5, 140.0, 142.5, 145.0,
+ 147.5, 150.0, 152.5, 155.0, 157.5, 160.0, 162.5, 165.0, 167.5,
+ 170.0, 172.5, 175.0, 177.5, 180.0, 182.5, 185.0, 187.5, 190.0,
+ 192.5, 195.0, 197.5, 200.0, 202.5, 205.0, 207.5, 210.0, 212.5,
+ 215.0, 217.5, 220.0, 222.5, 225.0, 227.5, 230.0, 232.5, 235.0,
+ 237.5, 240.0, 242.5, 245.0, 247.5, 250.0, 252.5, 255.0, 257.5,
+ 260.0, 262.5, 265.0, 267.5, 270.0, 272.5, 275.0, 277.5, 280.0,
+ 282.5, 285.0, 287.5, 290.0, 292.5, 295.0, 297.5, 300.0, 302.5,
+ 305.0, 307.5, 310.0, 312.5, 315.0, 317.5, 320.0, 322.5, 325.0,
+ 327.5, 330.0, 332.5, 335.0, 337.5, 340.0, 342.5, 345.0, 347.5,
+ 350.0, 352.5, 355.0, 357.5])
+
+ slp = numpy.array([100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0,
+ 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100347.0, 100240.0,
+ 100280.0, 100325.0, 100365.0, 100405.0, 100445.0, 100485.0, 100520.0,
+ 100560.0, 100595.0, 100625.0, 100657.0, 100690.0, 100720.0, 100747.0,
+ 100775.0, 100800.0, 100825.0, 100850.0, 100872.0, 100897.0, 100917.0,
+ 100940.0, 100955.0, 100975.0, 100992.0, 101012.0, 101027.0, 101045.0,
+ 101060.0, 101075.0, 101090.0, 101102.0, 101115.0, 101125.0, 101135.0,
+ 101147.0, 101157.0, 101162.0, 101172.0, 101180.0, 101180.0, 101182.0,
+ 101185.0, 101190.0, 101187.0, 101185.0, 101182.0, 101180.0, 101172.0,
+ 101162.0, 101157.0, 101145.0, 101132.0, 101120.0, 101105.0, 101090.0,
+ 101070.0, 101050.0, 101030.0, 101005.0, 100982.0, 100960.0, 100930.0,
+ 100902.0, 100875.0, 100845.0, 100815.0, 100782.0, 100752.0, 100717.0,
+ 100682.0, 100647.0, 100615.0, 100575.0, 100540.0, 100500.0, 100462.0,
+ 100427.0, 100387.0, 100347.0, 100312.0, 100270.0, 100230.0, 100192.0,
+ 100152.0, 100115.0, 100075.0, 100037.0, 100000.0, 99962.0, 99927.0,
+ 99890.0, 99860.0, 99825.0, 99795.0, 99760.0, 99730.0, 99702.0, 99675.0,
+ 99650.0, 99627.0, 99602.0, 99580.0, 99560.0, 99542.0, 99525.0, 99512.0,
+ 99497.0, 99485.0, 99472.0, 99465.0, 99455.0, 99447.0, 99442.0, 99440.0,
+ 99440.0, 99440.0, 99442.0, 99450.0, 99457.0, 99465.0, 99477.0, 99492.0,
+ 99510.0, 99532.0, 99550.0, 99577.0, 99605.0, 99632.0, 99665.0, 99697.0,
+ 99732.0, 99770.0, 99807.0, 99850.0, 99890.0, 99932.0, 99975.0, 100017.0,
+ 100062.0, 100105.0, 100150.0, 100197.0, 100277.0, 100352.0, 100427.0,
+ 100495.0, 100555.0, 100612.0, 100665.0, 100715.0, 100760.0, 100800.0,
+ 100837.0, 100870.0, 100902.0, 100932.0, 100955.0, 100985.0, 101010.0,
+ 101035.0, 101057.0, 101085.0, 101112.0, 101137.0, 101165.0, 101197.0,
+ 101225.0, 101260.0, 101292.0, 101327.0, 101360.0, 101395.0, 101432.0,
+ 101465.0, 101500.0, 101532.0, 101565.0, 101592.0, 101625.0, 101650.0,
+ 101672.0, 101695.0, 101715.0, 101732.0, 101740.0, 101755.0, 101762.0,
+ 101770.0, 101775.0, 101772.0, 101770.0, 101767.0, 101755.0, 101747.0,
+ 101735.0, 101715.0, 101697.0, 101675.0, 101650.0, 101620.0, 101590.0,
+ 101555.0, 101517.0, 101480.0, 101440.0, 101395.0, 101355.0, 101310.0,
+ 101267.0, 101220.0, 101177.0, 101130.0, 101085.0, 101042.0, 100995.0,
+ 100950.0, 100907.0, 100860.0, 100812.0, 100765.0, 100720.0, 100667.0,
+ 100617.0, 100567.0, 100510.0, 100452.0, 100395.0, 100332.0, 100272.0,
+ 100205.0, 100140.0, 100070.0, 100002.0, 99932.0, 99865.0, 99800.0,
+ 99730.0, 99665.0, 99600.0, 99540.0, 99485.0, 99435.0, 99385.0, 99345.0,
+ 99310.0, 99280.0, 99252.0, 99227.0, 99202.0, 99177.0, 99147.0, 99117.0,
+ 99087.0, 99055.0, 99017.0, 98975.0, 98940.0, 98897.0, 98862.0, 98832.0,
+ 98802.0, 98782.0, 98770.0, 98767.0, 98772.0, 98790.0, 98812.0, 98845.0,
+ 98890.0, 98937.0, 98995.0, 99055.0, 99122.0, 99192.0, 99272.0, 99347.0,
+ 99430.0, 99515.0, 99602.0, 99685.0, 99772.0, 99865.0, 99950.0, 100035.0,
+ 100117.0, 100200.0, 100355.0, 100452.0, 100547.0, 100637.0, 100715.0,
+ 100790.0, 100857.0, 100910.0, 100960.0, 101000.0, 101025.0, 101052.0,
+ 101075.0, 101090.0, 101102.0, 101110.0, 101122.0, 101135.0, 101147.0,
+ 101165.0, 101190.0, 101217.0, 101255.0, 101295.0, 101347.0, 101405.0,
+ 101470.0, 101537.0, 101615.0, 101687.0, 101765.0, 101845.0, 101920.0,
+ 101995.0, 102062.0, 102125.0, 102182.0, 102232.0, 102272.0, 102307.0,
+ 102332.0, 102350.0, 102367.0, 102375.0, 102380.0, 102377.0, 102372.0,
+ 102367.0, 102355.0, 102335.0, 102310.0, 102280.0, 102245.0, 102207.0,
+ 102165.0, 102117.0, 102067.0, 102020.0, 101967.0, 101920.0, 101865.0,
+ 101820.0, 101767.0, 101717.0, 101667.0, 101615.0, 101562.0, 101505.0,
+ 101452.0, 101397.0, 101342.0, 101290.0, 101237.0, 101190.0, 101137.0,
+ 101087.0, 101040.0, 100990.0, 100940.0, 100892.0, 100842.0, 100792.0,
+ 100747.0, 100702.0, 100652.0, 100605.0, 100550.0, 100495.0, 100430.0,
+ 100357.0, 100277.0, 100185.0, 100085.0, 99977.0, 99865.0, 99745.0,
+ 99617.0, 99497.0, 99380.0, 99267.0, 99172.0, 99092.0, 99035.0, 99000.0,
+ 98970.0, 98950.0, 98932.0, 98910.0, 98872.0, 98817.0, 98752.0, 98675.0,
+ 98587.0, 98505.0, 98430.0, 98365.0, 98317.0, 98285.0, 98280.0, 98287.0,
+ 98315.0, 98362.0, 98427.0, 98510.0, 98605.0, 98712.0, 98820.0, 98935.0,
+ 99037.0, 99132.0, 99217.0, 99287.0, 99355.0, 99412.0, 99472.0, 99532.0,
+ 99602.0, 99677.0, 99762.0, 99850.0, 99947.0, 100047.0, 100152.0,
+ 100252.0, 100205.0, 100345.0, 100482.0, 100617.0, 100745.0, 100860.0,
+ 100967.0, 101055.0, 101127.0, 101182.0, 101220.0, 101242.0, 101250.0,
+ 101250.0, 101240.0, 101230.0, 101215.0, 101210.0, 101205.0, 101212.0,
+ 101227.0, 101260.0, 101302.0, 101365.0, 101442.0, 101535.0, 101640.0,
+ 101752.0, 101872.0, 101992.0, 102112.0, 102235.0, 102352.0, 102467.0,
+ 102570.0, 102667.0, 102750.0, 102817.0, 102870.0, 102910.0, 102937.0,
+ 102950.0, 102960.0, 102962.0, 102960.0, 102955.0, 102947.0, 102937.0,
+ 102925.0, 102905.0, 102877.0, 102840.0, 102787.0, 102730.0, 102652.0,
+ 102572.0, 102487.0, 102402.0, 102325.0, 102257.0, 102205.0, 102155.0,
+ 102117.0, 102082.0, 102042.0, 102000.0, 101950.0, 101892.0, 101827.0,
+ 101755.0, 101682.0, 101612.0, 101545.0, 101482.0, 101422.0, 101367.0,
+ 101310.0, 101252.0, 101195.0, 101127.0, 101055.0, 100975.0, 100895.0,
+ 100807.0, 100727.0, 100652.0, 100587.0, 100527.0, 100480.0, 100437.0,
+ 100385.0, 100327.0, 100245.0, 100145.0, 100017.0, 99862.0, 99690.0,
+ 99497.0, 99297.0, 99095.0, 98900.0, 98722.0, 98565.0, 98435.0, 98312.0,
+ 98210.0, 98112.0, 98007.0, 97897.0, 97772.0, 97640.0, 97517.0, 97410.0,
+ 97325.0, 97267.0, 97245.0, 97250.0, 97280.0, 97340.0, 97427.0, 97557.0,
+ 97720.0, 97930.0, 98175.0, 98445.0, 98720.0, 98972.0, 99182.0, 99332.0,
+ 99417.0, 99440.0, 99420.0, 99377.0, 99337.0, 99315.0, 99320.0, 99352.0,
+ 99412.0, 99495.0, 99587.0, 99695.0, 99810.0, 99937.0, 100067.0, 99827.0,
+ 100032.0, 100235.0, 100437.0, 100620.0, 100785.0, 100932.0, 101062.0,
+ 101162.0, 101240.0, 101302.0, 101340.0, 101360.0, 101372.0, 101377.0,
+ 101377.0, 101390.0, 101400.0, 101420.0, 101447.0, 101485.0, 101535.0,
+ 101610.0, 101705.0, 101822.0, 101960.0, 102102.0, 102245.0, 102387.0,
+ 102522.0, 102652.0, 102777.0, 102907.0, 103032.0, 103155.0, 103265.0,
+ 103360.0, 103442.0, 103507.0, 103562.0, 103597.0, 103617.0, 103620.0,
+ 103610.0, 103585.0, 103545.0, 103495.0, 103445.0, 103395.0, 103345.0,
+ 103295.0, 103247.0, 103192.0, 103122.0, 103032.0, 102940.0, 102832.0,
+ 102730.0, 102632.0, 102557.0, 102507.0, 102482.0, 102475.0, 102472.0,
+ 102470.0, 102462.0, 102437.0, 102402.0, 102342.0, 102272.0, 102192.0,
+ 102110.0, 102025.0, 101942.0, 101862.0, 101775.0, 101677.0, 101575.0,
+ 101465.0, 101352.0, 101240.0, 101127.0, 101017.0, 100910.0, 100807.0,
+ 100705.0, 100612.0, 100520.0, 100432.0, 100352.0, 100275.0, 100195.0,
+ 100110.0, 100007.0, 99885.0, 99740.0, 99557.0, 99350.0, 99115.0,
+ 98860.0, 98595.0, 98327.0, 98072.0, 97840.0, 97642.0, 97482.0, 97337.0,
+ 97227.0, 97125.0, 97027.0, 96930.0, 96832.0, 96757.0, 96705.0, 96687.0,
+ 96697.0, 96732.0, 96782.0, 96850.0, 96940.0, 97077.0, 97277.0, 97550.0,
+ 97890.0, 98282.0, 98682.0, 99047.0, 99327.0, 99490.0, 99527.0, 99440.0,
+ 99285.0, 99092.0, 98925.0, 98800.0, 98742.0, 98740.0, 98785.0, 98865.0,
+ 98972.0, 99105.0, 99260.0, 99435.0, 99627.0, 99375.0, 99612.0, 99855.0,
+ 100090.0, 100312.0, 100520.0, 100707.0, 100867.0, 101002.0, 101112.0,
+ 101202.0, 101280.0, 101352.0, 101420.0, 101487.0, 101557.0, 101635.0,
+ 101707.0, 101785.0, 101862.0, 101937.0, 102017.0, 102115.0, 102227.0,
+ 102355.0, 102490.0, 102627.0, 102757.0, 102882.0, 103012.0, 103145.0,
+ 103285.0, 103427.0, 103562.0, 103692.0, 103807.0, 103902.0, 103982.0,
+ 104040.0, 104090.0, 104135.0, 104165.0, 104190.0, 104200.0, 104180.0,
+ 104125.0, 104045.0, 103955.0, 103847.0, 103730.0, 103615.0, 103507.0,
+ 103410.0, 103317.0, 103217.0, 103120.0, 103020.0, 102927.0, 102852.0,
+ 102802.0, 102782.0, 102792.0, 102832.0, 102892.0, 102962.0, 103017.0,
+ 103062.0, 103082.0, 103072.0, 103035.0, 102982.0, 102912.0, 102830.0,
+ 102727.0, 102607.0, 102462.0, 102292.0, 102107.0, 101917.0, 101735.0,
+ 101570.0, 101417.0, 101282.0, 101155.0, 101037.0, 100922.0, 100812.0,
+ 100697.0, 100582.0, 100465.0, 100345.0, 100225.0, 100112.0, 99997.0,
+ 99872.0, 99735.0, 99580.0, 99390.0, 99167.0, 98915.0, 98640.0, 98342.0,
+ 98052.0, 97782.0, 97557.0, 97385.0, 97255.0, 97185.0, 97142.0, 97117.0,
+ 97082.0, 97040.0, 96982.0, 96920.0, 96885.0, 96850.0, 96835.0, 96842.0,
+ 96865.0, 96877.0, 96907.0, 96967.0, 97082.0, 97280.0, 97580.0, 97972.0,
+ 98417.0, 98847.0, 99180.0, 99347.0, 99325.0, 99145.0, 98885.0, 98632.0,
+ 98447.0, 98337.0, 98292.0, 98300.0, 98352.0, 98437.0, 98567.0, 98732.0,
+ 98930.0, 99145.0, 99057.0, 99242.0, 99462.0, 99712.0, 99977.0, 100230.0,
+ 100462.0, 100660.0, 100840.0, 100995.0, 101135.0, 101270.0, 101397.0,
+ 101520.0, 101647.0, 101782.0, 101927.0, 102075.0, 102217.0, 102345.0,
+ 102450.0, 102537.0, 102617.0, 102692.0, 102780.0, 102877.0, 102982.0,
+ 103095.0, 103212.0, 103337.0, 103465.0, 103600.0, 103732.0, 103865.0,
+ 103985.0, 104095.0, 104180.0, 104245.0, 104287.0, 104312.0, 104325.0,
+ 104330.0, 104327.0, 104317.0, 104300.0, 104265.0, 104220.0, 104165.0,
+ 104102.0, 104025.0, 103940.0, 103842.0, 103735.0, 103597.0, 103452.0,
+ 103315.0, 103197.0, 103117.0, 103070.0, 103052.0, 103067.0, 103112.0,
+ 103165.0, 103235.0, 103317.0, 103405.0, 103492.0, 103560.0, 103602.0,
+ 103622.0, 103625.0, 103615.0, 103582.0, 103510.0, 103412.0, 103272.0,
+ 103105.0, 102920.0, 102725.0, 102535.0, 102355.0, 102177.0, 102010.0,
+ 101840.0, 101665.0, 101490.0, 101315.0, 101152.0, 101007.0, 100867.0,
+ 100732.0, 100597.0, 100455.0, 100300.0, 100145.0, 99980.0, 99810.0,
+ 99620.0, 99405.0, 99162.0, 98895.0, 98597.0, 98295.0, 97997.0, 97745.0,
+ 97552.0, 97422.0, 97357.0, 97347.0, 97390.0, 97467.0, 97570.0, 97667.0,
+ 97727.0, 97722.0, 97650.0, 97537.0, 97377.0, 97240.0, 97152.0, 97127.0,
+ 97127.0, 97122.0, 97115.0, 97170.0, 97385.0, 97810.0, 98380.0, 98920.0,
+ 99212.0, 99145.0, 98800.0, 98395.0, 98130.0, 98055.0, 98105.0, 98187.0,
+ 98257.0, 98320.0, 98400.0, 98502.0, 98620.0, 98755.0, 98895.0, 98942.0,
+ 99067.0, 99275.0, 99560.0, 99867.0, 100157.0, 100427.0, 100690.0,
+ 100945.0, 101170.0, 101365.0, 101542.0, 101705.0, 101855.0, 101997.0,
+ 102140.0, 102287.0, 102447.0, 102602.0, 102735.0, 102842.0, 102920.0,
+ 102970.0, 103012.0, 103055.0, 103107.0, 103177.0, 103265.0, 103365.0,
+ 103470.0, 103580.0, 103690.0, 103782.0, 103865.0, 103942.0, 104037.0,
+ 104172.0, 104300.0, 104337.0, 104302.0, 104252.0, 104217.0, 104192.0,
+ 104172.0, 104135.0, 104087.0, 104032.0, 103985.0, 103967.0, 103980.0,
+ 104025.0, 104070.0, 104052.0, 103962.0, 103827.0, 103687.0, 103577.0,
+ 103502.0, 103470.0, 103465.0, 103465.0, 103467.0, 103480.0, 103527.0,
+ 103615.0, 103722.0, 103815.0, 103860.0, 103877.0, 103877.0, 103887.0,
+ 103895.0, 103897.0, 103867.0, 103800.0, 103702.0, 103575.0, 103430.0,
+ 103285.0, 103160.0, 103045.0, 102930.0, 102810.0, 102685.0, 102550.0,
+ 102407.0, 102250.0, 102080.0, 101897.0, 101705.0, 101500.0, 101280.0,
+ 101042.0, 100797.0, 100572.0, 100370.0, 100197.0, 100032.0, 99852.0,
+ 99642.0, 99415.0, 99172.0, 98915.0, 98657.0, 98390.0, 98145.0, 97935.0,
+ 97785.0, 97705.0, 97692.0, 97740.0, 97842.0, 97995.0, 98155.0, 98285.0,
+ 98352.0, 98350.0, 98260.0, 98072.0, 97822.0, 97580.0, 97425.0, 97310.0,
+ 97162.0, 97037.0, 97117.0, 97532.0, 98190.0, 98795.0, 99055.0, 98872.0,
+ 98410.0, 97980.0, 97792.0, 97852.0, 98005.0, 98140.0, 98240.0, 98347.0,
+ 98472.0, 98602.0, 98717.0, 98805.0, 98865.0, 98985.0, 99127.0, 99370.0,
+ 99685.0, 100012.0, 100362.0, 100782.0, 101215.0, 101555.0, 101755.0,
+ 101865.0, 101965.0, 102077.0, 102180.0, 102277.0, 102390.0, 102517.0,
+ 102655.0, 102805.0, 102955.0, 103080.0, 103167.0, 103212.0, 103245.0,
+ 103270.0, 103290.0, 103312.0, 103337.0, 103375.0, 103430.0, 103507.0,
+ 103600.0, 103692.0, 103772.0, 103850.0, 103962.0, 104175.0, 104380.0,
+ 104427.0, 104275.0, 104070.0, 103930.0, 103887.0, 103922.0, 103985.0,
+ 104002.0, 103972.0, 103957.0, 103975.0, 104037.0, 104145.0, 104235.0,
+ 104227.0, 104162.0, 104082.0, 103997.0, 103912.0, 103850.0, 103820.0,
+ 103800.0, 103770.0, 103735.0, 103737.0, 103797.0, 103895.0, 103980.0,
+ 104010.0, 103990.0, 103952.0, 103950.0, 103987.0, 104032.0, 104035.0,
+ 103972.0, 103882.0, 103800.0, 103697.0, 103557.0, 103395.0, 103255.0,
+ 103145.0, 103042.0, 102977.0, 102940.0, 102940.0, 102940.0, 102907.0,
+ 102810.0, 102660.0, 102477.0, 102262.0, 102007.0, 101710.0, 101410.0,
+ 101142.0, 100920.0, 100735.0, 100580.0, 100427.0, 100272.0, 100110.0,
+ 99935.0, 99752.0, 99540.0, 99300.0, 99035.0, 98785.0, 98570.0, 98397.0,
+ 98282.0, 98207.0, 98167.0, 98170.0, 98212.0, 98302.0, 98415.0, 98542.0,
+ 98665.0, 98690.0, 98475.0, 98100.0, 97780.0, 97550.0, 97282.0, 97145.0,
+ 97430.0, 97895.0, 98017.0, 97747.0, 97452.0, 97350.0, 97390.0, 97485.0,
+ 97605.0, 97742.0, 97842.0, 97917.0, 97985.0, 98090.0, 98245.0, 98440.0,
+ 98642.0, 98795.0, 98897.0, 99312.0, 99512.0, 99762.0, 100025.0,
+ 100327.0, 100745.0, 101192.0, 101527.0, 101700.0, 101782.0, 101852.0,
+ 101922.0, 102002.0, 102080.0, 102175.0, 102307.0, 102452.0, 102622.0,
+ 102807.0, 102980.0, 103105.0, 103202.0, 103300.0, 103410.0, 103522.0,
+ 103615.0, 103632.0, 103590.0, 103547.0, 103517.0, 103510.0, 103552.0,
+ 103630.0, 103720.0, 103797.0, 103880.0, 103987.0, 104155.0, 104257.0,
+ 104202.0, 104045.0, 103900.0, 103865.0, 103985.0, 104127.0, 104105.0,
+ 103970.0, 103947.0, 104070.0, 104262.0, 104412.0, 104370.0, 104187.0,
+ 104092.0, 104132.0, 104235.0, 104352.0, 104402.0, 104352.0, 104252.0,
+ 104160.0, 104085.0, 104030.0, 104010.0, 104030.0, 104075.0, 104097.0,
+ 104055.0, 103965.0, 103907.0, 103912.0, 103940.0, 103922.0, 103865.0,
+ 103792.0, 103722.0, 103632.0, 103485.0, 103295.0, 103107.0, 102950.0,
+ 102840.0, 102770.0, 102715.0, 102687.0, 102717.0, 102787.0, 102857.0,
+ 102905.0, 102925.0, 102902.0, 102780.0, 102512.0, 102135.0, 101760.0,
+ 101465.0, 101260.0, 101100.0, 100977.0, 100860.0, 100745.0, 100632.0,
+ 100512.0, 100355.0, 100142.0, 99887.0, 99620.0, 99350.0, 99077.0,
+ 98802.0, 98580.0, 98465.0, 98477.0, 98590.0, 98717.0, 98805.0, 98882.0,
+ 98985.0, 99032.0, 98832.0, 98410.0, 98052.0, 97722.0, 97340.0, 97470.0,
+ 98287.0, 98532.0, 97530.0, 96495.0, 96327.0, 96665.0, 96995.0, 97255.0,
+ 97507.0, 97740.0, 97892.0, 97962.0, 97970.0, 97992.0, 98115.0, 98362.0,
+ 98655.0, 98927.0, 99135.0, 100062.0, 100247.0, 100395.0, 100575.0,
+ 100865.0, 101175.0, 101375.0, 101512.0, 101667.0, 101817.0, 101940.0,
+ 102005.0, 102025.0, 102027.0, 102072.0, 102165.0, 102280.0, 102412.0,
+ 102565.0, 102722.0, 102862.0, 103012.0, 103182.0, 103362.0, 103490.0,
+ 103512.0, 103485.0, 103470.0, 103492.0, 103547.0, 103605.0, 103662.0,
+ 103732.0, 103820.0, 103902.0, 103950.0, 103962.0, 103982.0, 104035.0,
+ 104077.0, 104060.0, 103995.0, 103967.0, 104057.0, 104215.0, 104257.0,
+ 104120.0, 103982.0, 104040.0, 104227.0, 104367.0, 104250.0, 104025.0,
+ 103995.0, 104135.0, 104415.0, 104775.0, 104960.0, 104885.0, 104722.0,
+ 104565.0, 104337.0, 104030.0, 103820.0, 103807.0, 103890.0, 103965.0,
+ 104017.0, 104065.0, 104097.0, 104112.0, 104075.0, 103987.0, 103915.0,
+ 103877.0, 103825.0, 103707.0, 103515.0, 103295.0, 103105.0, 103002.0,
+ 102942.0, 102772.0, 102417.0, 101992.0, 101725.0, 101762.0, 102072.0,
+ 102490.0, 102850.0, 103107.0, 103232.0, 103087.0, 102660.0, 102185.0,
+ 101850.0, 101652.0, 101520.0, 101432.0, 101360.0, 101287.0, 101230.0,
+ 101177.0, 101075.0, 100875.0, 100625.0, 100380.0, 100142.0, 99890.0,
+ 99597.0, 99295.0, 99075.0, 99015.0, 99135.0, 99290.0, 99335.0, 99290.0,
+ 99235.0, 99165.0, 98975.0, 98627.0, 98287.0, 97937.0, 97467.0, 97270.0,
+ 97482.0, 97260.0, 96415.0, 95920.0, 96090.0, 96447.0, 96827.0, 97247.0,
+ 97660.0, 98002.0, 98237.0, 98335.0, 98385.0, 98482.0, 98700.0, 99005.0,
+ 99332.0, 99620.0, 99855.0, 100810.0, 100915.0, 101000.0, 101115.0,
+ 101260.0, 101365.0, 101442.0, 101590.0, 101767.0, 101927.0, 102032.0,
+ 102025.0, 101945.0, 101897.0, 101927.0, 102002.0, 102085.0, 102155.0,
+ 102255.0, 102382.0, 102507.0, 102635.0, 102827.0, 103092.0, 103287.0,
+ 103327.0, 103282.0, 103267.0, 103332.0, 103450.0, 103527.0, 103570.0,
+ 103620.0, 103710.0, 103840.0, 103997.0, 104110.0, 104130.0, 104122.0,
+ 104165.0, 104230.0, 104190.0, 104035.0, 103957.0, 104077.0, 104257.0,
+ 104295.0, 104197.0, 104125.0, 104212.0, 104362.0, 104327.0, 104127.0,
+ 104007.0, 104055.0, 104122.0, 104087.0, 103935.0, 103790.0, 103735.0,
+ 103692.0, 103595.0, 103535.0, 103622.0, 103785.0, 103865.0, 103845.0,
+ 103817.0, 103837.0, 103885.0, 103902.0, 103882.0, 103872.0, 103892.0,
+ 103907.0, 103870.0, 103760.0, 103600.0, 103435.0, 103277.0, 103127.0,
+ 102875.0, 102322.0, 101470.0, 100635.0, 100097.0, 99940.0, 100202.0,
+ 100750.0, 101335.0, 101825.0, 102275.0, 102625.0, 102632.0, 102375.0,
+ 102142.0, 102007.0, 101910.0, 101810.0, 101710.0, 101625.0, 101572.0,
+ 101525.0, 101447.0, 101315.0, 101120.0, 100890.0, 100670.0, 100477.0,
+ 100310.0, 100130.0, 99920.0, 99750.0, 99670.0, 99692.0, 99772.0,
+ 99840.0, 99852.0, 99760.0, 99492.0, 99050.0, 98602.0, 98242.0, 97907.0,
+ 97567.0, 97277.0, 97007.0, 96795.0, 96770.0, 96927.0, 97245.0, 97645.0,
+ 98027.0, 98322.0, 98530.0, 98687.0, 98845.0, 99060.0, 99350.0, 99685.0,
+ 100010.0, 100292.0, 100507.0, 100675.0, 101442.0, 101457.0, 101472.0,
+ 101477.0, 101497.0, 101567.0, 101687.0, 101837.0, 101980.0, 102097.0,
+ 102170.0, 102145.0, 102052.0, 101982.0, 101965.0, 101980.0, 101985.0,
+ 101990.0, 102015.0, 102057.0, 102082.0, 102112.0, 102220.0, 102460.0,
+ 102727.0, 102867.0, 102877.0, 102855.0, 102842.0, 102875.0, 102967.0,
+ 103145.0, 103347.0, 103472.0, 103520.0, 103627.0, 103850.0, 104040.0,
+ 104080.0, 104080.0, 104185.0, 104365.0, 104477.0, 104442.0, 104352.0,
+ 104310.0, 104290.0, 104215.0, 104107.0, 104095.0, 104220.0, 104335.0,
+ 104225.0, 103917.0, 103672.0, 103590.0, 103527.0, 103410.0, 103287.0,
+ 103205.0, 103137.0, 103097.0, 103167.0, 103342.0, 103492.0, 103490.0,
+ 103417.0, 103380.0, 103382.0, 103365.0, 103327.0, 103325.0, 103387.0,
+ 103467.0, 103502.0, 103492.0, 103470.0, 103442.0, 103375.0, 103210.0,
+ 102935.0, 102560.0, 102070.0, 101482.0, 100920.0, 100515.0, 100285.0,
+ 100225.0, 100332.0, 100500.0, 100645.0, 100857.0, 101250.0, 101622.0,
+ 101752.0, 101802.0, 101912.0, 102007.0, 102020.0, 101980.0, 101970.0,
+ 101975.0, 101915.0, 101802.0, 101702.0, 101607.0, 101435.0, 101180.0,
+ 100945.0, 100782.0, 100657.0, 100510.0, 100372.0, 100267.0, 100230.0,
+ 100300.0, 100450.0, 100560.0, 100497.0, 100205.0, 99790.0, 99377.0,
+ 99015.0, 98727.0, 98527.0, 98417.0, 98357.0, 98322.0, 98335.0, 98445.0,
+ 98670.0, 98930.0, 99120.0, 99197.0, 99242.0, 99372.0, 99625.0, 99950.0,
+ 100255.0, 100537.0, 100805.0, 101075.0, 101287.0, 101400.0, 101997.0,
+ 101925.0, 101892.0, 101872.0, 101872.0, 101917.0, 102027.0, 102147.0,
+ 102215.0, 102252.0, 102275.0, 102282.0, 102252.0, 102217.0, 102185.0,
+ 102157.0, 102152.0, 102145.0, 102100.0, 102005.0, 101917.0, 101875.0,
+ 101900.0, 102032.0, 102222.0, 102342.0, 102362.0, 102375.0, 102412.0,
+ 102452.0, 102482.0, 102620.0, 102872.0, 103065.0, 103132.0, 103227.0,
+ 103495.0, 103847.0, 104095.0, 104177.0, 104172.0, 104217.0, 104342.0,
+ 104442.0, 104360.0, 104042.0, 103832.0, 103890.0, 103990.0, 103970.0,
+ 103910.0, 103882.0, 103770.0, 103477.0, 103222.0, 103202.0, 103197.0,
+ 103055.0, 102910.0, 102812.0, 102757.0, 102740.0, 102812.0, 102942.0,
+ 102942.0, 102770.0, 102625.0, 102532.0, 102450.0, 102372.0, 102352.0,
+ 102412.0, 102515.0, 102592.0, 102652.0, 102752.0, 102902.0, 103020.0,
+ 103022.0, 102902.0, 102715.0, 102497.0, 102220.0, 101887.0, 101545.0,
+ 101252.0, 101022.0, 100825.0, 100665.0, 100555.0, 100480.0, 100400.0,
+ 100412.0, 100537.0, 100657.0, 100780.0, 100985.0, 101240.0, 101442.0,
+ 101575.0, 101737.0, 101947.0, 102040.0, 101960.0, 101862.0, 101817.0,
+ 101732.0, 101560.0, 101415.0, 101322.0, 101182.0, 100987.0, 100857.0,
+ 100847.0, 100880.0, 100927.0, 100980.0, 101080.0, 101147.0, 101022.0,
+ 100727.0, 100442.0, 100180.0, 99910.0, 99702.0, 99602.0, 99595.0,
+ 99625.0, 99690.0, 99795.0, 99930.0, 100060.0, 100137.0, 100165.0,
+ 100215.0, 100365.0, 100612.0, 100880.0, 101107.0, 101337.0, 101600.0,
+ 101860.0, 102037.0, 102065.0, 102285.0, 102250.0, 102240.0, 102237.0,
+ 102212.0, 102215.0, 102302.0, 102425.0, 102465.0, 102425.0, 102372.0,
+ 102337.0, 102335.0, 102385.0, 102430.0, 102417.0, 102407.0, 102420.0,
+ 102380.0, 102270.0, 102182.0, 102162.0, 102162.0, 102160.0, 102200.0,
+ 102247.0, 102200.0, 102067.0, 102005.0, 102040.0, 102055.0, 102037.0,
+ 102142.0, 102392.0, 102665.0, 102942.0, 103197.0, 103392.0, 103695.0,
+ 104345.0, 104867.0, 104500.0, 104090.0, 104002.0, 103860.0, 103480.0,
+ 103200.0, 103297.0, 103570.0, 103720.0, 103680.0, 103630.0, 103575.0,
+ 103437.0, 103235.0, 103057.0, 102907.0, 102757.0, 102645.0, 102575.0,
+ 102515.0, 102472.0, 102447.0, 102387.0, 102207.0, 101960.0, 101725.0,
+ 101527.0, 101395.0, 101350.0, 101372.0, 101407.0, 101415.0, 101437.0,
+ 101542.0, 101745.0, 101995.0, 102207.0, 102367.0, 102495.0, 102585.0,
+ 102577.0, 102437.0, 102227.0, 102027.0, 101870.0, 101717.0, 101535.0,
+ 101322.0, 101095.0, 100895.0, 100740.0, 100627.0, 100577.0, 100567.0,
+ 100555.0, 100597.0, 100722.0, 100865.0, 101005.0, 101197.0, 101482.0,
+ 101787.0, 101940.0, 101967.0, 101947.0, 101852.0, 101675.0, 101547.0,
+ 101500.0, 101430.0, 101292.0, 101207.0, 101240.0, 101350.0, 101430.0,
+ 101462.0, 101517.0, 101600.0, 101595.0, 101460.0, 101250.0, 101012.0,
+ 100812.0, 100657.0, 100550.0, 100502.0, 100525.0, 100615.0, 100725.0,
+ 100807.0, 100875.0, 100952.0, 101040.0, 101130.0, 101240.0, 101392.0,
+ 101590.0, 101805.0, 102025.0, 102205.0, 102307.0, 102337.0, 102327.0,
+ 102470.0, 102472.0, 102475.0, 102502.0, 102492.0, 102467.0, 102540.0,
+ 102642.0, 102637.0, 102575.0, 102527.0, 102457.0, 102350.0, 102320.0,
+ 102340.0, 102332.0, 102360.0, 102470.0, 102570.0, 102590.0, 102580.0,
+ 102592.0, 102557.0, 102472.0, 102415.0, 102402.0, 102332.0, 102160.0,
+ 102002.0, 102012.0, 102155.0, 102297.0, 102392.0, 102522.0, 102777.0,
+ 103427.0, 103750.0, 103345.0, 103110.0, 103452.0, 104297.0, 104367.0,
+ 103982.0, 103862.0, 103832.0, 103355.0, 102867.0, 102810.0, 103115.0,
+ 103397.0, 103472.0, 103500.0, 103475.0, 103402.0, 103302.0, 103060.0,
+ 102690.0, 102412.0, 102275.0, 102172.0, 102092.0, 102042.0, 101965.0,
+ 101812.0, 101572.0, 101287.0, 100990.0, 100722.0, 100552.0, 100495.0,
+ 100462.0, 100375.0, 100262.0, 100242.0, 100387.0, 100655.0, 100967.0,
+ 101305.0, 101640.0, 101930.0, 102142.0, 102297.0, 102430.0, 102512.0,
+ 102510.0, 102420.0, 102267.0, 102087.0, 101897.0, 101667.0, 101410.0,
+ 101160.0, 100940.0, 100770.0, 100700.0, 100717.0, 100767.0, 100827.0,
+ 100837.0, 100785.0, 100807.0, 100975.0, 101290.0, 101580.0, 101700.0,
+ 101725.0, 101725.0, 101650.0, 101505.0, 101405.0, 101380.0, 101390.0,
+ 101397.0, 101395.0, 101412.0, 101490.0, 101625.0, 101695.0, 101670.0,
+ 101625.0, 101590.0, 101532.0, 101455.0, 101387.0, 101320.0, 101230.0,
+ 101177.0, 101200.0, 101267.0, 101325.0, 101360.0, 101400.0, 101480.0,
+ 101590.0, 101707.0, 101845.0, 102010.0, 102165.0, 102272.0, 102352.0,
+ 102407.0, 102420.0, 102412.0, 102432.0, 102680.0, 102647.0, 102665.0,
+ 102795.0, 102857.0, 102795.0, 102742.0, 102697.0, 102602.0, 102522.0,
+ 102535.0, 102377.0, 102017.0, 101747.0, 101680.0, 101762.0, 101962.0,
+ 102227.0, 102422.0, 102510.0, 102552.0, 102617.0, 102657.0, 102647.0,
+ 102602.0, 102530.0, 102480.0, 102450.0, 102385.0, 102322.0, 102280.0,
+ 102265.0, 102290.0, 102355.0, 102427.0, 102715.0, 103265.0, 103480.0,
+ 103327.0, 103347.0, 103687.0, 103770.0, 103667.0, 103622.0, 103452.0,
+ 102810.0, 102327.0, 102290.0, 102627.0, 102852.0, 102950.0, 103117.0,
+ 103252.0, 103200.0, 103120.0, 102875.0, 102472.0, 102197.0, 101987.0,
+ 101780.0, 101642.0, 101585.0, 101490.0, 101327.0, 101145.0, 100952.0,
+ 100717.0, 100447.0, 100190.0, 99992.0, 99847.0, 99690.0, 99480.0,
+ 99335.0, 99412.0, 99732.0, 100177.0, 100635.0, 101047.0, 101395.0,
+ 101692.0, 101977.0, 102257.0, 102465.0, 102572.0, 102630.0, 102652.0,
+ 102590.0, 102425.0, 102205.0, 101972.0, 101742.0, 101527.0, 101335.0,
+ 101167.0, 101090.0, 101105.0, 101137.0, 101145.0, 101037.0, 100862.0,
+ 100875.0, 101082.0, 101342.0, 101445.0, 101402.0, 101330.0, 101282.0,
+ 101217.0, 101192.0, 101205.0, 101265.0, 101342.0, 101427.0, 101550.0,
+ 101710.0, 101890.0, 101997.0, 101987.0, 101965.0, 101902.0, 101802.0,
+ 101755.0, 101752.0, 101702.0, 101612.0, 101572.0, 101590.0, 101597.0,
+ 101590.0, 101620.0, 101700.0, 101835.0, 102007.0, 102162.0, 102282.0,
+ 102375.0, 102430.0, 102427.0, 102445.0, 102537.0, 102645.0, 102687.0,
+ 102690.0, 102830.0, 102782.0, 102707.0, 102702.0, 102695.0, 102637.0,
+ 102642.0, 102682.0, 102617.0, 102490.0, 102235.0, 101822.0, 101462.0,
+ 101222.0, 101172.0, 101330.0, 101567.0, 101862.0, 102040.0, 102162.0,
+ 102317.0, 102437.0, 102492.0, 102555.0, 102602.0, 102532.0, 102407.0,
+ 102347.0, 102312.0, 102312.0, 102360.0, 102310.0, 102252.0, 102367.0,
+ 102552.0, 102665.0, 102765.0, 102987.0, 103215.0, 103395.0, 103542.0,
+ 103630.0, 103587.0, 103345.0, 102960.0, 102510.0, 102060.0, 101977.0,
+ 102217.0, 102292.0, 102365.0, 102642.0, 103030.0, 103112.0, 102960.0,
+ 102647.0, 102387.0, 102220.0, 101972.0, 101657.0, 101455.0, 101335.0,
+ 101155.0, 100957.0, 100820.0, 100717.0, 100622.0, 100487.0, 100265.0,
+ 99997.0, 99787.0, 99587.0, 99300.0, 99047.0, 99075.0, 99432.0, 99917.0,
+ 100350.0, 100732.0, 101132.0, 101527.0, 101872.0, 102150.0, 102365.0,
+ 102545.0, 102715.0, 102812.0, 102790.0, 102675.0, 102545.0, 102397.0,
+ 102195.0, 101975.0, 101800.0, 101617.0, 101515.0, 101555.0, 101555.0,
+ 101560.0, 101547.0, 101322.0, 101075.0, 101067.0, 101195.0, 101302.0,
+ 101320.0, 101232.0, 101147.0, 101152.0, 101240.0, 101355.0, 101457.0,
+ 101540.0, 101660.0, 101832.0, 101965.0, 102040.0, 102127.0, 102217.0,
+ 102225.0, 102150.0, 102047.0, 101990.0, 101947.0, 101840.0, 101687.0,
+ 101607.0, 101585.0, 101577.0, 101630.0, 101747.0, 101910.0, 102125.0,
+ 102355.0, 102500.0, 102522.0, 102502.0, 102535.0, 102605.0, 102682.0,
+ 102760.0, 102842.0, 102862.0, 102845.0, 102855.0, 102682.0, 102530.0,
+ 102497.0, 102565.0, 102557.0, 102437.0, 102355.0, 102440.0, 102387.0,
+ 101932.0, 101570.0, 101382.0, 101250.0, 101297.0, 101492.0, 101682.0,
+ 102020.0, 102072.0, 101920.0, 102065.0, 102265.0, 102365.0, 102410.0,
+ 102470.0, 102435.0, 102345.0, 102410.0, 102382.0, 102242.0, 102335.0,
+ 102510.0, 102700.0, 102837.0, 102832.0, 102760.0, 102627.0, 102590.0,
+ 102730.0, 102960.0, 103127.0, 103217.0, 103220.0, 103067.0, 102895.0,
+ 102675.0, 102337.0, 102097.0, 102052.0, 102080.0, 102185.0, 102505.0,
+ 102840.0, 102850.0, 102662.0, 102442.0, 102292.0, 102167.0, 101915.0,
+ 101630.0, 101492.0, 101375.0, 101142.0, 100907.0, 100735.0, 100612.0,
+ 100575.0, 100572.0, 100457.0, 100240.0, 100030.0, 99812.0, 99545.0,
+ 99340.0, 99377.0, 99645.0, 100002.0, 100375.0, 100775.0, 101200.0,
+ 101572.0, 101872.0, 102150.0, 102425.0, 102652.0, 102802.0, 102870.0,
+ 102862.0, 102820.0, 102755.0, 102640.0, 102452.0, 102255.0, 102092.0,
+ 101902.0, 101872.0, 101947.0, 101745.0, 101510.0, 101422.0, 101412.0,
+ 101287.0, 101065.0, 100985.0, 101050.0, 101140.0, 101207.0, 101302.0,
+ 101450.0, 101587.0, 101702.0, 101827.0, 101915.0, 101977.0, 102070.0,
+ 102125.0, 102147.0, 102192.0, 102220.0, 102165.0, 102085.0, 101992.0,
+ 101877.0, 101760.0, 101642.0, 101527.0, 101470.0, 101490.0, 101612.0,
+ 101812.0, 102030.0, 102237.0, 102407.0, 102520.0, 102590.0, 102652.0,
+ 102702.0, 102770.0, 102857.0, 102882.0, 102852.0, 102877.0, 102975.0,
+ 102967.0, 102750.0, 102650.0, 102585.0, 102517.0, 102495.0, 102460.0,
+ 102305.0, 102092.0, 102020.0, 102050.0, 101885.0, 101730.0, 101625.0,
+ 101607.0, 101632.0, 101770.0, 102027.0, 102365.0, 102365.0, 101970.0,
+ 101937.0, 102067.0, 102152.0, 102282.0, 102330.0, 102255.0, 102272.0,
+ 102422.0, 102407.0, 102785.0, 102900.0, 102512.0, 102457.0, 102525.0,
+ 102562.0, 102555.0, 102592.0, 102565.0, 102437.0, 102502.0, 102660.0,
+ 102815.0, 102820.0, 102752.0, 102722.0, 102607.0, 102335.0, 102075.0,
+ 101965.0, 101987.0, 102152.0, 102460.0, 102680.0, 102722.0, 102650.0,
+ 102500.0, 102305.0, 102082.0, 101822.0, 101580.0, 101457.0, 101357.0,
+ 101182.0, 101000.0, 100837.0, 100720.0, 100697.0, 100697.0, 100622.0,
+ 100515.0, 100400.0, 100250.0, 100080.0, 99962.0, 99952.0, 100067.0,
+ 100302.0, 100605.0, 100932.0, 101265.0, 101610.0, 101940.0, 102222.0,
+ 102447.0, 102617.0, 102752.0, 102892.0, 102980.0, 102962.0, 102857.0,
+ 102722.0, 102592.0, 102440.0, 102202.0, 101912.0, 101705.0, 101747.0,
+ 101735.0, 101447.0, 101180.0, 101135.0, 101330.0, 101207.0, 100900.0,
+ 100885.0, 101035.0, 101227.0, 101457.0, 101690.0, 101837.0, 101945.0,
+ 102087.0, 102207.0, 102220.0, 102265.0, 102307.0, 102270.0, 102195.0,
+ 102067.0, 101907.0, 101782.0, 101655.0, 101527.0, 101472.0, 101467.0,
+ 101495.0, 101575.0, 101712.0, 101912.0, 102107.0, 102255.0, 102410.0,
+ 102545.0, 102592.0, 102622.0, 102750.0, 102890.0, 102942.0, 102930.0,
+ 102892.0, 102815.0, 102850.0, 103117.0, 103035.0, 102770.0, 102710.0,
+ 102632.0, 102545.0, 102442.0, 102377.0, 102307.0, 102177.0, 102020.0,
+ 101895.0, 101780.0, 101690.0, 101627.0, 101562.0, 101602.0, 101837.0,
+ 101992.0, 102020.0, 102150.0, 102130.0, 101925.0, 101967.0, 102035.0,
+ 102180.0, 102257.0, 102282.0, 102300.0, 102360.0, 102565.0, 103567.0,
+ 103512.0, 102415.0, 102220.0, 102320.0, 102202.0, 102460.0, 102807.0,
+ 102970.0, 103055.0, 102890.0, 102542.0, 102537.0, 102680.0, 102665.0,
+ 102570.0, 102400.0, 102247.0, 102145.0, 102040.0, 102070.0, 102227.0,
+ 102380.0, 102527.0, 102620.0, 102585.0, 102475.0, 102282.0, 102087.0,
+ 101837.0, 101522.0, 101337.0, 101287.0, 101217.0, 101087.0, 100960.0,
+ 100882.0, 100850.0, 100807.0, 100760.0, 100715.0, 100637.0, 100545.0,
+ 100490.0, 100447.0, 100440.0, 100507.0, 100650.0, 100835.0, 101050.0,
+ 101310.0, 101615.0, 101890.0, 102087.0, 102272.0, 102495.0, 102702.0,
+ 102847.0, 102922.0, 102945.0, 102922.0, 102850.0, 102725.0, 102540.0,
+ 102257.0, 101970.0, 101750.0, 101620.0, 101710.0, 101535.0, 101205.0,
+ 101067.0, 101192.0, 101170.0, 100837.0, 100845.0, 101137.0, 101427.0,
+ 101600.0, 101732.0, 101857.0, 101990.0, 102150.0, 102260.0, 102292.0,
+ 102367.0, 102370.0, 102287.0, 102175.0, 102020.0, 101852.0, 101722.0,
+ 101625.0, 101595.0, 101630.0, 101670.0, 101742.0, 101870.0, 102000.0,
+ 102137.0, 102265.0, 102345.0, 102450.0, 102615.0, 102765.0, 102840.0,
+ 102895.0, 102960.0, 102950.0, 102875.0, 102767.0, 102655.0, 102592.0,
+ 102707.0, 102787.0, 102717.0, 102747.0, 102722.0, 102610.0, 102490.0,
+ 102395.0, 102262.0, 102102.0, 101980.0, 101907.0, 101825.0, 101722.0,
+ 101652.0, 101615.0, 101607.0, 101727.0, 101855.0, 101785.0, 101812.0,
+ 102012.0, 102152.0, 102112.0, 102080.0, 102087.0, 102157.0, 102115.0,
+ 102172.0, 102470.0, 102447.0, 102462.0, 103107.0, 103872.0, 103912.0,
+ 103480.0, 103427.0, 104000.0, 104555.0, 104505.0, 104317.0, 103832.0,
+ 103287.0, 102945.0, 102710.0, 102570.0, 102485.0, 102335.0, 102197.0,
+ 102125.0, 102060.0, 102095.0, 102155.0, 102182.0, 102310.0, 102422.0,
+ 102355.0, 102177.0, 102027.0, 101945.0, 101735.0, 101422.0, 101225.0,
+ 101165.0, 101090.0, 101002.0, 100960.0, 100945.0, 100925.0, 100895.0,
+ 100892.0, 100885.0, 100830.0, 100795.0, 100790.0, 100770.0, 100755.0,
+ 100737.0, 100737.0, 100870.0, 101130.0, 101390.0, 101582.0, 101727.0,
+ 101847.0, 102015.0, 102255.0, 102505.0, 102720.0, 102885.0, 102977.0,
+ 102990.0, 102895.0, 102735.0, 102570.0, 102382.0, 102155.0, 101962.0,
+ 101792.0, 101650.0, 101510.0, 101400.0, 101262.0, 101115.0, 101005.0,
+ 100972.0, 101097.0, 101370.0, 101597.0, 101680.0, 101737.0, 101845.0,
+ 102007.0, 102167.0, 102252.0, 102287.0, 102302.0, 102287.0, 102252.0,
+ 102190.0, 102082.0, 101985.0, 101927.0, 101885.0, 101880.0, 101900.0,
+ 101932.0, 102020.0, 102140.0, 102225.0, 102287.0, 102365.0, 102462.0,
+ 102582.0, 102707.0, 102825.0, 102887.0, 102865.0, 102812.0, 102762.0,
+ 102655.0, 102452.0, 102285.0, 102235.0, 102382.0, 102605.0, 102762.0,
+ 102800.0, 102712.0, 102592.0, 102495.0, 102385.0, 102290.0, 102195.0,
+ 102085.0, 101977.0, 101882.0, 101795.0, 101740.0, 101700.0, 101632.0,
+ 101730.0, 101815.0, 101720.0, 101655.0, 101682.0, 101897.0, 102235.0,
+ 102107.0, 102045.0, 102067.0, 101930.0, 101857.0, 102157.0, 102067.0,
+ 102177.0, 101947.0, 102390.0, 103812.0, 103807.0, 102847.0, 102560.0,
+ 103105.0, 104050.0, 104415.0, 104162.0, 103507.0, 102712.0, 102447.0,
+ 102377.0, 102412.0, 102340.0, 102190.0, 102105.0, 102022.0, 101970.0,
+ 101925.0, 101932.0, 102025.0, 102115.0, 102072.0, 101932.0, 101822.0,
+ 101750.0, 101600.0, 101322.0, 101037.0, 100825.0, 100695.0, 100655.0,
+ 100697.0, 100795.0, 100892.0, 100945.0, 100962.0, 100967.0, 100975.0,
+ 100987.0, 100970.0, 100940.0, 100912.0, 100850.0, 100830.0, 100965.0,
+ 101200.0, 101382.0, 101492.0, 101567.0, 101625.0, 101725.0, 101910.0,
+ 102180.0, 102507.0, 102775.0, 102907.0, 102917.0, 102850.0, 102735.0,
+ 102597.0, 102430.0, 102217.0, 102032.0, 101912.0, 101832.0, 101757.0,
+ 101667.0, 101482.0, 101217.0, 101090.0, 101167.0, 101340.0, 101525.0,
+ 101647.0, 101695.0, 101762.0, 101895.0, 102040.0, 102192.0, 102285.0,
+ 102272.0, 102235.0, 102245.0, 102245.0, 102207.0, 102152.0, 102105.0,
+ 102092.0, 102085.0, 102072.0, 102100.0, 102162.0, 102217.0, 102270.0,
+ 102345.0, 102407.0, 102427.0, 102472.0, 102597.0, 102677.0, 102677.0,
+ 102677.0, 102665.0, 102595.0, 102517.0, 102427.0, 102262.0, 102047.0,
+ 101982.0, 102240.0, 102560.0, 102462.0, 102545.0, 102532.0, 102387.0,
+ 102242.0, 102187.0, 102167.0, 102145.0, 102060.0, 101902.0, 101815.0,
+ 101787.0, 101710.0, 101637.0, 101690.0, 101830.0, 101840.0, 101777.0,
+ 101635.0, 101587.0, 101587.0, 101917.0, 102142.0, 102147.0, 102120.0,
+ 101987.0, 101940.0, 101995.0, 101905.0, 101872.0, 101957.0, 101940.0,
+ 102140.0, 103112.0, 103912.0, 104052.0, 103972.0, 103385.0, 102755.0,
+ 102942.0, 103140.0, 102420.0, 102367.0, 102245.0, 102210.0, 102232.0,
+ 102137.0, 102052.0, 101957.0, 101827.0, 101740.0, 101767.0, 101835.0,
+ 101915.0, 101920.0, 101820.0, 101715.0, 101637.0, 101510.0, 101260.0,
+ 100955.0, 100717.0, 100585.0, 100537.0, 100587.0, 100735.0, 100900.0,
+ 101007.0, 101057.0, 101087.0, 101080.0, 101060.0, 101042.0, 101027.0,
+ 100985.0, 100920.0, 100935.0, 101077.0, 101270.0, 101427.0, 101512.0,
+ 101495.0, 101440.0, 101500.0, 101720.0, 102005.0, 102297.0, 102535.0,
+ 102702.0, 102807.0, 102812.0, 102737.0, 102600.0, 102442.0, 102297.0,
+ 102175.0, 102067.0, 101947.0, 101805.0, 101685.0, 101570.0, 101437.0,
+ 101292.0, 101327.0, 101477.0, 101620.0, 101725.0, 101772.0, 101825.0,
+ 101942.0, 102077.0, 102190.0, 102280.0, 102292.0, 102282.0, 102300.0,
+ 102310.0, 102287.0, 102242.0, 102182.0, 102142.0, 102142.0, 102162.0,
+ 102215.0, 102287.0, 102335.0, 102342.0, 102347.0, 102367.0, 102397.0,
+ 102425.0, 102475.0, 102515.0, 102500.0, 102457.0, 102425.0, 102380.0,
+ 102315.0, 102252.0, 102190.0, 102047.0, 102057.0, 102222.0, 102362.0,
+ 102035.0, 102005.0, 102067.0, 101992.0, 101872.0, 101895.0, 101922.0,
+ 101935.0, 101937.0, 101832.0, 101735.0, 101680.0, 101585.0, 101472.0,
+ 101460.0, 101617.0, 101795.0, 101825.0, 101780.0, 101615.0, 101592.0,
+ 101552.0, 101655.0, 101777.0, 101852.0, 101890.0, 101892.0, 101872.0,
+ 101787.0, 101642.0, 101547.0, 101742.0, 101917.0, 101797.0, 101830.0,
+ 102120.0, 102292.0, 102102.0, 101945.0, 101885.0, 102445.0, 102425.0,
+ 102260.0, 102075.0, 101977.0, 102055.0, 101970.0, 101922.0, 101857.0,
+ 101717.0, 101625.0, 101610.0, 101667.0, 101737.0, 101717.0, 101665.0,
+ 101650.0, 101620.0, 101515.0, 101342.0, 101137.0, 100935.0, 100772.0,
+ 100685.0, 100720.0, 100807.0, 100872.0, 100932.0, 101017.0, 101077.0,
+ 101075.0, 101062.0, 101052.0, 101032.0, 101002.0, 100985.0, 101035.0,
+ 101175.0, 101335.0, 101432.0, 101457.0, 101427.0, 101400.0, 101452.0,
+ 101605.0, 101832.0, 102105.0, 102365.0, 102552.0, 102647.0, 102637.0,
+ 102590.0, 102527.0, 102437.0, 102312.0, 102190.0, 102117.0, 102047.0,
+ 101915.0, 101760.0, 101620.0, 101562.0, 101522.0, 101452.0, 101487.0,
+ 101595.0, 101717.0, 101830.0, 101892.0, 101980.0, 102092.0, 102167.0,
+ 102247.0, 102295.0, 102282.0, 102280.0, 102270.0, 102217.0, 102157.0,
+ 102120.0, 102132.0, 102162.0, 102192.0, 102217.0, 102237.0, 102262.0,
+ 102277.0, 102245.0, 102190.0, 102210.0, 102272.0, 102275.0, 102242.0,
+ 102245.0, 102222.0, 102145.0, 102102.0, 102090.0, 102050.0, 102010.0,
+ 102017.0, 102037.0, 102060.0, 102080.0, 101617.0, 101580.0, 101620.0,
+ 101647.0, 101622.0, 101635.0, 101635.0, 101657.0, 101692.0, 101667.0,
+ 101622.0, 101540.0, 101430.0, 101307.0, 101255.0, 101287.0, 101565.0,
+ 101747.0, 101802.0, 101767.0, 101580.0, 101545.0, 101502.0, 101535.0,
+ 101597.0, 101580.0, 101537.0, 101452.0, 101400.0, 101477.0, 101625.0,
+ 101597.0, 101537.0, 101740.0, 101930.0, 101887.0, 101687.0, 101630.0,
+ 101645.0, 101630.0, 101777.0, 101905.0, 101837.0, 101722.0, 101772.0,
+ 101810.0, 101782.0, 101790.0, 101720.0, 101597.0, 101542.0, 101492.0,
+ 101487.0, 101475.0, 101407.0, 101397.0, 101465.0, 101482.0, 101455.0,
+ 101382.0, 101247.0, 101102.0, 100985.0, 100905.0, 100885.0, 100882.0,
+ 100885.0, 100915.0, 100965.0, 101010.0, 101020.0, 101005.0, 100965.0,
+ 100932.0, 100965.0, 101027.0, 101120.0, 101262.0, 101362.0, 101402.0,
+ 101422.0, 101442.0, 101457.0, 101487.0, 101580.0, 101782.0, 102025.0,
+ 102220.0, 102350.0, 102435.0, 102495.0, 102500.0, 102450.0, 102370.0,
+ 102285.0, 102205.0, 102122.0, 102022.0, 101892.0, 101737.0, 101582.0,
+ 101457.0, 101555.0, 101650.0, 101520.0, 101630.0, 101757.0, 101870.0,
+ 101950.0, 101990.0, 102042.0, 102082.0, 102115.0, 102130.0, 102107.0,
+ 102100.0, 102102.0, 102075.0, 102050.0, 102047.0, 102062.0, 102072.0,
+ 102090.0, 102122.0, 102130.0, 102092.0, 102060.0, 102055.0, 102047.0,
+ 102022.0, 102027.0, 102042.0, 102002.0, 101955.0, 101937.0, 101897.0,
+ 101832.0, 101790.0, 101762.0, 101757.0, 101795.0, 101812.0, 101780.0,
+ 101702.0, 101417.0, 101407.0, 101380.0, 101387.0, 101412.0, 101392.0,
+ 101392.0, 101460.0, 101487.0, 101485.0, 101547.0, 101457.0, 101290.0,
+ 101167.0, 101115.0, 101210.0, 101305.0, 101580.0, 101695.0, 101715.0,
+ 101647.0, 101527.0, 101485.0, 101450.0, 101435.0, 101460.0, 101470.0,
+ 101402.0, 101215.0, 101145.0, 101392.0, 101655.0, 101657.0, 101537.0,
+ 101417.0, 101377.0, 101427.0, 101387.0, 101407.0, 101447.0, 101552.0,
+ 101610.0, 101592.0, 101630.0, 101682.0, 101665.0, 101672.0, 101640.0,
+ 101517.0, 101450.0, 101392.0, 101345.0, 101342.0, 101290.0, 101217.0,
+ 101207.0, 101257.0, 101302.0, 101320.0, 101292.0, 101210.0, 101142.0,
+ 101067.0, 101000.0, 100975.0, 100950.0, 100947.0, 100957.0, 100965.0,
+ 100992.0, 100990.0, 100947.0, 100920.0, 100947.0, 101005.0, 101052.0,
+ 101155.0, 101287.0, 101352.0, 101400.0, 101452.0, 101470.0, 101457.0,
+ 101485.0, 101607.0, 101775.0, 101937.0, 102065.0, 102170.0, 102260.0,
+ 102315.0, 102302.0, 102232.0, 102192.0, 102172.0, 102122.0, 102017.0,
+ 101885.0, 101770.0, 101662.0, 101540.0, 101462.0, 101477.0, 101775.0,
+ 101702.0, 101647.0, 101755.0, 101785.0, 101865.0, 101895.0, 101910.0,
+ 101932.0, 101912.0, 101880.0, 101867.0, 101870.0, 101877.0, 101895.0,
+ 101920.0, 101925.0, 101920.0, 101930.0, 101930.0, 101930.0, 101940.0,
+ 101942.0, 101905.0, 101855.0, 101850.0, 101842.0, 101797.0, 101770.0,
+ 101747.0, 101697.0, 101660.0, 101645.0, 101562.0, 101440.0, 101390.0,
+ 101460.0, 101517.0, 101552.0, 101562.0, 101485.0, 101217.0, 101270.0,
+ 101282.0, 101292.0, 101310.0, 101235.0, 101185.0, 101277.0, 101337.0,
+ 101332.0, 101352.0, 101252.0, 101095.0, 101035.0, 101085.0, 101162.0,
+ 101232.0, 101427.0, 101622.0, 101620.0, 101642.0, 101567.0, 101482.0,
+ 101482.0, 101467.0, 101355.0, 101272.0, 101295.0, 101327.0, 101245.0,
+ 101290.0, 101397.0, 101462.0, 101547.0, 101610.0, 101587.0, 101502.0,
+ 101350.0, 101235.0, 101200.0, 101282.0, 101455.0, 101562.0, 101607.0,
+ 101592.0, 101570.0, 101540.0, 101432.0, 101370.0, 101362.0, 101272.0,
+ 101265.0, 101250.0, 101180.0, 101167.0, 101160.0, 101187.0, 101225.0,
+ 101215.0, 101202.0, 101185.0, 101142.0, 101065.0, 101010.0, 101005.0,
+ 100985.0, 100962.0, 100945.0, 100942.0, 100950.0, 100937.0, 100925.0,
+ 100940.0, 100990.0, 101052.0, 101125.0, 101237.0, 101330.0, 101367.0,
+ 101382.0, 101397.0, 101435.0, 101472.0, 101530.0, 101602.0, 101692.0,
+ 101817.0, 101940.0, 102025.0, 102085.0, 102102.0, 102102.0, 102082.0,
+ 102047.0, 101985.0, 101900.0, 101842.0, 101782.0, 101697.0, 101607.0,
+ 101485.0, 101405.0, 101415.0, 101590.0, 101832.0, 101770.0, 101672.0,
+ 101717.0, 101775.0, 101777.0, 101735.0, 101712.0, 101670.0, 101647.0,
+ 101647.0, 101667.0, 101710.0, 101742.0, 101750.0, 101735.0, 101735.0,
+ 101740.0, 101740.0, 101737.0, 101720.0, 101725.0, 101755.0, 101732.0,
+ 101670.0, 101655.0, 101642.0, 101562.0, 101525.0, 101530.0, 101495.0,
+ 101440.0, 101360.0, 101225.0, 101125.0, 101197.0, 101297.0, 101325.0,
+ 101325.0, 101257.0, 101020.0, 101110.0, 101155.0, 101205.0, 101217.0,
+ 101140.0, 101070.0, 101042.0, 101087.0, 101207.0, 101252.0, 101160.0,
+ 101015.0, 100897.0, 100952.0, 101092.0, 101220.0, 101277.0, 101627.0,
+ 101575.0, 101570.0, 101515.0, 101415.0, 101387.0, 101392.0, 101382.0,
+ 101312.0, 101185.0, 101127.0, 101162.0, 101315.0, 101402.0, 101332.0,
+ 101325.0, 101415.0, 101402.0, 101277.0, 101152.0, 101042.0, 101040.0,
+ 101150.0, 101267.0, 101380.0, 101455.0, 101457.0, 101395.0, 101310.0,
+ 101230.0, 101227.0, 101222.0, 101205.0, 101227.0, 101177.0, 101137.0,
+ 101155.0, 101142.0, 101162.0, 101150.0, 101110.0, 101105.0, 101085.0,
+ 101065.0, 101052.0, 101007.0, 100942.0, 100902.0, 100907.0, 100925.0,
+ 100932.0, 100937.0, 100960.0, 100987.0, 101002.0, 101030.0, 101077.0,
+ 101145.0, 101207.0, 101260.0, 101322.0, 101365.0, 101387.0, 101422.0,
+ 101447.0, 101487.0, 101560.0, 101650.0, 101740.0, 101787.0, 101822.0,
+ 101892.0, 101945.0, 101930.0, 101880.0, 101835.0, 101810.0, 101777.0,
+ 101727.0, 101655.0, 101545.0, 101495.0, 101477.0, 101457.0, 101495.0,
+ 101472.0, 101560.0, 101757.0, 101625.0, 101627.0, 101697.0, 101675.0,
+ 101635.0, 101560.0, 101510.0, 101510.0, 101495.0, 101492.0, 101545.0,
+ 101572.0, 101565.0, 101547.0, 101532.0, 101507.0, 101495.0, 101530.0,
+ 101565.0, 101542.0, 101532.0, 101567.0, 101547.0, 101487.0, 101472.0,
+ 101452.0, 101415.0, 101405.0, 101395.0, 101335.0, 101230.0, 101100.0,
+ 100975.0, 100975.0, 101070.0, 101097.0, 101060.0, 101005.0, 100870.0,
+ 100927.0, 100972.0, 101070.0, 101110.0, 101057.0, 101015.0, 100935.0,
+ 100937.0, 101147.0, 101210.0, 101102.0, 100982.0, 100720.0, 100685.0,
+ 101020.0, 101162.0, 101247.0, 101507.0, 101480.0, 101417.0, 101397.0,
+ 101422.0, 101360.0, 101260.0, 101182.0, 101202.0, 101202.0, 101105.0,
+ 101050.0, 101090.0, 101305.0, 101390.0, 101355.0, 101287.0, 101212.0,
+ 101170.0, 101152.0, 101085.0, 101040.0, 101070.0, 101075.0, 101150.0,
+ 101312.0, 101292.0, 101202.0, 101137.0, 101130.0, 101110.0, 101137.0,
+ 101137.0, 101087.0, 101052.0, 101087.0, 101055.0, 101030.0, 101047.0,
+ 101037.0, 101040.0, 101032.0, 101000.0, 100985.0, 100955.0, 100890.0,
+ 100835.0, 100827.0, 100832.0, 100827.0, 100847.0, 100885.0, 100925.0,
+ 100950.0, 100975.0, 101030.0, 101075.0, 101122.0, 101177.0, 101215.0,
+ 101252.0, 101285.0, 101330.0, 101367.0, 101382.0, 101427.0, 101482.0,
+ 101530.0, 101567.0, 101620.0, 101682.0, 101712.0, 101722.0, 101720.0,
+ 101705.0, 101690.0, 101660.0, 101605.0, 101550.0, 101525.0, 101505.0,
+ 101457.0, 101440.0, 101417.0, 101397.0, 101452.0, 101425.0, 101427.0,
+ 101437.0, 101492.0, 101605.0, 101565.0, 101512.0, 101422.0, 101365.0,
+ 101345.0, 101360.0, 101367.0, 101392.0, 101402.0, 101390.0, 101372.0,
+ 101370.0, 101375.0, 101357.0, 101317.0, 101342.0, 101390.0, 101360.0,
+ 101315.0, 101320.0, 101317.0, 101305.0, 101327.0, 101325.0, 101292.0,
+ 101280.0, 101250.0, 101150.0, 101015.0, 100895.0, 100840.0, 100855.0,
+ 100870.0, 100847.0, 100830.0, 100765.0, 100800.0, 100847.0, 100965.0,
+ 101012.0, 100950.0, 100910.0, 100862.0, 100847.0, 100987.0, 101040.0,
+ 100962.0, 100860.0, 100655.0, 100630.0, 101125.0, 101282.0, 101245.0,
+ 101297.0, 101365.0, 101387.0, 101345.0, 101315.0, 101265.0, 101247.0,
+ 101147.0, 101070.0, 101080.0, 101092.0, 101117.0, 101072.0, 101197.0,
+ 101152.0, 101132.0, 101097.0, 101065.0, 101010.0, 101000.0, 101025.0,
+ 100990.0, 100965.0, 100972.0, 101005.0, 101112.0, 101092.0, 101042.0,
+ 100995.0, 100980.0, 100972.0, 101007.0, 101007.0, 100962.0, 101000.0,
+ 101020.0, 100967.0, 100967.0, 100960.0, 100962.0, 100977.0, 100920.0,
+ 100905.0, 100905.0, 100852.0, 100812.0, 100790.0, 100787.0, 100790.0,
+ 100792.0, 100815.0, 100845.0, 100880.0, 100895.0, 100927.0, 100962.0,
+ 101002.0, 101062.0, 101122.0, 101157.0, 101172.0, 101200.0, 101245.0,
+ 101272.0, 101310.0, 101347.0, 101367.0, 101400.0, 101440.0, 101480.0,
+ 101502.0, 101507.0, 101540.0, 101582.0, 101592.0, 101572.0, 101517.0,
+ 101485.0, 101472.0, 101455.0, 101460.0, 101422.0, 101397.0, 101407.0,
+ 101382.0, 101385.0, 101390.0, 101397.0, 101392.0, 101350.0, 101340.0,
+ 101300.0, 101330.0, 101295.0, 101220.0, 101142.0, 101152.0, 101197.0,
+ 101255.0, 101282.0, 101267.0, 101220.0, 101192.0, 101190.0, 101207.0,
+ 101202.0, 101170.0, 101187.0, 101207.0, 101157.0, 101122.0, 101162.0,
+ 101177.0, 101180.0, 101192.0, 101195.0, 101152.0, 101090.0, 101042.0,
+ 100955.0, 100847.0, 100790.0, 100760.0, 100750.0, 100750.0, 100740.0,
+ 100772.0, 100757.0, 100785.0, 100857.0, 100922.0, 100895.0, 100837.0,
+ 100775.0, 100755.0, 100832.0, 100882.0, 100825.0, 100692.0, 100657.0,
+ 100782.0, 101247.0, 101350.0, 101222.0, 101207.0, 101252.0, 101290.0,
+ 101287.0, 101260.0, 101140.0, 101082.0, 101067.0, 101060.0, 101040.0,
+ 100987.0, 100987.0, 101037.0, 101125.0, 101145.0, 101060.0, 100970.0,
+ 100952.0, 100945.0, 100952.0, 100982.0, 100965.0, 100945.0, 100940.0,
+ 100920.0, 100920.0, 100930.0, 100910.0, 100880.0, 100870.0, 100870.0,
+ 100865.0, 100907.0, 100922.0, 100945.0, 100902.0, 100905.0, 100907.0,
+ 100865.0, 100887.0, 100890.0, 100845.0, 100830.0, 100810.0, 100802.0,
+ 100795.0, 100752.0, 100740.0, 100740.0, 100742.0, 100747.0, 100767.0,
+ 100815.0, 100855.0, 100887.0, 100912.0, 100962.0, 101025.0, 101057.0,
+ 101077.0, 101092.0, 101130.0, 101167.0, 101180.0, 101190.0, 101185.0,
+ 101225.0, 101292.0, 101330.0, 101340.0, 101345.0, 101365.0, 101410.0,
+ 101427.0, 101412.0, 101402.0, 101402.0, 101422.0, 101427.0, 101385.0,
+ 101375.0, 101372.0, 101342.0, 101342.0, 101350.0, 101350.0, 101337.0,
+ 101340.0, 101317.0, 101302.0, 101277.0, 101190.0, 101190.0, 101175.0,
+ 101075.0, 101007.0, 100955.0, 101090.0, 101115.0, 101122.0, 101145.0,
+ 101127.0, 101130.0, 101100.0, 101052.0, 101087.0, 101107.0, 101060.0,
+ 101037.0, 101050.0, 101017.0, 101010.0, 101030.0, 101050.0, 101062.0,
+ 101070.0, 101042.0, 100980.0, 100952.0, 100912.0, 100820.0, 100800.0,
+ 100812.0, 100795.0, 100790.0, 100800.0, 100820.0, 100780.0, 100752.0,
+ 100760.0, 100812.0, 100852.0, 100787.0, 100687.0, 100692.0, 100757.0,
+ 100750.0, 100690.0, 100612.0, 100710.0, 100907.0, 101220.0, 101297.0,
+ 101130.0, 101070.0, 101120.0, 101127.0, 101127.0, 101155.0, 101142.0,
+ 101072.0, 100992.0, 100960.0, 101005.0, 101057.0, 101025.0, 100977.0,
+ 100982.0, 101002.0, 101002.0, 100962.0, 100905.0, 100877.0, 100872.0,
+ 100850.0, 100840.0, 100867.0, 100870.0, 100860.0, 100840.0, 100800.0,
+ 100792.0, 100815.0, 100835.0, 100820.0, 100787.0, 100857.0, 100860.0,
+ 100837.0, 100812.0, 100825.0, 100812.0, 100815.0, 100820.0, 100795.0,
+ 100785.0, 100770.0, 100767.0, 100792.0, 100762.0, 100740.0, 100742.0,
+ 100717.0, 100705.0, 100725.0, 100772.0, 100810.0, 100820.0, 100840.0,
+ 100875.0, 100915.0, 100957.0, 101000.0, 101020.0, 101010.0, 101015.0,
+ 101052.0, 101102.0, 101132.0, 101142.0, 101187.0, 101220.0, 101235.0,
+ 101267.0, 101287.0, 101290.0, 101302.0, 101347.0, 101385.0, 101377.0,
+ 101370.0, 101352.0, 101355.0, 101362.0, 101357.0, 101340.0, 101317.0,
+ 101312.0, 101325.0, 101332.0, 101350.0, 101357.0, 101325.0, 101297.0,
+ 101250.0, 101205.0, 101177.0, 101090.0, 101010.0, 100945.0, 101050.0,
+ 101145.0, 100982.0, 100962.0, 101052.0, 101075.0, 101060.0, 101057.0,
+ 101027.0, 100972.0, 100960.0, 100977.0, 100930.0, 100925.0, 100955.0,
+ 100960.0, 100962.0, 100975.0, 101005.0, 100985.0, 100975.0, 100960.0,
+ 100917.0, 100892.0, 100857.0, 100830.0, 100870.0, 100870.0, 100812.0,
+ 100817.0, 100847.0, 100832.0, 100787.0, 100782.0, 100875.0, 100932.0,
+ 100840.0, 100700.0, 100680.0, 100737.0, 100737.0, 100722.0, 100717.0,
+ 100770.0, 100875.0, 101005.0, 101010.0, 100902.0, 100902.0, 101010.0,
+ 101087.0, 101070.0, 101025.0, 101032.0, 101047.0, 101022.0, 100965.0,
+ 100910.0, 100940.0, 100985.0, 101005.0, 100982.0, 100910.0, 100862.0,
+ 100837.0, 100807.0, 100782.0, 100785.0, 100777.0, 100802.0, 100842.0,
+ 100867.0, 100827.0, 100755.0, 100727.0, 100765.0, 100847.0, 100857.0,
+ 100787.0, 100740.0, 100737.0, 100762.0, 100762.0, 100755.0, 100745.0,
+ 100750.0, 100785.0, 100765.0, 100750.0, 100750.0, 100715.0, 100742.0,
+ 100760.0, 100720.0, 100717.0, 100697.0, 100680.0, 100710.0, 100750.0,
+ 100777.0, 100795.0, 100815.0, 100832.0, 100862.0, 100895.0, 100917.0,
+ 100932.0, 100932.0, 100952.0, 100992.0, 101027.0, 101055.0, 101065.0,
+ 101090.0, 101130.0, 101172.0, 101207.0, 101205.0, 101200.0, 101232.0,
+ 101260.0, 101310.0, 101327.0, 101277.0, 101295.0, 101332.0, 101337.0,
+ 101322.0, 101285.0, 101302.0, 101335.0, 101367.0, 101365.0, 101335.0,
+ 101335.0, 101322.0, 101302.0, 101287.0, 101245.0, 101217.0, 101180.0,
+ 101110.0, 101092.0, 101012.0, 101245.0, 101107.0, 100917.0, 100907.0,
+ 101042.0, 101072.0, 101017.0, 100960.0, 100957.0, 100955.0, 100880.0,
+ 100845.0, 100842.0, 100837.0, 100860.0, 100890.0, 100912.0, 100900.0,
+ 100942.0, 100965.0, 100940.0, 100942.0, 100927.0, 100912.0, 100885.0,
+ 100872.0, 100880.0, 100885.0, 100850.0, 100825.0, 100832.0, 100820.0,
+ 100810.0, 100847.0, 100885.0, 100925.0, 100872.0, 100805.0, 100742.0,
+ 100737.0, 100755.0, 100802.0, 100867.0, 100857.0, 100925.0, 101007.0,
+ 100912.0, 100837.0, 100912.0, 100995.0, 101007.0, 101042.0, 101022.0,
+ 100980.0, 100937.0, 100947.0, 100990.0, 100997.0, 100965.0, 100925.0,
+ 100925.0, 100932.0, 100912.0, 100882.0, 100855.0, 100847.0, 100840.0,
+ 100840.0, 100830.0, 100845.0, 100847.0, 100810.0, 100775.0, 100737.0,
+ 100752.0, 100785.0, 100920.0, 100837.0, 100790.0, 100730.0, 100700.0,
+ 100717.0, 100735.0, 100710.0, 100705.0, 100715.0, 100697.0, 100700.0,
+ 100747.0, 100755.0, 100735.0, 100745.0, 100720.0, 100722.0, 100732.0,
+ 100707.0, 100715.0, 100717.0, 100740.0, 100770.0, 100775.0, 100795.0,
+ 100805.0, 100840.0, 100895.0, 100927.0, 100942.0, 100947.0, 100977.0,
+ 100997.0, 101007.0, 101020.0, 101040.0, 101072.0, 101087.0, 101112.0,
+ 101127.0, 101132.0, 101172.0, 101217.0, 101232.0, 101260.0, 101272.0,
+ 101275.0, 101287.0, 101297.0, 101295.0, 101277.0, 101295.0, 101332.0,
+ 101320.0, 101300.0, 101290.0, 101302.0, 101322.0, 101317.0, 101307.0,
+ 101285.0, 101260.0, 101225.0, 101182.0, 101177.0, 101102.0, 101137.0,
+ 101072.0, 100942.0, 100920.0, 100910.0, 100930.0, 100930.0, 100955.0,
+ 100910.0, 100897.0, 100890.0, 100857.0, 100797.0, 100780.0, 100797.0,
+ 100807.0, 100812.0, 100880.0, 100905.0, 100907.0, 100940.0, 100937.0,
+ 100955.0, 100965.0, 100947.0, 100922.0, 100910.0, 100897.0, 100885.0,
+ 100880.0, 100852.0, 100857.0, 100870.0, 100850.0, 100842.0, 100825.0,
+ 100885.0, 100855.0, 100817.0, 100787.0, 100795.0, 100817.0, 100892.0,
+ 100987.0, 100947.0, 101082.0, 101125.0, 100895.0, 100862.0, 100947.0,
+ 101012.0, 100977.0, 100945.0, 100937.0, 100967.0, 100972.0, 100950.0,
+ 100907.0, 100907.0, 100947.0, 100950.0, 100927.0, 100875.0, 100835.0,
+ 100820.0, 100817.0, 100810.0, 100780.0, 100770.0, 100775.0, 100810.0,
+ 100842.0, 100815.0, 100785.0, 100780.0, 100762.0, 100780.0, 100800.0,
+ 100750.0, 100775.0, 100730.0, 100672.0, 100687.0, 100707.0, 100712.0,
+ 100690.0, 100685.0, 100705.0, 100737.0, 100730.0, 100727.0, 100755.0,
+ 100752.0, 100727.0, 100730.0, 100727.0, 100737.0, 100737.0, 100697.0,
+ 100732.0, 100785.0, 100777.0, 100777.0, 100790.0, 100830.0, 100867.0,
+ 100892.0, 100920.0, 100955.0, 100990.0, 100990.0, 100982.0, 100980.0,
+ 101002.0, 101045.0, 101052.0, 101060.0, 101072.0, 101120.0, 101167.0,
+ 101167.0, 101185.0, 101212.0, 101237.0, 101260.0, 101260.0, 101285.0,
+ 101292.0, 101282.0, 101312.0, 101327.0, 101310.0, 101307.0, 101302.0,
+ 101320.0, 101320.0, 101302.0, 101312.0, 101297.0, 101277.0, 101227.0,
+ 101205.0, 101237.0, 101237.0, 101217.0, 100897.0, 100862.0, 100855.0,
+ 100862.0, 100885.0, 100840.0, 100845.0, 100877.0, 100880.0, 100852.0,
+ 100825.0, 100812.0, 100780.0, 100750.0, 100792.0, 100820.0, 100862.0,
+ 100907.0, 100897.0, 100902.0, 100917.0, 100952.0, 100957.0, 100945.0,
+ 100927.0, 100917.0, 100920.0, 100897.0, 100872.0, 100852.0, 100870.0,
+ 100882.0, 100872.0, 100847.0, 100827.0, 100877.0, 100860.0, 100782.0,
+ 100770.0, 100812.0, 100870.0, 100992.0, 101122.0, 101012.0, 101090.0,
+ 101085.0, 100935.0, 100930.0, 100950.0, 100975.0, 101002.0, 101010.0,
+ 100960.0, 100922.0, 100902.0, 100937.0, 100937.0, 100900.0, 100872.0,
+ 100862.0, 100897.0, 100892.0, 100870.0, 100842.0, 100842.0, 100872.0,
+ 100860.0, 100835.0, 100822.0, 100827.0, 100817.0, 100817.0, 100767.0,
+ 100747.0, 100717.0, 100690.0, 100690.0, 100692.0, 100767.0, 100687.0,
+ 100637.0, 100665.0, 100682.0, 100690.0, 100702.0, 100735.0, 100770.0,
+ 100730.0, 100722.0, 100765.0, 100765.0, 100740.0, 100750.0, 100740.0,
+ 100735.0, 100750.0, 100732.0, 100720.0, 100727.0, 100722.0, 100737.0,
+ 100762.0, 100777.0, 100810.0, 100850.0, 100887.0, 100912.0, 100932.0,
+ 100985.0, 101002.0, 100990.0, 100967.0, 100975.0, 101012.0, 101017.0,
+ 101035.0, 101057.0, 101105.0, 101147.0, 101180.0, 101230.0, 101235.0,
+ 101230.0, 101235.0, 101262.0, 101312.0, 101307.0, 101295.0, 101317.0,
+ 101335.0, 101337.0, 101317.0, 101300.0, 101312.0, 101305.0, 101305.0,
+ 101327.0, 101315.0, 101302.0, 101262.0, 101250.0, 101265.0, 101290.0,
+ 101082.0, 100835.0, 100795.0, 100777.0, 100770.0, 100800.0, 100862.0,
+ 100830.0, 100837.0, 100872.0, 100870.0, 100817.0, 100787.0, 100775.0,
+ 100712.0, 100740.0, 100827.0, 100860.0, 100882.0, 100887.0, 100902.0,
+ 100920.0, 100952.0, 100960.0, 100952.0, 100950.0, 100920.0, 100917.0,
+ 100917.0, 100890.0, 100872.0, 100900.0, 100895.0, 100885.0, 100867.0,
+ 100820.0, 100800.0, 100860.0, 100835.0, 100815.0, 100817.0, 100835.0,
+ 100905.0, 101047.0, 101072.0, 101075.0, 101000.0, 100920.0, 100905.0,
+ 100967.0, 100960.0, 100922.0, 100910.0, 100902.0, 100927.0, 100897.0,
+ 100867.0, 100847.0, 100852.0, 100865.0, 100840.0, 100812.0, 100782.0,
+ 100787.0, 100805.0, 100815.0, 100830.0, 100815.0, 100790.0, 100787.0,
+ 100790.0, 100770.0, 100772.0, 100767.0, 100740.0, 100705.0, 100670.0,
+ 100630.0, 100612.0, 100612.0, 100565.0, 100590.0, 100647.0, 100682.0,
+ 100682.0, 100697.0, 100782.0, 100842.0, 100850.0, 100832.0, 100755.0,
+ 100742.0, 100735.0, 100742.0, 100730.0, 100747.0, 100755.0, 100745.0,
+ 100770.0, 100752.0, 100760.0, 100797.0, 100790.0, 100777.0, 100782.0,
+ 100815.0, 100857.0, 100887.0, 100920.0, 100965.0, 100977.0, 100977.0,
+ 100997.0, 101032.0, 101065.0, 101047.0, 101042.0, 101060.0, 101105.0,
+ 101157.0, 101205.0, 101222.0, 101232.0, 101290.0, 101305.0, 101310.0,
+ 101305.0, 101287.0, 101335.0, 101370.0, 101397.0, 101395.0, 101390.0,
+ 101417.0, 101422.0, 101410.0, 101400.0, 101405.0, 101400.0, 101387.0,
+ 101350.0, 101312.0, 101292.0, 101315.0, 101112.0, 100842.0, 100782.0,
+ 100777.0, 100835.0, 100737.0, 100752.0, 100840.0, 100862.0, 100815.0,
+ 100827.0, 100822.0, 100800.0, 100797.0, 100772.0, 100760.0, 100820.0,
+ 100892.0, 100887.0, 100880.0, 100912.0, 100935.0, 100975.0, 100985.0,
+ 100965.0, 100967.0, 100957.0, 100942.0, 100930.0, 100920.0, 100907.0,
+ 100965.0, 100952.0, 100910.0, 100887.0, 100845.0, 100827.0, 100910.0,
+ 100920.0, 100887.0, 100870.0, 100850.0, 100872.0, 100957.0, 101042.0,
+ 101002.0, 100910.0, 100892.0, 100880.0, 100925.0, 100935.0, 100905.0,
+ 100855.0, 100815.0, 100840.0, 100875.0, 100857.0, 100775.0, 100717.0,
+ 100707.0, 100735.0, 100762.0, 100767.0, 100792.0, 100835.0, 100862.0,
+ 100855.0, 100827.0, 100795.0, 100790.0, 100797.0, 100777.0, 100747.0,
+ 100710.0, 100687.0, 100667.0, 100630.0, 100585.0, 100575.0, 100577.0,
+ 100562.0, 100595.0, 100637.0, 100667.0, 100677.0, 100677.0, 100680.0,
+ 100685.0, 100717.0, 100837.0, 100790.0, 100717.0, 100715.0, 100715.0,
+ 100715.0, 100715.0, 100695.0, 100715.0, 100732.0, 100730.0, 100785.0,
+ 100795.0, 100785.0, 100805.0, 100807.0, 100842.0, 100872.0, 100902.0,
+ 100937.0, 100965.0, 101000.0, 101027.0, 101055.0, 101067.0, 101070.0,
+ 101090.0, 101100.0, 101117.0, 101145.0, 101170.0, 101205.0, 101227.0,
+ 101270.0, 101315.0, 101307.0, 101335.0, 101345.0, 101360.0, 101382.0,
+ 101372.0, 101412.0, 101440.0, 101505.0, 101542.0, 101505.0, 101487.0,
+ 101462.0, 101452.0, 101455.0, 101432.0, 101395.0, 101345.0, 101352.0,
+ 101312.0, 101350.0, 100772.0, 100730.0, 100707.0, 100742.0, 100822.0,
+ 100787.0, 100772.0, 100830.0, 100835.0, 100800.0, 100797.0, 100820.0,
+ 100820.0, 100825.0, 100852.0, 100910.0, 100925.0, 100910.0, 100912.0,
+ 100950.0, 100982.0, 101035.0, 101050.0, 101037.0, 101012.0, 101000.0,
+ 101012.0, 101015.0, 101015.0, 100980.0, 101040.0, 101017.0, 100970.0,
+ 100927.0, 100865.0, 100845.0, 100940.0, 101012.0, 100955.0, 100935.0,
+ 100915.0, 100955.0, 101000.0, 100985.0, 100897.0, 100810.0, 100845.0,
+ 100890.0, 100860.0, 100815.0, 100795.0, 100815.0, 100835.0, 100820.0,
+ 100807.0, 100820.0, 100805.0, 100747.0, 100670.0, 100615.0, 100602.0,
+ 100637.0, 100702.0, 100782.0, 100850.0, 100860.0, 100840.0, 100807.0,
+ 100792.0, 100787.0, 100762.0, 100715.0, 100657.0, 100625.0, 100617.0,
+ 100585.0, 100547.0, 100532.0, 100557.0, 100567.0, 100587.0, 100602.0,
+ 100622.0, 100622.0, 100625.0, 100645.0, 100667.0, 100692.0, 100685.0,
+ 100725.0, 100717.0, 100682.0, 100682.0, 100702.0, 100697.0, 100682.0,
+ 100690.0, 100682.0, 100705.0, 100762.0, 100780.0, 100807.0, 100812.0,
+ 100800.0, 100847.0, 100897.0, 100947.0, 100965.0, 100987.0, 101030.0,
+ 101055.0, 101100.0, 101142.0, 101160.0, 101172.0, 101177.0, 101170.0,
+ 101165.0, 101185.0, 101225.0, 101277.0, 101320.0, 101342.0, 101370.0,
+ 101410.0, 101405.0, 101412.0, 101420.0, 101437.0, 101497.0, 101515.0,
+ 101570.0, 101572.0, 101562.0, 101585.0, 101545.0, 101540.0, 101525.0,
+ 101492.0, 101470.0, 101385.0, 101392.0, 101295.0, 101447.0, 100970.0,
+ 100720.0, 100812.0, 100710.0, 100730.0, 100820.0, 100792.0, 100757.0,
+ 100785.0, 100742.0, 100740.0, 100827.0, 100867.0, 100897.0, 100940.0,
+ 100960.0, 100982.0, 101000.0, 101015.0, 101032.0, 101070.0, 101127.0,
+ 101132.0, 101140.0, 101125.0, 101092.0, 101107.0, 101097.0, 101092.0,
+ 101070.0, 101187.0, 101152.0, 101090.0, 101022.0, 100927.0, 100832.0,
+ 101015.0, 101097.0, 100982.0, 100927.0, 100920.0, 100965.0, 100917.0,
+ 100877.0, 100890.0, 100827.0, 100787.0, 100842.0, 100862.0, 100830.0,
+ 100792.0, 100775.0, 100830.0, 100880.0, 100890.0, 100855.0, 100787.0,
+ 100722.0, 100692.0, 100680.0, 100655.0, 100660.0, 100725.0, 100802.0,
+ 100867.0, 100872.0, 100845.0, 100847.0, 100865.0, 100862.0, 100835.0,
+ 100777.0, 100697.0, 100632.0, 100597.0, 100560.0, 100522.0, 100490.0,
+ 100452.0, 100430.0, 100480.0, 100565.0, 100595.0, 100592.0, 100605.0,
+ 100617.0, 100600.0, 100600.0, 100642.0, 100667.0, 100670.0, 100685.0,
+ 100705.0, 100675.0, 100650.0, 100662.0, 100640.0, 100625.0, 100650.0,
+ 100697.0, 100772.0, 100817.0, 100792.0, 100797.0, 100825.0, 100865.0,
+ 100947.0, 100987.0, 101037.0, 101090.0, 101107.0, 101145.0, 101180.0,
+ 101200.0, 101210.0, 101212.0, 101212.0, 101215.0, 101242.0, 101260.0,
+ 101285.0, 101320.0, 101367.0, 101415.0, 101430.0, 101445.0, 101475.0,
+ 101485.0, 101510.0, 101545.0, 101582.0, 101640.0, 101637.0, 101640.0,
+ 101630.0, 101592.0, 101617.0, 101577.0, 101567.0, 101567.0, 101482.0,
+ 101442.0, 101427.0, 101340.0, 101412.0, 100917.0, 100695.0, 100747.0,
+ 100660.0, 100697.0, 100747.0, 100700.0, 100677.0, 100677.0, 100715.0,
+ 100822.0, 100887.0, 100955.0, 100992.0, 101000.0, 101045.0, 101080.0,
+ 101102.0, 101122.0, 101162.0, 101230.0, 101240.0, 101240.0, 101237.0,
+ 101232.0, 101240.0, 101207.0, 101192.0, 101207.0, 101312.0, 101277.0,
+ 101217.0, 101120.0, 101005.0, 100887.0, 100932.0, 100895.0, 100837.0,
+ 100815.0, 100820.0, 100840.0, 100762.0, 100730.0, 100765.0, 100790.0,
+ 100832.0, 100837.0, 100800.0, 100887.0, 100972.0, 100957.0, 100932.0,
+ 100917.0, 100927.0, 100950.0, 100940.0, 100880.0, 100807.0, 100780.0,
+ 100780.0, 100787.0, 100820.0, 100857.0, 100897.0, 100927.0, 100940.0,
+ 100962.0, 100987.0, 100962.0, 100907.0, 100845.0, 100767.0, 100690.0,
+ 100600.0, 100527.0, 100497.0, 100447.0, 100362.0, 100335.0, 100477.0,
+ 100620.0, 100645.0, 100637.0, 100625.0, 100622.0, 100597.0, 100567.0,
+ 100612.0, 100635.0, 100667.0, 100710.0, 100740.0, 100710.0, 100692.0,
+ 100700.0, 100680.0, 100662.0, 100657.0, 100690.0, 100772.0, 100802.0,
+ 100795.0, 100812.0, 100822.0, 100880.0, 100947.0, 100972.0, 101037.0,
+ 101102.0, 101157.0, 101195.0, 101207.0, 101240.0, 101255.0, 101267.0,
+ 101282.0, 101295.0, 101310.0, 101320.0, 101365.0, 101405.0, 101430.0,
+ 101450.0, 101480.0, 101542.0, 101557.0, 101567.0, 101597.0, 101650.0,
+ 101707.0, 101697.0, 101665.0, 101680.0, 101702.0, 101732.0, 101745.0,
+ 101697.0, 101685.0, 101652.0, 101605.0, 101545.0, 101472.0, 101422.0,
+ 101312.0, 101625.0, 101422.0, 100690.0, 100760.0, 100690.0, 100622.0,
+ 100592.0, 100560.0, 100600.0, 100672.0, 100757.0, 100827.0, 100932.0,
+ 101017.0, 101062.0, 101105.0, 101170.0, 101197.0, 101247.0, 101290.0,
+ 101340.0, 101400.0, 101402.0, 101407.0, 101397.0, 101385.0, 101367.0,
+ 101327.0, 101322.0, 101475.0, 101420.0, 101390.0, 101280.0, 101120.0,
+ 100970.0, 100792.0, 100702.0, 100715.0, 100690.0, 100710.0, 100800.0,
+ 100827.0, 100797.0, 100697.0, 100682.0, 100767.0, 100875.0, 100887.0,
+ 101085.0, 101057.0, 101045.0, 101052.0, 101070.0, 101045.0, 101022.0,
+ 101005.0, 100987.0, 100962.0, 100925.0, 100915.0, 100915.0, 100917.0,
+ 100932.0, 100955.0, 100985.0, 101007.0, 101037.0, 101077.0, 101052.0,
+ 100977.0, 100897.0, 100845.0, 100787.0, 100675.0, 100570.0, 100510.0,
+ 100447.0, 100385.0, 100380.0, 100545.0, 100715.0, 100717.0, 100687.0,
+ 100682.0, 100665.0, 100620.0, 100585.0, 100625.0, 100607.0, 100660.0,
+ 100707.0, 100755.0, 100795.0, 100802.0, 100805.0, 100795.0, 100750.0,
+ 100715.0, 100737.0, 100795.0, 100832.0, 100860.0, 100860.0, 100865.0,
+ 100902.0, 100910.0, 100930.0, 101015.0, 101077.0, 101145.0, 101195.0,
+ 101245.0, 101300.0, 101297.0, 101300.0, 101315.0, 101340.0, 101370.0,
+ 101377.0, 101402.0, 101425.0, 101467.0, 101522.0, 101557.0, 101577.0,
+ 101580.0, 101652.0, 101735.0, 101795.0, 101817.0, 101765.0, 101767.0,
+ 101797.0, 101817.0, 101840.0, 101787.0, 101777.0, 101782.0, 101712.0,
+ 101722.0, 101652.0, 101545.0, 101557.0, 101452.0, 101257.0, 101460.0,
+ 101220.0, 100775.0, 100827.0, 100572.0, 100490.0, 100470.0, 100477.0,
+ 100532.0, 100620.0, 100765.0, 100882.0, 100975.0, 101102.0, 101177.0,
+ 101272.0, 101290.0, 101337.0, 101417.0, 101465.0, 101530.0, 101545.0,
+ 101580.0, 101590.0, 101560.0, 101540.0, 101497.0, 101497.0, 101625.0,
+ 101540.0, 101505.0, 101440.0, 101320.0, 101150.0, 100882.0, 100677.0,
+ 100625.0, 100607.0, 100637.0, 100745.0, 100837.0, 100780.0, 100737.0,
+ 100785.0, 100822.0, 100870.0, 100995.0, 101237.0, 101165.0, 101130.0,
+ 101120.0, 101162.0, 101170.0, 101177.0, 101147.0, 101100.0, 101090.0,
+ 101075.0, 101067.0, 101057.0, 101042.0, 101045.0, 101070.0, 101120.0,
+ 101177.0, 101212.0, 101220.0, 101145.0, 101020.0, 100910.0, 100857.0,
+ 100835.0, 100790.0, 100685.0, 100530.0, 100450.0, 100492.0, 100510.0,
+ 100597.0, 100722.0, 100777.0, 100795.0, 100807.0, 100795.0, 100732.0,
+ 100707.0, 100692.0, 100582.0, 100617.0, 100747.0, 100840.0, 100897.0,
+ 100930.0, 100957.0, 100955.0, 100892.0, 100817.0, 100787.0, 100805.0,
+ 100875.0, 100947.0, 100982.0, 101000.0, 100975.0, 100932.0, 100952.0,
+ 101012.0, 101087.0, 101160.0, 101187.0, 101245.0, 101302.0, 101325.0,
+ 101345.0, 101337.0, 101335.0, 101370.0, 101395.0, 101425.0, 101445.0,
+ 101505.0, 101560.0, 101567.0, 101580.0, 101615.0, 101687.0, 101745.0,
+ 101812.0, 101867.0, 101845.0, 101867.0, 101855.0, 101852.0, 101887.0,
+ 101840.0, 101865.0, 101860.0, 101800.0, 101810.0, 101722.0, 101682.0,
+ 101575.0, 101445.0, 101580.0, 101222.0, 101490.0, 101085.0, 100940.0,
+ 100600.0, 100432.0, 100365.0, 100310.0, 100377.0, 100500.0, 100687.0,
+ 100765.0, 100877.0, 101117.0, 101250.0, 101377.0, 101405.0, 101445.0,
+ 101545.0, 101607.0, 101677.0, 101705.0, 101747.0, 101782.0, 101762.0,
+ 101740.0, 101682.0, 101665.0, 101842.0, 101770.0, 101680.0, 101580.0,
+ 101462.0, 101255.0, 100982.0, 100725.0, 100567.0, 100635.0, 100697.0,
+ 100742.0, 100767.0, 100677.0, 100737.0, 100857.0, 100917.0, 100860.0,
+ 100940.0, 101147.0, 101242.0, 101260.0, 101250.0, 101245.0, 101225.0,
+ 101277.0, 101307.0, 101285.0, 101260.0, 101222.0, 101187.0, 101160.0,
+ 101115.0, 101110.0, 101162.0, 101260.0, 101332.0, 101347.0, 101315.0,
+ 101210.0, 101062.0, 100942.0, 100895.0, 100905.0, 100905.0, 100745.0,
+ 100485.0, 100482.0, 100662.0, 100715.0, 100715.0, 100760.0, 100857.0,
+ 100972.0, 100970.0, 100907.0, 100840.0, 100820.0, 100782.0, 100632.0,
+ 100590.0, 100742.0, 100877.0, 100965.0, 101035.0, 101067.0, 101085.0,
+ 101065.0, 100990.0, 100922.0, 100957.0, 101050.0, 101095.0, 101110.0,
+ 101105.0, 101070.0, 101042.0, 101015.0, 100990.0, 101035.0, 101100.0,
+ 101135.0, 101217.0, 101295.0, 101347.0, 101372.0, 101355.0, 101357.0,
+ 101360.0, 101362.0, 101385.0, 101407.0, 101472.0, 101525.0, 101540.0,
+ 101552.0, 101545.0, 101577.0, 101660.0, 101785.0, 101887.0, 101895.0,
+ 101917.0, 101940.0, 101975.0, 102002.0, 101977.0, 101995.0, 101965.0,
+ 101952.0, 101905.0, 101762.0, 101785.0, 101655.0, 101445.0, 101530.0,
+ 101210.0, 101575.0, 101135.0, 101015.0, 100645.0, 100505.0, 100382.0,
+ 100300.0, 100432.0, 100492.0, 100522.0, 100562.0, 100792.0, 101122.0,
+ 101307.0, 101457.0, 101535.0, 101597.0, 101690.0, 101742.0, 101822.0,
+ 101880.0, 101935.0, 101980.0, 101972.0, 101960.0, 101915.0, 101872.0,
+ 102127.0, 102065.0, 101945.0, 101815.0, 101660.0, 101420.0, 101167.0,
+ 100930.0, 100647.0, 100710.0, 100865.0, 100932.0, 100865.0, 100695.0,
+ 100737.0, 100850.0, 100897.0, 100865.0, 100947.0, 101175.0, 101342.0,
+ 101377.0, 101417.0, 101465.0, 101460.0, 101440.0, 101410.0, 101392.0,
+ 101407.0, 101415.0, 101392.0, 101332.0, 101265.0, 101272.0, 101362.0,
+ 101477.0, 101542.0, 101545.0, 101492.0, 101377.0, 101207.0, 101052.0,
+ 100957.0, 100927.0, 100870.0, 100672.0, 100460.0, 100567.0, 100802.0,
+ 100875.0, 100905.0, 100962.0, 101062.0, 101127.0, 101077.0, 101027.0,
+ 100980.0, 100937.0, 100890.0, 100777.0, 100747.0, 100832.0, 100917.0,
+ 100957.0, 101022.0, 101077.0, 101125.0, 101147.0, 101102.0, 101057.0,
+ 101107.0, 101207.0, 101275.0, 101302.0, 101305.0, 101282.0, 101237.0,
+ 101160.0, 101095.0, 101060.0, 101027.0, 101040.0, 101117.0, 101190.0,
+ 101260.0, 101317.0, 101380.0, 101447.0, 101452.0, 101437.0, 101437.0,
+ 101440.0, 101492.0, 101522.0, 101522.0, 101515.0, 101485.0, 101500.0,
+ 101552.0, 101630.0, 101762.0, 101872.0, 101950.0, 101992.0, 102025.0,
+ 102040.0, 102067.0, 102127.0, 102105.0, 102077.0, 102005.0, 101842.0,
+ 101800.0, 101710.0, 101560.0, 101495.0, 101297.0, 101892.0, 101102.0,
+ 101080.0, 100767.0, 100730.0, 100560.0, 100495.0, 100640.0, 100540.0,
+ 100432.0, 100510.0, 100757.0, 101112.0, 101335.0, 101510.0, 101635.0,
+ 101742.0, 101872.0, 101942.0, 102010.0, 102077.0, 102140.0, 102200.0,
+ 102202.0, 102195.0, 102200.0, 102167.0, 102427.0, 102342.0, 102200.0,
+ 102052.0, 101882.0, 101672.0, 101485.0, 101252.0, 100862.0, 100715.0,
+ 100860.0, 101060.0, 100925.0, 100690.0, 100797.0, 100947.0, 101027.0,
+ 101002.0, 101087.0, 101292.0, 101485.0, 101592.0, 101620.0, 101635.0,
+ 101675.0, 101710.0, 101707.0, 101660.0, 101605.0, 101565.0, 101522.0,
+ 101455.0, 101385.0, 101400.0, 101512.0, 101647.0, 101727.0, 101725.0,
+ 101655.0, 101530.0, 101367.0, 101200.0, 101047.0, 100922.0, 100805.0,
+ 100655.0, 100575.0, 100730.0, 100932.0, 101057.0, 101192.0, 101292.0,
+ 101335.0, 101302.0, 101212.0, 101195.0, 101177.0, 101092.0, 100982.0,
+ 100877.0, 100945.0, 101080.0, 101077.0, 101025.0, 101030.0, 101090.0,
+ 101145.0, 101167.0, 101155.0, 101142.0, 101175.0, 101275.0, 101397.0,
+ 101467.0, 101480.0, 101467.0, 101417.0, 101340.0, 101252.0, 101140.0,
+ 101047.0, 101040.0, 101077.0, 101120.0, 101162.0, 101222.0, 101350.0,
+ 101485.0, 101567.0, 101630.0, 101650.0, 101647.0, 101650.0, 101605.0,
+ 101557.0, 101522.0, 101470.0, 101437.0, 101422.0, 101472.0, 101620.0,
+ 101762.0, 101872.0, 101940.0, 102012.0, 102122.0, 102212.0, 102275.0,
+ 102252.0, 102150.0, 102067.0, 101972.0, 101820.0, 101685.0, 101595.0,
+ 101425.0, 101520.0, 101617.0, 101020.0, 101117.0, 100960.0, 100975.0,
+ 100765.0, 100780.0, 100895.0, 100735.0, 100637.0, 100695.0, 100920.0,
+ 101200.0, 101367.0, 101545.0, 101710.0, 101835.0, 101987.0, 102090.0,
+ 102195.0, 102295.0, 102355.0, 102430.0, 102460.0, 102455.0, 102480.0,
+ 102482.0, 102702.0, 102630.0, 102482.0, 102290.0, 102085.0, 101887.0,
+ 101702.0, 101432.0, 101057.0, 100785.0, 100782.0, 100870.0, 100757.0,
+ 100720.0, 100822.0, 100977.0, 101157.0, 101245.0, 101330.0, 101455.0,
+ 101610.0, 101780.0, 101877.0, 101920.0, 101940.0, 101937.0, 101920.0,
+ 101880.0, 101835.0, 101780.0, 101712.0, 101630.0, 101587.0, 101640.0,
+ 101757.0, 101870.0, 101912.0, 101895.0, 101842.0, 101752.0, 101602.0,
+ 101415.0, 101197.0, 101007.0, 100882.0, 100825.0, 100845.0, 101005.0,
+ 101195.0, 101345.0, 101482.0, 101545.0, 101547.0, 101482.0, 101375.0,
+ 101317.0, 101277.0, 101185.0, 101050.0, 100985.0, 101135.0, 101287.0,
+ 101202.0, 101087.0, 101025.0, 101010.0, 101030.0, 101077.0, 101112.0,
+ 101115.0, 101137.0, 101267.0, 101435.0, 101530.0, 101567.0, 101562.0,
+ 101515.0, 101460.0, 101380.0, 101272.0, 101185.0, 101122.0, 101072.0,
+ 101057.0, 101037.0, 101082.0, 101292.0, 101530.0, 101717.0, 101847.0,
+ 101875.0, 101860.0, 101832.0, 101770.0, 101692.0, 101560.0, 101387.0,
+ 101277.0, 101247.0, 101315.0, 101437.0, 101530.0, 101682.0, 101827.0,
+ 101915.0, 102040.0, 102162.0, 102285.0, 102350.0, 102250.0, 102140.0,
+ 102060.0, 101900.0, 101740.0, 101607.0, 101415.0, 101537.0, 101215.0,
+ 101085.0, 101137.0, 101167.0, 101210.0, 101112.0, 101167.0, 101210.0,
+ 101092.0, 101015.0, 101035.0, 101147.0, 101310.0, 101392.0, 101537.0,
+ 101725.0, 101895.0, 102070.0, 102187.0, 102302.0, 102437.0, 102512.0,
+ 102597.0, 102680.0, 102707.0, 102737.0, 102742.0, 102867.0, 102782.0,
+ 102670.0, 102512.0, 102312.0, 102080.0, 101825.0, 101525.0, 101252.0,
+ 101097.0, 100975.0, 100835.0, 100762.0, 100880.0, 101002.0, 101157.0,
+ 101262.0, 101335.0, 101432.0, 101560.0, 101697.0, 101862.0, 102015.0,
+ 102130.0, 102192.0, 102200.0, 102192.0, 102185.0, 102175.0, 102112.0,
+ 101992.0, 101865.0, 101807.0, 101857.0, 101960.0, 102047.0, 102085.0,
+ 102085.0, 102070.0, 102015.0, 101895.0, 101720.0, 101505.0, 101307.0,
+ 101175.0, 101107.0, 101137.0, 101307.0, 101495.0, 101627.0, 101772.0,
+ 101872.0, 101875.0, 101785.0, 101645.0, 101545.0, 101465.0, 101307.0,
+ 101160.0, 101190.0, 101372.0, 101332.0, 101110.0, 100932.0, 100827.0,
+ 100762.0, 100772.0, 100877.0, 100985.0, 101017.0, 101040.0, 101145.0,
+ 101310.0, 101467.0, 101577.0, 101567.0, 101475.0, 101400.0, 101332.0,
+ 101260.0, 101212.0, 101155.0, 101100.0, 101050.0, 100987.0, 101050.0,
+ 101297.0, 101577.0, 101825.0, 102000.0, 102060.0, 102055.0, 102012.0,
+ 101925.0, 101792.0, 101580.0, 101332.0, 101150.0, 101037.0, 101055.0,
+ 101190.0, 101327.0, 101505.0, 101685.0, 101777.0, 101910.0, 102120.0,
+ 102317.0, 102445.0, 102392.0, 102285.0, 102152.0, 101960.0, 101732.0,
+ 101517.0, 101332.0, 101447.0, 101215.0, 101127.0, 101192.0, 101295.0,
+ 101365.0, 101420.0, 101507.0, 101477.0, 101395.0, 101387.0, 101365.0,
+ 101397.0, 101467.0, 101495.0, 101582.0, 101750.0, 101922.0, 102110.0,
+ 102245.0, 102345.0, 102467.0, 102577.0, 102690.0, 102815.0, 102887.0,
+ 102927.0, 102927.0, 103052.0, 102960.0, 102842.0, 102702.0, 102507.0,
+ 102275.0, 102025.0, 101770.0, 101562.0, 101422.0, 101292.0, 101130.0,
+ 101022.0, 101070.0, 101225.0, 101390.0, 101467.0, 101480.0, 101477.0,
+ 101530.0, 101665.0, 101847.0, 102020.0, 102170.0, 102282.0, 102372.0,
+ 102437.0, 102462.0, 102457.0, 102430.0, 102375.0, 102292.0, 102237.0,
+ 102217.0, 102227.0, 102262.0, 102305.0, 102320.0, 102315.0, 102277.0,
+ 102207.0, 102100.0, 101957.0, 101807.0, 101680.0, 101580.0, 101582.0,
+ 101710.0, 101855.0, 101960.0, 102082.0, 102167.0, 102165.0, 102082.0,
+ 101952.0, 101830.0, 101697.0, 101487.0, 101317.0, 101397.0, 101520.0,
+ 101370.0, 101090.0, 100870.0, 100717.0, 100565.0, 100487.0, 100557.0,
+ 100737.0, 100897.0, 100977.0, 101052.0, 101192.0, 101372.0, 101507.0,
+ 101515.0, 101442.0, 101367.0, 101272.0, 101145.0, 101052.0, 101020.0,
+ 101025.0, 100995.0, 100947.0, 101042.0, 101300.0, 101605.0, 101892.0,
+ 102100.0, 102202.0, 102222.0, 102175.0, 102077.0, 101920.0, 101652.0,
+ 101307.0, 101012.0, 100802.0, 100782.0, 100957.0, 101145.0, 101320.0,
+ 101522.0, 101682.0, 101832.0, 102027.0, 102245.0, 102405.0, 102465.0,
+ 102417.0, 102267.0, 102075.0, 101775.0, 101440.0, 101210.0, 101267.0,
+ 101145.0, 101112.0, 101207.0, 101325.0, 101405.0, 101537.0, 101667.0,
+ 101647.0, 101627.0, 101652.0, 101627.0, 101635.0, 101645.0, 101610.0,
+ 101647.0, 101762.0, 101885.0, 102020.0, 102152.0, 102265.0, 102385.0,
+ 102507.0, 102647.0, 102800.0, 102942.0, 103042.0, 103080.0, 103095.0,
+ 103037.0, 102915.0, 102762.0, 102570.0, 102352.0, 102145.0, 101942.0,
+ 101762.0, 101602.0, 101455.0, 101357.0, 101370.0, 101452.0, 101517.0,
+ 101570.0, 101647.0, 101700.0, 101672.0, 101605.0, 101552.0, 101607.0,
+ 101797.0, 102055.0, 102277.0, 102442.0, 102565.0, 102647.0, 102707.0,
+ 102755.0, 102757.0, 102720.0, 102672.0, 102640.0, 102620.0, 102627.0,
+ 102647.0, 102662.0, 102665.0, 102622.0, 102530.0, 102410.0, 102275.0,
+ 102165.0, 102095.0, 102047.0, 102052.0, 102122.0, 102200.0, 102267.0,
+ 102342.0, 102377.0, 102367.0, 102325.0, 102220.0, 102092.0, 101972.0,
+ 101817.0, 101680.0, 101672.0, 101682.0, 101532.0, 101292.0, 101065.0,
+ 100810.0, 100517.0, 100270.0, 100230.0, 100445.0, 100702.0, 100825.0,
+ 100895.0, 101080.0, 101305.0, 101440.0, 101457.0, 101417.0, 101355.0,
+ 101250.0, 101080.0, 100912.0, 100825.0, 100847.0, 100877.0, 100900.0,
+ 101037.0, 101342.0, 101707.0, 102025.0, 102235.0, 102372.0, 102440.0,
+ 102407.0, 102315.0, 102160.0, 101885.0, 101507.0, 101115.0, 100802.0,
+ 100652.0, 100710.0, 100895.0, 101147.0, 101417.0, 101602.0, 101717.0,
+ 101877.0, 102107.0, 102320.0, 102435.0, 102402.0, 102272.0, 102127.0,
+ 101852.0, 101480.0, 101180.0, 101067.0, 100927.0, 100962.0, 101027.0,
+ 101177.0, 101325.0, 101465.0, 101602.0, 101650.0, 101682.0, 101705.0,
+ 101702.0, 101722.0, 101725.0, 101700.0, 101710.0, 101770.0, 101850.0,
+ 101937.0, 102025.0, 102117.0, 102225.0, 102330.0, 102457.0, 102632.0,
+ 102820.0, 102977.0, 103072.0, 102970.0, 102962.0, 102860.0, 102712.0,
+ 102545.0, 102367.0, 102197.0, 102042.0, 101922.0, 101837.0, 101752.0,
+ 101677.0, 101657.0, 101697.0, 101752.0, 101790.0, 101797.0, 101780.0,
+ 101775.0, 101742.0, 101640.0, 101532.0, 101555.0, 101737.0, 102012.0,
+ 102297.0, 102535.0, 102700.0, 102810.0, 102872.0, 102915.0, 102952.0,
+ 102982.0, 102992.0, 103002.0, 103002.0, 102985.0, 102952.0, 102907.0,
+ 102832.0, 102722.0, 102622.0, 102507.0, 102395.0, 102315.0, 102275.0,
+ 102262.0, 102280.0, 102292.0, 102305.0, 102350.0, 102397.0, 102407.0,
+ 102377.0, 102300.0, 102207.0, 102125.0, 102027.0, 101920.0, 101857.0,
+ 101812.0, 101670.0, 101425.0, 101157.0, 100850.0, 100447.0, 100047.0,
+ 99907.0, 100125.0, 100457.0, 100627.0, 100725.0, 100942.0, 101200.0,
+ 101340.0, 101365.0, 101322.0, 101232.0, 101090.0, 100900.0, 100695.0,
+ 100592.0, 100640.0, 100747.0, 100867.0, 101072.0, 101415.0, 101810.0,
+ 102137.0, 102367.0, 102550.0, 102635.0, 102602.0, 102495.0, 102335.0,
+ 102102.0, 101817.0, 101500.0, 101160.0, 100865.0, 100710.0, 100740.0,
+ 100947.0, 101210.0, 101432.0, 101615.0, 101797.0, 101970.0, 102110.0,
+ 102217.0, 102235.0, 102177.0, 102105.0, 101907.0, 101607.0, 101355.0,
+ 101140.0, 100867.0, 100805.0, 100877.0, 101017.0, 101145.0, 101247.0,
+ 101377.0, 101455.0, 101490.0, 101507.0, 101515.0, 101530.0, 101555.0,
+ 101577.0, 101602.0, 101635.0, 101672.0, 101715.0, 101757.0, 101820.0,
+ 101915.0, 102025.0, 102140.0, 102292.0, 102485.0, 102692.0, 102867.0,
+ 102637.0, 102712.0, 102680.0, 102562.0, 102410.0, 102265.0, 102152.0,
+ 102042.0, 101912.0, 101797.0, 101735.0, 101697.0, 101657.0, 101657.0,
+ 101720.0, 101792.0, 101812.0, 101795.0, 101785.0, 101757.0, 101672.0,
+ 101555.0, 101480.0, 101510.0, 101682.0, 101952.0, 102240.0, 102460.0,
+ 102617.0, 102732.0, 102837.0, 102937.0, 103012.0, 103047.0, 103077.0,
+ 103097.0, 103082.0, 103020.0, 102932.0, 102827.0, 102720.0, 102612.0,
+ 102482.0, 102345.0, 102235.0, 102162.0, 102125.0, 102122.0, 102122.0,
+ 102120.0, 102140.0, 102177.0, 102190.0, 102182.0, 102167.0, 102130.0,
+ 102067.0, 101987.0, 101920.0, 101882.0, 101832.0, 101682.0, 101455.0,
+ 101227.0, 100965.0, 100562.0, 100107.0, 99875.0, 100010.0, 100295.0,
+ 100502.0, 100685.0, 100940.0, 101190.0, 101302.0, 101320.0, 101295.0,
+ 101202.0, 101012.0, 100757.0, 100535.0, 100432.0, 100475.0, 100605.0,
+ 100797.0, 101097.0, 101510.0, 101925.0, 102260.0, 102512.0, 102690.0,
+ 102760.0, 102725.0, 102630.0, 102465.0, 102225.0, 101970.0, 101720.0,
+ 101467.0, 101215.0, 100995.0, 100835.0, 100810.0, 100932.0, 101137.0,
+ 101342.0, 101510.0, 101635.0, 101747.0, 101872.0, 101960.0, 101962.0,
+ 101912.0, 101805.0, 101657.0, 101552.0, 101342.0, 100955.0, 100822.0,
+ 100857.0, 100935.0, 101020.0, 101115.0, 101192.0, 101207.0, 101175.0,
+ 101150.0, 101130.0, 101122.0, 101142.0, 101182.0, 101240.0, 101297.0,
+ 101320.0, 101315.0, 101345.0, 101400.0, 101470.0, 101562.0, 101677.0,
+ 101810.0, 101987.0, 102217.0, 102460.0, 102077.0, 102222.0, 102290.0,
+ 102265.0, 102167.0, 102022.0, 101875.0, 101752.0, 101635.0, 101520.0,
+ 101440.0, 101427.0, 101462.0, 101497.0, 101540.0, 101582.0, 101610.0,
+ 101627.0, 101617.0, 101570.0, 101475.0, 101365.0, 101285.0, 101275.0,
+ 101367.0, 101562.0, 101807.0, 102067.0, 102297.0, 102460.0, 102552.0,
+ 102612.0, 102662.0, 102702.0, 102742.0, 102770.0, 102760.0, 102707.0,
+ 102625.0, 102512.0, 102390.0, 102265.0, 102137.0, 102025.0, 101940.0,
+ 101872.0, 101817.0, 101797.0, 101815.0, 101850.0, 101905.0, 101965.0,
+ 102007.0, 102045.0, 102067.0, 102055.0, 102020.0, 101995.0, 101955.0,
+ 101875.0, 101737.0, 101555.0, 101377.0, 101225.0, 101057.0, 100800.0,
+ 100507.0, 100307.0, 100275.0, 100315.0, 100392.0, 100577.0, 100885.0,
+ 101167.0, 101297.0, 101312.0, 101270.0, 101150.0, 100915.0, 100650.0,
+ 100460.0, 100410.0, 100452.0, 100570.0, 100792.0, 101150.0, 101590.0,
+ 102005.0, 102325.0, 102562.0, 102717.0, 102770.0, 102722.0, 102607.0,
+ 102435.0, 102200.0, 101957.0, 101740.0, 101552.0, 101380.0, 101230.0,
+ 101072.0, 100937.0, 100892.0, 100937.0, 101017.0, 101122.0, 101250.0,
+ 101377.0, 101477.0, 101530.0, 101545.0, 101545.0, 101540.0, 101512.0,
+ 101500.0, 101242.0, 100907.0, 100820.0, 100807.0, 100867.0, 100945.0,
+ 100985.0, 100962.0, 100897.0, 100830.0, 100765.0, 100702.0, 100677.0,
+ 100692.0, 100717.0, 100745.0, 100787.0, 100790.0, 100747.0, 100747.0,
+ 100802.0, 100865.0, 100947.0, 101077.0, 101235.0, 101417.0, 101635.0,
+ 101872.0, 101442.0, 101627.0, 101730.0, 101760.0, 101752.0, 101680.0,
+ 101530.0, 101367.0, 101257.0, 101170.0, 101082.0, 101007.0, 100970.0,
+ 101000.0, 101082.0, 101180.0, 101225.0, 101197.0, 101137.0, 101082.0,
+ 101025.0, 100947.0, 100857.0, 100812.0, 100857.0, 101002.0, 101207.0,
+ 101440.0, 101680.0, 101900.0, 102075.0, 102212.0, 102317.0, 102387.0,
+ 102410.0, 102400.0, 102352.0, 102280.0, 102162.0, 102012.0, 101862.0,
+ 101735.0, 101625.0, 101520.0, 101435.0, 101382.0, 101367.0, 101390.0,
+ 101432.0, 101505.0, 101610.0, 101722.0, 101810.0, 101872.0, 101907.0,
+ 101907.0, 101892.0, 101865.0, 101790.0, 101655.0, 101492.0, 101340.0,
+ 101200.0, 101072.0, 100950.0, 100825.0, 100690.0, 100575.0, 100477.0,
+ 100410.0, 100432.0, 100582.0, 100845.0, 101117.0, 101305.0, 101392.0,
+ 101360.0, 101202.0, 100937.0, 100660.0, 100482.0, 100440.0, 100485.0,
+ 100605.0, 100827.0, 101180.0, 101607.0, 102012.0, 102315.0, 102517.0,
+ 102630.0, 102662.0, 102597.0, 102457.0, 102272.0, 102057.0, 101835.0,
+ 101600.0, 101380.0, 101207.0, 101095.0, 101035.0, 100977.0, 100915.0,
+ 100857.0, 100830.0, 100852.0, 100905.0, 100942.0, 100975.0, 101020.0,
+ 101062.0, 101097.0, 101122.0, 101135.0, 101097.0, 100867.0, 100712.0,
+ 100712.0, 100682.0, 100687.0, 100710.0, 100707.0, 100640.0, 100555.0,
+ 100472.0, 100365.0, 100232.0, 100150.0, 100142.0, 100152.0, 100175.0,
+ 100197.0, 100182.0, 100115.0, 100067.0, 100090.0, 100150.0, 100237.0,
+ 100362.0, 100530.0, 100725.0, 100950.0, 101200.0, 100745.0, 100947.0,
+ 101072.0, 101130.0, 101167.0, 101182.0, 101115.0, 100977.0, 100817.0,
+ 100687.0, 100595.0, 100540.0, 100500.0, 100482.0, 100507.0, 100582.0,
+ 100620.0, 100555.0, 100410.0, 100255.0, 100142.0, 100057.0, 99995.0,
+ 99972.0, 100032.0, 100192.0, 100415.0, 100657.0, 100902.0, 101142.0,
+ 101377.0, 101582.0, 101740.0, 101845.0, 101907.0, 101922.0, 101895.0,
+ 101812.0, 101672.0, 101480.0, 101265.0, 101055.0, 100872.0, 100720.0,
+ 100617.0, 100587.0, 100620.0, 100700.0, 100805.0, 100930.0, 101072.0,
+ 101217.0, 101347.0, 101455.0, 101532.0, 101570.0, 101567.0, 101540.0,
+ 101455.0, 101330.0, 101195.0, 101057.0, 100917.0, 100792.0, 100717.0,
+ 100662.0, 100610.0, 100557.0, 100520.0, 100525.0, 100590.0, 100715.0,
+ 100892.0, 101082.0, 101247.0, 101347.0, 101355.0, 101240.0, 101027.0,
+ 100770.0, 100570.0, 100480.0, 100510.0, 100647.0, 100895.0, 101247.0,
+ 101645.0, 102005.0, 102260.0, 102405.0, 102470.0, 102462.0, 102387.0,
+ 102257.0, 102087.0, 101882.0, 101642.0, 101390.0, 101162.0, 100967.0,
+ 100812.0, 100707.0, 100657.0, 100645.0, 100640.0, 100642.0, 100642.0,
+ 100625.0, 100602.0, 100602.0, 100612.0, 100597.0, 100570.0, 100577.0,
+ 100597.0, 100552.0, 100437.0, 100412.0, 100457.0, 100442.0, 100372.0,
+ 100300.0, 100240.0, 100160.0, 100055.0, 99947.0, 99830.0, 99680.0,
+ 99550.0, 99487.0, 99495.0, 99530.0, 99562.0, 99565.0, 99520.0, 99467.0,
+ 99450.0, 99490.0, 99567.0, 99682.0, 99855.0, 100065.0, 100290.0,
+ 100512.0, 100142.0, 100335.0, 100505.0, 100622.0, 100687.0, 100712.0,
+ 100685.0, 100597.0, 100457.0, 100297.0, 100150.0, 100050.0, 99992.0,
+ 99935.0, 99890.0, 99872.0, 99862.0, 99787.0, 99602.0, 99360.0, 99140.0,
+ 98992.0, 98920.0, 98910.0, 98962.0, 99105.0, 99335.0, 99620.0, 99917.0,
+ 100212.0, 100507.0, 100792.0, 101045.0, 101237.0, 101350.0, 101367.0,
+ 101300.0, 101167.0, 100995.0, 100782.0, 100532.0, 100262.0, 100015.0,
+ 99830.0, 99710.0, 99650.0, 99647.0, 99710.0, 99830.0, 99995.0, 100180.0,
+ 100375.0, 100565.0, 100725.0, 100837.0, 100897.0, 100922.0, 100920.0,
+ 100882.0, 100805.0, 100707.0, 100582.0, 100455.0, 100350.0, 100300.0,
+ 100292.0, 100310.0, 100347.0, 100405.0, 100467.0, 100540.0, 100647.0,
+ 100800.0, 100957.0, 101085.0, 101175.0, 101220.0, 101220.0, 101132.0,
+ 100965.0, 100775.0, 100632.0, 100600.0, 100707.0, 100940.0, 101270.0,
+ 101617.0, 101912.0, 102120.0, 102237.0, 102282.0, 102245.0, 102137.0,
+ 101982.0, 101795.0, 101587.0, 101362.0, 101127.0, 100900.0, 100677.0,
+ 100467.0, 100300.0, 100202.0, 100170.0, 100175.0, 100197.0, 100222.0,
+ 100240.0, 100247.0, 100255.0, 100255.0, 100225.0, 100192.0, 100200.0,
+ 100197.0, 100120.0, 99990.0, 99945.0, 99980.0, 99990.0, 99927.0,
+ 99825.0, 99705.0, 99562.0, 99405.0, 99250.0, 99100.0, 98927.0, 98765.0,
+ 98675.0, 98697.0, 98760.0, 98842.0, 98905.0, 98925.0, 98900.0, 98880.0,
+ 98897.0, 98955.0, 99065.0, 99242.0, 99487.0, 99732.0, 99950.0, 99640.0,
+ 99797.0, 99952.0, 100105.0, 100225.0, 100255.0, 100205.0, 100107.0,
+ 99995.0, 99870.0, 99735.0, 99602.0, 99502.0, 99432.0, 99390.0, 99357.0,
+ 99305.0, 99192.0, 98965.0, 98640.0, 98297.0, 98022.0, 97865.0, 97830.0,
+ 97890.0, 98052.0, 98307.0, 98625.0, 98970.0, 99322.0, 99677.0, 100015.0,
+ 100295.0, 100500.0, 100612.0, 100635.0, 100575.0, 100452.0, 100260.0,
+ 100015.0, 99727.0, 99420.0, 99130.0, 98887.0, 98717.0, 98620.0, 98597.0,
+ 98650.0, 98762.0, 98920.0, 99102.0, 99312.0, 99530.0, 99725.0, 99880.0,
+ 99982.0, 100042.0, 100065.0, 100060.0, 100035.0, 100007.0, 99972.0,
+ 99927.0, 99877.0, 99837.0, 99822.0, 99830.0, 99885.0, 99980.0, 100092.0,
+ 100210.0, 100335.0, 100472.0, 100602.0, 100705.0, 100797.0, 100900.0,
+ 101000.0, 101057.0, 101052.0, 101000.0, 100922.0, 100875.0, 100910.0,
+ 101062.0, 101302.0, 101565.0, 101787.0, 101940.0, 102022.0, 102032.0,
+ 101970.0, 101842.0, 101665.0, 101460.0, 101232.0, 100992.0, 100747.0,
+ 100492.0, 100237.0, 100010.0, 99845.0, 99740.0, 99662.0, 99617.0,
+ 99612.0, 99645.0, 99692.0, 99725.0, 99752.0, 99777.0, 99792.0, 99790.0,
+ 99762.0, 99695.0, 99575.0, 99432.0, 99340.0, 99322.0, 99320.0, 99285.0,
+ 99202.0, 99067.0, 98887.0, 98685.0, 98477.0, 98255.0, 98032.0, 97865.0,
+ 97812.0, 97877.0, 98022.0, 98195.0, 98367.0, 98487.0, 98530.0, 98522.0,
+ 98522.0, 98542.0, 98602.0, 98722.0, 98935.0, 99195.0, 99450.0, 99182.0,
+ 99365.0, 99520.0, 99662.0, 99782.0, 99830.0, 99797.0, 99710.0, 99620.0,
+ 99545.0, 99457.0, 99352.0, 99227.0, 99107.0, 99010.0, 98945.0, 98905.0,
+ 98835.0, 98662.0, 98345.0, 97927.0, 97505.0, 97187.0, 97032.0, 97042.0,
+ 97212.0, 97515.0, 97895.0, 98292.0, 98672.0, 99020.0, 99327.0, 99572.0,
+ 99752.0, 99862.0, 99907.0, 99882.0, 99762.0, 99550.0, 99265.0, 98960.0,
+ 98655.0, 98377.0, 98132.0, 97930.0, 97792.0, 97715.0, 97697.0, 97735.0,
+ 97832.0, 97995.0, 98205.0, 98435.0, 98652.0, 98835.0, 98980.0, 99082.0,
+ 99142.0, 99182.0, 99205.0, 99222.0, 99222.0, 99215.0, 99217.0, 99237.0,
+ 99260.0, 99280.0, 99307.0, 99372.0, 99490.0, 99640.0, 99800.0, 99947.0,
+ 100075.0, 100182.0, 100280.0, 100380.0, 100485.0, 100597.0, 100715.0,
+ 100827.0, 100910.0, 100962.0, 101020.0, 101112.0, 101245.0, 101390.0,
+ 101512.0, 101595.0, 101625.0, 101595.0, 101502.0, 101370.0, 101197.0,
+ 101000.0, 100767.0, 100527.0, 100287.0, 100047.0, 99802.0, 99577.0,
+ 99395.0, 99270.0, 99180.0, 99127.0, 99105.0, 99110.0, 99120.0, 99130.0,
+ 99145.0, 99172.0, 99192.0, 99177.0, 99130.0, 99060.0, 98985.0, 98907.0,
+ 98832.0, 98772.0, 98727.0, 98680.0, 98595.0, 98447.0, 98240.0, 97992.0,
+ 97710.0, 97420.0, 97167.0, 97042.0, 97080.0, 97252.0, 97497.0, 97767.0,
+ 98025.0, 98237.0, 98350.0, 98372.0, 98355.0, 98350.0, 98367.0, 98420.0,
+ 98537.0, 98725.0, 98955.0, 98692.0, 98892.0, 99055.0, 99175.0, 99260.0,
+ 99317.0, 99352.0, 99360.0, 99327.0, 99260.0, 99172.0, 99067.0, 98960.0,
+ 98845.0, 98740.0, 98672.0, 98650.0, 98645.0, 98592.0, 98432.0, 98152.0,
+ 97805.0, 97467.0, 97210.0, 97075.0, 97102.0, 97277.0, 97562.0, 97895.0,
+ 98222.0, 98530.0, 98817.0, 99075.0, 99270.0, 99382.0, 99397.0, 99325.0,
+ 99185.0, 98992.0, 98762.0, 98520.0, 98270.0, 98032.0, 97810.0, 97610.0,
+ 97427.0, 97267.0, 97137.0, 97077.0, 97097.0, 97220.0, 97405.0, 97610.0,
+ 97805.0, 97977.0, 98120.0, 98242.0, 98335.0, 98415.0, 98472.0, 98512.0,
+ 98520.0, 98517.0, 98522.0, 98555.0, 98592.0, 98627.0, 98662.0, 98727.0,
+ 98830.0, 98972.0, 99140.0, 99312.0, 99487.0, 99652.0, 99792.0, 99902.0,
+ 99992.0, 100092.0, 100207.0, 100335.0, 100460.0, 100567.0, 100675.0,
+ 100780.0, 100877.0, 100955.0, 101015.0, 101055.0, 101060.0, 101015.0,
+ 100907.0, 100770.0, 100610.0, 100422.0, 100205.0, 99972.0, 99747.0,
+ 99535.0, 99325.0, 99120.0, 98947.0, 98810.0, 98720.0, 98657.0, 98607.0,
+ 98567.0, 98530.0, 98515.0, 98527.0, 98560.0, 98587.0, 98595.0, 98577.0,
+ 98552.0, 98515.0, 98465.0, 98395.0, 98332.0, 98280.0, 98217.0, 98115.0,
+ 97952.0, 97727.0, 97450.0, 97130.0, 96812.0, 96595.0, 96560.0, 96727.0,
+ 97027.0, 97360.0, 97660.0, 97922.0, 98132.0, 98252.0, 98280.0, 98250.0,
+ 98222.0, 98230.0, 98260.0, 98302.0, 98377.0, 98507.0, 98155.0, 98320.0,
+ 98500.0, 98665.0, 98802.0, 98927.0, 99030.0, 99095.0, 99102.0, 99050.0,
+ 98970.0, 98905.0, 98857.0, 98827.0, 98785.0, 98737.0, 98692.0, 98650.0,
+ 98602.0, 98532.0, 98425.0, 98272.0, 98090.0, 97905.0, 97752.0, 97682.0,
+ 97712.0, 97840.0, 98037.0, 98255.0, 98470.0, 98670.0, 98837.0, 98942.0,
+ 98967.0, 98930.0, 98852.0, 98765.0, 98660.0, 98525.0, 98355.0, 98165.0,
+ 97977.0, 97812.0, 97652.0, 97480.0, 97285.0, 97092.0, 96940.0, 96867.0,
+ 96890.0, 97002.0, 97170.0, 97367.0, 97562.0, 97727.0, 97840.0, 97897.0,
+ 97915.0, 97912.0, 97910.0, 97912.0, 97917.0, 97932.0, 97952.0, 97980.0,
+ 98017.0, 98075.0, 98152.0, 98255.0, 98367.0, 98490.0, 98627.0, 98777.0,
+ 98937.0, 99082.0, 99207.0, 99327.0, 99445.0, 99557.0, 99665.0, 99765.0,
+ 99872.0, 99990.0, 100107.0, 100207.0, 100270.0, 100292.0, 100295.0,
+ 100277.0, 100230.0, 100145.0, 100030.0, 99905.0, 99765.0, 99610.0,
+ 99437.0, 99260.0, 99085.0, 98915.0, 98752.0, 98595.0, 98455.0, 98332.0,
+ 98235.0, 98157.0, 98110.0, 98092.0, 98102.0, 98135.0, 98172.0, 98207.0,
+ 98227.0, 98240.0, 98227.0, 98192.0, 98130.0, 98052.0, 97977.0, 97890.0,
+ 97792.0, 97675.0, 97542.0, 97385.0, 97207.0, 97012.0, 96837.0, 96750.0,
+ 96815.0, 97035.0, 97317.0, 97577.0, 97757.0, 97872.0, 97947.0, 97977.0,
+ 97952.0, 97887.0, 97835.0, 97837.0, 97877.0, 97932.0, 97977.0, 98040.0,
+ 97750.0, 97945.0, 98187.0, 98440.0, 98667.0, 98835.0, 98947.0, 98992.0,
+ 98997.0, 98980.0, 98955.0, 98937.0, 98922.0, 98900.0, 98865.0, 98812.0,
+ 98755.0, 98695.0, 98632.0, 98570.0, 98507.0, 98442.0, 98377.0, 98312.0,
+ 98250.0, 98197.0, 98180.0, 98217.0, 98312.0, 98450.0, 98592.0, 98715.0,
+ 98785.0, 98792.0, 98745.0, 98670.0, 98597.0, 98527.0, 98450.0, 98357.0,
+ 98252.0, 98150.0, 98055.0, 97980.0, 97895.0, 97752.0, 97570.0, 97370.0,
+ 97197.0, 97100.0, 97087.0, 97160.0, 97290.0, 97445.0, 97580.0, 97682.0,
+ 97702.0, 97652.0, 97555.0, 97455.0, 97380.0, 97362.0, 97382.0, 97432.0,
+ 97495.0, 97560.0, 97620.0, 97682.0, 97735.0, 97802.0, 97867.0, 97942.0,
+ 98037.0, 98147.0, 98260.0, 98370.0, 98472.0, 98580.0, 98687.0, 98792.0,
+ 98880.0, 98960.0, 99037.0, 99130.0, 99220.0, 99302.0, 99357.0, 99392.0,
+ 99410.0, 99412.0, 99390.0, 99340.0, 99265.0, 99175.0, 99082.0, 98985.0,
+ 98892.0, 98800.0, 98702.0, 98600.0, 98492.0, 98382.0, 98265.0, 98162.0,
+ 98067.0, 97997.0, 97955.0, 97950.0, 97970.0, 97997.0, 98025.0, 98047.0,
+ 98057.0, 98045.0, 98010.0, 97940.0, 97845.0, 97735.0, 97605.0, 97490.0,
+ 97380.0, 97305.0, 97277.0, 97272.0, 97282.0, 97287.0, 97287.0, 97312.0,
+ 97395.0, 97532.0, 97680.0, 97772.0, 97792.0, 97757.0, 97695.0, 97612.0,
+ 97510.0, 97405.0, 97325.0, 97310.0, 97352.0, 97425.0, 97512.0, 97615.0,
+ 97885.0, 98125.0, 98385.0, 98630.0, 98817.0, 98935.0, 98977.0, 98972.0,
+ 98947.0, 98930.0, 98920.0, 98905.0, 98882.0, 98855.0, 98815.0, 98775.0,
+ 98717.0, 98632.0, 98515.0, 98375.0, 98247.0, 98187.0, 98197.0, 98262.0,
+ 98332.0, 98367.0, 98392.0, 98422.0, 98480.0, 98547.0, 98605.0, 98630.0,
+ 98615.0, 98560.0, 98487.0, 98417.0, 98370.0, 98345.0, 98315.0, 98272.0,
+ 98212.0, 98137.0, 98052.0, 97970.0, 97897.0, 97820.0, 97747.0, 97672.0,
+ 97612.0, 97585.0, 97592.0, 97630.0, 97690.0, 97757.0, 97790.0, 97752.0,
+ 97657.0, 97552.0, 97467.0, 97422.0, 97415.0, 97412.0, 97440.0, 97480.0,
+ 97527.0, 97570.0, 97602.0, 97617.0, 97620.0, 97605.0, 97595.0, 97585.0,
+ 97575.0, 97567.0, 97552.0, 97547.0, 97552.0, 97570.0, 97617.0, 97682.0,
+ 97755.0, 97837.0, 97925.0, 98025.0, 98125.0, 98225.0, 98312.0, 98392.0,
+ 98460.0, 98512.0, 98545.0, 98552.0, 98535.0, 98507.0, 98480.0, 98460.0,
+ 98445.0, 98435.0, 98422.0, 98402.0, 98367.0, 98322.0, 98265.0, 98207.0,
+ 98147.0, 98090.0, 98047.0, 98020.0, 98000.0, 97990.0, 97972.0, 97952.0,
+ 97930.0, 97887.0, 97827.0, 97742.0, 97655.0, 97555.0, 97450.0, 97352.0,
+ 97285.0, 97262.0, 97275.0, 97320.0, 97382.0, 97450.0, 97512.0, 97562.0,
+ 97625.0, 97697.0, 97745.0, 97762.0, 97745.0, 97692.0, 97607.0, 97512.0,
+ 97410.0, 97312.0, 97247.0, 97230.0, 97277.0, 97372.0, 97505.0, 97677.0,
+ 98675.0, 98862.0, 99032.0, 99152.0, 99237.0, 99275.0, 99285.0, 99270.0,
+ 99245.0, 99205.0, 99137.0, 99055.0, 98955.0, 98860.0, 98777.0, 98710.0,
+ 98647.0, 98572.0, 98495.0, 98435.0, 98430.0, 98482.0, 98532.0, 98507.0,
+ 98400.0, 98282.0, 98267.0, 98362.0, 98477.0, 98530.0, 98517.0, 98497.0,
+ 98497.0, 98495.0, 98477.0, 98447.0, 98430.0, 98440.0, 98467.0, 98480.0,
+ 98450.0, 98365.0, 98260.0, 98175.0, 98145.0, 98192.0, 98282.0, 98392.0,
+ 98480.0, 98525.0, 98530.0, 98517.0, 98515.0, 98517.0, 98497.0, 98420.0,
+ 98287.0, 98137.0, 98005.0, 97920.0, 97877.0, 97870.0, 97880.0, 97892.0,
+ 97900.0, 97892.0, 97880.0, 97855.0, 97820.0, 97767.0, 97675.0, 97565.0,
+ 97435.0, 97287.0, 97135.0, 96982.0, 96842.0, 96727.0, 96650.0, 96607.0,
+ 96612.0, 96647.0, 96722.0, 96827.0, 96955.0, 97092.0, 97232.0, 97362.0,
+ 97477.0, 97575.0, 97655.0, 97722.0, 97782.0, 97837.0, 97897.0, 97960.0,
+ 98022.0, 98080.0, 98125.0, 98157.0, 98167.0, 98155.0, 98125.0, 98085.0,
+ 98045.0, 98005.0, 97970.0, 97937.0, 97907.0, 97880.0, 97845.0, 97822.0,
+ 97805.0, 97790.0, 97775.0, 97765.0, 97785.0, 97805.0, 97785.0, 97705.0,
+ 97607.0, 97522.0, 97472.0, 97432.0, 97425.0, 97442.0, 97480.0, 97517.0,
+ 97560.0, 97592.0, 97610.0, 97612.0, 97607.0, 97585.0, 97590.0, 97587.0,
+ 97595.0, 97620.0, 97672.0, 97760.0, 97892.0, 98060.0, 98255.0, 98467.0,
+ 98982.0, 99035.0, 99080.0, 99117.0, 99167.0, 99235.0, 99322.0, 99430.0,
+ 99547.0, 99662.0, 99755.0, 99822.0, 99865.0, 99885.0, 99887.0, 99885.0,
+ 99872.0, 99855.0, 99800.0, 99697.0, 99522.0, 99247.0, 98890.0, 98525.0,
+ 98252.0, 98140.0, 98190.0, 98315.0, 98435.0, 98540.0, 98665.0, 98825.0,
+ 98970.0, 99017.0, 98955.0, 98837.0, 98755.0, 98740.0, 98800.0, 98895.0,
+ 98972.0, 99017.0, 99030.0, 99047.0, 99105.0, 99230.0, 99410.0, 99622.0,
+ 99802.0, 99910.0, 99907.0, 99802.0, 99635.0, 99440.0, 99247.0, 99080.0,
+ 98947.0, 98850.0, 98792.0, 98780.0, 98797.0, 98827.0, 98822.0, 98745.0,
+ 98590.0, 98390.0, 98197.0, 98040.0, 97915.0, 97795.0, 97667.0, 97515.0,
+ 97350.0, 97182.0, 97015.0, 96845.0, 96680.0, 96520.0, 96382.0, 96267.0,
+ 96185.0, 96152.0, 96167.0, 96237.0, 96350.0, 96495.0, 96647.0, 96805.0,
+ 96950.0, 97075.0, 97180.0, 97272.0, 97350.0, 97430.0, 97512.0, 97597.0,
+ 97680.0, 97757.0, 97822.0, 97867.0, 97892.0, 97892.0, 97882.0, 97865.0,
+ 97850.0, 97832.0, 97832.0, 97847.0, 97867.0, 97892.0, 97925.0, 97965.0,
+ 97985.0, 98002.0, 98017.0, 98075.0, 98160.0, 98217.0, 98185.0, 98060.0,
+ 97900.0, 97765.0, 97675.0, 97597.0, 97542.0, 97500.0, 97485.0, 97477.0,
+ 97492.0, 97527.0, 97565.0, 97610.0, 97680.0, 97760.0, 97857.0, 97957.0,
+ 98065.0, 98162.0, 98270.0, 98392.0, 98527.0, 98665.0, 98790.0, 98900.0,
+ 98877.0, 98825.0, 98795.0, 98800.0, 98872.0, 99012.0, 99210.0, 99445.0,
+ 99685.0, 99912.0, 100112.0, 100272.0, 100395.0, 100477.0, 100517.0,
+ 100517.0, 100465.0, 100355.0, 100165.0, 99897.0, 99560.0, 99195.0,
+ 98852.0, 98587.0, 98427.0, 98392.0, 98450.0, 98582.0, 98767.0, 98990.0,
+ 99220.0, 99400.0, 99480.0, 99442.0, 99320.0, 99185.0, 99097.0, 99090.0,
+ 99157.0, 99270.0, 99390.0, 99475.0, 99525.0, 99547.0, 99575.0, 99630.0,
+ 99725.0, 99842.0, 99957.0, 100030.0, 100020.0, 99920.0, 99742.0,
+ 99527.0, 99312.0, 99130.0, 98997.0, 98930.0, 98917.0, 98940.0, 98962.0,
+ 98945.0, 98842.0, 98667.0, 98460.0, 98265.0, 98110.0, 97985.0, 97870.0,
+ 97712.0, 97535.0, 97350.0, 97185.0, 97040.0, 96917.0, 96810.0, 96707.0,
+ 96615.0, 96537.0, 96475.0, 96430.0, 96410.0, 96425.0, 96465.0, 96532.0,
+ 96605.0, 96695.0, 96787.0, 96870.0, 96945.0, 97007.0, 97072.0, 97147.0,
+ 97230.0, 97317.0, 97407.0, 97497.0, 97585.0, 97667.0, 97742.0, 97810.0,
+ 97862.0, 97905.0, 97937.0, 97970.0, 98017.0, 98082.0, 98157.0, 98227.0,
+ 98285.0, 98335.0, 98377.0, 98410.0, 98430.0, 98445.0, 98447.0, 98437.0,
+ 98395.0, 98320.0, 98227.0, 98137.0, 98072.0, 98000.0, 97935.0, 97875.0,
+ 97815.0, 97780.0, 97775.0, 97792.0, 97837.0, 97915.0, 98015.0, 98117.0,
+ 98207.0, 98287.0, 98360.0, 98435.0, 98530.0, 98637.0, 98752.0, 98847.0,
+ 98910.0, 98935.0, 98920.0, 99232.0, 99135.0, 99052.0, 98990.0, 98957.0,
+ 98957.0, 98992.0, 99057.0, 99137.0, 99232.0, 99330.0, 99427.0, 99525.0,
+ 99612.0, 99680.0, 99720.0, 99720.0, 99667.0, 99552.0, 99380.0, 99167.0,
+ 98945.0, 98745.0, 98600.0, 98530.0, 98542.0, 98632.0, 98790.0, 98992.0,
+ 99200.0, 99385.0, 99510.0, 99550.0, 99505.0, 99400.0, 99250.0, 99097.0,
+ 98965.0, 98860.0, 98782.0, 98732.0, 98697.0, 98680.0, 98677.0, 98692.0,
+ 98725.0, 98770.0, 98812.0, 98850.0, 98855.0, 98830.0, 98767.0, 98685.0,
+ 98595.0, 98520.0, 98475.0, 98475.0, 98510.0, 98572.0, 98632.0, 98662.0,
+ 98632.0, 98545.0, 98415.0, 98267.0, 98127.0, 98007.0, 97892.0, 97772.0,
+ 97622.0, 97462.0, 97305.0, 97172.0, 97065.0, 96975.0, 96917.0, 96887.0,
+ 96872.0, 96880.0, 96882.0, 96895.0, 96912.0, 96930.0, 96937.0, 96960.0,
+ 96992.0, 97027.0, 97080.0, 97147.0, 97235.0, 97330.0, 97432.0, 97522.0,
+ 97595.0, 97647.0, 97677.0, 97697.0, 97727.0, 97765.0, 97827.0, 97907.0,
+ 98002.0, 98107.0, 98227.0, 98350.0, 98472.0, 98572.0, 98645.0, 98675.0,
+ 98677.0, 98667.0, 98655.0, 98650.0, 98642.0, 98630.0, 98607.0, 98575.0,
+ 98527.0, 98475.0, 98422.0, 98360.0, 98310.0, 98262.0, 98225.0, 98202.0,
+ 98202.0, 98225.0, 98277.0, 98345.0, 98427.0, 98505.0, 98562.0, 98600.0,
+ 98635.0, 98682.0, 98767.0, 98900.0, 99070.0, 99237.0, 99375.0, 99447.0,
+ 99457.0, 99410.0, 99327.0, 100085.0, 100070.0, 100042.0, 100017.0,
+ 99992.0, 99972.0, 99952.0, 99940.0, 99930.0, 99910.0, 99895.0, 99872.0,
+ 99847.0, 99812.0, 99765.0, 99702.0, 99612.0, 99500.0, 99365.0, 99212.0,
+ 99050.0, 98892.0, 98757.0, 98657.0, 98595.0, 98590.0, 98632.0, 98720.0,
+ 98837.0, 98972.0, 99102.0, 99210.0, 99285.0, 99307.0, 99285.0, 99220.0,
+ 99127.0, 99017.0, 98905.0, 98797.0, 98707.0, 98637.0, 98587.0, 98555.0,
+ 98532.0, 98520.0, 98502.0, 98482.0, 98447.0, 98400.0, 98345.0, 98282.0,
+ 98227.0, 98187.0, 98160.0, 98157.0, 98172.0, 98205.0, 98245.0, 98277.0,
+ 98290.0, 98282.0, 98252.0, 98202.0, 98137.0, 98067.0, 97997.0, 97920.0,
+ 97852.0, 97765.0, 97677.0, 97590.0, 97507.0, 97440.0, 97387.0, 97355.0,
+ 97337.0, 97337.0, 97330.0, 97345.0, 97367.0, 97390.0, 97417.0, 97445.0,
+ 97475.0, 97505.0, 97532.0, 97562.0, 97585.0, 97607.0, 97622.0, 97632.0,
+ 97637.0, 97647.0, 97655.0, 97670.0, 97700.0, 97742.0, 97802.0, 97892.0,
+ 98007.0, 98145.0, 98297.0, 98455.0, 98597.0, 98705.0, 98765.0, 98777.0,
+ 98755.0, 98712.0, 98672.0, 98650.0, 98650.0, 98662.0, 98677.0, 98675.0,
+ 98660.0, 98630.0, 98590.0, 98547.0, 98502.0, 98475.0, 98460.0, 98455.0,
+ 98472.0, 98510.0, 98565.0, 98635.0, 98707.0, 98772.0, 98815.0, 98842.0,
+ 98852.0, 98870.0, 98915.0, 99005.0, 99145.0, 99327.0, 99525.0, 99717.0,
+ 99875.0, 99987.0, 100057.0, 100085.0, 100032.0, 100007.0, 99965.0,
+ 99917.0, 99867.0, 99812.0, 99765.0, 99720.0, 99682.0, 99652.0, 99632.0,
+ 99617.0, 99605.0, 99600.0, 99592.0, 99585.0, 99570.0, 99550.0, 99522.0,
+ 99495.0, 99462.0, 99430.0, 99400.0, 99375.0, 99362.0, 99360.0, 99372.0,
+ 99385.0, 99407.0, 99427.0, 99445.0, 99450.0, 99445.0, 99425.0, 99387.0,
+ 99342.0, 99290.0, 99232.0, 99172.0, 99120.0, 99070.0, 99027.0, 98992.0,
+ 98962.0, 98940.0, 98912.0, 98892.0, 98867.0, 98840.0, 98815.0, 98785.0,
+ 98750.0, 98720.0, 98690.0, 98662.0, 98637.0, 98615.0, 98595.0, 98577.0,
+ 98552.0, 98525.0, 98495.0, 98462.0, 98430.0, 98400.0, 98370.0, 98342.0,
+ 98320.0, 98305.0, 98290.0, 98277.0, 98262.0, 98252.0, 98235.0, 98217.0,
+ 98202.0, 98187.0, 98175.0, 98162.0, 98152.0, 98145.0, 98137.0, 98130.0,
+ 98127.0, 98122.0, 98115.0, 98105.0, 98087.0, 98065.0, 98042.0, 98012.0,
+ 97985.0, 97947.0, 97912.0, 97882.0, 97870.0, 97870.0, 97890.0, 97935.0,
+ 98007.0, 98100.0, 98212.0, 98330.0, 98437.0, 98532.0, 98597.0, 98640.0,
+ 98655.0, 98652.0, 98642.0, 98630.0, 98622.0, 98627.0, 98647.0, 98667.0,
+ 98692.0, 98712.0, 98727.0, 98732.0, 98735.0, 98730.0, 98722.0, 98720.0,
+ 98720.0, 98720.0, 98730.0, 98742.0, 98760.0, 98785.0, 98817.0, 98860.0,
+ 98912.0, 98982.0, 99070.0, 99170.0, 99292.0, 99425.0, 99557.0, 99687.0,
+ 99807.0, 99905.0, 99977.0, 100020.0, 100037.0, 99987.0, 99960.0,
+ 99925.0, 99892.0, 99852.0, 99817.0, 99790.0, 99762.0, 99740.0, 99725.0,
+ 99715.0, 99712.0, 99712.0, 99720.0, 99732.0, 99750.0, 99765.0, 99780.0,
+ 99797.0, 99807.0, 99815.0, 99810.0, 99807.0, 99790.0, 99762.0, 99725.0,
+ 99685.0, 99635.0, 99570.0, 99505.0, 99435.0, 99365.0, 99290.0, 99220.0,
+ 99150.0, 99085.0, 99027.0, 98972.0, 98932.0, 98892.0, 98862.0, 98842.0,
+ 98830.0, 98827.0, 98832.0, 98845.0, 98862.0, 98890.0, 98922.0, 98957.0,
+ 98997.0, 99037.0, 99080.0, 99117.0, 99150.0, 99177.0, 99197.0, 99212.0,
+ 99210.0, 99197.0, 99175.0, 99145.0, 99105.0, 99060.0, 99005.0, 98947.0,
+ 98890.0, 98832.0, 98782.0, 98727.0, 98680.0, 98640.0, 98602.0, 98572.0,
+ 98547.0, 98525.0, 98505.0, 98485.0, 98467.0, 98457.0, 98442.0, 98427.0,
+ 98415.0, 98405.0, 98392.0, 98382.0, 98372.0, 98362.0, 98355.0, 98350.0,
+ 98350.0, 98350.0, 98360.0, 98377.0, 98402.0, 98432.0, 98472.0, 98517.0,
+ 98567.0, 98625.0, 98682.0, 98735.0, 98790.0, 98835.0, 98875.0, 98905.0,
+ 98930.0, 98947.0, 98957.0, 98967.0, 98972.0, 98975.0, 98982.0, 98990.0,
+ 98995.0, 99005.0, 99012.0, 99022.0, 99032.0, 99042.0, 99052.0, 99067.0,
+ 99080.0, 99102.0, 99130.0, 99160.0, 99202.0, 99247.0, 99305.0, 99365.0,
+ 99435.0, 99510.0, 99587.0, 99665.0, 99740.0, 99812.0, 99877.0, 99932.0,
+ 99975.0, 100007.0, 100025.0, 100035.0, 100030.0, 100012.0, 99935.0,
+ 99927.0, 99920.0, 99912.0, 99905.0, 99890.0, 99880.0, 99865.0, 99850.0,
+ 99837.0, 99817.0, 99802.0, 99785.0, 99767.0, 99747.0, 99725.0, 99705.0,
+ 99682.0, 99657.0, 99632.0, 99602.0, 99575.0, 99547.0, 99515.0, 99482.0,
+ 99452.0, 99420.0, 99387.0, 99352.0, 99320.0, 99287.0, 99252.0, 99225.0,
+ 99195.0, 99167.0, 99140.0, 99117.0, 99095.0, 99075.0, 99062.0, 99050.0,
+ 99042.0, 99035.0, 99032.0, 99032.0, 99040.0, 99047.0, 99057.0, 99067.0,
+ 99085.0, 99102.0, 99122.0, 99137.0, 99157.0, 99180.0, 99197.0, 99217.0,
+ 99235.0, 99252.0, 99260.0, 99272.0, 99277.0, 99282.0, 99282.0, 99282.0,
+ 99277.0, 99267.0, 99257.0, 99247.0, 99232.0, 99217.0, 99207.0, 99187.0,
+ 99175.0, 99155.0, 99145.0, 99130.0, 99120.0, 99110.0, 99100.0, 99092.0,
+ 99087.0, 99090.0, 99087.0, 99090.0, 99097.0, 99102.0, 99115.0, 99125.0,
+ 99137.0, 99152.0, 99172.0, 99190.0, 99207.0, 99230.0, 99250.0, 99275.0,
+ 99292.0, 99315.0, 99337.0, 99355.0, 99375.0, 99395.0, 99410.0, 99427.0,
+ 99442.0, 99455.0, 99467.0, 99482.0, 99492.0, 99505.0, 99515.0, 99522.0,
+ 99535.0, 99542.0, 99555.0, 99565.0, 99577.0, 99587.0, 99600.0, 99612.0,
+ 99630.0, 99645.0, 99660.0, 99680.0, 99697.0, 99715.0, 99737.0, 99755.0,
+ 99777.0, 99797.0, 99817.0, 99835.0, 99852.0, 99867.0, 99882.0, 99897.0,
+ 99912.0, 99922.0, 99930.0, 99932.0, 99935.0, 99937.0, 99937.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0,
+ 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0, 99515.0])
+ field = numpy.multiply(slp,0.01)
+ field.shape = (jm,im)
+ lats = lats[::-1]
+
+ ## Create a numpy array of the correct size
+ #field = numpy.ones((jm,im))
+ ## Make Checker Board of 0 and 1
+ #im_half = int(im*0.5)
+ #for j in range(jm):
+ # if j % 2:
+ # field[j,:] = numpy.array(([0,1]*im_half))
+ # else:
+ # field[j,:] = numpy.array(([1,0]*im_half))
+ ## Instantiate matplotlib
+ #plot = plotmap(ptype='pcolor',color_scheme="bone")
+ ## Start simple loop
+ #for step in range(1):
+ # pname = "/Users/mbauer/Desktop/plot_%04d.png" % (step)
+ # plot.create_fig()
+ # plot.add_field(lons,lats,field)
+ # plot.finish(pname)
+ # print "Made",pname
+
+
+ ## Instantiate matplotlib
+ #plot = plotmap(ptype='pcolor',color_scheme="bone")
+ #pname = "/Users/mbauer/Desktop/plot_%04d.png" % (0)
+ #plot.create_fig()
+ #plot.add_field(lons,lats,field)
+ #plot.finish(pname)
+ #print "Made",pname
+
+ # Instantiate matplotlib
+ clevs=[950,1060,5]
+ cints=[950.0,1060.0]
+ missing=-999.0
+ # add missing longitude
+ #field[:,0] = missing
+ # use discrete colorbar
+ d = (clevs[1]-clevs[0])/clevs[2]
+ d = ''
+ center_loc = ((180.0,-60.0),(180.0,-80.0),(0.0,-90.0))
+ for proj in ['laea']:
+ #for proj in ['laea','stere','aeqd','ortho']:
+ if d:
+ pplot = plotmap_polar(mproj=proj,hemi='sh',
+ bounding_lat=-30.0,color_scheme="jet",
+ clevs=clevs,cints=cints,missing=missing,
+ clabels=True,discrete=d)
+ else:
+ pplot = plotmap_polar(mproj=proj,hemi='sh',
+ bounding_lat=-30.0,color_scheme="jet",
+ clevs=clevs,cints=cints,missing=missing,
+ clabels=True)
+
+ pname = "/Users/mbauer/Desktop/pplot_%s.png" % (proj)
+ pplot.create_fig()
+ pplot.add_pcolor(lons,lats,field)
+ #pplot.add_contour(lons,lats,field)
+ pplot.add_pnts(center_loc,lons,lats,marker='o',msize=4,
+ mfc='white',mec='black',lw=1.)
+ pplot.add_extras()
+ pplot.finish(pname)
+ print ("Made",pname)
+
diff --git a/diagnostics/etc_composites/util/tracker/polar_fix_v4.py b/diagnostics/etc_composites/util/tracker/polar_fix_v4.py
new file mode 100755
index 000000000..276f01c6c
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/polar_fix_v4.py
@@ -0,0 +1,136 @@
+def c_partition(pp):
+ # Find the minimum SLP
+ mins = [x[1] for x in pp]
+ mins.sort()
+ mins = mins[0]
+
+ # Find the gridIDs with mins (may be multiple)
+ # because in a single longitude we can just use
+ # the median gridID as the "central" grid if
+ # multiple grids with mins.
+ cmins = [x[0] for x in pp if x[1] == mins]
+ cmins.sort()
+
+ # Partition cmins into contiguous grids.
+ groups = {}
+ # 1st value always a group
+ groups[0] = [cmins[0]]
+ last = 0
+ # JIMMY, I added an if statement on the len of cmins equal to 1, b/c otherwise I
+ # was getting an error with no value assigned to new_pp.
+ #print len(cmins)
+ if len(cmins) == 1:
+ pick = cmins[0]
+ new_pp = []
+ new_pp.append(pick)
+ # print "hello"
+ else:
+ for x in range(1,len(cmins)):
+ diff = abs(cmins[x-1]-cmins[x])
+ if diff == 1:
+ # Add to existing group
+ d = groups[last]
+ d.append(cmins[x])
+ groups[last] = d
+ else:
+ # Create new group
+ last = x
+ groups[x] = [cmins[x]]
+
+ # Find Median grid(s)
+ new_pp = []
+ for x in groups:
+ n = len(groups[x])
+ if n & 1: # odd number
+ pick = groups[x][n // 2]
+ else:
+ pick = (groups[x][n // 2 - 1] + groups[x][n // 2]) // 2
+ new_pp.append(pick)
+
+ return new_pp
+
+def polar_fix(use_all_lons,kept_centers,row_end):
+ """Fix problem of too many centers, due to ties, at poles.
+ """
+ poleless = []
+ s_polar = []
+ n_polar = []
+ poleless_append = poleless.append
+ s_polar_append = s_polar.append
+ n_polar_append = n_polar.append
+
+ # Split use_all_lons into 2 equal parts (NH,SH)
+ dd = len(use_all_lons)//2
+ need_pole_row = []
+ need_pole_row_append = need_pole_row.append
+ for row in use_all_lons: # keep track of rows used
+ need_pole_row_append(1)
+ for g in kept_centers:
+ for rowe in row_end:
+ if g[0] <= rowe:
+ row = row_end.index(rowe)
+ break
+ if row in use_all_lons[:dd]:
+ s_polar_append(g)
+ elif row in use_all_lons[dd:]:
+ n_polar_append(g)
+ elif g not in poleless:
+ poleless_append(g)
+ else:
+ import sys
+ sys.exit("Error in polar filter")
+
+ # For each pole retain the center with the lowest center
+ # pressure... if ties then average location.
+ if len(s_polar):
+ new_polar = c_partition(s_polar)
+ # Add any grids
+ for p in new_polar:
+ poleless_append([x for x in kept_centers if x[0] == p][0])
+
+ if len(n_polar):
+ #JIMMY
+ #print n_polar
+ new_polar = c_partition(n_polar)
+ #JIMMY
+ #print new_polar
+ # Add any grids
+ for p in new_polar:
+ poleless_append([x for x in kept_centers if x[0] == p][0])
+
+ # log discard for discards
+ dumped = [x for x in kept_centers if x not in poleless]
+
+# print "kept_centers"
+# for e in kept_centers:
+# print e
+# print "poleless"
+# for e in poleless:
+# print e
+# print "dumped"
+# for e in dumped:
+# print e
+# sys.exit()
+
+ # Check
+ if len(poleless)+len(dumped) != len(kept_centers):
+ import sys
+ sys.exit("Error in polar_fix")
+
+ return poleless,dumped
+
+if __name__=='__main__':
+
+ use_all_lons = [0, 72]
+
+ # set with no issues
+# kept_centers = [(542, 977300, 300, 11000, 977969, 1533, 0), (710, 977800, 400, 13400, 979031, 1733, 0), (804, 960200, 500, 8400, 961671, 1231, 0), (805, 960200, 500, 8500, 961828, 1204, 0), (1129, 965000, 700, 12100, 969734, 1667, 0), (1358, 974200, 900, 6200, 980428, 1616, 0), (1455, 956300, 1000, 1500, 965156, 1727, 0), (1514, 956700, 1000, 7400, 967460, 2640, 0), (1570, 968700, 1000, 13000, 973070, 766, 0), (1633, 967600, 1100, 4900, 975716, 1502, 0), (1761, 957300, 1200, 3300, 967217, 1845, 0), (3319, 1011300, 2300, 700, 1017794, 634, 0), (7317, 1005700, 5000, 11700, 1018754, 1422, 0), (7841, 1000600, 5400, 6500, 1012826, 1427, 0), (7970, 1017500, 5500, 5000, 1026146, 1021, 0), (7971, 1017500, 5500, 5100, 1025738, 1047, 0), (8115, 1017500, 5600, 5100, 1024903, 1295, 0), (8436, 971200, 5800, 8400, 987119, 2065, 0), (8713, 972900, 6000, 7300, 984857, 2638, 0), (8740, 991500, 6000, 10000, 998937, 1509, 0), (8916, 996200, 6100, 13200, 1007151, 1856, 0), (8932, 957000, 6200, 400, 969353, 2413, 0), (8933, 957000, 6200, 500, 967624, 1708, 0), (9237, 967000, 6400, 2100, 973361, 1596, 0), (9371, 966600, 6500, 1100, 969855, 814, 0), (9617, 1008200, 6600, 11300, 1010852, 867, 0), (9755, 1008700, 6700, 10700, 1010347, 605, 0), (10048, 1008500, 6900, 11200, 1009392, 636, 0), (10049, 1008500, 6900, 11300, 1009369, 1274, 0), (10050, 1008500, 6900, 11400, 1009446, 1188, 0), (10202, 1009500, 7000, 12200, 1010015, 1021, 0), (10203, 1009500, 7000, 12300, 1010000, 858, 0), (10204, 1009500, 7000, 12400, 1010015, 909, 0)]
+
+ # set with polar issues
+ kept_centers = [(544, 978300, 300, 11200, 979292, 2191, 0), (654, 962100, 400, 7800, 963521, 743, 0), (655, 962100, 400, 7900, 963594, 803, 0), (709, 979400, 400, 13300, 980442, 1595, 0), (1129, 966800, 700, 12100, 971962, 1984, 0), (1214, 970500, 800, 6200, 975202, 404, 0), (1252, 973800, 800, 10000, 978482, 1055, 0), (1456, 960600, 1000, 1600, 968658, 1430, 0), (1491, 963800, 1000, 5100, 971319, 1491, 0), (1515, 956600, 1000, 7500, 967153, 2726, 0), (1717, 969100, 1100, 13300, 973865, 885, 0), (1718, 969100, 1100, 13400, 973560, 795, 0), (1764, 959200, 1200, 3600, 965986, 1030, 0), (3319, 1012200, 2300, 700, 1018100, 614, 0), (7462, 995000, 5100, 11800, 1015929, 2133, 0), (7973, 1015400, 5500, 5300, 1024335, 1067, 0), (7987, 991700, 5500, 6700, 1007912, 2069, 0), (8581, 965500, 5900, 8500, 981574, 2949, 0), (8713, 978300, 6000, 7300, 988702, 2292, 0), (8742, 989400, 6000, 10200, 997384, 1673, 0), (8794, 957000, 6100, 1000, 969593, 2269, 0), (8937, 957000, 6200, 900, 965602, 1109, 0), (9062, 993800, 6200, 13400, 1001912, 1888, 0), (9193, 1012800, 6300, 12100, 1017246, 893, 0), (9340, 1012600, 6400, 12400, 1019751, 3477, 0), (9372, 966500, 6500, 1200, 968831, 617, 0), (9373, 966500, 6500, 1300, 968472, 580, 0), (9378, 965600, 6500, 1800, 968110, 899, 0), (9379, 965600, 6500, 1900, 968444, 1098, 0), (9616, 1010000, 6600, 11200, 1012052, 648, 0), (9617, 1010000, 6600, 11300, 1012144, 684, 0), (9754, 1009300, 6700, 10600, 1011538, 1040, 0), (9755, 1009300, 6700, 10700, 1011128, 799, 0), (10049, 1011400, 6900, 11300, 1012046, 469, 0), (10431, 1008500, 7200, 6300, 1009033, 469, 0), (10432, 1008500, 7200, 6400, 1009033, 469, 0), (10433, 1008500, 7200, 6500, 1009066, 469, 0), (10434, 1008500, 7200, 6600, 1009100, 469, 0), (10435, 1008500, 7200, 6700, 1009133, 469, 0), (10503, 1008500, 7200, 13500, 1008866, 469, 0), (10504, 1008500, 7200, 13600, 1008766, 469, 0), (10505, 1008500, 7200, 13700, 1008700, 469, 0), (10506, 1008500, 7200, 13800, 1008633, 469, 0), (10507, 1008500, 7200, 13900, 1008566, 469, 0)]
+
+ row_end = [143, 287, 431, 575, 719, 863, 1007, 1151, 1295, 1439, 1583, 1727, 1871, 2015, 2159, 2303, 2447, 2591, 2735, 2879, 3023, 3167, 3311, 3455, 3599, 3743, 3887, 4031, 4175, 4319, 4463, 4607, 4751, 4895, 5039, 5183, 5327, 5471, 5615, 5759, 5903, 6047, 6191, 6335, 6479, 6623, 6767, 6911, 7055, 7199, 7343, 7487, 7631, 7775, 7919, 8063, 8207, 8351, 8495, 8639, 8783, 8927, 9071, 9215, 9359, 9503, 9647, 9791, 9935, 10079, 10223, 10367, 10511]
+
+ print ("Start",len(kept_centers))
+ kept_centers,discards = polar_fix(use_all_lons,kept_centers,row_end)
+ print ("End",len(kept_centers))
diff --git a/diagnostics/etc_composites/util/tracker/print_col_v4.py b/diagnostics/etc_composites/util/tracker/print_col_v4.py
new file mode 100755
index 000000000..d4a7e8c0d
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/print_col_v4.py
@@ -0,0 +1,24 @@
+def print_col(the_list,indent_tag,fmt,cols,width,sort_me=1):
+ line = 0
+ if not the_list:
+ print (indent_tag+" None")
+ return
+ if sort_me:
+ the_list.sort()
+ wide = width*cols
+ end = len(the_list)
+ i = 0
+ for g in the_list:
+ i += 1
+ tag = ""
+ if not line:
+ tag = indent_tag
+ if i == end:
+ print (tag+fmt % (g))
+ elif line <= wide:
+ print (tag+fmt % (g),end='')
+ line += width
+ else:
+ print (tag+fmt % (g))
+ line = 0
+ return
diff --git a/diagnostics/etc_composites/util/tracker/pull_data_v4.py b/diagnostics/etc_composites/util/tracker/pull_data_v4.py
new file mode 100755
index 000000000..69e210b39
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/pull_data_v4.py
@@ -0,0 +1,288 @@
+import os,sys
+
+def pull_data(NetCDF,numpy,in_path,file_seperator,read_year,
+ scale,var_slp,var_time,lat_flip,lon_shift):
+ """
+ This function takes an input directory (in_path) and finds all
+ the available files to be read. Then the requested file is
+ read and put into a single array.
+
+ Options/Arguments:
+ NetCDF -- module.
+ numpy -- module.
+ in_path -- path to the data files.
+ read_year -- year desired.
+ scale -- scales the variable as desired
+ (can be 1.0 to do nothing).
+ var_slp -- variable to be extracted.
+ var_time -- variable for time.
+ lat_flip -- Reverse latitude in arrays.
+ lon_shift -- Force longitude shift so 1st index
+ contains Greenwich Meridian.
+ Returns:
+ slp -- numpy array containing data.
+ times -- numpy array of time data.
+ the_time_units -- string of time units.
+
+ Examples:
+
+ Author: Mike Bauer
+
+ Log:
+ 2008/01 MB - File created.
+ 2008/10 MB - Added input checks, docstring.
+ 2008/10 MB - Fixed error where file_list was unordered...
+ 2009/11 MB - Updated to version 4.
+ """
+ # verbose = 0
+
+ read_year = int(read_year)
+
+ # Pull the list of available files, put in chronological order.
+ file_list = os.listdir(in_path)
+ file_list = [x for x in file_list if x.find(".nc") != -1]
+ file_list.sort()
+
+ # Loop over available files for correct year.
+ # found_years = {}
+ found_file = ""
+ for infile in file_list:
+ if infile.find(".nc") != -1:
+ #if verbose:
+ # print "Scanning File:",infile
+ # This works for filenames like slp.1998.nc
+ year = int(infile.split(file_seperator)[1])
+ if year == read_year:
+ found_file = infile
+ break
+ if not found_file:
+ msg = "WARNING: Cannot file for year %d in directory %s"
+ sys.exit(msg % (read_year,in_path))
+ #if verbose:
+ # print "\nFound File:",found_file
+
+ # Open file to read, use the netcdf 3 format
+ nc_in = NetCDF.Dataset(in_path+found_file,'r',format='NETCDF3_CLASSIC')
+
+ # Pull var_time
+ times = nc_in.variables[var_time][:]
+ the_time_units = nc_in.variables[var_time].units
+ times = times.astype(numpy.float32)
+
+ ## See if need to scale or add offset to var_slp
+ #if 'add_offset' in nc_in.variables[var_slp].ncattrs():
+ # add_offset = getattr(nc_in.variables[var_slp],'add_offset')
+ # #if verbose:
+ # # print "add_offset",add_offset
+ #else:
+ # add_offset = 0.0
+
+ #if 'scale_factor' in nc_in.variables[var_slp].ncattrs():
+ # scale_factor = getattr(nc_in.variables[var_slp],'scale_factor')
+ # #if verbose:
+ # # print "scale_factor",scale_factor
+ #else:
+ # scale_factor = 1.0
+
+##CUT
+ #lat_flip = lon_shift = 0
+ #lat_flip = 1; lon_shift = 72
+
+ # Pull slp_var, assume has dimensions of [time,lat,lon]
+ #slp = numpy.multiply(
+ # numpy.add(
+ # numpy.array(nc_in.variables[var_slp][:],dtype=numpy.float32,copy=1),add_offset),scale_factor*scale)
+
+ # Pull slp_var, assume has dimensions of [time,lat,lon]
+ slp = numpy.multiply(numpy.array(nc_in.variables[var_slp][:],dtype=numpy.float32,copy=1),scale)
+
+ if lat_flip:
+ slp = slp[:,::-1,:]
+ #if verbose:
+ # print "Lat Flipped"
+
+ if lon_shift:
+ slp = numpy.roll(slp,lon_shift,axis=2)
+ #if verbose:
+ # print "Lon Shifted"
+
+ # Close File
+ nc_in.close()
+
+ return slp,times,the_time_units
+
+if __name__=='__main__':
+
+ # import os,sys
+ import time
+
+ def pretty_filesize(bytes,msg=""):
+ if bytes >= 1073741824:
+ return msg+str(bytes / 1024 / 1024 / 1024) + ' GB'
+ elif bytes >= 1048576:
+ return msg+str(bytes / 1024 / 1024) + ' MB'
+ elif bytes >= 1024:
+ return msg+str(bytes / 1024) + ' KB'
+ elif bytes < 1024:
+ return msg+str(bytes) + ' bytes'
+
+ save_plot = 1
+
+ months = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May',
+ 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October',
+ 11: 'November', 12: 'December'}
+
+ # Extract version number from this scripts name.
+ tmp = sys.argv[0]
+ file_len = len(tmp.split("_"))
+ vnum = "_"+tmp.split("_")[file_len-1][:2]
+
+ # --------------------------------------------------------------------------
+ # Define all modules to be imported.
+ # --------------------------------------------------------------------------
+
+ # Basic standard Python modules to import.
+ imports = []
+ # Jeyavinoth: removed necdftime from line below
+ # system_imports = "import numpy,netcdftime,pickle"
+ system_imports = "import numpy,pickle"
+
+ imports.append(system_imports)
+ imports.append("import netCDF4 as NetCDF")
+
+ # My modules to import w/ version number appended.
+ my_base = ["defs"]
+ if save_plot:
+ my_base.append("plot_map")
+ for x in my_base:
+ tmp = "import %s%s as %s" % (x,vnum,x)
+ imports.append(tmp)
+
+ for i in imports:
+ exec(i)
+ defs_set = {}
+
+ # Fetch definitions and impose those set in defs_set.
+ defs = defs.defs(**defs_set)
+
+ shared_path = "/Volumes/scratch/output/nra_files/"
+
+ # Get some definitions. Note must have run setup_vx.py already!
+ sf_file = "%ss_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(sf_file, 'rb'))
+ inputs = ("im","jm","maxid","lats","lons","timestep","dx","dy","dlon","dlat",
+ "start_lat","start_lon","dlon_sq","dlat_sq","two_dlat","model_flag","eq_grid",
+ "tropical_n","tropical_s","bot","mid","top","row_start","row_end",
+ "tropical_n_alt","tropical_s_alt","bot_alt","top_alt","lon_shift","lat_flip",
+ "the_calendar","found_years","super_years","dim_lat","dim_lon","dim_time",
+ "var_lat","var_lon","var_time","var_slp","var_topo","var_land_sea_mask",
+ "file_seperator","no_topo","no_mask","slp_path","model","out_path",
+ "shared_path","lat_edges","lon_edges","land_gridids","troubled_centers")
+ if save_plot:
+ lats = fnc_out[inputs.index("lats")]
+ lons = fnc_out[inputs.index("lons")]
+ model = fnc_out[inputs.index("model")]
+ slp_path = fnc_out[inputs.index("slp_path")]
+ the_calendar = fnc_out[inputs.index("the_calendar")]
+ file_seperator = fnc_out[inputs.index("file_seperator")]
+ super_years = fnc_out[inputs.index("super_years")]
+ var_slp = fnc_out[inputs.index("var_slp")]
+ var_time = fnc_out[inputs.index("var_time")]
+ lat_flip = fnc_out[inputs.index("lat_flip")]
+ lon_shift = fnc_out[inputs.index("lon_shift")]
+ im = fnc_out[inputs.index("im")]
+ jm = fnc_out[inputs.index("jm")]
+
+ #(im,jm,maxid,lats,lons,timestep,dx,dy,dlon,dlat,start_lat,start_lon,
+ # dlon_sq,dlat_sq,two_dlat,model_flag,eq_grid,tropical_n,tropical_s,
+ # bot,mid,top,row_start,row_end,tropical_n_alt,tropical_s_alt,
+ # bot_alt,top_alt,lon_shift,lat_flip,the_calendar,found_years,
+ # super_years,dim_lat,dim_lon,dim_time,var_lat,var_lon,var_time,
+ # var_slp,var_topo,var_land_sea_mask,file_seperator,no_topo,
+ # no_mask,slp_path,model,out_path,shared_path,lat_edges,lon_edges,
+ # land_gridids,troubled_centers) = fnc_out
+ # except: # removed by JJ
+ except Exception as e:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (sf_file))
+ del fnc_out # reduces memory footprint by 83464 bytes
+ del pickle
+
+ years = [x for x in range(int(super_years[0]),int(super_years[-1])+1)]
+
+ for loop_year in years:
+
+ loop_year = int(loop_year)
+
+ print ("\n=============%d=============" % (loop_year))
+
+ # Open data file, extract data
+ fnc = pull_data(NetCDF,numpy,slp_path,file_seperator,loop_year,
+ defs.read_scale,var_slp,var_time,lat_flip,lon_shift)
+ (slp,times,the_time_units) = fnc
+ del fnc
+
+ msg = "\nMemory Use: "
+ print (pretty_filesize(slp.size*slp.itemsize,msg),)
+ print (pretty_filesize(times.size*times.itemsize,"& "))
+ d = {'slp':slp,'times':times}
+ print (numpy.who(d))
+ del d
+
+ #print "Memory Usage:"
+ #for i in dir():
+ # print type(i)
+ # print "\tObject %s: %s " % (i, pretty_filesize(sys.getsizeof(i)))
+ time.sleep(6)
+ if loop_year == years[1]:
+ import sys; sys.exit("Stop HERE")
+
+ tsteps = len(times)
+ the_time_range = [times[0],times[tsteps-1]]
+ start = "%s" % (the_time_units)
+ tmp = start.split()
+ tmp1 = tmp[2].split("-")
+ tmp2 = tmp[3].split(":")
+ tmp3 = tmp2[2][0]
+ start = "%s %s %04d-%02d-%02d %02d:%02d:%02d" % \
+ (tmp[0],tmp[1],int(tmp1[0]),int(tmp1[1]),
+ int(tmp1[2]),int(tmp2[0]),int(tmp2[1]),
+ int(tmp3))
+ print (start)
+ import sys; sys.exit("Stop Here")
+ # # Jeyavinoth Start: the code below is commented out till Jeyavinoth: End
+ # # seems like this code never gets used, because the code is forced to crash above
+ # # right before these lines.
+ # # In anycase I change the code below to use my jj_calendar.py functions
+ # # Only date_stamps is getting used anyways
+ # cdftime = netcdftime.utime(start,calendar=the_calendar)
+ # get_datetime = cdftime.num2date
+ # dtimes = (get_datetime(times[step]) for step in range(0,tsteps))
+ # date_stamps = ["%4d%02d%02d%02d" % (d.year,d.month,d.day,d.hour) for d in dtimes]
+ # # Jeyavinoth: End
+ _, date_stamps, _ = jjCal.get_time_info(start, times, calendar=the_calendar)
+ del times
+ del dtimes
+
+ print ("Start %s and End %s" % (date_stamps[0],date_stamps[-1]))
+
+ if save_plot:
+ # Plot an example to see if okay.
+ plot = plot_map.plotmap(clevs=[960,1040,2],cints=[960.0,1040.0],color_scheme="jet")
+ for step in range(tsteps):
+ msg = "State at %4d %s %02d %02d UTC" % (int(date_stamps[step][:4]),
+ months[int(date_stamps[step][4:6])],
+ int(date_stamps[step][6:8]),
+ int(date_stamps[step][8:]))
+ pname = "%s%s_example_slp_%s.pdf" % (shared_path,model,date_stamps[step])
+ plot.create_fig()
+ slp_step = slp[step,:,:].copy()
+ slp_step.shape = jm*im
+# plot.add_field(lons,lats,slp_step,ptype='pcolor')
+ plot.add_field(lons,lats,slp_step,ptype='contour')
+ plot.finish(pname,title=msg)
+ print ("\tMade figure: %s" % (pname))
+ #continue # Exit early from this year
+ sys.exit("Stopped Early.")
+ del slp
+
diff --git a/diagnostics/etc_composites/util/tracker/read_mcms_v4.py b/diagnostics/etc_composites/util/tracker/read_mcms_v4.py
new file mode 100755
index 000000000..e6f149ead
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/read_mcms_v4.py
@@ -0,0 +1,1300 @@
+import sys,os
+import defines
+
+class Read_MCMS:
+ """This Class contains the basic logic for reading and manipulating
+ data files from the MCMS project. It uses object orientation to maximize
+ the reuse of code and minimize the amount of code in general.
+
+ Options/Arguments:
+
+ Returns:
+
+ Examples:
+
+ Notes: This should work with any standard installation of python version
+ 2.4 or greater. I have tested it on Apple OS-X (10.5/10.6), Ubuntu
+ (8.04/9.04) and RedHat Enterprise 4.0 Linux distributions.
+
+ Author: Mike Bauer
+
+ Log:
+ 2008/07 MB - File created.
+ 2008/09 MB - Finished initial code base.
+ 2008/09 MB - Multiple bug fixes.
+ 2008/10 MB - Added input checks, docstring.
+ 2008/10 MB - Bug fix in fetch_center: skip saving -777 and -888
+ if place or time check enforced and nothing has
+ passed.
+ 2009/11 MB - Updated to v4, removed need for model def file.
+ """
+
+ def __init__(self,**kwargs):
+ """This is where the default values are set with the option to alter
+ then upon instantiation.
+ """
+ # Name: template
+ # Purpose: Full path to template file to tell read what to do.
+ # Default: empty - raises error
+ if 'template' in kwargs:
+ self.template = kwargs['template']
+ # Import values as module (namespace local not self).
+ try:
+ print ("Importing '%s' ... " % (self.template),)
+ # Remove .py extension
+ self.template = os.path.splitext(self.template)[0]
+ cmd = 'from %s import *' % (self.template)
+ exec(cmd)
+ print ("Done")
+ # Import template data into instance
+ ignore_these = ('cmd','kwargs','self')
+ for value in dir():
+ if value not in ignore_these:
+ cmd = 'self.%s = %s' % (value,value)
+ exec(cmd)
+ self.parse_template()
+ # Conflict check
+ if self.detail_tracks and self.as_tracks:
+ warning = "WARNING: detail_tracks and as_tracks cannot"
+ warning = warning + " both be non-empty ... Aborting."
+ sys.exit(warning)
+ except IOError:
+ warning = "Oops! %s failed to open. Try again..."
+ sys.exit(warning % (self.template))
+
+ else:
+ # Use provided values
+ self.model = kwargs['model']
+ self.in_file = kwargs['in_file']
+ self.out_file = kwargs['out_file']
+ self.just_center_table = kwargs['just_center_table']
+ self.detail_tracks = kwargs['detail_tracks']
+ self.as_tracks = kwargs['as_tracks']
+ self.start_time = kwargs['start_time']
+ self.end_time = kwargs['end_time']
+ self.places = kwargs['places']
+ self.include_atts = kwargs['include_atts']
+ self.include_stormy = kwargs['include_stormy']
+ self.just_centers = kwargs['just_centers']
+ self.save_output = kwargs['save_output']
+ self.overwrite = kwargs['overwrite']
+ self.parse_template()
+
+ # JIMMY VERBOSE
+ '''print ("Jimmy reading template")
+ print model
+ print self.model
+ print self.detail_tracks
+ print kwargs.has_key('model')
+ print kwargs.has_key('template')
+ print kwargs
+ '''
+ # Import model defs
+ if 'model' in kwargs:
+ if kwargs['model'] in ["nra","nra2"]:
+ if 'maxID' in kwargs:
+ # Use provided values already imported above.
+ pass
+ else:
+ self.provision_nra()
+ else:
+ if 'maxID' in kwargs:
+ # Use provided values already imported above.
+ pass
+ else:
+ warning = "Oops! no model defs provided! Try again..."
+ sys.exit(warning)
+ else:
+ warning = "Oops! no model defs provided! Try again..."
+ sys.exit(warning)
+
+ self.mm2season = { 1 : ("DJF","NDJFMA"),
+ 2 : ("DJF","NDJFMA"),
+ 3 : ("MAM","NDJFMA"),
+ 4 : ("MAM","NDJFMA"),
+ 5 : ("MAM","MJJASO"),
+ 6 : ("JJA","MJJASO"),
+ 7 : ("JJA","MJJASO"),
+ 8 : ("JJA","MJJASO"),
+ 9 : ("SON","MJJASO"),
+ 10 : ("SON","MJJASO"),
+ 11 : ("SON","NDJFMA"),
+ 12 : ("DJF","NDJFMA")}
+ if 'verbose' in kwargs:
+ # if present print details
+ self.verbose = True
+ else:
+ self.verbose = False
+
+ def provision_nra(self):
+ """Defaults for NRA 1 and 2 circa 2009"""
+ self.tropical_start = 3456
+ self.tropical_end = 7055
+ self.maxID = 144*73
+ self.land_gridids = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
+ 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43,
+ 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
+ 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
+ 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85,
+ 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99,
+ 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110,
+ 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121,
+ 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
+ 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154,
+ 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
+ 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,
+ 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187,
+ 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198,
+ 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209,
+ 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220,
+ 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231,
+ 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242,
+ 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253,
+ 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264,
+ 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275,
+ 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286,
+ 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
+ 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
+ 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319,
+ 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330,
+ 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341,
+ 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352,
+ 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363,
+ 364, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381,
+ 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392,
+ 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403,
+ 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414,
+ 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425,
+ 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436,
+ 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447,
+ 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458,
+ 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469,
+ 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480,
+ 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491,
+ 492, 493, 494, 495, 496, 497, 498, 516, 517, 518, 519,
+ 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530,
+ 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541,
+ 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552,
+ 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563,
+ 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574,
+ 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585,
+ 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596,
+ 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607,
+ 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618,
+ 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629,
+ 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640,
+ 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668,
+ 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679,
+ 680, 681, 682, 683, 684, 685, 686, 687, 700, 701, 706,
+ 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717,
+ 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728,
+ 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739,
+ 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750,
+ 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761,
+ 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772,
+ 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783,
+ 784, 785, 802, 805, 806, 807, 808, 809, 810, 811, 812,
+ 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823,
+ 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834,
+ 835, 836, 851, 852, 853, 854, 855, 856, 857, 858, 859,
+ 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870,
+ 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881,
+ 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892,
+ 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903,
+ 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914,
+ 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925,
+ 926, 927, 928, 929, 953, 954, 955, 956, 957, 958, 959,
+ 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970,
+ 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981,
+ 982, 983, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006,
+ 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016,
+ 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026,
+ 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036,
+ 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046,
+ 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056,
+ 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066,
+ 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1113,
+ 1114, 1115, 1116, 1117, 1124, 1125, 1126, 1127, 1146, 1147,
+ 1148, 1149, 1150, 1151, 1165, 1166, 1168, 1169, 1170, 1171,
+ 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1183, 1184,
+ 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194,
+ 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204,
+ 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214,
+ 1215, 1216, 1268, 1269, 1270, 1271, 1315, 1316, 1317, 1318,
+ 1319, 1320, 1321, 1329, 1330, 1331, 1332, 1333, 1334, 1335,
+ 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345,
+ 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1413,
+ 1414, 2132, 2133, 2275, 2276, 2419, 2420, 2563, 2564, 2565,
+ 2660, 2707, 2708, 2709, 2805, 2851, 2852, 2853, 2854, 2950,
+ 2995, 2996, 2997, 2998, 2999, 3081, 3082, 3083, 3139, 3140,
+ 3141, 3142, 3143, 3144, 3223, 3224, 3225, 3226, 3227, 3228,
+ 3284, 3285, 3286, 3287, 3288, 3289, 3320, 3321, 3322, 3323,
+ 3359, 3360, 3361, 3362, 3366, 3367, 3368, 3369, 3370, 3371,
+ 3372, 3428, 3429, 3430, 3431, 3432, 3433, 3434, 3464, 3465,
+ 3466, 3467, 3468, 3503, 3504, 3505, 3506, 3507, 3508, 3509,
+ 3510, 3511, 3512, 3513, 3514, 3515, 3516, 3517, 3572, 3573,
+ 3574, 3575, 3576, 3577, 3578, 3579, 3607, 3608, 3609, 3610,
+ 3611, 3612, 3613, 3646, 3647, 3648, 3649, 3650, 3651, 3652,
+ 3653, 3654, 3655, 3656, 3657, 3658, 3659, 3660, 3661, 3716,
+ 3717, 3718, 3719, 3720, 3721, 3722, 3723, 3724, 3750, 3751,
+ 3752, 3753, 3754, 3755, 3756, 3757, 3790, 3791, 3792, 3793,
+ 3794, 3795, 3796, 3797, 3798, 3799, 3800, 3801, 3802, 3803,
+ 3804, 3860, 3861, 3862, 3863, 3864, 3865, 3866, 3867, 3868,
+ 3869, 3894, 3895, 3896, 3897, 3898, 3899, 3900, 3901, 3906,
+ 3907, 3934, 3935, 3936, 3937, 3938, 3939, 3940, 3941, 3942,
+ 3943, 3944, 3945, 3946, 3947, 4004, 4005, 4006, 4007, 4008,
+ 4009, 4010, 4011, 4012, 4013, 4014, 4037, 4038, 4039, 4040,
+ 4041, 4042, 4043, 4044, 4045, 4050, 4051, 4080, 4081, 4082,
+ 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4148,
+ 4149, 4150, 4151, 4152, 4153, 4154, 4155, 4156, 4157, 4158,
+ 4159, 4181, 4182, 4183, 4184, 4185, 4186, 4187, 4188, 4189,
+ 4190, 4194, 4195, 4226, 4227, 4228, 4229, 4230, 4231, 4232,
+ 4233, 4234, 4292, 4293, 4294, 4295, 4296, 4297, 4298, 4299,
+ 4300, 4301, 4302, 4303, 4304, 4325, 4326, 4327, 4328, 4329,
+ 4330, 4331, 4332, 4333, 4334, 4335, 4336, 4339, 4372, 4373,
+ 4374, 4377, 4434, 4435, 4436, 4437, 4438, 4439, 4440, 4441,
+ 4442, 4443, 4444, 4445, 4446, 4447, 4448, 4470, 4471, 4472,
+ 4473, 4474, 4475, 4476, 4477, 4478, 4479, 4480, 4517, 4518,
+ 4521, 4578, 4579, 4580, 4581, 4582, 4583, 4584, 4585, 4586,
+ 4587, 4588, 4589, 4590, 4591, 4592, 4614, 4615, 4616, 4617,
+ 4618, 4619, 4620, 4621, 4622, 4623, 4624, 4721, 4722, 4723,
+ 4724, 4725, 4726, 4727, 4728, 4729, 4730, 4731, 4732, 4733,
+ 4734, 4735, 4736, 4737, 4758, 4759, 4760, 4761, 4762, 4763,
+ 4764, 4765, 4766, 4767, 4808, 4809, 4810, 4865, 4866, 4867,
+ 4868, 4869, 4870, 4871, 4872, 4873, 4874, 4875, 4876, 4877,
+ 4878, 4879, 4880, 4881, 4882, 4901, 4902, 4903, 4904, 4905,
+ 4906, 4907, 4908, 4909, 4910, 4911, 4938, 4951, 4952, 4953,
+ 4954, 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016,
+ 5017, 5018, 5019, 5020, 5021, 5022, 5023, 5024, 5025, 5044,
+ 5045, 5046, 5047, 5048, 5049, 5050, 5051, 5052, 5053, 5054,
+ 5055, 5056, 5081, 5082, 5085, 5086, 5088, 5093, 5095, 5096,
+ 5153, 5154, 5155, 5156, 5157, 5158, 5159, 5160, 5161, 5162,
+ 5163, 5164, 5165, 5166, 5188, 5189, 5190, 5191, 5192, 5193,
+ 5194, 5195, 5196, 5197, 5198, 5199, 5200, 5201, 5224, 5225,
+ 5228, 5229, 5230, 5231, 5297, 5298, 5299, 5300, 5301, 5302,
+ 5303, 5304, 5305, 5306, 5307, 5308, 5333, 5334, 5335, 5336,
+ 5337, 5338, 5339, 5340, 5341, 5342, 5343, 5344, 5345, 5346,
+ 5368, 5369, 5373, 5374, 5375, 5441, 5442, 5443, 5444, 5445,
+ 5446, 5447, 5448, 5449, 5450, 5451, 5475, 5476, 5477, 5478,
+ 5479, 5480, 5481, 5482, 5483, 5484, 5485, 5486, 5487, 5488,
+ 5489, 5490, 5491, 5511, 5513, 5519, 5585, 5586, 5587, 5588,
+ 5589, 5590, 5591, 5592, 5593, 5594, 5595, 5613, 5616, 5617,
+ 5618, 5619, 5620, 5621, 5622, 5623, 5624, 5625, 5626, 5627,
+ 5628, 5629, 5630, 5631, 5632, 5633, 5634, 5635, 5648, 5666,
+ 5729, 5730, 5731, 5732, 5733, 5734, 5735, 5736, 5756, 5757,
+ 5758, 5759, 5760, 5761, 5762, 5763, 5764, 5765, 5766, 5767,
+ 5768, 5769, 5770, 5771, 5772, 5773, 5774, 5775, 5776, 5777,
+ 5778, 5779, 5780, 5791, 5802, 5874, 5875, 5876, 5877, 5878,
+ 5879, 5899, 5900, 5901, 5902, 5903, 5904, 5905, 5906, 5907,
+ 5908, 5909, 5910, 5911, 5912, 5913, 5914, 5915, 5916, 5917,
+ 5918, 5919, 5920, 5921, 5935, 5944, 5945, 5946, 5947, 6014,
+ 6042, 6043, 6044, 6045, 6046, 6047, 6048, 6049, 6050, 6051,
+ 6052, 6053, 6054, 6055, 6056, 6057, 6058, 6059, 6060, 6061,
+ 6062, 6063, 6064, 6065, 6066, 6067, 6078, 6079, 6080, 6088,
+ 6089, 6090, 6091, 6156, 6157, 6158, 6186, 6187, 6188, 6189,
+ 6190, 6191, 6192, 6193, 6194, 6195, 6196, 6197, 6198, 6199,
+ 6200, 6201, 6202, 6203, 6204, 6205, 6206, 6207, 6209, 6210,
+ 6211, 6212, 6213, 6222, 6223, 6224, 6225, 6230, 6231, 6232,
+ 6233, 6234, 6296, 6297, 6298, 6299, 6300, 6330, 6331, 6332,
+ 6333, 6334, 6335, 6336, 6337, 6338, 6339, 6340, 6341, 6342,
+ 6343, 6344, 6345, 6346, 6347, 6348, 6349, 6350, 6353, 6354,
+ 6355, 6356, 6357, 6358, 6365, 6366, 6367, 6368, 6369, 6370,
+ 6374, 6375, 6376, 6377, 6378, 6438, 6439, 6440, 6441, 6444,
+ 6474, 6475, 6476, 6477, 6478, 6479, 6480, 6481, 6482, 6483,
+ 6484, 6485, 6486, 6487, 6488, 6489, 6490, 6491, 6492, 6493,
+ 6494, 6496, 6497, 6498, 6499, 6500, 6501, 6502, 6503, 6508,
+ 6509, 6510, 6511, 6512, 6513, 6514, 6515, 6516, 6517, 6518,
+ 6519, 6520, 6521, 6522, 6523, 6524, 6525, 6582, 6583, 6584,
+ 6618, 6619, 6620, 6621, 6622, 6623, 6624, 6625, 6626, 6627,
+ 6628, 6629, 6630, 6631, 6632, 6633, 6634, 6635, 6636, 6637,
+ 6639, 6640, 6641, 6642, 6643, 6644, 6647, 6648, 6649, 6650,
+ 6651, 6652, 6653, 6654, 6655, 6656, 6657, 6658, 6659, 6660,
+ 6661, 6662, 6663, 6664, 6665, 6666, 6667, 6668, 6669, 6670,
+ 6671, 6725, 6726, 6727, 6728, 6729, 6763, 6764, 6765, 6766,
+ 6767, 6768, 6769, 6770, 6771, 6772, 6773, 6774, 6775, 6776,
+ 6777, 6778, 6779, 6780, 6781, 6782, 6783, 6784, 6785, 6786,
+ 6787, 6789, 6790, 6791, 6792, 6793, 6794, 6795, 6796, 6797,
+ 6798, 6799, 6800, 6801, 6802, 6803, 6804, 6805, 6806, 6807,
+ 6808, 6809, 6810, 6811, 6812, 6813, 6814, 6815, 6816, 6867,
+ 6868, 6869, 6870, 6871, 6872, 6873, 6879, 6880, 6908, 6909,
+ 6910, 6911, 6912, 6913, 6914, 6915, 6916, 6917, 6918, 6919,
+ 6920, 6921, 6922, 6923, 6924, 6925, 6926, 6927, 6928, 6929,
+ 6930, 6931, 6932, 6933, 6934, 6935, 6936, 6937, 6938, 6939,
+ 6940, 6941, 6942, 6943, 6944, 6945, 6946, 6947, 6948, 6949,
+ 6950, 6951, 6952, 6953, 6954, 6955, 6956, 6957, 6958, 6959,
+ 6960, 7010, 7011, 7012, 7013, 7014, 7015, 7016, 7017, 7018,
+ 7023, 7052, 7053, 7054, 7055, 7056, 7057, 7058, 7059, 7060,
+ 7061, 7070, 7071, 7072, 7073, 7074, 7075, 7076, 7077, 7078,
+ 7079, 7080, 7081, 7082, 7083, 7084, 7085, 7086, 7087, 7088,
+ 7089, 7090, 7091, 7092, 7093, 7094, 7095, 7096, 7097, 7098,
+ 7099, 7100, 7101, 7102, 7103, 7104, 7154, 7155, 7156, 7157,
+ 7158, 7159, 7160, 7161, 7162, 7163, 7164, 7165, 7166, 7167,
+ 7197, 7198, 7199, 7200, 7201, 7202, 7203, 7204, 7215, 7216,
+ 7217, 7218, 7219, 7220, 7221, 7222, 7223, 7224, 7225, 7226,
+ 7227, 7228, 7229, 7230, 7231, 7232, 7233, 7234, 7235, 7236,
+ 7237, 7238, 7239, 7240, 7241, 7242, 7243, 7244, 7245, 7246,
+ 7247, 7251, 7253, 7254, 7255, 7296, 7297, 7298, 7299, 7300,
+ 7301, 7302, 7303, 7304, 7305, 7306, 7307, 7308, 7309, 7310,
+ 7311, 7312, 7313, 7342, 7353, 7355, 7356, 7357, 7358, 7359,
+ 7360, 7361, 7362, 7363, 7366, 7367, 7368, 7369, 7370, 7371,
+ 7372, 7373, 7374, 7375, 7376, 7377, 7378, 7379, 7380, 7381,
+ 7382, 7383, 7384, 7385, 7386, 7387, 7388, 7389, 7390, 7391,
+ 7392, 7395, 7400, 7439, 7440, 7441, 7442, 7443, 7444, 7445,
+ 7446, 7447, 7448, 7449, 7450, 7451, 7452, 7453, 7454, 7455,
+ 7456, 7457, 7485, 7486, 7487, 7496, 7497, 7499, 7500, 7501,
+ 7502, 7503, 7504, 7505, 7506, 7507, 7510, 7511, 7512, 7513,
+ 7514, 7515, 7516, 7517, 7518, 7519, 7520, 7521, 7522, 7523,
+ 7524, 7525, 7526, 7527, 7528, 7529, 7530, 7531, 7532, 7533,
+ 7534, 7535, 7536, 7538, 7539, 7544, 7583, 7584, 7585, 7586,
+ 7587, 7588, 7589, 7590, 7591, 7592, 7593, 7594, 7595, 7596,
+ 7597, 7598, 7599, 7600, 7601, 7602, 7629, 7630, 7631, 7632,
+ 7637, 7640, 7641, 7642, 7643, 7648, 7649, 7650, 7651, 7653,
+ 7654, 7655, 7656, 7657, 7658, 7659, 7660, 7661, 7662, 7663,
+ 7664, 7665, 7666, 7667, 7668, 7669, 7670, 7671, 7672, 7673,
+ 7674, 7675, 7676, 7677, 7678, 7679, 7680, 7681, 7682, 7683,
+ 7684, 7689, 7727, 7728, 7729, 7730, 7731, 7732, 7733, 7734,
+ 7735, 7736, 7737, 7738, 7739, 7740, 7741, 7742, 7743, 7744,
+ 7745, 7746, 7747, 7773, 7774, 7775, 7776, 7777, 7778, 7779,
+ 7780, 7781, 7782, 7783, 7784, 7785, 7786, 7787, 7792, 7793,
+ 7794, 7795, 7797, 7798, 7799, 7800, 7801, 7802, 7803, 7804,
+ 7805, 7806, 7807, 7808, 7809, 7810, 7811, 7812, 7813, 7814,
+ 7815, 7816, 7817, 7818, 7819, 7820, 7821, 7822, 7823, 7824,
+ 7825, 7826, 7827, 7828, 7829, 7830, 7833, 7871, 7872, 7873,
+ 7874, 7875, 7876, 7877, 7878, 7879, 7880, 7881, 7882, 7883,
+ 7884, 7885, 7886, 7887, 7888, 7889, 7890, 7891, 7892, 7893,
+ 7894, 7920, 7921, 7922, 7923, 7924, 7925, 7926, 7927, 7928,
+ 7929, 7930, 7931, 7932, 7933, 7934, 7935, 7936, 7937, 7938,
+ 7939, 7940, 7941, 7942, 7943, 7944, 7945, 7946, 7947, 7948,
+ 7949, 7950, 7951, 7952, 7953, 7954, 7955, 7956, 7957, 7958,
+ 7959, 7960, 7961, 7962, 7963, 7964, 7965, 7966, 7967, 7968,
+ 7969, 7970, 7971, 7972, 7973, 7974, 7975, 8015, 8016, 8017,
+ 8018, 8019, 8020, 8021, 8022, 8023, 8024, 8025, 8026, 8027,
+ 8028, 8029, 8030, 8031, 8032, 8033, 8034, 8035, 8036, 8037,
+ 8038, 8065, 8066, 8067, 8068, 8069, 8070, 8071, 8072, 8073,
+ 8074, 8075, 8076, 8077, 8078, 8079, 8080, 8081, 8082, 8083,
+ 8084, 8085, 8086, 8087, 8088, 8089, 8090, 8091, 8092, 8093,
+ 8094, 8095, 8096, 8097, 8098, 8099, 8100, 8101, 8102, 8103,
+ 8104, 8105, 8106, 8107, 8108, 8109, 8110, 8111, 8112, 8113,
+ 8114, 8115, 8116, 8117, 8118, 8119, 8121, 8158, 8159, 8160,
+ 8161, 8162, 8163, 8164, 8165, 8166, 8167, 8168, 8169, 8170,
+ 8171, 8172, 8173, 8174, 8175, 8176, 8177, 8178, 8179, 8180,
+ 8181, 8182, 8183, 8208, 8210, 8211, 8212, 8213, 8214, 8215,
+ 8216, 8217, 8218, 8219, 8220, 8221, 8222, 8223, 8224, 8225,
+ 8226, 8227, 8228, 8229, 8230, 8231, 8232, 8233, 8234, 8235,
+ 8236, 8237, 8238, 8239, 8240, 8241, 8242, 8243, 8244, 8245,
+ 8246, 8247, 8248, 8249, 8250, 8251, 8252, 8253, 8254, 8255,
+ 8256, 8257, 8258, 8259, 8260, 8261, 8262, 8263, 8264, 8265,
+ 8271, 8301, 8302, 8303, 8304, 8305, 8306, 8307, 8308, 8309,
+ 8310, 8311, 8312, 8313, 8314, 8315, 8316, 8317, 8318, 8319,
+ 8321, 8322, 8323, 8324, 8325, 8326, 8327, 8328, 8329, 8349,
+ 8351, 8356, 8357, 8359, 8360, 8361, 8362, 8363, 8364, 8365,
+ 8366, 8367, 8368, 8369, 8370, 8371, 8372, 8373, 8374, 8375,
+ 8376, 8377, 8378, 8379, 8380, 8381, 8382, 8383, 8384, 8385,
+ 8386, 8387, 8388, 8389, 8390, 8391, 8392, 8393, 8394, 8395,
+ 8396, 8397, 8398, 8399, 8400, 8401, 8402, 8403, 8404, 8405,
+ 8406, 8407, 8415, 8416, 8444, 8445, 8446, 8447, 8448, 8449,
+ 8450, 8451, 8452, 8453, 8454, 8455, 8456, 8457, 8458, 8459,
+ 8460, 8461, 8462, 8463, 8465, 8466, 8467, 8468, 8469, 8470,
+ 8471, 8472, 8493, 8495, 8501, 8502, 8506, 8507, 8508, 8509,
+ 8510, 8511, 8512, 8513, 8514, 8515, 8516, 8517, 8518, 8519,
+ 8520, 8521, 8522, 8523, 8524, 8525, 8526, 8527, 8528, 8529,
+ 8530, 8531, 8532, 8533, 8534, 8535, 8536, 8537, 8538, 8539,
+ 8540, 8541, 8542, 8543, 8544, 8545, 8546, 8547, 8548, 8549,
+ 8550, 8551, 8552, 8560, 8577, 8578, 8586, 8587, 8588, 8589,
+ 8590, 8591, 8592, 8593, 8594, 8595, 8596, 8597, 8598, 8599,
+ 8600, 8601, 8602, 8603, 8610, 8611, 8612, 8613, 8614, 8615,
+ 8638, 8643, 8644, 8645, 8646, 8647, 8650, 8651, 8652, 8653,
+ 8654, 8655, 8656, 8657, 8658, 8659, 8660, 8661, 8662, 8663,
+ 8664, 8665, 8666, 8667, 8668, 8669, 8670, 8671, 8672, 8673,
+ 8674, 8675, 8676, 8677, 8678, 8679, 8680, 8681, 8682, 8683,
+ 8684, 8685, 8686, 8687, 8688, 8689, 8690, 8691, 8692, 8693,
+ 8694, 8695, 8696, 8697, 8698, 8699, 8700, 8701, 8705, 8720,
+ 8721, 8722, 8724, 8728, 8729, 8730, 8731, 8732, 8733, 8734,
+ 8735, 8736, 8737, 8738, 8739, 8740, 8741, 8742, 8743, 8744,
+ 8745, 8746, 8753, 8754, 8755, 8787, 8788, 8789, 8790, 8791,
+ 8793, 8794, 8795, 8796, 8797, 8798, 8799, 8800, 8801, 8802,
+ 8803, 8804, 8805, 8806, 8807, 8808, 8809, 8810, 8811, 8812,
+ 8813, 8814, 8815, 8816, 8817, 8818, 8819, 8820, 8821, 8822,
+ 8823, 8824, 8825, 8826, 8827, 8828, 8829, 8830, 8831, 8832,
+ 8833, 8834, 8835, 8836, 8837, 8838, 8839, 8840, 8841, 8842,
+ 8843, 8844, 8845, 8846, 8847, 8848, 8849, 8850, 8851, 8852,
+ 8853, 8854, 8862, 8863, 8864, 8865, 8866, 8867, 8868, 8869,
+ 8870, 8871, 8872, 8873, 8874, 8875, 8876, 8877, 8878, 8879,
+ 8880, 8881, 8882, 8883, 8884, 8885, 8886, 8887, 8888, 8889,
+ 8890, 8891, 8898, 8899, 8908, 8909, 8910, 8911, 8933, 8934,
+ 8935, 8936, 8937, 8938, 8939, 8940, 8941, 8942, 8944, 8945,
+ 8946, 8947, 8948, 8949, 8950, 8951, 8952, 8953, 8954, 8955,
+ 8956, 8957, 8958, 8959, 8960, 8961, 8962, 8963, 8964, 8965,
+ 8966, 8967, 8968, 8969, 8970, 8971, 8972, 8973, 8974, 8975,
+ 8976, 8977, 8978, 8979, 8980, 8981, 8982, 8983, 8984, 8985,
+ 8986, 8987, 8988, 8989, 8990, 8991, 8992, 8993, 8994, 8995,
+ 8996, 8997, 8998, 8999, 9000, 9001, 9002, 9003, 9006, 9007,
+ 9008, 9009, 9010, 9011, 9012, 9013, 9014, 9015, 9016, 9017,
+ 9018, 9019, 9020, 9021, 9022, 9023, 9025, 9026, 9027, 9028,
+ 9029, 9030, 9031, 9032, 9033, 9034, 9035, 9036, 9038, 9043,
+ 9044, 9045, 9051, 9052, 9053, 9054, 9055, 9056, 9064, 9065,
+ 9066, 9078, 9079, 9080, 9081, 9082, 9083, 9084, 9085, 9086,
+ 9087, 9088, 9090, 9092, 9093, 9094, 9095, 9096, 9097, 9098,
+ 9099, 9100, 9102, 9103, 9104, 9105, 9106, 9107, 9108, 9109,
+ 9110, 9111, 9112, 9113, 9114, 9115, 9116, 9117, 9118, 9119,
+ 9120, 9121, 9122, 9123, 9124, 9125, 9126, 9127, 9128, 9129,
+ 9130, 9131, 9132, 9133, 9134, 9135, 9136, 9137, 9138, 9139,
+ 9140, 9141, 9142, 9143, 9144, 9145, 9151, 9152, 9153, 9154,
+ 9155, 9156, 9157, 9158, 9159, 9160, 9161, 9162, 9163, 9164,
+ 9165, 9166, 9167, 9168, 9169, 9170, 9171, 9172, 9173, 9174,
+ 9175, 9176, 9177, 9178, 9179, 9180, 9181, 9182, 9183, 9187,
+ 9188, 9189, 9190, 9195, 9196, 9197, 9198, 9199, 9200, 9201,
+ 9202, 9224, 9225, 9226, 9227, 9228, 9241, 9244, 9246, 9247,
+ 9248, 9249, 9250, 9251, 9252, 9253, 9254, 9255, 9256, 9257,
+ 9258, 9259, 9260, 9261, 9262, 9263, 9264, 9265, 9266, 9267,
+ 9268, 9269, 9270, 9271, 9272, 9273, 9274, 9275, 9276, 9277,
+ 9278, 9279, 9282, 9285, 9286, 9296, 9297, 9298, 9299, 9300,
+ 9301, 9302, 9303, 9308, 9309, 9311, 9314, 9315, 9316, 9317,
+ 9318, 9322, 9323, 9326, 9327, 9330, 9331, 9332, 9340, 9341,
+ 9342, 9343, 9344, 9345, 9346, 9347, 9348, 9349, 9350, 9382,
+ 9393, 9394, 9395, 9396, 9397, 9398, 9399, 9400, 9401, 9402,
+ 9403, 9404, 9405, 9406, 9407, 9408, 9409, 9410, 9411, 9455,
+ 9456, 9457, 9458, 9461, 9465, 9466, 9467, 9469, 9470, 9471,
+ 9482, 9483, 9484, 9485, 9486, 9487, 9488, 9489, 9490, 9491,
+ 9492, 9493, 9494, 9495, 9527, 9528, 9540, 9541, 9542, 9543,
+ 9544, 9545, 9546, 9547, 9548, 9549, 9560, 9602, 9603, 9605,
+ 9608, 9611, 9612, 9613, 9614, 9615, 9626, 9627, 9628, 9629,
+ 9630, 9631, 9632, 9633, 9634, 9635, 9636, 9637, 9638, 9639,
+ 9655, 9690, 9757, 9758, 9759, 9760, 9761, 9765, 9766, 9767,
+ 9768, 9769, 9770, 9771, 9772, 9773, 9774, 9775, 9776, 9777,
+ 9778, 9779, 9780, 9781, 9782, 9783, 9784, 9798, 9799, 9800,
+ 9830, 9831, 9899, 9900, 9901, 9902, 9903, 9904, 9905, 9906,
+ 9909, 9910, 9911, 9912, 9913, 9914, 9915, 9916, 9917, 9918,
+ 9919, 9920, 9921, 9922, 9923, 9924, 9925, 9926, 9927, 9928,
+ 10048, 10049, 10050, 10051, 10052, 10062, 10063, 10064,
+ 10065, 10066, 10067, 10068, 10069]
+
+ def parse_template(self):
+ """Parse template file."""
+
+ (self.start_year,self.start_month,self.start_day,self.start_hour,
+ self.start_season) = self.start_time.split()
+
+ (self.end_year,self.end_month,self.end_day,self.end_hour,
+ self.end_season) = self.end_time.split()
+
+ def check_time(self):
+ """See if any time related checks needed"""
+
+ self.time_filter = ['HH','DD','MM','YYYY']
+
+ # Default ensures all time related criteria ignored
+ self.time_check = [-1,-1,-1,-1,-1,10000,10000,10000,10000,10000]
+
+ # Update time_check
+ if self.start_year != "YYYY":
+ self.time_check[0] = int(self.start_year)
+ if self.start_month != "MM":
+ self.time_check[1] = int(self.start_month)
+ if self.start_day != "DD":
+ self.time_check[2] = int(self.start_day)
+ if self.start_hour != "HH":
+ self.time_check[3] = int(self.start_hour)
+ if self.start_season != "SEASON":
+ self.time_check[4] = self.start_season
+ # Warn SEASON can't be used concurrently with other time limits.
+ lead = [x for x in self.time_check[:4] if x > 0]
+ if lead:
+ warning = "SEASON can't be used concurrently with other time"
+ warning = warning + " limits. Aborting."
+ sys.exit(warning)
+
+ if self.end_year != "YYYY":
+ self.time_check[5] = int(self.end_year)
+ if self.end_month != "MM":
+ self.time_check[6] = int(self.end_month)
+ if self.end_day != "DD":
+ self.time_check[7] = int(self.end_day)
+ if self.end_hour != "HH":
+ self.time_check[8] = int(self.end_hour)
+ # Warn HH has to be the same for start and end
+ if self.end_hour != self.start_hour:
+ warning = "HH must be the same for start and end. Aborting."
+ sys.exit(warning)
+ if self.end_season != "SEASON":
+ self.time_check[9] = self.end_season
+ # Warn SEASON can't be used concurrently with other time limits.
+ if self.start_season != self.end_season:
+ warning = "SEASON start and end mismatched"
+ warning = warning + " limits. Aborting."
+ sys.exit(warning)
+
+ # See if any thing to check at all
+ lead = [x for x in self.time_check[:5] if x > 0]
+ tails = [x for x in self.time_check[5:] if x != 10000]
+ if not lead and not tails:
+ self.time_check = []
+
+ def check_place(self):
+ """See if any place related checks needed"""
+
+ if self.places[0] == "GLOBAL":
+ # Default ensures all place related criteria ignored
+ self.place_check = []
+ elif self.places[0] == "NH":
+ # Include only centers from the Northern Hemisphere.
+ self.place_check = range(self.tropical_end,self.maxID+1)
+ elif self.places[0] == "SH":
+ # Include only centers from the Southern Hemisphere.
+ self.place_check = range(0,self.tropical_start+1)
+ elif self.places[0] == "LAND":
+ self.place_check = self.land_gridids
+ elif self.places[0] == "SEA":
+ self.place_check = [x for x in range(self.maxID)
+ if x not in self.land_gridids]
+ else:
+ # Include only centers from GridID
+ self.place_check = [int(x) for x in self.places]
+
+ def start_IO(self):
+ """
+ Open files for reading and writing.
+ """
+
+ # Open file for read
+ try:
+ read_file = open(self.in_file,"r")
+ except IOError:
+ sys.exit("The '%s' does not exist.\nStopping" % (self.in_file))
+
+ if self.save_output:
+ # Open output file for write
+ if not self.out_file:
+ # Create out_file from in_file
+ try:
+ self.out_file = self.in_file.replace(".txt","_new.txt")
+ if os.path.exists(self.out_file) and not self.overwrite:
+ warning = "WARNING out_file: %s exists"
+ sys.exit(warning % (self.out_file))
+ write_file = open(self.out_file,"w")
+ except IOError:
+ warning = "Cannot create file '%s'.\nStopping"
+ sys.exit(warning % (self.out_file))
+ else:
+ # Open out_file as provided
+ if os.path.exists(self.out_file) and not self.overwrite:
+ warning = "WARNING out_file: %s exists"
+ sys.exit(warning % (self.out_file))
+ try:
+ write_file = open(self.out_file,"w")
+ except IOError:
+ warning = "Cannot create file '%s'.\nStopping"
+ sys.exit(warning % (self.out_file))
+ out_file = self.out_file
+ else:
+ # Don't open a file
+ write_file = 'Saving to memory.'
+ out_file = write_file
+
+ if self.verbose:
+ print ("Reading: '%s'\nWriting: '%s'" % (self.in_file,out_file))
+
+ return read_file,write_file
+
+ def fill_tracks(self):
+ """Read in tracks database and populate a tracks dictionary"""
+
+ # Open file for read
+ try:
+ read_file = open(self.as_tracks,"r")
+ except IOError:
+ warning = "The file '%s' does not exist" % (self.as_tracks)
+ warning = warning + " ... Aborting"
+ sys.exit(warning)
+
+ self.sorted_tracks = {}
+ for line in read_file:
+ entry = line.split()
+ usi = entry[0]
+ self.sorted_tracks[usi] = [x for x in entry if x != usi]
+ read_file.close()
+
+ def fetch_centers(self):
+ """Read each line of read_file and return information."""
+
+ # Create holding dictionary if not writing to file.
+ if not self.save_output:
+ self.center_holder = {} # key by UCI
+ self.problematic = {} # key by JD
+ self.stormy = {} # key by JD
+
+ # Create holding dictionary
+ if self.detail_tracks:
+ self.save_output = False
+ self.sorted_tracks = {} # key by USI
+
+ # Deal with as_tracks
+ if self.as_tracks:
+ # Read tracks dbase and fill sorted_tracks.
+ self.save_output_original = self.save_output
+ self.save_output = False
+ self.fill_tracks()
+ self.track_holder = {}
+
+ # Start file I/O
+ read_file,write_file = self.start_IO()
+
+ exit_early = 0
+ started = 0
+
+ # Read one line at a time and process
+ for line in read_file:
+ too_early = 0
+
+ # Process line
+ fnc = self.strip_read(line)
+
+ if self.as_tracks:
+ # Need to store whole of record in memory to test
+ if fnc[0] == 0:
+ # if self.track_holder.has_key(fnc[1]['USI']):
+ if fnc[1]['USI'] in self.track_holder:
+ # Append to existing track
+ old = self.track_holder[fnc[1]['USI']]
+ old.append(line)
+ self.track_holder[fnc[1]['USI']] = old
+ else:
+ # Create a new track
+ self.track_holder[fnc[1]['USI']] = [line]
+ # time_check and places screening delayed
+ continue
+
+ # See if a center or empty center
+ if fnc[0] == 0 or fnc[0] == -999:
+
+ # See if passes extra search criteria
+ if self.time_check:
+ # See if Season screen
+ if self.time_check[4] != -1:
+ test = self.mm2season[fnc[1]['MM']]
+ if self.time_check[4] not in test:
+ # Get another
+ continue
+ else:
+ # See if pass start/end requirements
+ bot = 4
+ top = 9
+ for tf in self.time_filter:
+ bot -= 1
+ top -= 1
+ if self.time_check[bot] > fnc[1][tf]:
+ # Read another line until reach start
+ too_early = 1
+ break
+ elif fnc[1][tf] > self.time_check[top]:
+ # Passed end
+ exit_early = 1
+ # Only stops time_filter loop
+ break
+ if too_early:
+ continue
+ if exit_early:
+ # Stop reading altoghter
+ break
+
+ if self.place_check:
+ # Add center
+ screen = [fnc[1]['GridID']]
+
+ if self.include_atts and fnc[3]:
+ screen.extend(fnc[3])
+
+ if self.include_stormy:
+ # Allow 'stormy' grids into place check to do this you
+ # need to pre-populate self.stormy_uci.
+ if fnc[1]['UCI'] in self.stormy_uci.keys():
+ screen.extend(self.stormy_uci[fnc[1]['UCI']])
+
+ # Check for intersection of place_passed and fnc[3]
+ place_passed = [x for x in screen if x in self.place_check]
+ if not place_passed:
+ # Read another line
+ continue
+
+ # Store most recently read julian date
+ jd = fnc[1]['JD']
+
+ # If I haven't started saving yet, pass on saving stormy or problematic
+ # lines as these are uncheckable for time or place.
+ if not started:
+ if self.time_check or self.place_check:
+ if fnc[0] == -888 or fnc[0] == -777:
+ continue
+ started = 1
+
+ # This center passed the time and place screens
+ if self.just_center_table:
+
+ if fnc[0] == 0 or fnc[0] == -999:
+
+# lon = fnc[1]['Lon']*0.01
+# if lon > 180.0:
+# lon = lon - 360.0
+# lat = 90.0 - (fnc[1]['CoLat']*0.01)
+# tmp = ("%(YYYY)d %(MM)d %(DD)d %(HH)d")
+
+ if self.save_output:
+ # Save just center info file
+ write_file.write(tmp % fnc[1] +
+ " %.2f %.2f\n" % (lon,lat))
+ else:
+
+ if self.detail_tracks:
+ if fnc[0] == 0:
+ if fnc[1]['USI'] == "00000000000000000000":
+ warning = "WARNING USI suggests non-tracking data!"
+ warning = warning + " Aborting detail_tracks."
+ sys.exit(warning)
+ # if self.sorted_tracks.has_key(fnc[1]['USI']):
+ if fnc[1]['USI'] in self.sorted_tracks:
+ # Append to existing track
+ old = self.sorted_tracks[fnc[1]['USI']]
+ old.append(fnc[1]['UCI'])
+ self.sorted_tracks[fnc[1]['USI']] = old
+ else:
+ # Create a new track
+ self.sorted_tracks[fnc[1]['USI']] = [fnc[1]['UCI']]
+ # Skip the rest and get another line
+ continue
+
+ # Need to pass what wanted so don't have to tweak this for
+ # different uses.
+ # Save to a dictionary/memory
+# # tmp = [fnc[1]['YYYY'],fnc[1]['MM'],fnc[1]['DD'],
+# # fnc[1]['HH'],lon,lat]]
+
+# tmp = [fnc[1]['YYYY'],fnc[1]['MM'],fnc[1]['DD'],
+# # fnc[1]['HH'],fnc[1]['JD'],lat,lon,fnc[1]['GridID'],
+# fnc[1]['HH'],fnc[1]['JD'],fnc[1]['CoLat'],fnc[1]['Lon'],fnc[1]['GridID'],
+# fnc[1]['GridSLP'],fnc[1]['RegSLP'],fnc[1]['GridLAP'],
+# fnc[1]['Flags'], fnc[1]['Intensity'], fnc[1][ 'Disimularity']]
+
+ # Use with check when using centers or tracking output pre-att.
+ tmp = [fnc[1]['YYYY'],fnc[1]['MM'],fnc[1]['DD'],
+ fnc[1]['HH'],fnc[1]['JD'],fnc[1]['CoLat'],fnc[1]['Lon'],fnc[1]['GridID'],
+ fnc[1]['GridSLP'],fnc[1]['RegSLP'],fnc[1]['GridLAP'],
+ fnc[1]['Flags'], fnc[1]['Intensity'], fnc[1]['Disimularity'],
+ fnc[1]['UCI'],fnc[1]['USI']]
+
+ self.center_holder[fnc[1]['UCI']] = tmp
+
+
+ else:
+
+ if self.detail_tracks:
+ if fnc[0] == 0 or fnc[0] == -999:
+
+ if fnc[1]['USI'] == "00000000000000000000":
+ warning = "WARNING USI suggests non-tracking data!"
+ warning = warning + " Aborting detail_tracks."
+ sys.exit(warning)
+ if fnc[1]['USI'] in self.sorted_tracks:
+ # Append to existing track
+ old = self.sorted_tracks[fnc[1]['USI']]
+ old.append(fnc[1]['UCI'])
+ self.sorted_tracks[fnc[1]['USI']] = old
+ else:
+ # Create a new track
+ self.sorted_tracks[fnc[1]['USI']] = [fnc[1]['UCI']]
+ # Skip the rest and get another line
+ continue
+
+ if self.save_output:
+ # Save everything to file
+ write_file.write(line)
+ else:
+ if fnc[0] == 0:
+ # Return basic center
+ tmp = [fnc[1]['YYYY'],fnc[1]['MM'],fnc[1]['DD'],
+ fnc[1]['HH'],fnc[1]['JD'],fnc[1]['CoLat'],
+ fnc[1]['Lon'],fnc[1]['GridID'],fnc[1]['GridSLP'],
+ fnc[1]['RegSLP'],fnc[1]['GridLAP'],
+ fnc[1]['Flags'],fnc[1]['Intensity'],
+ fnc[1]['Disimularity'],fnc[1]['UCI'],
+ fnc[1]['USI'],fnc[2]['NGrids'],fnc[2]['Area'],
+ fnc[2]['Depth'],fnc[2]['NearestCenterDist'],
+ fnc[2]['NearestCenterAngle'],
+ fnc[2]['MinOuterEdgeDist'],
+ fnc[2]['MaxOuterEdgeDist'],
+ fnc[2]['AveOuterEdgeDist'],[x for x in fnc[3]]]
+ self.center_holder[fnc[1]['UCI']] = tmp
+ elif fnc[0] == -999:
+ # Return empty center
+ tmp = [fnc[1]['YYYY'],fnc[1]['MM'],fnc[1]['DD'],
+ fnc[1]['HH'],fnc[1]['JD'],fnc[1]['CoLat'],
+ fnc[1]['Lon'],fnc[1]['GridID'],fnc[1]['GridSLP'],
+ fnc[1]['RegSLP'],fnc[1]['GridLAP'],
+ fnc[1]['Flags'],fnc[1]['Intensity'],
+ fnc[1]['Disimularity'],fnc[1]['UCI'],
+ fnc[1]['USI']]
+ self.center_holder[fnc[1]['UCI']] = tmp
+ elif(fnc[0] == -888):
+ # Return near_stormy line for further processing
+ self.stormy[jd] = [fnc[1],fnc[2],fnc[3],fnc[4],fnc[5]]
+ elif(fnc[0] == -777):
+ # Return problematic gridids
+ self.problematic[jd] = fnc[1]
+ read_file.close()
+ if self.save_output:
+ write_file.close()
+
+ def fetch_stormy(self):
+ """Read each line of read_file and return stormy grids."""
+
+ # Open file for read
+ try:
+ read_file = open(self.in_file,"r")
+ except IOError:
+ sys.exit("The '%s' does not exist.\nStopping" % (self.in_file))
+
+ self.stormy_uci = {} # key by UCI
+ exit_early = 0
+
+ # Read one line at a time and process
+ for line in read_file:
+
+ too_early = 0
+
+ # Process line
+ fnc = self.strip_read(line)
+
+ # See if a center or empty center
+ if fnc[0] == 0 or fnc[0] == -999:
+
+ # See if passes extra search criteria
+ if self.time_check:
+ # See if Season screen
+ if self.time_check[4] != -1:
+ test = self.mm2season[fnc[1]['MM']]
+ if self.time_check[4] not in test:
+ # Get another
+ continue
+ else:
+ # See if pass start/end requirements
+ bot = 4
+ top = 9
+ for tf in self.time_filter:
+ bot -= 1
+ top -= 1
+ if self.time_check[bot] > fnc[1][tf]:
+ # Read another line until reach start
+ too_early = 1
+ break
+ elif fnc[1][tf] > self.time_check[top]:
+ # Passed end
+ exit_early = 1
+ # Only stops time_filter loop
+ break
+ if too_early:
+ continue
+ if exit_early:
+ # Stop reading altoghter
+ break
+
+ # Skip place check as need to use these results for that
+
+ if(fnc[0] == -888):
+ # Return near_stormy keyed by primary center
+ self.stormy_uci[fnc[1]] = fnc[5]
+
+ read_file.close()
+
+ def strip_read(self,line):
+ """
+ Reads parses a line from the read_file for the purpose of extracting
+ certain info.
+ """
+ parts = line.split()
+ flag = int(parts[0])
+
+ # print "Jimmy in strip_read"
+ # print parts
+ # print flag
+ # print self.just_center_table
+
+ if flag > 0:
+ # Unpack a center
+ center_table = {'YYYY' : int(parts[0]),
+ 'MM' : int(parts[1]),
+ 'DD' : int(parts[2]),
+ 'HH' : int(parts[3]),
+ 'JD' : int(parts[4]),
+ 'CoLat' : int(parts[5]),
+ 'Lon' : int(parts[6]),
+ 'GridID' : int(parts[7]),
+ 'GridSLP' : int(parts[8]),
+ 'RegSLP' : int(parts[9]),
+ 'GridLAP' : int(parts[10]),
+ 'Flags' : int(parts[11]),
+ 'Intensity' : int(parts[12]),
+ 'Disimularity' : int(parts[13]),
+ 'UCI' : parts[14],
+ 'USI' : parts[15]
+ }
+ if self.just_center_table:
+ attribution_table = {'NGrids' : 1,
+ 'Area' : 1,
+ 'Depth' : 1,
+ 'NearestCenterDist' : 1,
+ 'NearestCenterAngle' : 1,
+ 'MinOuterEdgeDist' : 1,
+ 'MaxOuterEdgeDist' : 1,
+ 'AveOuterEdgeDist' : 1
+ }
+ else:
+ # Unpack attribution group
+ attribution_table = {'NGrids' : int(parts[16]),
+ 'Area' : int(parts[17]),
+ 'Depth' : int(parts[18]),
+ 'NearestCenterDist' : int(parts[19]),
+ 'NearestCenterAngle' : int(parts[20]),
+ 'MinOuterEdgeDist' : int(parts[21]),
+ 'MaxOuterEdgeDist' : int(parts[22]),
+ 'AveOuterEdgeDist' : int(parts[23])
+ }
+
+ # If needed extract and return attribution gridIDs
+ if self.include_atts:
+ natts = int(parts[16])
+ atts = parts[24:23+natts]
+ atts = [int(x) for x in atts]
+ else:
+ atts = []
+
+ return (0,center_table,attribution_table,atts)
+
+ elif(flag == -888):
+ # Unpack near_stormy gridids
+ skip = int(parts[2])
+ jump = 2+skip+1
+ hop = jump + 2
+ nstormy = int(parts[jump])
+ stormy = parts[hop:hop+nstormy]
+ stormy = [int(x) for x in stormy]
+ if nstormy != len(stormy):
+ print ("Read Error Stormy",parts)
+ print (nstormy,len(stormy),stormy)
+ return (-888,parts[1],int(parts[2]),parts[3],int(parts[4]),stormy)
+
+ elif(flag == -777):
+ # Unpack problematic gridids
+ probs = [int(x) for x in parts[1:]]
+ return ((-777,probs))
+
+ elif(flag == -999):
+ # Unpack empty centers
+ center_table = {'YYYY' : int(parts[1]),
+ 'MM' : int(parts[2]),
+ 'DD' : int(parts[3]),
+ 'HH' : int(parts[4]),
+ 'JD' : int(parts[5]),
+ 'CoLat' : int(parts[6]),
+ 'Lon' : int(parts[7]),
+ 'GridID' : int(parts[8]),
+ 'GridSLP' : int(parts[9]),
+ 'RegSLP' : int(parts[10]),
+ 'GridLAP' : int(parts[11]),
+ 'Flags' : int(parts[12]),
+ 'Intensity' : int(parts[13]),
+ 'Disimularity' : int(parts[14]),
+ 'UCI' : parts[15],
+ 'USI' : parts[16]
+ }
+ return (-999,center_table,[],[])
+
+ else:
+ sys.exit("Yikes! Unrecognized flag: %s" %(repr(flag)))
+
+ def save_tracks(self):
+ """Save detail_tracks dbase to out_file"""
+
+ # Start file I/O
+ print ("JIMMY in save_tracks")
+ print (self.detail_tracks)
+ try:
+ write_file = open(self.detail_tracks,"w")
+ except IOError:
+ warning = "Cannot create file '%s'.\nStopping"
+ sys.exit(warning % (self.detail_tracks))
+
+ tracks = list(self.sorted_tracks.keys())
+ tracks.sort()
+ for usi in tracks:
+ uci = self.sorted_tracks[usi]
+ write_file.write("%s" % (usi))
+ for usi in uci:
+ write_file.write(" %s" % (usi))
+ write_file.write("\n")
+ write_file.close()
+
+ def dump_tracks(self):
+ """Filter and save as tracks"""
+
+ # Reset save_output
+ self.save_output = self.save_output_original
+
+ # Start file I/O
+ read_file,write_file = self.start_IO()
+
+ # JIMMY ADDED TO SORT THE TRACKS
+ alltracks = self.track_holder
+ sortedtracks = sorted(alltracks)
+ # For some reason, it worked better to assign this later on
+ # though it may have been a different error that was creating
+ # problems.
+ # self.track_holder = sortedtracks
+ track_counter = -1
+ long_storm_count = -1
+ # JIMMY ALL OF THE ABOVE IS NEW
+
+ for usi in self.track_holder:
+ track_counter = track_counter+1
+ track_passed = 1 # Defaults to pass if no time_check needed
+
+ # See if any center passes extra search criteria
+ if self.time_check:
+ for line in self.track_holder[usi]:
+ track_passed = 0 # set to fail
+ # Process UCI
+ fnc = self.strip_read(line)
+
+ # See if Season screen
+ if self.time_check[4] != -1:
+ test = self.mm2season[fnc[1]['MM']]
+ if self.time_check[4] not in test:
+ track_passed = 1
+ # If even one passes whole thing passes
+ break
+ else:
+ # See if pass start/end requirements
+ bot = 4
+ top = 9
+ for tf in self.time_filter:
+ bot -= 1
+ top -= 1
+ if (self.time_check[top] < 10000 or
+ self.time_check[bot] > 0):
+ track_passed = 1
+ # If even one passes whole thing passes
+ break
+
+ if not self.place_check:
+ track_passed = 1 # ensure writes if not tested
+
+ if self.place_check and track_passed:
+ track_passed = 0 # Reset to False
+ for line in self.track_holder[usi]:
+ # Process UCI
+ fnc = self.strip_read(line)
+
+ # Add center
+ screen = [fnc[1]['GridID']]
+
+ if self.include_atts and fnc[3]:
+ screen.extend(fnc[3])
+
+ if self.include_stormy:
+ # Allow 'stormy' grids into place check to do this you
+ # need to pre-populate self.stormy_uci.
+ if fnc[1]['UCI'] in self.stormy_uci.keys():
+ screen.extend(self.stormy_uci[fnc[1]['UCI']])
+
+ # Check for intersection of place_passed and fnc[3]
+ place_passed = [x for x in screen if x in self.place_check]
+ if place_passed:
+ track_passed = 1
+ # If even one passes whole thing passes
+ break
+
+ if track_passed:
+ # JIMMY, test writing only the usi to file.
+ #write_file.write(usi)
+ #write_file.write(str(track_counter))
+ #print usi
+ #print self.track_holder[usi]
+ #print track_counter
+ #if track_counter > 10:
+ # sys.exit()
+
+ usi2 = sortedtracks[track_counter]
+ # TEST THE LENGTH OF THE CENTER
+ center_duration = 0
+ for line in self.track_holder[usi2]:
+ center_duration = center_duration + 1
+
+ if center_duration > 5:
+ # Passed all filters write line to file
+ long_storm_count = long_storm_count + 1
+ # JIMMY MOST OF THE ABOVE IS NEW
+ for line in self.track_holder[usi2]:
+ write_file.write(line)
+ # write_file.write(usi)
+ # JIMMY, comment out -444 separator for matlab readable.
+ # write_file.write("-444\n")
+ write_file.close()
+ read_file.close()
+ # JIMMY: write the number of cyclones.
+ print (track_counter)
+ print (long_storm_count)
+
+#---Start of main code block. This show how to use the above code
+if __name__=='__main__':
+
+ import sys
+ import pickle
+
+ # Create a log file?
+ log = 0
+
+ # --------------------------------------------------------------------------
+ # Select options for this run.
+ # --------------------------------------------------------------------------
+
+ # This next set of lines should be copied from setup_vX.py
+ # Short names by which model will be labeled.
+ model = defines.model
+
+ # Length of file ending to replace if using year_loop
+ tails = ["_att.txt","_tracks.txt","_centers.txt","_dumped_centers.txt"]
+ # JIMMY must use _tracks.txt
+ # tail = tails[0]
+ tail = tails[1]
+
+ cut_tail = len(tail)
+
+ # Flags
+ # tracks: track info included in file
+ # atts: attribute info included in file
+ #
+ # Note tweaked self.just_center_table: in mcms_read for center/track pre-att read
+ # also watch detail_tracks names in template
+ tracks = ""
+ atts = ""
+ if tail.find("tracks") != -1:
+ tracks = 1
+ if tail.find("att") != -1:
+ atts = 1
+ # Note atts files can contain track info so if you want
+ # track statistics for an att file manually set tracks
+ # to 1 here.
+ #tracks = 1
+
+ # --------------------------------------------------------------------------
+ # Alter default behavior found in either defs_vX.py or setup_vX.py
+ # --------------------------------------------------------------------------
+
+ # This next set of lines should be copied from setup_vX.py
+ # Full path to the root directory where model specific output will be stored.
+
+ result_directory = defines.out_folder
+ if not os.path.exists(result_directory):
+ sys.exit("ERROR: result_directory not found.")
+
+ # Directory to be created for storing temporary model specific files.
+ shared_path = "%s%s_files/" % (result_directory,model)
+
+ # The default behavior is to run over all the
+ # years found by setup_vX.py. Here you can
+ # elect to override this behavior.
+ # over_write_years = []
+ #over_write_years = [2007,2008]
+ over_write_years = defines.over_write_years
+
+ # Get some definitions. Note must have run setup_vx.py already!
+ sf_file = "%ss_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(sf_file, 'rb'))
+ (im,jm,maxid,lats,lons,timestep,dx,dy,dlon,dlat,start_lat,start_lon,
+ dlon_sq,dlat_sq,two_dlat,model_flag,eq_grid,tropical_n,tropical_s,
+ bot,mid,top,row_start,row_end,tropical_n_alt,tropical_s_alt,
+ bot_alt,top_alt,lon_shift,lat_flip,the_calendar,found_years,
+ super_years,dim_lat,dim_lon,dim_time,var_lat,var_lon,var_time,
+ var_slp,var_topo,var_land_sea_mask,file_seperator,no_topo,
+ no_mask,slp_path,model,out_path,shared_path,lat_edges,lon_edges,
+ land_gridids,troubled_centers, faux_grids) = fnc_out
+ #
+ # JIMMY, IN THE LINE ABOVE, I ADD ,faux_grids TO MATCH CENTER_FINDER CALL TO s_dat.p
+ # Save memory
+ del troubled_centers
+ del lat_edges
+ del lon_edges
+ del fnc_out
+ # except: # removed by JJ
+ except Exception as e:
+ sys.exit("\n\tWARNING: Error reading or finding %s" % (sf_file))
+
+ header = "mcms_%s_%04d" % (model,int(super_years[0]))
+
+ in_file = "%s%s%s" % (out_path,header,tail)
+ # JIMMY, verbose.
+ #print header
+ #print in_file
+ #print "len(sys.argv), and sys.argv"
+ #print len(sys.argv)
+ #print sys.argv
+
+ if len(sys.argv) == 1:
+ # Set definitions and instantiate read_mcms w/out a template
+ what_do = {"model" : model,
+ "in_file" : in_file,
+ "out_file" : "",
+ # JIMMY "just_center_table" : False,
+ "just_center_table" : True,
+ # JIMMY "detail_tracks" : tracks,
+ "detail_tracks" : 'jimmy_tracks',
+ "as_tracks" : "",
+ "start_time" : "%04d MM DD HH SEASON" % (int(super_years[0])),
+ "end_time" : "%04d MM DD HH SEASON" % (int(super_years[0])),
+ "places" : ["GLOBAL"],
+ "include_atts" : atts,
+ "include_stormy" : atts,
+ "just_centers" : False,
+ "save_output" : False,
+ "overwrite" : True
+ }
+ # Pass in model definitions, if sf_file available this is simple.
+
+ # Provide values
+ what_do["tropical_end"] = row_end[tropical_n_alt]
+ what_do["tropical_start"] = row_start[tropical_s_alt]
+ what_do["maxID"] = maxid
+ what_do["land_gridids"] = list[land_gridids]
+ else:
+ # Use provided template
+ template = sys.argv[1]
+ # JIMMY I added the part after the ,
+ what_do = {"template":template, "model":'nra2'}
+
+ # Parse definitions.
+ readit = Read_MCMS(**what_do)
+
+
+ # See if request something other than everything.
+ readit.check_time()
+ readit.check_place()
+
+ if readit.include_stormy:
+ # Need to read in_file to extract stormy gridids
+ readit.fetch_stormy()
+
+ # Read center file
+ readit.fetch_centers()
+
+ if readit.detail_tracks:
+ # Issue Warning!
+ if tail in tails[2:]:
+ warning = ("WARNING: detail_tracks requires that",
+ "in_file contains tracked centers!")
+ import sys; sys.exit(warning)
+ # Save tracks dbase to out_file
+ readit.save_tracks()
+
+ if readit.as_tracks:
+ readit.dump_tracks()
diff --git a/diagnostics/etc_composites/util/tracker/resort_v4.py b/diagnostics/etc_composites/util/tracker/resort_v4.py
new file mode 100755
index 000000000..73d2c1e9f
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/resort_v4.py
@@ -0,0 +1,21 @@
+def resort(infile,strip_read,jd_key,cf):
+ # Important STEP: re-read tracks and discards and sort by date as now
+ # may not be in order
+ read_file = open(infile,"r")
+ centers = []
+ centers_append = centers.append
+ for line in read_file:
+ # Process line
+ fnc = strip_read(line)
+ centers_append(fnc)
+ read_file.close()
+ # Sort by Julian date
+ centers.sort(key=jd_key)
+ # Dump to a file, overwritting original!
+ save_file = open(infile,"w")
+ for center in centers:
+ msg = cf % (center[0],center[1],center[2],center[3],center[4],
+ center[5],center[6],center[7],center[8],center[9],
+ center[10],center[11],center[12],center[13],center[14],center[15])
+ save_file.writelines(msg)
+ save_file.close()
diff --git a/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.c b/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.c
new file mode 100755
index 000000000..e7b0713ae
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.c
@@ -0,0 +1,3299 @@
+/* Generated by Cython 0.28.2 */
+
+/* BEGIN: Cython Metadata
+{
+ "distutils": {
+ "depends": [],
+ "name": "rhumb_line_nav_v4",
+ "sources": [
+ "rhumb_line_nav_v4.pyx"
+ ]
+ },
+ "module_name": "rhumb_line_nav_v4"
+}
+END: Cython Metadata */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+ #error Python headers needed to compile C extensions, please install development version of Python.
+#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
+ #error Cython requires Python 2.6+ or Python 3.3+.
+#else
+#define CYTHON_ABI "0_28_2"
+#define CYTHON_FUTURE_DIVISION 0
+#include
+#ifndef offsetof
+ #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+ #ifndef __stdcall
+ #define __stdcall
+ #endif
+ #ifndef __cdecl
+ #define __cdecl
+ #endif
+ #ifndef __fastcall
+ #define __fastcall
+ #endif
+#endif
+#ifndef DL_IMPORT
+ #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+ #define DL_EXPORT(t) t
+#endif
+#define __PYX_COMMA ,
+#ifndef HAVE_LONG_LONG
+ #if PY_VERSION_HEX >= 0x02070000
+ #define HAVE_LONG_LONG
+ #endif
+#endif
+#ifndef PY_LONG_LONG
+ #define PY_LONG_LONG LONG_LONG
+#endif
+#ifndef Py_HUGE_VAL
+ #define Py_HUGE_VAL HUGE_VAL
+#endif
+#ifdef PYPY_VERSION
+ #define CYTHON_COMPILING_IN_PYPY 1
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #undef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 0
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #if PY_VERSION_HEX < 0x03050000
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #undef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 1
+ #undef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 0
+ #undef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 0
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+#elif defined(PYSTON_VERSION)
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 1
+ #define CYTHON_COMPILING_IN_CPYTHON 0
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #undef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 0
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #undef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 0
+ #undef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 0
+ #undef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT 0
+ #undef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE 0
+#else
+ #define CYTHON_COMPILING_IN_PYPY 0
+ #define CYTHON_COMPILING_IN_PYSTON 0
+ #define CYTHON_COMPILING_IN_CPYTHON 1
+ #ifndef CYTHON_USE_TYPE_SLOTS
+ #define CYTHON_USE_TYPE_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYTYPE_LOOKUP
+ #define CYTHON_USE_PYTYPE_LOOKUP 0
+ #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
+ #define CYTHON_USE_PYTYPE_LOOKUP 1
+ #endif
+ #if PY_MAJOR_VERSION < 3
+ #undef CYTHON_USE_ASYNC_SLOTS
+ #define CYTHON_USE_ASYNC_SLOTS 0
+ #elif !defined(CYTHON_USE_ASYNC_SLOTS)
+ #define CYTHON_USE_ASYNC_SLOTS 1
+ #endif
+ #if PY_VERSION_HEX < 0x02070000
+ #undef CYTHON_USE_PYLONG_INTERNALS
+ #define CYTHON_USE_PYLONG_INTERNALS 0
+ #elif !defined(CYTHON_USE_PYLONG_INTERNALS)
+ #define CYTHON_USE_PYLONG_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_PYLIST_INTERNALS
+ #define CYTHON_USE_PYLIST_INTERNALS 1
+ #endif
+ #ifndef CYTHON_USE_UNICODE_INTERNALS
+ #define CYTHON_USE_UNICODE_INTERNALS 1
+ #endif
+ #if PY_VERSION_HEX < 0x030300F0
+ #undef CYTHON_USE_UNICODE_WRITER
+ #define CYTHON_USE_UNICODE_WRITER 0
+ #elif !defined(CYTHON_USE_UNICODE_WRITER)
+ #define CYTHON_USE_UNICODE_WRITER 1
+ #endif
+ #ifndef CYTHON_AVOID_BORROWED_REFS
+ #define CYTHON_AVOID_BORROWED_REFS 0
+ #endif
+ #ifndef CYTHON_ASSUME_SAFE_MACROS
+ #define CYTHON_ASSUME_SAFE_MACROS 1
+ #endif
+ #ifndef CYTHON_UNPACK_METHODS
+ #define CYTHON_UNPACK_METHODS 1
+ #endif
+ #ifndef CYTHON_FAST_THREAD_STATE
+ #define CYTHON_FAST_THREAD_STATE 1
+ #endif
+ #ifndef CYTHON_FAST_PYCALL
+ #define CYTHON_FAST_PYCALL 1
+ #endif
+ #ifndef CYTHON_PEP489_MULTI_PHASE_INIT
+ #define CYTHON_PEP489_MULTI_PHASE_INIT (0 && PY_VERSION_HEX >= 0x03050000)
+ #endif
+ #ifndef CYTHON_USE_TP_FINALIZE
+ #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
+ #endif
+#endif
+#if !defined(CYTHON_FAST_PYCCALL)
+#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
+#endif
+#if CYTHON_USE_PYLONG_INTERNALS
+ #include "longintrepr.h"
+ #undef SHIFT
+ #undef BASE
+ #undef MASK
+#endif
+#ifndef __has_attribute
+ #define __has_attribute(x) 0
+#endif
+#ifndef __has_cpp_attribute
+ #define __has_cpp_attribute(x) 0
+#endif
+#ifndef CYTHON_RESTRICT
+ #if defined(__GNUC__)
+ #define CYTHON_RESTRICT __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1400
+ #define CYTHON_RESTRICT __restrict
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_RESTRICT restrict
+ #else
+ #define CYTHON_RESTRICT
+ #endif
+#endif
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
+# define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+# define CYTHON_UNUSED
+# endif
+#endif
+#ifndef CYTHON_MAYBE_UNUSED_VAR
+# if defined(__cplusplus)
+ template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
+# else
+# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
+# endif
+#endif
+#ifndef CYTHON_NCP_UNUSED
+# if CYTHON_COMPILING_IN_CPYTHON
+# define CYTHON_NCP_UNUSED
+# else
+# define CYTHON_NCP_UNUSED CYTHON_UNUSED
+# endif
+#endif
+#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
+#ifdef _MSC_VER
+ #ifndef _MSC_STDINT_H_
+ #if _MSC_VER < 1300
+ typedef unsigned char uint8_t;
+ typedef unsigned int uint32_t;
+ #else
+ typedef unsigned __int8 uint8_t;
+ typedef unsigned __int32 uint32_t;
+ #endif
+ #endif
+#else
+ #include
+#endif
+#ifndef CYTHON_FALLTHROUGH
+ #if defined(__cplusplus) && __cplusplus >= 201103L
+ #if __has_cpp_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH [[fallthrough]]
+ #elif __has_cpp_attribute(clang::fallthrough)
+ #define CYTHON_FALLTHROUGH [[clang::fallthrough]]
+ #elif __has_cpp_attribute(gnu::fallthrough)
+ #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
+ #endif
+ #endif
+ #ifndef CYTHON_FALLTHROUGH
+ #if __has_attribute(fallthrough)
+ #define CYTHON_FALLTHROUGH __attribute__((fallthrough))
+ #else
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+ #if defined(__clang__ ) && defined(__apple_build_version__)
+ #if __apple_build_version__ < 7000000
+ #undef CYTHON_FALLTHROUGH
+ #define CYTHON_FALLTHROUGH
+ #endif
+ #endif
+#endif
+
+#ifndef CYTHON_INLINE
+ #if defined(__clang__)
+ #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
+ #elif defined(__GNUC__)
+ #define CYTHON_INLINE __inline__
+ #elif defined(_MSC_VER)
+ #define CYTHON_INLINE __inline
+ #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define CYTHON_INLINE inline
+ #else
+ #define CYTHON_INLINE
+ #endif
+#endif
+
+#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
+ #define Py_OptimizeFlag 0
+#endif
+#define __PYX_BUILD_PY_SSIZE_T "n"
+#define CYTHON_FORMAT_SSIZE_T "z"
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyClass_Type
+#else
+ #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+ #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
+ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
+ #define __Pyx_DefaultClassType PyType_Type
+#endif
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_FINALIZE
+ #define Py_TPFLAGS_HAVE_FINALIZE 0
+#endif
+#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
+ #ifndef METH_FASTCALL
+ #define METH_FASTCALL 0x80
+ #endif
+ typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
+ typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
+ Py_ssize_t nargs, PyObject *kwnames);
+#else
+ #define __Pyx_PyCFunctionFast _PyCFunctionFast
+ #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
+#endif
+#if CYTHON_FAST_PYCCALL
+#define __Pyx_PyFastCFunction_Check(func)\
+ ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS)))))
+#else
+#define __Pyx_PyFastCFunction_Check(func) 0
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
+ #define PyObject_Malloc(s) PyMem_Malloc(s)
+ #define PyObject_Free(p) PyMem_Free(p)
+ #define PyObject_Realloc(p) PyMem_Realloc(p)
+#endif
+#if CYTHON_COMPILING_IN_PYSTON
+ #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
+#else
+ #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
+ #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
+#endif
+#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#elif PY_VERSION_HEX >= 0x03060000
+ #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
+#elif PY_VERSION_HEX >= 0x03000000
+ #define __Pyx_PyThreadState_Current PyThreadState_GET()
+#else
+ #define __Pyx_PyThreadState_Current _PyThreadState_Current
+#endif
+#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
+#include "pythread.h"
+#define Py_tss_NEEDS_INIT 0
+typedef int Py_tss_t;
+static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
+ *key = PyThread_create_key();
+ return 0; // PyThread_create_key reports success always
+}
+static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
+ Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
+ *key = Py_tss_NEEDS_INIT;
+ return key;
+}
+static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
+ PyObject_Free(key);
+}
+static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
+ return *key != Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
+ PyThread_delete_key(*key);
+ *key = Py_tss_NEEDS_INIT;
+}
+static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
+ return PyThread_set_key_value(*key, value);
+}
+static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
+ return PyThread_get_key_value(*key);
+}
+#endif // TSS (Thread Specific Storage) API
+#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
+#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
+#else
+#define __Pyx_PyDict_NewPresized(n) PyDict_New()
+#endif
+#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
+#else
+ #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
+ #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
+#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
+#else
+#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
+#endif
+#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
+ #define CYTHON_PEP393_ENABLED 1
+ #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
+ 0 : _PyUnicode_Ready((PyObject *)(op)))
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
+ #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
+ #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
+ #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
+#else
+ #define CYTHON_PEP393_ENABLED 0
+ #define PyUnicode_1BYTE_KIND 1
+ #define PyUnicode_2BYTE_KIND 2
+ #define PyUnicode_4BYTE_KIND 4
+ #define __Pyx_PyUnicode_READY(op) (0)
+ #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
+ #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
+ #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
+ #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
+ #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
+ #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
+ #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
+ #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
+#endif
+#if CYTHON_COMPILING_IN_PYPY
+ #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
+#else
+ #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
+ #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
+ PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
+ #define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
+ #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
+#endif
+#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
+ #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
+#endif
+#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
+#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
+#else
+ #define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
+#endif
+#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
+ #define PyObject_ASCII(o) PyObject_Repr(o)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBaseString_Type PyUnicode_Type
+ #define PyStringObject PyUnicodeObject
+ #define PyString_Type PyUnicode_Type
+ #define PyString_Check PyUnicode_Check
+ #define PyString_CheckExact PyUnicode_CheckExact
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
+ #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
+#else
+ #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
+ #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
+#endif
+#ifndef PySet_CheckExact
+ #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
+#endif
+#if CYTHON_ASSUME_SAFE_MACROS
+ #define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
+#else
+ #define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyIntObject PyLongObject
+ #define PyInt_Type PyLong_Type
+ #define PyInt_Check(op) PyLong_Check(op)
+ #define PyInt_CheckExact(op) PyLong_CheckExact(op)
+ #define PyInt_FromString PyLong_FromString
+ #define PyInt_FromUnicode PyLong_FromUnicode
+ #define PyInt_FromLong PyLong_FromLong
+ #define PyInt_FromSize_t PyLong_FromSize_t
+ #define PyInt_FromSsize_t PyLong_FromSsize_t
+ #define PyInt_AsLong PyLong_AsLong
+ #define PyInt_AS_LONG PyLong_AS_LONG
+ #define PyInt_AsSsize_t PyLong_AsSsize_t
+ #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
+ #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+ #define PyNumber_Int PyNumber_Long
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define PyBoolObject PyLongObject
+#endif
+#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
+ #ifndef PyUnicode_InternFromString
+ #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
+ #endif
+#endif
+#if PY_VERSION_HEX < 0x030200A4
+ typedef long Py_hash_t;
+ #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+ #define __Pyx_PyInt_AsHash_t PyInt_AsLong
+#else
+ #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+ #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t
+#endif
+#if PY_MAJOR_VERSION >= 3
+ #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))
+#else
+ #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
+#endif
+#if CYTHON_USE_ASYNC_SLOTS
+ #if PY_VERSION_HEX >= 0x030500B1
+ #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
+ #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
+ #else
+ #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
+ #endif
+#else
+ #define __Pyx_PyType_AsAsync(obj) NULL
+#endif
+#ifndef __Pyx_PyAsyncMethodsStruct
+ typedef struct {
+ unaryfunc am_await;
+ unaryfunc am_aiter;
+ unaryfunc am_anext;
+ } __Pyx_PyAsyncMethodsStruct;
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+ #define _USE_MATH_DEFINES
+#endif
+#include
+#ifdef NAN
+#define __PYX_NAN() ((float) NAN)
+#else
+static CYTHON_INLINE float __PYX_NAN() {
+ float value;
+ memset(&value, 0xFF, sizeof(value));
+ return value;
+}
+#endif
+#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
+#define __Pyx_truncl trunc
+#else
+#define __Pyx_truncl truncl
+#endif
+
+
+#define __PYX_ERR(f_index, lineno, Ln_error) \
+{ \
+ __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \
+}
+
+#ifndef __PYX_EXTERN_C
+ #ifdef __cplusplus
+ #define __PYX_EXTERN_C extern "C"
+ #else
+ #define __PYX_EXTERN_C extern
+ #endif
+#endif
+
+#define __PYX_HAVE__rhumb_line_nav_v4
+#define __PYX_HAVE_API__rhumb_line_nav_v4
+/* Early includes */
+#include "math.h"
+#ifdef _OPENMP
+#include
+#endif /* _OPENMP */
+
+#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
+ const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
+
+#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
+#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0
+#define __PYX_DEFAULT_STRING_ENCODING ""
+#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
+#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#define __Pyx_uchar_cast(c) ((unsigned char)c)
+#define __Pyx_long_cast(x) ((long)x)
+#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
+ (sizeof(type) < sizeof(Py_ssize_t)) ||\
+ (sizeof(type) > sizeof(Py_ssize_t) &&\
+ likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX) &&\
+ (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
+ v == (type)PY_SSIZE_T_MIN))) ||\
+ (sizeof(type) == sizeof(Py_ssize_t) &&\
+ (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
+ v == (type)PY_SSIZE_T_MAX))) )
+#if defined (__cplusplus) && __cplusplus >= 201103L
+ #include
+ #define __Pyx_sst_abs(value) std::abs(value)
+#elif SIZEOF_INT >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) abs(value)
+#elif SIZEOF_LONG >= SIZEOF_SIZE_T
+ #define __Pyx_sst_abs(value) labs(value)
+#elif defined (_MSC_VER)
+ #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
+#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ #define __Pyx_sst_abs(value) llabs(value)
+#elif defined (__GNUC__)
+ #define __Pyx_sst_abs(value) __builtin_llabs(value)
+#else
+ #define __Pyx_sst_abs(value) ((value<0) ? -value : value)
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
+#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
+#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
+#define __Pyx_PyBytes_FromString PyBytes_FromString
+#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
+#if PY_MAJOR_VERSION < 3
+ #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
+#else
+ #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
+ #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
+#endif
+#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
+#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
+#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
+#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
+#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
+#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
+#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
+static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
+ const Py_UNICODE *u_end = u;
+ while (*u_end++) ;
+ return (size_t)(u_end - u - 1);
+}
+#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
+#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
+#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
+#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
+#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
+#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False))
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
+#define __Pyx_PySequence_Tuple(obj)\
+ (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+#if CYTHON_ASSUME_SAFE_MACROS
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+#else
+#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
+#endif
+#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
+#else
+#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
+#endif
+#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+static int __Pyx_sys_getdefaultencoding_not_ascii;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ PyObject* ascii_chars_u = NULL;
+ PyObject* ascii_chars_b = NULL;
+ const char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ if (strcmp(default_encoding_c, "ascii") == 0) {
+ __Pyx_sys_getdefaultencoding_not_ascii = 0;
+ } else {
+ char ascii_chars[128];
+ int c;
+ for (c = 0; c < 128; c++) {
+ ascii_chars[c] = c;
+ }
+ __Pyx_sys_getdefaultencoding_not_ascii = 1;
+ ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
+ if (!ascii_chars_u) goto bad;
+ ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
+ if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
+ PyErr_Format(
+ PyExc_ValueError,
+ "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
+ default_encoding_c);
+ goto bad;
+ }
+ Py_DECREF(ascii_chars_u);
+ Py_DECREF(ascii_chars_b);
+ }
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ Py_XDECREF(ascii_chars_u);
+ Py_XDECREF(ascii_chars_b);
+ return -1;
+}
+#endif
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
+#else
+#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
+#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+static char* __PYX_DEFAULT_STRING_ENCODING;
+static int __Pyx_init_sys_getdefaultencoding_params(void) {
+ PyObject* sys;
+ PyObject* default_encoding = NULL;
+ char* default_encoding_c;
+ sys = PyImport_ImportModule("sys");
+ if (!sys) goto bad;
+ default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
+ Py_DECREF(sys);
+ if (!default_encoding) goto bad;
+ default_encoding_c = PyBytes_AsString(default_encoding);
+ if (!default_encoding_c) goto bad;
+ __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c));
+ if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
+ strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
+ Py_DECREF(default_encoding);
+ return 0;
+bad:
+ Py_XDECREF(default_encoding);
+ return -1;
+}
+#endif
+#endif
+
+
+/* Test for GCC > 2.95 */
+#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
+ #define likely(x) __builtin_expect(!!(x), 1)
+ #define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* !__GNUC__ or GCC < 2.95 */
+ #define likely(x) (x)
+ #define unlikely(x) (x)
+#endif /* __GNUC__ */
+static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
+
+static PyObject *__pyx_m = NULL;
+static PyObject *__pyx_d;
+static PyObject *__pyx_b;
+static PyObject *__pyx_cython_runtime;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static PyObject *__pyx_empty_unicode;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+ "rhumb_line_nav_v4.pyx",
+};
+
+/*--- Type declarations ---*/
+
+/* --- Runtime support code (head) --- */
+/* Refnanny.proto */
+#ifndef CYTHON_REFNANNY
+ #define CYTHON_REFNANNY 0
+#endif
+#if CYTHON_REFNANNY
+ typedef struct {
+ void (*INCREF)(void*, PyObject*, int);
+ void (*DECREF)(void*, PyObject*, int);
+ void (*GOTREF)(void*, PyObject*, int);
+ void (*GIVEREF)(void*, PyObject*, int);
+ void* (*SetupContext)(const char*, int, const char*);
+ void (*FinishContext)(void**);
+ } __Pyx_RefNannyAPIStruct;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+ static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
+ #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+#ifdef WITH_THREAD
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ if (acquire_gil) {\
+ PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ PyGILState_Release(__pyx_gilstate_save);\
+ } else {\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
+ }
+#else
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)\
+ __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+#endif
+ #define __Pyx_RefNannyFinishContext()\
+ __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+ #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+ #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+ #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+ #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+ #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+ #define __Pyx_RefNannyDeclarations
+ #define __Pyx_RefNannySetupContext(name, acquire_gil)
+ #define __Pyx_RefNannyFinishContext()
+ #define __Pyx_INCREF(r) Py_INCREF(r)
+ #define __Pyx_DECREF(r) Py_DECREF(r)
+ #define __Pyx_GOTREF(r)
+ #define __Pyx_GIVEREF(r)
+ #define __Pyx_XINCREF(r) Py_XINCREF(r)
+ #define __Pyx_XDECREF(r) Py_XDECREF(r)
+ #define __Pyx_XGOTREF(r)
+ #define __Pyx_XGIVEREF(r)
+#endif
+#define __Pyx_XDECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_XDECREF(tmp);\
+ } while (0)
+#define __Pyx_DECREF_SET(r, v) do {\
+ PyObject *tmp = (PyObject *) r;\
+ r = v; __Pyx_DECREF(tmp);\
+ } while (0)
+#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
+#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
+
+/* RaiseArgTupleInvalid.proto */
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+ Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
+
+/* RaiseDoubleKeywords.proto */
+static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
+
+/* ParseKeywords.proto */
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
+ PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
+ const char* function_name);
+
+/* PyFloatBinop.proto */
+#if !CYTHON_COMPILING_IN_PYPY
+static PyObject* __Pyx_PyFloat_SubtractObjC(PyObject *op1, PyObject *op2, double floatval, int inplace);
+#else
+#define __Pyx_PyFloat_SubtractObjC(op1, op2, floatval, inplace)\
+ (inplace ? PyNumber_InPlaceSubtract(op1, op2) : PyNumber_Subtract(op1, op2))
+#endif
+
+/* None.proto */
+static CYTHON_INLINE double __Pyx_mod_double(double, double);
+
+/* PyObjectGetAttrStr.proto */
+#if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
+#else
+#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
+#endif
+
+/* PyThreadStateGet.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
+#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
+#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
+#else
+#define __Pyx_PyThreadState_declare
+#define __Pyx_PyThreadState_assign
+#define __Pyx_PyErr_Occurred() PyErr_Occurred()
+#endif
+
+/* PyErrFetchRestore.proto */
+#if CYTHON_FAST_THREAD_STATE
+#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
+#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
+#else
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#endif
+#else
+#define __Pyx_PyErr_Clear() PyErr_Clear()
+#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
+#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
+#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
+#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
+#endif
+
+/* CLineInTraceback.proto */
+#ifdef CYTHON_CLINE_IN_TRACEBACK
+#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
+#else
+static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
+#endif
+
+/* CodeObjectCache.proto */
+typedef struct {
+ PyCodeObject* code_object;
+ int code_line;
+} __Pyx_CodeObjectCacheEntry;
+struct __Pyx_CodeObjectCache {
+ int count;
+ int max_count;
+ __Pyx_CodeObjectCacheEntry* entries;
+};
+static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
+static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
+static PyCodeObject *__pyx_find_code_object(int code_line);
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
+
+/* AddTraceback.proto */
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename);
+
+/* CIntToPy.proto */
+static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
+
+/* CIntFromPy.proto */
+static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
+
+/* FastTypeChecks.proto */
+#if CYTHON_COMPILING_IN_CPYTHON
+#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
+#else
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
+#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
+#endif
+#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
+
+/* CheckBinaryVersion.proto */
+static int __Pyx_check_binary_version(void);
+
+/* InitStrings.proto */
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
+
+
+/* Module declarations from 'rhumb_line_nav_v4' */
+#define __Pyx_MODULE_NAME "rhumb_line_nav_v4"
+extern int __pyx_module_is_main_rhumb_line_nav_v4;
+int __pyx_module_is_main_rhumb_line_nav_v4 = 0;
+
+/* Implementation of 'rhumb_line_nav_v4' */
+static const char __pyx_k_d[] = "d";
+static const char __pyx_k_q[] = "q";
+static const char __pyx_k_de[] = "de";
+static const char __pyx_k_TOL[] = "TOL";
+static const char __pyx_k_tmp[] = "tmp";
+static const char __pyx_k_dphi[] = "dphi";
+static const char __pyx_k_lat1[] = "lat1";
+static const char __pyx_k_lat2[] = "lat2";
+static const char __pyx_k_lon1[] = "lon1";
+static const char __pyx_k_lon2[] = "lon2";
+static const char __pyx_k_main[] = "__main__";
+static const char __pyx_k_test[] = "__test__";
+static const char __pyx_k_lat1r[] = "lat1r";
+static const char __pyx_k_lat2r[] = "lat2r";
+static const char __pyx_k_lon1r[] = "lon1r";
+static const char __pyx_k_lon2r[] = "lon2r";
+static const char __pyx_k_dlon_e[] = "dlon_e";
+static const char __pyx_k_dlon_w[] = "dlon_w";
+static const char __pyx_k_bearing[] = "bearing";
+static const char __pyx_k_distance[] = "distance";
+static const char __pyx_k_rhumb_line_nav[] = "rhumb_line_nav";
+static const char __pyx_k_rhumb_line_nav_v4[] = "rhumb_line_nav_v4";
+static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
+static const char __pyx_k_rhumb_line_nav_v4_pyx[] = "rhumb_line_nav_v4.pyx";
+static PyObject *__pyx_n_s_TOL;
+static PyObject *__pyx_n_s_bearing;
+static PyObject *__pyx_n_s_cline_in_traceback;
+static PyObject *__pyx_n_s_d;
+static PyObject *__pyx_n_s_de;
+static PyObject *__pyx_n_s_distance;
+static PyObject *__pyx_n_s_dlon_e;
+static PyObject *__pyx_n_s_dlon_w;
+static PyObject *__pyx_n_s_dphi;
+static PyObject *__pyx_n_s_lat1;
+static PyObject *__pyx_n_s_lat1r;
+static PyObject *__pyx_n_s_lat2;
+static PyObject *__pyx_n_s_lat2r;
+static PyObject *__pyx_n_s_lon1;
+static PyObject *__pyx_n_s_lon1r;
+static PyObject *__pyx_n_s_lon2;
+static PyObject *__pyx_n_s_lon2r;
+static PyObject *__pyx_n_s_main;
+static PyObject *__pyx_n_s_q;
+static PyObject *__pyx_n_s_rhumb_line_nav;
+static PyObject *__pyx_n_s_rhumb_line_nav_v4;
+static PyObject *__pyx_kp_s_rhumb_line_nav_v4_pyx;
+static PyObject *__pyx_n_s_test;
+static PyObject *__pyx_n_s_tmp;
+static PyObject *__pyx_pf_17rhumb_line_nav_v4_rhumb_line_nav(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_lon2, PyObject *__pyx_v_lat2, PyObject *__pyx_v_lon1, PyObject *__pyx_v_lat1, PyObject *__pyx_v_distance); /* proto */
+static PyObject *__pyx_float_180_0;
+static PyObject *__pyx_float_360_0;
+static PyObject *__pyx_float_0_0174532925;
+static PyObject *__pyx_tuple_;
+static PyObject *__pyx_codeobj__2;
+/* Late includes */
+
+/* "rhumb_line_nav_v4.pyx":11
+ * double pow(double x, int y)
+ *
+ * def rhumb_line_nav(double lon2,lat2,lon1,lat1,distance=False): # <<<<<<<<<<<<<<
+ * """
+ * The true course between the points (lat1,lon1), (lat2,lon2)
+ */
+
+/* Python wrapper */
+static PyObject *__pyx_pw_17rhumb_line_nav_v4_1rhumb_line_nav(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_17rhumb_line_nav_v4_rhumb_line_nav[] = "\n The true course between the points (lat1,lon1), (lat2,lon2)\n is given by Rhumb Line Navigation.\n\n true_course = mod(atan2(lon1-lon2,\n log(tan(lat2/2+pi/4)/tan(lat1/2+pi/4))),2*pi)\n Angle is measured clockwise from north 0 degrees, east 90 degrees etc.\n\n NOTES: Rhumb lines follow a spiral on the globe and are least simular\n (i.e, longer) when two ends are co-latitude and most simular when the\n two ends are co-longitude. Rhumb lines spiral very tightly at high\n latitudes which can result in comparatively long distances between\n points. Rhumb lines need special treatment if the coarse crosses the\n dateline.\n ";
+static PyMethodDef __pyx_mdef_17rhumb_line_nav_v4_1rhumb_line_nav = {"rhumb_line_nav", (PyCFunction)__pyx_pw_17rhumb_line_nav_v4_1rhumb_line_nav, METH_VARARGS|METH_KEYWORDS, __pyx_doc_17rhumb_line_nav_v4_rhumb_line_nav};
+static PyObject *__pyx_pw_17rhumb_line_nav_v4_1rhumb_line_nav(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+ double __pyx_v_lon2;
+ PyObject *__pyx_v_lat2 = 0;
+ PyObject *__pyx_v_lon1 = 0;
+ PyObject *__pyx_v_lat1 = 0;
+ PyObject *__pyx_v_distance = 0;
+ PyObject *__pyx_r = 0;
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("rhumb_line_nav (wrapper)", 0);
+ {
+ static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_lon2,&__pyx_n_s_lat2,&__pyx_n_s_lon1,&__pyx_n_s_lat1,&__pyx_n_s_distance,0};
+ PyObject* values[5] = {0,0,0,0,0};
+ values[4] = ((PyObject *)Py_False);
+ if (unlikely(__pyx_kwds)) {
+ Py_ssize_t kw_args;
+ const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
+ switch (pos_args) {
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ CYTHON_FALLTHROUGH;
+ case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ CYTHON_FALLTHROUGH;
+ case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ CYTHON_FALLTHROUGH;
+ case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ CYTHON_FALLTHROUGH;
+ case 0: break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ kw_args = PyDict_Size(__pyx_kwds);
+ switch (pos_args) {
+ case 0:
+ if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lon2)) != 0)) kw_args--;
+ else goto __pyx_L5_argtuple_error;
+ CYTHON_FALLTHROUGH;
+ case 1:
+ if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lat2)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("rhumb_line_nav", 0, 4, 5, 1); __PYX_ERR(0, 11, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 2:
+ if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lon1)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("rhumb_line_nav", 0, 4, 5, 2); __PYX_ERR(0, 11, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 3:
+ if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_lat1)) != 0)) kw_args--;
+ else {
+ __Pyx_RaiseArgtupleInvalid("rhumb_line_nav", 0, 4, 5, 3); __PYX_ERR(0, 11, __pyx_L3_error)
+ }
+ CYTHON_FALLTHROUGH;
+ case 4:
+ if (kw_args > 0) {
+ PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_distance);
+ if (value) { values[4] = value; kw_args--; }
+ }
+ }
+ if (unlikely(kw_args > 0)) {
+ if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "rhumb_line_nav") < 0)) __PYX_ERR(0, 11, __pyx_L3_error)
+ }
+ } else {
+ switch (PyTuple_GET_SIZE(__pyx_args)) {
+ case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+ CYTHON_FALLTHROUGH;
+ case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+ values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+ values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+ values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+ break;
+ default: goto __pyx_L5_argtuple_error;
+ }
+ }
+ __pyx_v_lon2 = __pyx_PyFloat_AsDouble(values[0]); if (unlikely((__pyx_v_lon2 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 11, __pyx_L3_error)
+ __pyx_v_lat2 = values[1];
+ __pyx_v_lon1 = values[2];
+ __pyx_v_lat1 = values[3];
+ __pyx_v_distance = values[4];
+ }
+ goto __pyx_L4_argument_unpacking_done;
+ __pyx_L5_argtuple_error:;
+ __Pyx_RaiseArgtupleInvalid("rhumb_line_nav", 0, 4, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 11, __pyx_L3_error)
+ __pyx_L3_error:;
+ __Pyx_AddTraceback("rhumb_line_nav_v4.rhumb_line_nav", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __Pyx_RefNannyFinishContext();
+ return NULL;
+ __pyx_L4_argument_unpacking_done:;
+ __pyx_r = __pyx_pf_17rhumb_line_nav_v4_rhumb_line_nav(__pyx_self, __pyx_v_lon2, __pyx_v_lat2, __pyx_v_lon1, __pyx_v_lat1, __pyx_v_distance);
+
+ /* function exit code */
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyObject *__pyx_pf_17rhumb_line_nav_v4_rhumb_line_nav(CYTHON_UNUSED PyObject *__pyx_self, double __pyx_v_lon2, PyObject *__pyx_v_lat2, PyObject *__pyx_v_lon1, PyObject *__pyx_v_lat1, PyObject *__pyx_v_distance) {
+ double __pyx_v_lon1r;
+ double __pyx_v_lat1r;
+ double __pyx_v_lon2r;
+ double __pyx_v_lat2r;
+ double __pyx_v_dphi;
+ double __pyx_v_dlon_w;
+ double __pyx_v_dlon_e;
+ double __pyx_v_d;
+ double __pyx_v_q;
+ double __pyx_v_TOL;
+ double __pyx_v_bearing;
+ double __pyx_v_tmp;
+ PyObject *__pyx_r = NULL;
+ __Pyx_RefNannyDeclarations
+ PyObject *__pyx_t_1 = NULL;
+ int __pyx_t_2;
+ double __pyx_t_3;
+ double __pyx_t_4;
+ PyObject *__pyx_t_5 = NULL;
+ PyObject *__pyx_t_6 = NULL;
+ __Pyx_RefNannySetupContext("rhumb_line_nav", 0);
+ __Pyx_INCREF(__pyx_v_lon1);
+
+ /* "rhumb_line_nav_v4.pyx":30
+ * cdef double lon1r,lat1r,lon2r,lat2r,dphi,dlon_w,dlon_e,d,q,TOL,bearing,de
+ *
+ * TOL = 1e-15 # small number to avoid 0/0 indeterminacies on E-W courses. # <<<<<<<<<<<<<<
+ * d = 0.0
+ * q = 0.0
+ */
+ __pyx_v_TOL = 1e-15;
+
+ /* "rhumb_line_nav_v4.pyx":31
+ *
+ * TOL = 1e-15 # small number to avoid 0/0 indeterminacies on E-W courses.
+ * d = 0.0 # <<<<<<<<<<<<<<
+ * q = 0.0
+ *
+ */
+ __pyx_v_d = 0.0;
+
+ /* "rhumb_line_nav_v4.pyx":32
+ * TOL = 1e-15 # small number to avoid 0/0 indeterminacies on E-W courses.
+ * d = 0.0
+ * q = 0.0 # <<<<<<<<<<<<<<
+ *
+ * # Ensure that longitude in +-180 form
+ */
+ __pyx_v_q = 0.0;
+
+ /* "rhumb_line_nav_v4.pyx":35
+ *
+ * # Ensure that longitude in +-180 form
+ * if lon1 > 180.0: # <<<<<<<<<<<<<<
+ * lon1 = lon1 - 360.0
+ * if lon2 > 180.0:
+ */
+ __pyx_t_1 = PyObject_RichCompare(__pyx_v_lon1, __pyx_float_180_0, Py_GT); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 35, __pyx_L1_error)
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 35, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":36
+ * # Ensure that longitude in +-180 form
+ * if lon1 > 180.0:
+ * lon1 = lon1 - 360.0 # <<<<<<<<<<<<<<
+ * if lon2 > 180.0:
+ * lon2 = lon2 - 360.0
+ */
+ __pyx_t_1 = __Pyx_PyFloat_SubtractObjC(__pyx_v_lon1, __pyx_float_360_0, 360.0, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 36, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __Pyx_DECREF_SET(__pyx_v_lon1, __pyx_t_1);
+ __pyx_t_1 = 0;
+
+ /* "rhumb_line_nav_v4.pyx":35
+ *
+ * # Ensure that longitude in +-180 form
+ * if lon1 > 180.0: # <<<<<<<<<<<<<<
+ * lon1 = lon1 - 360.0
+ * if lon2 > 180.0:
+ */
+ }
+
+ /* "rhumb_line_nav_v4.pyx":37
+ * if lon1 > 180.0:
+ * lon1 = lon1 - 360.0
+ * if lon2 > 180.0: # <<<<<<<<<<<<<<
+ * lon2 = lon2 - 360.0
+ * ## # Ensure that longitude in 360 form
+ */
+ __pyx_t_2 = ((__pyx_v_lon2 > 180.0) != 0);
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":38
+ * lon1 = lon1 - 360.0
+ * if lon2 > 180.0:
+ * lon2 = lon2 - 360.0 # <<<<<<<<<<<<<<
+ * ## # Ensure that longitude in 360 form
+ * ## if lon1 < 0.0:
+ */
+ __pyx_v_lon2 = (__pyx_v_lon2 - 360.0);
+
+ /* "rhumb_line_nav_v4.pyx":37
+ * if lon1 > 180.0:
+ * lon1 = lon1 - 360.0
+ * if lon2 > 180.0: # <<<<<<<<<<<<<<
+ * lon2 = lon2 - 360.0
+ * ## # Ensure that longitude in 360 form
+ */
+ }
+
+ /* "rhumb_line_nav_v4.pyx":45
+ * ## lon2 = lon2 + 360.0
+ *
+ * lon1r = lon1*0.0174532925 # <<<<<<<<<<<<<<
+ * lat1r = lat1*0.0174532925
+ * lon2r = lon2*0.0174532925
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_lon1, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __pyx_PyFloat_AsDouble(__pyx_t_1); if (unlikely((__pyx_t_3 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 45, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_lon1r = __pyx_t_3;
+
+ /* "rhumb_line_nav_v4.pyx":46
+ *
+ * lon1r = lon1*0.0174532925
+ * lat1r = lat1*0.0174532925 # <<<<<<<<<<<<<<
+ * lon2r = lon2*0.0174532925
+ * lat2r = lat2*0.0174532925
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_lat1, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __pyx_PyFloat_AsDouble(__pyx_t_1); if (unlikely((__pyx_t_3 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 46, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_lat1r = __pyx_t_3;
+
+ /* "rhumb_line_nav_v4.pyx":47
+ * lon1r = lon1*0.0174532925
+ * lat1r = lat1*0.0174532925
+ * lon2r = lon2*0.0174532925 # <<<<<<<<<<<<<<
+ * lat2r = lat2*0.0174532925
+ *
+ */
+ __pyx_v_lon2r = (__pyx_v_lon2 * 0.0174532925);
+
+ /* "rhumb_line_nav_v4.pyx":48
+ * lat1r = lat1*0.0174532925
+ * lon2r = lon2*0.0174532925
+ * lat2r = lat2*0.0174532925 # <<<<<<<<<<<<<<
+ *
+ * tmp = lon1r-lon2r
+ */
+ __pyx_t_1 = PyNumber_Multiply(__pyx_v_lat2, __pyx_float_0_0174532925); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 48, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_3 = __pyx_PyFloat_AsDouble(__pyx_t_1); if (unlikely((__pyx_t_3 == (double)-1) && PyErr_Occurred())) __PYX_ERR(0, 48, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+ __pyx_v_lat2r = __pyx_t_3;
+
+ /* "rhumb_line_nav_v4.pyx":50
+ * lat2r = lat2*0.0174532925
+ *
+ * tmp = lon1r-lon2r # <<<<<<<<<<<<<<
+ * dlon_w = tmp%6.28318531
+ * tmp = lon2r-lon1r
+ */
+ __pyx_v_tmp = (__pyx_v_lon1r - __pyx_v_lon2r);
+
+ /* "rhumb_line_nav_v4.pyx":51
+ *
+ * tmp = lon1r-lon2r
+ * dlon_w = tmp%6.28318531 # <<<<<<<<<<<<<<
+ * tmp = lon2r-lon1r
+ * dlon_e = tmp%6.28318531
+ */
+ __pyx_v_dlon_w = __Pyx_mod_double(__pyx_v_tmp, 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":52
+ * tmp = lon1r-lon2r
+ * dlon_w = tmp%6.28318531
+ * tmp = lon2r-lon1r # <<<<<<<<<<<<<<
+ * dlon_e = tmp%6.28318531
+ * ## # for some reason fmod won't return correct answer for dlon_w
+ */
+ __pyx_v_tmp = (__pyx_v_lon2r - __pyx_v_lon1r);
+
+ /* "rhumb_line_nav_v4.pyx":53
+ * dlon_w = tmp%6.28318531
+ * tmp = lon2r-lon1r
+ * dlon_e = tmp%6.28318531 # <<<<<<<<<<<<<<
+ * ## # for some reason fmod won't return correct answer for dlon_w
+ * ## dlon_w = fmod(lon2r-lon1r,6.28318531)
+ */
+ __pyx_v_dlon_e = __Pyx_mod_double(__pyx_v_tmp, 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":57
+ * ## dlon_w = fmod(lon2r-lon1r,6.28318531)
+ * ## dlon_e = fmod(lon1r-lon2r,6.28318531)
+ * dphi = log(tan(lat2r*0.5 + 0.7853981625) / # <<<<<<<<<<<<<<
+ * tan(lat1r*0.5 + 0.7853981625))
+ *
+ */
+ __pyx_t_3 = tan(((__pyx_v_lat2r * 0.5) + 0.7853981625));
+
+ /* "rhumb_line_nav_v4.pyx":58
+ * ## dlon_e = fmod(lon1r-lon2r,6.28318531)
+ * dphi = log(tan(lat2r*0.5 + 0.7853981625) /
+ * tan(lat1r*0.5 + 0.7853981625)) # <<<<<<<<<<<<<<
+ *
+ * if distance:
+ */
+ __pyx_t_4 = tan(((__pyx_v_lat1r * 0.5) + 0.7853981625));
+
+ /* "rhumb_line_nav_v4.pyx":57
+ * ## dlon_w = fmod(lon2r-lon1r,6.28318531)
+ * ## dlon_e = fmod(lon1r-lon2r,6.28318531)
+ * dphi = log(tan(lat2r*0.5 + 0.7853981625) / # <<<<<<<<<<<<<<
+ * tan(lat1r*0.5 + 0.7853981625))
+ *
+ */
+ if (unlikely(__pyx_t_4 == 0)) {
+ PyErr_SetString(PyExc_ZeroDivisionError, "float division");
+ __PYX_ERR(0, 57, __pyx_L1_error)
+ }
+ __pyx_v_dphi = log((__pyx_t_3 / __pyx_t_4));
+
+ /* "rhumb_line_nav_v4.pyx":60
+ * tan(lat1r*0.5 + 0.7853981625))
+ *
+ * if distance: # <<<<<<<<<<<<<<
+ * if abs(lat2r-lat1r) < sqrt(TOL):
+ * q = cos(lat1r)
+ */
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_distance); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 60, __pyx_L1_error)
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":61
+ *
+ * if distance:
+ * if abs(lat2r-lat1r) < sqrt(TOL): # <<<<<<<<<<<<<<
+ * q = cos(lat1r)
+ * else:
+ */
+ __pyx_t_2 = ((fabs((__pyx_v_lat2r - __pyx_v_lat1r)) < sqrt(__pyx_v_TOL)) != 0);
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":62
+ * if distance:
+ * if abs(lat2r-lat1r) < sqrt(TOL):
+ * q = cos(lat1r) # <<<<<<<<<<<<<<
+ * else:
+ * q = (lat2r-lat1r)/dphi
+ */
+ __pyx_v_q = cos(__pyx_v_lat1r);
+
+ /* "rhumb_line_nav_v4.pyx":61
+ *
+ * if distance:
+ * if abs(lat2r-lat1r) < sqrt(TOL): # <<<<<<<<<<<<<<
+ * q = cos(lat1r)
+ * else:
+ */
+ goto __pyx_L6;
+ }
+
+ /* "rhumb_line_nav_v4.pyx":64
+ * q = cos(lat1r)
+ * else:
+ * q = (lat2r-lat1r)/dphi # <<<<<<<<<<<<<<
+ *
+ * if dlon_w < dlon_e:
+ */
+ /*else*/ {
+ __pyx_t_4 = (__pyx_v_lat2r - __pyx_v_lat1r);
+ if (unlikely(__pyx_v_dphi == 0)) {
+ PyErr_SetString(PyExc_ZeroDivisionError, "float division");
+ __PYX_ERR(0, 64, __pyx_L1_error)
+ }
+ __pyx_v_q = (__pyx_t_4 / __pyx_v_dphi);
+ }
+ __pyx_L6:;
+
+ /* "rhumb_line_nav_v4.pyx":60
+ * tan(lat1r*0.5 + 0.7853981625))
+ *
+ * if distance: # <<<<<<<<<<<<<<
+ * if abs(lat2r-lat1r) < sqrt(TOL):
+ * q = cos(lat1r)
+ */
+ }
+
+ /* "rhumb_line_nav_v4.pyx":66
+ * q = (lat2r-lat1r)/dphi
+ *
+ * if dlon_w < dlon_e: # <<<<<<<<<<<<<<
+ * # Westerly rhumb line is the shortest
+ * tmp = atan2(-1.0*dlon_w,dphi)
+ */
+ __pyx_t_2 = ((__pyx_v_dlon_w < __pyx_v_dlon_e) != 0);
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":68
+ * if dlon_w < dlon_e:
+ * # Westerly rhumb line is the shortest
+ * tmp = atan2(-1.0*dlon_w,dphi) # <<<<<<<<<<<<<<
+ * bearing = tmp%6.28318531
+ * if distance:
+ */
+ __pyx_v_tmp = atan2((-1.0 * __pyx_v_dlon_w), __pyx_v_dphi);
+
+ /* "rhumb_line_nav_v4.pyx":69
+ * # Westerly rhumb line is the shortest
+ * tmp = atan2(-1.0*dlon_w,dphi)
+ * bearing = tmp%6.28318531 # <<<<<<<<<<<<<<
+ * if distance:
+ * d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2))
+ */
+ __pyx_v_bearing = __Pyx_mod_double(__pyx_v_tmp, 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":70
+ * tmp = atan2(-1.0*dlon_w,dphi)
+ * bearing = tmp%6.28318531
+ * if distance: # <<<<<<<<<<<<<<
+ * d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2))
+ * else:
+ */
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_distance); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 70, __pyx_L1_error)
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":71
+ * bearing = tmp%6.28318531
+ * if distance:
+ * d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2)) # <<<<<<<<<<<<<<
+ * else:
+ * tmp = atan2(dlon_e,dphi)
+ */
+ __pyx_v_d = sqrt(((pow(__pyx_v_q, 2) * pow(__pyx_v_dlon_w, 2)) + pow((__pyx_v_lat2r - __pyx_v_lat1r), 2)));
+
+ /* "rhumb_line_nav_v4.pyx":70
+ * tmp = atan2(-1.0*dlon_w,dphi)
+ * bearing = tmp%6.28318531
+ * if distance: # <<<<<<<<<<<<<<
+ * d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2))
+ * else:
+ */
+ }
+
+ /* "rhumb_line_nav_v4.pyx":66
+ * q = (lat2r-lat1r)/dphi
+ *
+ * if dlon_w < dlon_e: # <<<<<<<<<<<<<<
+ * # Westerly rhumb line is the shortest
+ * tmp = atan2(-1.0*dlon_w,dphi)
+ */
+ goto __pyx_L7;
+ }
+
+ /* "rhumb_line_nav_v4.pyx":73
+ * d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2))
+ * else:
+ * tmp = atan2(dlon_e,dphi) # <<<<<<<<<<<<<<
+ * bearing = tmp%6.28318531
+ * if distance:
+ */
+ /*else*/ {
+ __pyx_v_tmp = atan2(__pyx_v_dlon_e, __pyx_v_dphi);
+
+ /* "rhumb_line_nav_v4.pyx":74
+ * else:
+ * tmp = atan2(dlon_e,dphi)
+ * bearing = tmp%6.28318531 # <<<<<<<<<<<<<<
+ * if distance:
+ * d = sqrt(pow(q,2)*pow(dlon_e,2) + pow((lat2r-lat1r),2))
+ */
+ __pyx_v_bearing = __Pyx_mod_double(__pyx_v_tmp, 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":75
+ * tmp = atan2(dlon_e,dphi)
+ * bearing = tmp%6.28318531
+ * if distance: # <<<<<<<<<<<<<<
+ * d = sqrt(pow(q,2)*pow(dlon_e,2) + pow((lat2r-lat1r),2))
+ *
+ */
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_distance); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 75, __pyx_L1_error)
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":76
+ * bearing = tmp%6.28318531
+ * if distance:
+ * d = sqrt(pow(q,2)*pow(dlon_e,2) + pow((lat2r-lat1r),2)) # <<<<<<<<<<<<<<
+ *
+ * # Convert to the +-2pi (0-360) system
+ */
+ __pyx_v_d = sqrt(((pow(__pyx_v_q, 2) * pow(__pyx_v_dlon_e, 2)) + pow((__pyx_v_lat2r - __pyx_v_lat1r), 2)));
+
+ /* "rhumb_line_nav_v4.pyx":75
+ * tmp = atan2(dlon_e,dphi)
+ * bearing = tmp%6.28318531
+ * if distance: # <<<<<<<<<<<<<<
+ * d = sqrt(pow(q,2)*pow(dlon_e,2) + pow((lat2r-lat1r),2))
+ *
+ */
+ }
+ }
+ __pyx_L7:;
+
+ /* "rhumb_line_nav_v4.pyx":79
+ *
+ * # Convert to the +-2pi (0-360) system
+ * tmp = bearing+6.28318531 # <<<<<<<<<<<<<<
+ * bearing = tmp%6.28318531
+ *
+ */
+ __pyx_v_tmp = (__pyx_v_bearing + 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":80
+ * # Convert to the +-2pi (0-360) system
+ * tmp = bearing+6.28318531
+ * bearing = tmp%6.28318531 # <<<<<<<<<<<<<<
+ *
+ * # Convert to degrees
+ */
+ __pyx_v_bearing = __Pyx_mod_double(__pyx_v_tmp, 6.28318531);
+
+ /* "rhumb_line_nav_v4.pyx":83
+ *
+ * # Convert to degrees
+ * bearing = bearing * 57.2957795 # <<<<<<<<<<<<<<
+ *
+ * ## # Convert to 0-360 format
+ */
+ __pyx_v_bearing = (__pyx_v_bearing * 57.2957795);
+
+ /* "rhumb_line_nav_v4.pyx":97
+ * ## bearing = 0.0
+ *
+ * if distance: # <<<<<<<<<<<<<<
+ * # convert from distance in radians to nautical miles to km
+ * d = d*((180.0*60.0)/3.14159265)
+ */
+ __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_distance); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 97, __pyx_L1_error)
+ if (__pyx_t_2) {
+
+ /* "rhumb_line_nav_v4.pyx":99
+ * if distance:
+ * # convert from distance in radians to nautical miles to km
+ * d = d*((180.0*60.0)/3.14159265) # <<<<<<<<<<<<<<
+ * d = d*1.852
+ * return bearing,d
+ */
+ __pyx_v_d = (__pyx_v_d * ((180.0 * 60.0) / 3.14159265));
+
+ /* "rhumb_line_nav_v4.pyx":100
+ * # convert from distance in radians to nautical miles to km
+ * d = d*((180.0*60.0)/3.14159265)
+ * d = d*1.852 # <<<<<<<<<<<<<<
+ * return bearing,d
+ * else:
+ */
+ __pyx_v_d = (__pyx_v_d * 1.852);
+
+ /* "rhumb_line_nav_v4.pyx":101
+ * d = d*((180.0*60.0)/3.14159265)
+ * d = d*1.852
+ * return bearing,d # <<<<<<<<<<<<<<
+ * else:
+ * return bearing
+ */
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_1 = PyFloat_FromDouble(__pyx_v_bearing); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ __pyx_t_5 = PyFloat_FromDouble(__pyx_v_d); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 101, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_5);
+ __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 101, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __Pyx_GIVEREF(__pyx_t_1);
+ PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1);
+ __Pyx_GIVEREF(__pyx_t_5);
+ PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_5);
+ __pyx_t_1 = 0;
+ __pyx_t_5 = 0;
+ __pyx_r = __pyx_t_6;
+ __pyx_t_6 = 0;
+ goto __pyx_L0;
+
+ /* "rhumb_line_nav_v4.pyx":97
+ * ## bearing = 0.0
+ *
+ * if distance: # <<<<<<<<<<<<<<
+ * # convert from distance in radians to nautical miles to km
+ * d = d*((180.0*60.0)/3.14159265)
+ */
+ }
+
+ /* "rhumb_line_nav_v4.pyx":103
+ * return bearing,d
+ * else:
+ * return bearing # <<<<<<<<<<<<<<
+ *
+ * # tests
+ */
+ /*else*/ {
+ __Pyx_XDECREF(__pyx_r);
+ __pyx_t_6 = PyFloat_FromDouble(__pyx_v_bearing); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 103, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_6);
+ __pyx_r = __pyx_t_6;
+ __pyx_t_6 = 0;
+ goto __pyx_L0;
+ }
+
+ /* "rhumb_line_nav_v4.pyx":11
+ * double pow(double x, int y)
+ *
+ * def rhumb_line_nav(double lon2,lat2,lon1,lat1,distance=False): # <<<<<<<<<<<<<<
+ * """
+ * The true course between the points (lat1,lon1), (lat2,lon2)
+ */
+
+ /* function exit code */
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ __Pyx_XDECREF(__pyx_t_5);
+ __Pyx_XDECREF(__pyx_t_6);
+ __Pyx_AddTraceback("rhumb_line_nav_v4.rhumb_line_nav", __pyx_clineno, __pyx_lineno, __pyx_filename);
+ __pyx_r = NULL;
+ __pyx_L0:;
+ __Pyx_XDECREF(__pyx_v_lon1);
+ __Pyx_XGIVEREF(__pyx_r);
+ __Pyx_RefNannyFinishContext();
+ return __pyx_r;
+}
+
+static PyMethodDef __pyx_methods[] = {
+ {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
+static int __pyx_pymod_exec_rhumb_line_nav_v4(PyObject* module); /*proto*/
+static PyModuleDef_Slot __pyx_moduledef_slots[] = {
+ {Py_mod_create, (void*)__pyx_pymod_create},
+ {Py_mod_exec, (void*)__pyx_pymod_exec_rhumb_line_nav_v4},
+ {0, NULL}
+};
+#endif
+
+static struct PyModuleDef __pyx_moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "rhumb_line_nav_v4",
+ 0, /* m_doc */
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ 0, /* m_size */
+ #else
+ -1, /* m_size */
+ #endif
+ __pyx_methods /* m_methods */,
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_moduledef_slots, /* m_slots */
+ #else
+ NULL, /* m_reload */
+ #endif
+ NULL, /* m_traverse */
+ NULL, /* m_clear */
+ NULL /* m_free */
+};
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+ {&__pyx_n_s_TOL, __pyx_k_TOL, sizeof(__pyx_k_TOL), 0, 0, 1, 1},
+ {&__pyx_n_s_bearing, __pyx_k_bearing, sizeof(__pyx_k_bearing), 0, 0, 1, 1},
+ {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
+ {&__pyx_n_s_d, __pyx_k_d, sizeof(__pyx_k_d), 0, 0, 1, 1},
+ {&__pyx_n_s_de, __pyx_k_de, sizeof(__pyx_k_de), 0, 0, 1, 1},
+ {&__pyx_n_s_distance, __pyx_k_distance, sizeof(__pyx_k_distance), 0, 0, 1, 1},
+ {&__pyx_n_s_dlon_e, __pyx_k_dlon_e, sizeof(__pyx_k_dlon_e), 0, 0, 1, 1},
+ {&__pyx_n_s_dlon_w, __pyx_k_dlon_w, sizeof(__pyx_k_dlon_w), 0, 0, 1, 1},
+ {&__pyx_n_s_dphi, __pyx_k_dphi, sizeof(__pyx_k_dphi), 0, 0, 1, 1},
+ {&__pyx_n_s_lat1, __pyx_k_lat1, sizeof(__pyx_k_lat1), 0, 0, 1, 1},
+ {&__pyx_n_s_lat1r, __pyx_k_lat1r, sizeof(__pyx_k_lat1r), 0, 0, 1, 1},
+ {&__pyx_n_s_lat2, __pyx_k_lat2, sizeof(__pyx_k_lat2), 0, 0, 1, 1},
+ {&__pyx_n_s_lat2r, __pyx_k_lat2r, sizeof(__pyx_k_lat2r), 0, 0, 1, 1},
+ {&__pyx_n_s_lon1, __pyx_k_lon1, sizeof(__pyx_k_lon1), 0, 0, 1, 1},
+ {&__pyx_n_s_lon1r, __pyx_k_lon1r, sizeof(__pyx_k_lon1r), 0, 0, 1, 1},
+ {&__pyx_n_s_lon2, __pyx_k_lon2, sizeof(__pyx_k_lon2), 0, 0, 1, 1},
+ {&__pyx_n_s_lon2r, __pyx_k_lon2r, sizeof(__pyx_k_lon2r), 0, 0, 1, 1},
+ {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
+ {&__pyx_n_s_q, __pyx_k_q, sizeof(__pyx_k_q), 0, 0, 1, 1},
+ {&__pyx_n_s_rhumb_line_nav, __pyx_k_rhumb_line_nav, sizeof(__pyx_k_rhumb_line_nav), 0, 0, 1, 1},
+ {&__pyx_n_s_rhumb_line_nav_v4, __pyx_k_rhumb_line_nav_v4, sizeof(__pyx_k_rhumb_line_nav_v4), 0, 0, 1, 1},
+ {&__pyx_kp_s_rhumb_line_nav_v4_pyx, __pyx_k_rhumb_line_nav_v4_pyx, sizeof(__pyx_k_rhumb_line_nav_v4_pyx), 0, 0, 1, 0},
+ {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
+ {&__pyx_n_s_tmp, __pyx_k_tmp, sizeof(__pyx_k_tmp), 0, 0, 1, 1},
+ {0, 0, 0, 0, 0, 0, 0}
+};
+static int __Pyx_InitCachedBuiltins(void) {
+ return 0;
+}
+
+static int __Pyx_InitCachedConstants(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
+
+ /* "rhumb_line_nav_v4.pyx":11
+ * double pow(double x, int y)
+ *
+ * def rhumb_line_nav(double lon2,lat2,lon1,lat1,distance=False): # <<<<<<<<<<<<<<
+ * """
+ * The true course between the points (lat1,lon1), (lat2,lon2)
+ */
+ __pyx_tuple_ = PyTuple_Pack(18, __pyx_n_s_lon2, __pyx_n_s_lat2, __pyx_n_s_lon1, __pyx_n_s_lat1, __pyx_n_s_distance, __pyx_n_s_lon1r, __pyx_n_s_lat1r, __pyx_n_s_lon2r, __pyx_n_s_lat2r, __pyx_n_s_dphi, __pyx_n_s_dlon_w, __pyx_n_s_dlon_e, __pyx_n_s_d, __pyx_n_s_q, __pyx_n_s_TOL, __pyx_n_s_bearing, __pyx_n_s_de, __pyx_n_s_tmp); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 11, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_tuple_);
+ __Pyx_GIVEREF(__pyx_tuple_);
+ __pyx_codeobj__2 = (PyObject*)__Pyx_PyCode_New(5, 0, 18, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple_, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_rhumb_line_nav_v4_pyx, __pyx_n_s_rhumb_line_nav, 11, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__2)) __PYX_ERR(0, 11, __pyx_L1_error)
+ __Pyx_RefNannyFinishContext();
+ return 0;
+ __pyx_L1_error:;
+ __Pyx_RefNannyFinishContext();
+ return -1;
+}
+
+static int __Pyx_InitGlobals(void) {
+ if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ __pyx_float_180_0 = PyFloat_FromDouble(180.0); if (unlikely(!__pyx_float_180_0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_float_360_0 = PyFloat_FromDouble(360.0); if (unlikely(!__pyx_float_360_0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_float_0_0174532925 = PyFloat_FromDouble(0.0174532925); if (unlikely(!__pyx_float_0_0174532925)) __PYX_ERR(0, 1, __pyx_L1_error)
+ return 0;
+ __pyx_L1_error:;
+ return -1;
+}
+
+static int __Pyx_modinit_global_init_code(void); /*proto*/
+static int __Pyx_modinit_variable_export_code(void); /*proto*/
+static int __Pyx_modinit_function_export_code(void); /*proto*/
+static int __Pyx_modinit_type_init_code(void); /*proto*/
+static int __Pyx_modinit_type_import_code(void); /*proto*/
+static int __Pyx_modinit_variable_import_code(void); /*proto*/
+static int __Pyx_modinit_function_import_code(void); /*proto*/
+
+static int __Pyx_modinit_global_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
+ /*--- Global init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
+ /*--- Variable export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_export_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
+ /*--- Function export code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_init_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
+ /*--- Type init code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_type_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
+ /*--- Type import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_variable_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
+ /*--- Variable import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+static int __Pyx_modinit_function_import_code(void) {
+ __Pyx_RefNannyDeclarations
+ __Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
+ /*--- Function import code ---*/
+ __Pyx_RefNannyFinishContext();
+ return 0;
+}
+
+
+#if PY_MAJOR_VERSION < 3
+#ifdef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC void
+#else
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#endif
+#else
+#ifdef CYTHON_NO_PYINIT_EXPORT
+#define __Pyx_PyMODINIT_FUNC PyObject *
+#else
+#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
+#endif
+#endif
+#ifndef CYTHON_SMALL_CODE
+#if defined(__clang__)
+ #define CYTHON_SMALL_CODE
+#elif defined(__GNUC__)
+ #define CYTHON_SMALL_CODE __attribute__((optimize("Os")))
+#else
+ #define CYTHON_SMALL_CODE
+#endif
+#endif
+
+
+#if PY_MAJOR_VERSION < 3
+__Pyx_PyMODINIT_FUNC initrhumb_line_nav_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC initrhumb_line_nav_v4(void)
+#else
+__Pyx_PyMODINIT_FUNC PyInit_rhumb_line_nav_v4(void) CYTHON_SMALL_CODE; /*proto*/
+__Pyx_PyMODINIT_FUNC PyInit_rhumb_line_nav_v4(void)
+#if CYTHON_PEP489_MULTI_PHASE_INIT
+{
+ return PyModuleDef_Init(&__pyx_moduledef);
+}
+static int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name) {
+ PyObject *value = PyObject_GetAttrString(spec, from_name);
+ int result = 0;
+ if (likely(value)) {
+ result = PyDict_SetItemString(moddict, to_name, value);
+ Py_DECREF(value);
+ } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
+ PyErr_Clear();
+ } else {
+ result = -1;
+ }
+ return result;
+}
+static PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
+ PyObject *module = NULL, *moddict, *modname;
+ if (__pyx_m)
+ return __Pyx_NewRef(__pyx_m);
+ modname = PyObject_GetAttrString(spec, "name");
+ if (unlikely(!modname)) goto bad;
+ module = PyModule_NewObject(modname);
+ Py_DECREF(modname);
+ if (unlikely(!module)) goto bad;
+ moddict = PyModule_GetDict(module);
+ if (unlikely(!moddict)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__") < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__") < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__") < 0)) goto bad;
+ if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__") < 0)) goto bad;
+ return module;
+bad:
+ Py_XDECREF(module);
+ return NULL;
+}
+
+
+static int __pyx_pymod_exec_rhumb_line_nav_v4(PyObject *__pyx_pyinit_module)
+#endif
+#endif
+{
+ PyObject *__pyx_t_1 = NULL;
+ __Pyx_RefNannyDeclarations
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ if (__pyx_m && __pyx_m == __pyx_pyinit_module) return 0;
+ #elif PY_MAJOR_VERSION >= 3
+ if (__pyx_m) return __Pyx_NewRef(__pyx_m);
+ #endif
+ #if CYTHON_REFNANNY
+__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+if (!__Pyx_RefNanny) {
+ PyErr_Clear();
+ __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+ if (!__Pyx_RefNanny)
+ Py_FatalError("failed to import 'refnanny' module");
+}
+#endif
+ __Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_rhumb_line_nav_v4(void)", 0);
+ if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #ifdef __Pyx_CyFunction_USED
+ if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_FusedFunction_USED
+ if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Coroutine_USED
+ if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_Generator_USED
+ if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_AsyncGen_USED
+ if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ #ifdef __Pyx_StopAsyncIteration_USED
+ if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ /*--- Library function declarations ---*/
+ /*--- Threads initialization code ---*/
+ #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+ #ifdef WITH_THREAD /* Python build with threading support? */
+ PyEval_InitThreads();
+ #endif
+ #endif
+ /*--- Module creation code ---*/
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ __pyx_m = __pyx_pyinit_module;
+ Py_INCREF(__pyx_m);
+ #else
+ #if PY_MAJOR_VERSION < 3
+ __pyx_m = Py_InitModule4("rhumb_line_nav_v4", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
+ #else
+ __pyx_m = PyModule_Create(&__pyx_moduledef);
+ #endif
+ if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)
+ Py_INCREF(__pyx_d);
+ __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)
+ #if CYTHON_COMPILING_IN_PYPY
+ Py_INCREF(__pyx_b);
+ #endif
+ if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);
+ /*--- Initialize various global constants etc. ---*/
+ if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
+ if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+ if (__pyx_module_is_main_rhumb_line_nav_v4) {
+ if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ #if PY_MAJOR_VERSION >= 3
+ {
+ PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)
+ if (!PyDict_GetItemString(modules, "rhumb_line_nav_v4")) {
+ if (unlikely(PyDict_SetItemString(modules, "rhumb_line_nav_v4", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)
+ }
+ }
+ #endif
+ /*--- Builtin init code ---*/
+ if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Constants init code ---*/
+ if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ /*--- Global type/function init code ---*/
+ (void)__Pyx_modinit_global_init_code();
+ (void)__Pyx_modinit_variable_export_code();
+ (void)__Pyx_modinit_function_export_code();
+ (void)__Pyx_modinit_type_init_code();
+ (void)__Pyx_modinit_type_import_code();
+ (void)__Pyx_modinit_variable_import_code();
+ (void)__Pyx_modinit_function_import_code();
+ /*--- Execution code ---*/
+ #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
+ if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ #endif
+
+ /* "rhumb_line_nav_v4.pyx":11
+ * double pow(double x, int y)
+ *
+ * def rhumb_line_nav(double lon2,lat2,lon1,lat1,distance=False): # <<<<<<<<<<<<<<
+ * """
+ * The true course between the points (lat1,lon1), (lat2,lon2)
+ */
+ __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_17rhumb_line_nav_v4_1rhumb_line_nav, NULL, __pyx_n_s_rhumb_line_nav_v4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 11, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_rhumb_line_nav, __pyx_t_1) < 0) __PYX_ERR(0, 11, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /* "rhumb_line_nav_v4.pyx":1
+ * # use python2.5 setup_rhumb_line_nav.py build_ext --inplace # <<<<<<<<<<<<<<
+ * cdef extern from "math.h":
+ * double sin(double x)
+ */
+ __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_GOTREF(__pyx_t_1);
+ if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)
+ __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+ /*--- Wrapped vars code ---*/
+
+ goto __pyx_L0;
+ __pyx_L1_error:;
+ __Pyx_XDECREF(__pyx_t_1);
+ if (__pyx_m) {
+ if (__pyx_d) {
+ __Pyx_AddTraceback("init rhumb_line_nav_v4", 0, __pyx_lineno, __pyx_filename);
+ }
+ Py_DECREF(__pyx_m); __pyx_m = 0;
+ } else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_ImportError, "init rhumb_line_nav_v4");
+ }
+ __pyx_L0:;
+ __Pyx_RefNannyFinishContext();
+ #if CYTHON_PEP489_MULTI_PHASE_INIT
+ return (__pyx_m != NULL) ? 0 : -1;
+ #elif PY_MAJOR_VERSION >= 3
+ return __pyx_m;
+ #else
+ return;
+ #endif
+}
+
+/* --- Runtime support code --- */
+/* Refnanny */
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+ PyObject *m = NULL, *p = NULL;
+ void *r = NULL;
+ m = PyImport_ImportModule((char *)modname);
+ if (!m) goto end;
+ p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
+ if (!p) goto end;
+ r = PyLong_AsVoidPtr(p);
+end:
+ Py_XDECREF(p);
+ Py_XDECREF(m);
+ return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif
+
+/* RaiseArgTupleInvalid */
+static void __Pyx_RaiseArgtupleInvalid(
+ const char* func_name,
+ int exact,
+ Py_ssize_t num_min,
+ Py_ssize_t num_max,
+ Py_ssize_t num_found)
+{
+ Py_ssize_t num_expected;
+ const char *more_or_less;
+ if (num_found < num_min) {
+ num_expected = num_min;
+ more_or_less = "at least";
+ } else {
+ num_expected = num_max;
+ more_or_less = "at most";
+ }
+ if (exact) {
+ more_or_less = "exactly";
+ }
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
+ func_name, more_or_less, num_expected,
+ (num_expected == 1) ? "" : "s", num_found);
+}
+
+/* RaiseDoubleKeywords */
+static void __Pyx_RaiseDoubleKeywordsError(
+ const char* func_name,
+ PyObject* kw_name)
+{
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION >= 3
+ "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+ #else
+ "%s() got multiple values for keyword argument '%s'", func_name,
+ PyString_AsString(kw_name));
+ #endif
+}
+
+/* ParseKeywords */
+static int __Pyx_ParseOptionalKeywords(
+ PyObject *kwds,
+ PyObject **argnames[],
+ PyObject *kwds2,
+ PyObject *values[],
+ Py_ssize_t num_pos_args,
+ const char* function_name)
+{
+ PyObject *key = 0, *value = 0;
+ Py_ssize_t pos = 0;
+ PyObject*** name;
+ PyObject*** first_kw_arg = argnames + num_pos_args;
+ while (PyDict_Next(kwds, &pos, &key, &value)) {
+ name = first_kw_arg;
+ while (*name && (**name != key)) name++;
+ if (*name) {
+ values[name-argnames] = value;
+ continue;
+ }
+ name = first_kw_arg;
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {
+ while (*name) {
+ if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**name, key)) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ if ((**argname == key) || (
+ (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
+ && _PyString_Eq(**argname, key))) {
+ goto arg_passed_twice;
+ }
+ argname++;
+ }
+ }
+ } else
+ #endif
+ if (likely(PyUnicode_Check(key))) {
+ while (*name) {
+ int cmp = (**name == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**name, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) {
+ values[name-argnames] = value;
+ break;
+ }
+ name++;
+ }
+ if (*name) continue;
+ else {
+ PyObject*** argname = argnames;
+ while (argname != first_kw_arg) {
+ int cmp = (**argname == key) ? 0 :
+ #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
+ (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :
+ #endif
+ PyUnicode_Compare(**argname, key);
+ if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
+ if (cmp == 0) goto arg_passed_twice;
+ argname++;
+ }
+ }
+ } else
+ goto invalid_keyword_type;
+ if (kwds2) {
+ if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+ } else {
+ goto invalid_keyword;
+ }
+ }
+ return 0;
+arg_passed_twice:
+ __Pyx_RaiseDoubleKeywordsError(function_name, key);
+ goto bad;
+invalid_keyword_type:
+ PyErr_Format(PyExc_TypeError,
+ "%.200s() keywords must be strings", function_name);
+ goto bad;
+invalid_keyword:
+ PyErr_Format(PyExc_TypeError,
+ #if PY_MAJOR_VERSION < 3
+ "%.200s() got an unexpected keyword argument '%.200s'",
+ function_name, PyString_AsString(key));
+ #else
+ "%s() got an unexpected keyword argument '%U'",
+ function_name, key);
+ #endif
+bad:
+ return -1;
+}
+
+/* PyFloatBinop */
+#if !CYTHON_COMPILING_IN_PYPY
+static PyObject* __Pyx_PyFloat_SubtractObjC(PyObject *op1, PyObject *op2, double floatval, CYTHON_UNUSED int inplace) {
+ const double b = floatval;
+ double a, result;
+ if (likely(PyFloat_CheckExact(op1))) {
+ a = PyFloat_AS_DOUBLE(op1);
+ } else
+ #if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(op1))) {
+ a = (double) PyInt_AS_LONG(op1);
+ } else
+ #endif
+ if (likely(PyLong_CheckExact(op1))) {
+ #if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)op1)->ob_digit;
+ const Py_ssize_t size = Py_SIZE(op1);
+ switch (size) {
+ case 0: a = 0.0; break;
+ case -1: a = -(double) digits[0]; break;
+ case 1: a = (double) digits[0]; break;
+ case -2:
+ case 2:
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || (1 * PyLong_SHIFT < 53))) {
+ a = (double) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
+ if ((8 * sizeof(unsigned long) < 53) || (2 * PyLong_SHIFT < 53) || (a < (double) ((PY_LONG_LONG)1 << 53))) {
+ if (size == -2)
+ a = -a;
+ break;
+ }
+ }
+ CYTHON_FALLTHROUGH;
+ case -3:
+ case 3:
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || (2 * PyLong_SHIFT < 53))) {
+ a = (double) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
+ if ((8 * sizeof(unsigned long) < 53) || (3 * PyLong_SHIFT < 53) || (a < (double) ((PY_LONG_LONG)1 << 53))) {
+ if (size == -3)
+ a = -a;
+ break;
+ }
+ }
+ CYTHON_FALLTHROUGH;
+ case -4:
+ case 4:
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT && ((8 * sizeof(unsigned long) < 53) || (3 * PyLong_SHIFT < 53))) {
+ a = (double) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));
+ if ((8 * sizeof(unsigned long) < 53) || (4 * PyLong_SHIFT < 53) || (a < (double) ((PY_LONG_LONG)1 << 53))) {
+ if (size == -4)
+ a = -a;
+ break;
+ }
+ }
+ CYTHON_FALLTHROUGH;
+ default:
+ #else
+ {
+ #endif
+ a = PyLong_AsDouble(op1);
+ if (unlikely(a == -1.0 && PyErr_Occurred())) return NULL;
+ }
+ } else {
+ return (inplace ? PyNumber_InPlaceSubtract : PyNumber_Subtract)(op1, op2);
+ }
+ PyFPE_START_PROTECT("subtract", return NULL)
+ result = a - b;
+ PyFPE_END_PROTECT(result)
+ return PyFloat_FromDouble(result);
+}
+#endif
+
+/* None */
+ static CYTHON_INLINE double __Pyx_mod_double(double a, double b) {
+ double r = fmod(a, b);
+ r += ((r != 0) & ((r < 0) ^ (b < 0))) * b;
+ return r;
+}
+
+/* PyObjectGetAttrStr */
+ #if CYTHON_USE_TYPE_SLOTS
+static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
+ PyTypeObject* tp = Py_TYPE(obj);
+ if (likely(tp->tp_getattro))
+ return tp->tp_getattro(obj, attr_name);
+#if PY_MAJOR_VERSION < 3
+ if (likely(tp->tp_getattr))
+ return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
+#endif
+ return PyObject_GetAttr(obj, attr_name);
+}
+#endif
+
+/* PyErrFetchRestore */
+ #if CYTHON_FAST_THREAD_STATE
+static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
+ PyObject *tmp_type, *tmp_value, *tmp_tb;
+ tmp_type = tstate->curexc_type;
+ tmp_value = tstate->curexc_value;
+ tmp_tb = tstate->curexc_traceback;
+ tstate->curexc_type = type;
+ tstate->curexc_value = value;
+ tstate->curexc_traceback = tb;
+ Py_XDECREF(tmp_type);
+ Py_XDECREF(tmp_value);
+ Py_XDECREF(tmp_tb);
+}
+static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
+ *type = tstate->curexc_type;
+ *value = tstate->curexc_value;
+ *tb = tstate->curexc_traceback;
+ tstate->curexc_type = 0;
+ tstate->curexc_value = 0;
+ tstate->curexc_traceback = 0;
+}
+#endif
+
+/* CLineInTraceback */
+ #ifndef CYTHON_CLINE_IN_TRACEBACK
+static int __Pyx_CLineForTraceback(CYTHON_UNUSED PyThreadState *tstate, int c_line) {
+ PyObject *use_cline;
+ PyObject *ptype, *pvalue, *ptraceback;
+#if CYTHON_COMPILING_IN_CPYTHON
+ PyObject **cython_runtime_dict;
+#endif
+ __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
+#if CYTHON_COMPILING_IN_CPYTHON
+ cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
+ if (likely(cython_runtime_dict)) {
+ use_cline = __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback);
+ } else
+#endif
+ {
+ PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
+ if (use_cline_obj) {
+ use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
+ Py_DECREF(use_cline_obj);
+ } else {
+ PyErr_Clear();
+ use_cline = NULL;
+ }
+ }
+ if (!use_cline) {
+ c_line = 0;
+ PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
+ }
+ else if (PyObject_Not(use_cline) != 0) {
+ c_line = 0;
+ }
+ __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
+ return c_line;
+}
+#endif
+
+/* CodeObjectCache */
+ static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
+ int start = 0, mid = 0, end = count - 1;
+ if (end >= 0 && code_line > entries[end].code_line) {
+ return count;
+ }
+ while (start < end) {
+ mid = start + (end - start) / 2;
+ if (code_line < entries[mid].code_line) {
+ end = mid;
+ } else if (code_line > entries[mid].code_line) {
+ start = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ if (code_line <= entries[mid].code_line) {
+ return mid;
+ } else {
+ return mid + 1;
+ }
+}
+static PyCodeObject *__pyx_find_code_object(int code_line) {
+ PyCodeObject* code_object;
+ int pos;
+ if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
+ return NULL;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
+ return NULL;
+ }
+ code_object = __pyx_code_cache.entries[pos].code_object;
+ Py_INCREF(code_object);
+ return code_object;
+}
+static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
+ int pos, i;
+ __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
+ if (unlikely(!code_line)) {
+ return;
+ }
+ if (unlikely(!entries)) {
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (likely(entries)) {
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = 64;
+ __pyx_code_cache.count = 1;
+ entries[0].code_line = code_line;
+ entries[0].code_object = code_object;
+ Py_INCREF(code_object);
+ }
+ return;
+ }
+ pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
+ if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
+ PyCodeObject* tmp = entries[pos].code_object;
+ entries[pos].code_object = code_object;
+ Py_DECREF(tmp);
+ return;
+ }
+ if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
+ int new_max = __pyx_code_cache.max_count + 64;
+ entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
+ __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));
+ if (unlikely(!entries)) {
+ return;
+ }
+ __pyx_code_cache.entries = entries;
+ __pyx_code_cache.max_count = new_max;
+ }
+ for (i=__pyx_code_cache.count; i>pos; i--) {
+ entries[i] = entries[i-1];
+ }
+ entries[pos].code_line = code_line;
+ entries[pos].code_object = code_object;
+ __pyx_code_cache.count++;
+ Py_INCREF(code_object);
+}
+
+/* AddTraceback */
+ #include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
+ const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyObject *py_srcfile = 0;
+ PyObject *py_funcname = 0;
+ #if PY_MAJOR_VERSION < 3
+ py_srcfile = PyString_FromString(filename);
+ #else
+ py_srcfile = PyUnicode_FromString(filename);
+ #endif
+ if (!py_srcfile) goto bad;
+ if (c_line) {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #else
+ py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
+ #endif
+ }
+ else {
+ #if PY_MAJOR_VERSION < 3
+ py_funcname = PyString_FromString(funcname);
+ #else
+ py_funcname = PyUnicode_FromString(funcname);
+ #endif
+ }
+ if (!py_funcname) goto bad;
+ py_code = __Pyx_PyCode_New(
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ __pyx_empty_bytes, /*PyObject *code,*/
+ __pyx_empty_tuple, /*PyObject *consts,*/
+ __pyx_empty_tuple, /*PyObject *names,*/
+ __pyx_empty_tuple, /*PyObject *varnames,*/
+ __pyx_empty_tuple, /*PyObject *freevars,*/
+ __pyx_empty_tuple, /*PyObject *cellvars,*/
+ py_srcfile, /*PyObject *filename,*/
+ py_funcname, /*PyObject *name,*/
+ py_line,
+ __pyx_empty_bytes /*PyObject *lnotab*/
+ );
+ Py_DECREF(py_srcfile);
+ Py_DECREF(py_funcname);
+ return py_code;
+bad:
+ Py_XDECREF(py_srcfile);
+ Py_XDECREF(py_funcname);
+ return NULL;
+}
+static void __Pyx_AddTraceback(const char *funcname, int c_line,
+ int py_line, const char *filename) {
+ PyCodeObject *py_code = 0;
+ PyFrameObject *py_frame = 0;
+ PyThreadState *tstate = __Pyx_PyThreadState_Current;
+ if (c_line) {
+ c_line = __Pyx_CLineForTraceback(tstate, c_line);
+ }
+ py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
+ if (!py_code) {
+ py_code = __Pyx_CreateCodeObjectForTraceback(
+ funcname, c_line, py_line, filename);
+ if (!py_code) goto bad;
+ __pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
+ }
+ py_frame = PyFrame_New(
+ tstate, /*PyThreadState *tstate,*/
+ py_code, /*PyCodeObject *code,*/
+ __pyx_d, /*PyObject *globals,*/
+ 0 /*PyObject *locals*/
+ );
+ if (!py_frame) goto bad;
+ __Pyx_PyFrame_SetLineNumber(py_frame, py_line);
+ PyTraceBack_Here(py_frame);
+bad:
+ Py_XDECREF(py_code);
+ Py_XDECREF(py_frame);
+}
+
+/* CIntToPy */
+ static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
+ const long neg_one = (long) -1, const_zero = (long) 0;
+ const int is_unsigned = neg_one > const_zero;
+ if (is_unsigned) {
+ if (sizeof(long) < sizeof(long)) {
+ return PyInt_FromLong((long) value);
+ } else if (sizeof(long) <= sizeof(unsigned long)) {
+ return PyLong_FromUnsignedLong((unsigned long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
+#endif
+ }
+ } else {
+ if (sizeof(long) <= sizeof(long)) {
+ return PyInt_FromLong((long) value);
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ return PyLong_FromLongLong((PY_LONG_LONG) value);
+#endif
+ }
+ }
+ {
+ int one = 1; int little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&value;
+ return _PyLong_FromByteArray(bytes, sizeof(long),
+ little, !is_unsigned);
+ }
+}
+
+/* CIntFromPyVerify */
+ #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
+#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
+ __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
+#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
+ {\
+ func_type value = func_value;\
+ if (sizeof(target_type) < sizeof(func_type)) {\
+ if (unlikely(value != (func_type) (target_type) value)) {\
+ func_type zero = 0;\
+ if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
+ return (target_type) -1;\
+ if (is_unsigned && unlikely(value < zero))\
+ goto raise_neg_overflow;\
+ else\
+ goto raise_overflow;\
+ }\
+ }\
+ return (target_type) value;\
+ }
+
+/* CIntFromPy */
+ static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
+ const long neg_one = (long) -1, const_zero = (long) 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(long) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (long) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
+ return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
+ return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
+ return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (long) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(long) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (long) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
+ return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(long) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ long val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (long) -1;
+ }
+ } else {
+ long val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (long) -1;
+ val = __Pyx_PyInt_As_long(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to long");
+ return (long) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to long");
+ return (long) -1;
+}
+
+/* CIntFromPy */
+ static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
+ const int neg_one = (int) -1, const_zero = (int) 0;
+ const int is_unsigned = neg_one > const_zero;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x))) {
+ if (sizeof(int) < sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
+ } else {
+ long val = PyInt_AS_LONG(x);
+ if (is_unsigned && unlikely(val < 0)) {
+ goto raise_neg_overflow;
+ }
+ return (int) val;
+ }
+ } else
+#endif
+ if (likely(PyLong_Check(x))) {
+ if (is_unsigned) {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
+ return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
+ return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
+ return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
+ }
+ }
+ break;
+ }
+#endif
+#if CYTHON_COMPILING_IN_CPYTHON
+ if (unlikely(Py_SIZE(x) < 0)) {
+ goto raise_neg_overflow;
+ }
+#else
+ {
+ int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
+ if (unlikely(result < 0))
+ return (int) -1;
+ if (unlikely(result == 1))
+ goto raise_neg_overflow;
+ }
+#endif
+ if (sizeof(int) <= sizeof(unsigned long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
+#endif
+ }
+ } else {
+#if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)x)->ob_digit;
+ switch (Py_SIZE(x)) {
+ case 0: return (int) 0;
+ case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
+ case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
+ case -2:
+ if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 2:
+ if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -3:
+ if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 3:
+ if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case -4:
+ if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ case 4:
+ if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
+ if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
+ __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
+ } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
+ return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
+ }
+ }
+ break;
+ }
+#endif
+ if (sizeof(int) <= sizeof(long)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
+#ifdef HAVE_LONG_LONG
+ } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
+ __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
+#endif
+ }
+ }
+ {
+#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
+ PyErr_SetString(PyExc_RuntimeError,
+ "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
+#else
+ int val;
+ PyObject *v = __Pyx_PyNumber_IntOrLong(x);
+ #if PY_MAJOR_VERSION < 3
+ if (likely(v) && !PyLong_Check(v)) {
+ PyObject *tmp = v;
+ v = PyNumber_Long(tmp);
+ Py_DECREF(tmp);
+ }
+ #endif
+ if (likely(v)) {
+ int one = 1; int is_little = (int)*(unsigned char *)&one;
+ unsigned char *bytes = (unsigned char *)&val;
+ int ret = _PyLong_AsByteArray((PyLongObject *)v,
+ bytes, sizeof(val),
+ is_little, !is_unsigned);
+ Py_DECREF(v);
+ if (likely(!ret))
+ return val;
+ }
+#endif
+ return (int) -1;
+ }
+ } else {
+ int val;
+ PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
+ if (!tmp) return (int) -1;
+ val = __Pyx_PyInt_As_int(tmp);
+ Py_DECREF(tmp);
+ return val;
+ }
+raise_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "value too large to convert to int");
+ return (int) -1;
+raise_neg_overflow:
+ PyErr_SetString(PyExc_OverflowError,
+ "can't convert negative value to int");
+ return (int) -1;
+}
+
+/* FastTypeChecks */
+ #if CYTHON_COMPILING_IN_CPYTHON
+static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
+ while (a) {
+ a = a->tp_base;
+ if (a == b)
+ return 1;
+ }
+ return b == &PyBaseObject_Type;
+}
+static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
+ PyObject *mro;
+ if (a == b) return 1;
+ mro = a->tp_mro;
+ if (likely(mro)) {
+ Py_ssize_t i, n;
+ n = PyTuple_GET_SIZE(mro);
+ for (i = 0; i < n; i++) {
+ if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
+ return 1;
+ }
+ return 0;
+ }
+ return __Pyx_InBases(a, b);
+}
+#if PY_MAJOR_VERSION == 2
+static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
+ PyObject *exception, *value, *tb;
+ int res;
+ __Pyx_PyThreadState_declare
+ __Pyx_PyThreadState_assign
+ __Pyx_ErrFetch(&exception, &value, &tb);
+ res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ if (!res) {
+ res = PyObject_IsSubclass(err, exc_type2);
+ if (unlikely(res == -1)) {
+ PyErr_WriteUnraisable(err);
+ res = 0;
+ }
+ }
+ __Pyx_ErrRestore(exception, value, tb);
+ return res;
+}
+#else
+static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
+ int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
+ if (!res) {
+ res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
+ }
+ return res;
+}
+#endif
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
+ if (likely(err == exc_type)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
+ }
+ return PyErr_GivenExceptionMatches(err, exc_type);
+}
+static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
+ if (likely(err == exc_type1 || err == exc_type2)) return 1;
+ if (likely(PyExceptionClass_Check(err))) {
+ return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
+ }
+ return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
+}
+#endif
+
+/* CheckBinaryVersion */
+ static int __Pyx_check_binary_version(void) {
+ char ctversion[4], rtversion[4];
+ PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
+ PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
+ if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
+ char message[200];
+ PyOS_snprintf(message, sizeof(message),
+ "compiletime version %s of module '%.100s' "
+ "does not match runtime version %s",
+ ctversion, __Pyx_MODULE_NAME, rtversion);
+ return PyErr_WarnEx(NULL, message, 1);
+ }
+ return 0;
+}
+
+/* InitStrings */
+ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+ while (t->p) {
+ #if PY_MAJOR_VERSION < 3
+ if (t->is_unicode) {
+ *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+ } else if (t->intern) {
+ *t->p = PyString_InternFromString(t->s);
+ } else {
+ *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+ }
+ #else
+ if (t->is_unicode | t->is_str) {
+ if (t->intern) {
+ *t->p = PyUnicode_InternFromString(t->s);
+ } else if (t->encoding) {
+ *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+ } else {
+ *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+ }
+ } else {
+ *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+ }
+ #endif
+ if (!*t->p)
+ return -1;
+ if (PyObject_Hash(*t->p) == -1)
+ return -1;
+ ++t;
+ }
+ return 0;
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
+ return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
+}
+static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
+ Py_ssize_t ignore;
+ return __Pyx_PyObject_AsStringAndSize(o, &ignore);
+}
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+#if !CYTHON_PEP393_ENABLED
+static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ char* defenc_c;
+ PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
+ if (!defenc) return NULL;
+ defenc_c = PyBytes_AS_STRING(defenc);
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ {
+ char* end = defenc_c + PyBytes_GET_SIZE(defenc);
+ char* c;
+ for (c = defenc_c; c < end; c++) {
+ if ((unsigned char) (*c) >= 128) {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+ }
+ }
+#endif
+ *length = PyBytes_GET_SIZE(defenc);
+ return defenc_c;
+}
+#else
+static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+ if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ if (likely(PyUnicode_IS_ASCII(o))) {
+ *length = PyUnicode_GET_LENGTH(o);
+ return PyUnicode_AsUTF8(o);
+ } else {
+ PyUnicode_AsASCIIString(o);
+ return NULL;
+ }
+#else
+ return PyUnicode_AsUTF8AndSize(o, length);
+#endif
+}
+#endif
+#endif
+static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
+#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
+ if (
+#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
+ __Pyx_sys_getdefaultencoding_not_ascii &&
+#endif
+ PyUnicode_Check(o)) {
+ return __Pyx_PyUnicode_AsStringAndSize(o, length);
+ } else
+#endif
+#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
+ if (PyByteArray_Check(o)) {
+ *length = PyByteArray_GET_SIZE(o);
+ return PyByteArray_AS_STRING(o);
+ } else
+#endif
+ {
+ char* result;
+ int r = PyBytes_AsStringAndSize(o, &result, length);
+ if (unlikely(r < 0)) {
+ return NULL;
+ } else {
+ return result;
+ }
+ }
+}
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+ int is_true = x == Py_True;
+ if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+ else return PyObject_IsTrue(x);
+}
+static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
+#if PY_MAJOR_VERSION >= 3
+ if (PyLong_Check(result)) {
+ if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
+ "__int__ returned non-int (type %.200s). "
+ "The ability to return an instance of a strict subclass of int "
+ "is deprecated, and may be removed in a future version of Python.",
+ Py_TYPE(result)->tp_name)) {
+ Py_DECREF(result);
+ return NULL;
+ }
+ return result;
+ }
+#endif
+ PyErr_Format(PyExc_TypeError,
+ "__%.4s__ returned non-%.4s (type %.200s)",
+ type_name, type_name, Py_TYPE(result)->tp_name);
+ Py_DECREF(result);
+ return NULL;
+}
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
+#if CYTHON_USE_TYPE_SLOTS
+ PyNumberMethods *m;
+#endif
+ const char *name = NULL;
+ PyObject *res = NULL;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_Check(x) || PyLong_Check(x)))
+#else
+ if (likely(PyLong_Check(x)))
+#endif
+ return __Pyx_NewRef(x);
+#if CYTHON_USE_TYPE_SLOTS
+ m = Py_TYPE(x)->tp_as_number;
+ #if PY_MAJOR_VERSION < 3
+ if (m && m->nb_int) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ else if (m && m->nb_long) {
+ name = "long";
+ res = m->nb_long(x);
+ }
+ #else
+ if (likely(m && m->nb_int)) {
+ name = "int";
+ res = m->nb_int(x);
+ }
+ #endif
+#else
+ if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
+ res = PyNumber_Int(x);
+ }
+#endif
+ if (likely(res)) {
+#if PY_MAJOR_VERSION < 3
+ if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
+#else
+ if (unlikely(!PyLong_CheckExact(res))) {
+#endif
+ return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
+ }
+ }
+ else if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError,
+ "an integer is required");
+ }
+ return res;
+}
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+ Py_ssize_t ival;
+ PyObject *x;
+#if PY_MAJOR_VERSION < 3
+ if (likely(PyInt_CheckExact(b))) {
+ if (sizeof(Py_ssize_t) >= sizeof(long))
+ return PyInt_AS_LONG(b);
+ else
+ return PyInt_AsSsize_t(x);
+ }
+#endif
+ if (likely(PyLong_CheckExact(b))) {
+ #if CYTHON_USE_PYLONG_INTERNALS
+ const digit* digits = ((PyLongObject*)b)->ob_digit;
+ const Py_ssize_t size = Py_SIZE(b);
+ if (likely(__Pyx_sst_abs(size) <= 1)) {
+ ival = likely(size) ? digits[0] : 0;
+ if (size == -1) ival = -ival;
+ return ival;
+ } else {
+ switch (size) {
+ case 2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -2:
+ if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -3:
+ if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case 4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ case -4:
+ if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
+ return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
+ }
+ break;
+ }
+ }
+ #endif
+ return PyLong_AsSsize_t(b);
+ }
+ x = PyNumber_Index(b);
+ if (!x) return -1;
+ ival = PyInt_AsSsize_t(x);
+ Py_DECREF(x);
+ return ival;
+}
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+ return PyInt_FromSize_t(ival);
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.pyx b/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.pyx
new file mode 100644
index 000000000..821a614cf
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/rhumb_line_nav_v4.pyx
@@ -0,0 +1,183 @@
+# use python2.5 setup_rhumb_line_nav.py build_ext --inplace
+cdef extern from "math.h":
+ double sin(double x)
+ double cos(double x)
+ double atan2(double y, double x)
+ double log(double x)
+ double tan(double x)
+ double sqrt(double x)
+ double pow(double x, int y)
+
+def rhumb_line_nav(double lon2,lat2,lon1,lat1,distance=False):
+ """
+ The true course between the points (lat1,lon1), (lat2,lon2)
+ is given by Rhumb Line Navigation.
+
+ true_course = mod(atan2(lon1-lon2,
+ log(tan(lat2/2+pi/4)/tan(lat1/2+pi/4))),2*pi)
+ Angle is measured clockwise from north 0 degrees, east 90 degrees etc.
+
+ NOTES: Rhumb lines follow a spiral on the globe and are least simular
+ (i.e, longer) when two ends are co-latitude and most simular when the
+ two ends are co-longitude. Rhumb lines spiral very tightly at high
+ latitudes which can result in comparatively long distances between
+ points. Rhumb lines need special treatment if the coarse crosses the
+ dateline.
+ """
+
+ cdef double lon1r,lat1r,lon2r,lat2r,dphi,dlon_w,dlon_e,d,q,TOL,bearing,de
+
+ TOL = 1e-15 # small number to avoid 0/0 indeterminacies on E-W courses.
+ d = 0.0
+ q = 0.0
+
+ # Ensure that longitude in +-180 form
+ if lon1 > 180.0:
+ lon1 = lon1 - 360.0
+ if lon2 > 180.0:
+ lon2 = lon2 - 360.0
+## # Ensure that longitude in 360 form
+## if lon1 < 0.0:
+## lon1 = lon1 + 360.0
+## if lon2 < 0.0:
+## lon2 = lon2 + 360.0
+
+ lon1r = lon1*0.0174532925
+ lat1r = lat1*0.0174532925
+ lon2r = lon2*0.0174532925
+ lat2r = lat2*0.0174532925
+
+ tmp = lon1r-lon2r
+ dlon_w = tmp%6.28318531
+ tmp = lon2r-lon1r
+ dlon_e = tmp%6.28318531
+## # for some reason fmod won't return correct answer for dlon_w
+## dlon_w = fmod(lon2r-lon1r,6.28318531)
+## dlon_e = fmod(lon1r-lon2r,6.28318531)
+ dphi = log(tan(lat2r*0.5 + 0.7853981625) /
+ tan(lat1r*0.5 + 0.7853981625))
+
+ if distance:
+ if abs(lat2r-lat1r) < sqrt(TOL):
+ q = cos(lat1r)
+ else:
+ q = (lat2r-lat1r)/dphi
+
+ if dlon_w < dlon_e:
+ # Westerly rhumb line is the shortest
+ tmp = atan2(-1.0*dlon_w,dphi)
+ bearing = tmp%6.28318531
+ if distance:
+ d = sqrt(pow(q,2)*pow(dlon_w,2) + pow((lat2r-lat1r),2))
+ else:
+ tmp = atan2(dlon_e,dphi)
+ bearing = tmp%6.28318531
+ if distance:
+ d = sqrt(pow(q,2)*pow(dlon_e,2) + pow((lat2r-lat1r),2))
+
+ # Convert to the +-2pi (0-360) system
+ tmp = bearing+6.28318531
+ bearing = tmp%6.28318531
+
+ # Convert to degrees
+ bearing = bearing * 57.2957795
+
+## # Convert to 0-360 format
+## if bearing < 0.0:
+## bearing = bearing + 360.0
+
+## # This formula was written assuming West longitudes are positive
+## # so I reverse the bearing orientation for the usual convention.
+## # That is, compensate for unit circle orientation so 270 is to west not east
+## bearing = 360.0 - bearing
+
+## if bearing == 360.0:
+## bearing = 0.0
+
+ if distance:
+ # convert from distance in radians to nautical miles to km
+ d = d*((180.0*60.0)/3.14159265)
+ d = d*1.852
+ return bearing,d
+ else:
+ return bearing
+
+# tests
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = -100.0
+## lat2 = 0.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = 100.0
+## lat2 = 0.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = 0.0
+## lat2 = 80.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = 0.0
+## lat2 = -80.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = 179.9
+## lat2 = 0.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = 0.0
+## lon2 = -179.9
+## lat2 = 0.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = -90.0
+## lon2 = 0.0
+## lat2 = 90.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## lon1 = 0.0
+## lat1 = -40.0
+## lon2 = -145.0+360
+## lat2 = -45.0
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## print "bearing",rln(lon2,lat2,lon1,lat1,True)
+## print ""
+
+## ## Suppose point 1 is LAX: (33deg 57min N, 118deg 24min W)
+## ## Suppose point 2 is JFK: (40deg 38min N, 73deg 47min W)
+
+## lat1 = 0.592539*57.2957795
+## lon1 = -2.066470*57.2957795
+## lat2 = 0.709185*57.2957795
+## lon2 = -1.287762*57.2957795
+## print "lon,lat",lon1,lat1,"->",lon2,lat2
+## fnc = []
+## fnc = rln(lon2,lat2,lon1,lat1,True)
+## print fnc
+## print "bearing",round(fnc[0],2),round(fnc[1])
+## # tc 79.32 degrees
+## # 2164.6 nm
diff --git a/diagnostics/etc_composites/util/tracker/run_track_stats.py.bak b/diagnostics/etc_composites/util/tracker/run_track_stats.py.bak
new file mode 100644
index 000000000..e6305695c
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/run_track_stats.py.bak
@@ -0,0 +1,259 @@
+#!/usr/bin/env python
+
+# supressing warnings, because there are a lot of NaN value warnings
+# comment lines below when debugging
+# only supress in production
+import sys
+if not sys.warnoptions:
+ import warnings
+ warnings.simplefilter("ignore")
+
+import numpy as np
+import scipy.io as sio
+import cartopy
+import matplotlib.ticker as mticker
+import matplotlib.pyplot as plt
+from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
+import defines
+import os
+
+def hist_2d(lon, lat, val=None, bins=None):
+ '''
+ Given lat/lon values, we create a 2d histogram global map.
+ '''
+
+ if (bins is None):
+ # creating my bins
+ lat_div = 5.
+ lon_div = 5.
+ bins = (np.arange(-180, 180+lon_div, lon_div), np.arange(-90, 90+lat_div, lat_div))
+
+ # convert lat and lon into 2d array
+ lat = np.array(lat).flatten()
+ lon = np.array(lon).flatten()
+ lon[lon >= 180.] -= 360.
+
+ # make sure the lens equal each other
+ assert(len(lat) == len(lon))
+ if (val is not None):
+ val = np.array(val).flatten()
+ assert(len(lon) == len(val))
+
+ # bins for the latitude and longitude
+ lon_bins = bins[0]
+ lat_bins = bins[1]
+ lon_mids = lon_bins[:-1] + (lon_bins[1] - lon_bins[0])/2.
+ lat_mids = lat_bins[:-1] + (lat_bins[1] - lat_bins[0])/2.
+
+ H_cnts, x, y = np.histogram2d(lon, lat, bins=bins)
+ if (val is None):
+ H_sums = H_cnts
+ else:
+ H_sums, x, y = np.histogram2d(lon, lat, bins=bins, weights=val)
+
+ return {'cnts': H_cnts.T, 'sums': H_sums.T, 'lon': lon_mids, 'lat': lat_mids}
+
+def global_map(ax=None):
+ '''Create a global map for plotting the figures.'''
+ if (ax is None):
+ plt.style.use('seaborn-talk')
+ ax = plt.axes(projection=cartopy.crs.PlateCarree())
+ else:
+ ax.coastlines(lw=1.)
+ ax.set_extent([-180, 180, -90, 90])
+ gl = ax.gridlines(crs=cartopy.crs.PlateCarree(), draw_labels=True, lw=2., color='gray', alpha=0.5, linestyle='--')
+ gl.xlabels_top = False
+ gl.ylabels_right = False
+ gl.xlocator = mticker.FixedLocator([-180, -90, 0, 90, 180])
+ gl.ylocator = mticker.FixedLocator([-90, -45, 0, 45, 90])
+ gl.xformatter = LONGITUDE_FORMATTER
+ gl.yformatter = LATITUDE_FORMATTER
+
+ return ax
+
+def read_tracks(year):
+ '''Reading in tracks for a given year'''
+ in_file = os.path.join(defines.read_folder, f'{defines.model}_{year}.mat')
+ tracks = sio.loadmat(in_file)
+ return tracks['cyc'][0]
+
+def plot_2d(ax, x, y, z):
+ cf = ax.contourf(x, y, z)
+ cf = ax.colorbar(cax=cax)
+
+def get_data(tracks):
+
+ g_lat = []
+ g_lon = []
+ g_slp = []
+ l_lat = []
+ l_lon = []
+ l_slp = []
+ lat = []
+ lon = []
+ slp = []
+ for track in tracks:
+
+ # lysiss
+ l_lat.append(track['fulllat'][0][0])
+ l_lon.append(track['fulllon'][0][0])
+ l_slp.append(track['fullslp'][0][0])
+
+ # genesis
+ g_lat.append(track['fulllat'][0][-1])
+ g_lon.append(track['fulllon'][0][-1])
+ g_slp.append(track['fullslp'][0][-1])
+
+ # all
+ lat.extend(track['fulllat'][0].tolist())
+ lon.extend(track['fulllon'][0].tolist())
+ slp.extend(track['fullslp'][0].tolist())
+
+ return {'genesis': {'lat': g_lat, 'lon': g_lon, 'slp': g_slp}, \
+ 'lysis': {'lat': l_lat, 'lon': l_lon, 'slp': l_slp}, \
+ 'all': {'lat': lat, 'lon': lon, 'slp': slp}}
+
+def track_density_2d(lon, lat, ax=None):
+ H = hist_2d(lon, lat)
+ if (ax is not None):
+ # levels=np.arange(0, 0.004, 0.0001)
+ levels=10 # cuz I don't know the range of the colorbar
+ cf = ax.contourf(H['lon'], H['lat'], H['cnts']/np.sum(H['cnts']), cmap='jet', levels=levels, extend='max')
+ cb = plt.colorbar(cf, ax=ax, shrink=0.5, extend='max')
+ return H
+
+def track_feature_density_2d(lon, lat, ax=None):
+ H = hist_2d(lon, lat)
+ if (ax is not None):
+ levels=np.arange(0, 0.004, 0.0001)
+ cf = ax.contourf(H['lon'], H['lat'], H['cnts']/np.sum(H['cnts']), cmap='jet', levels=levels, extend='max')
+ cb = plt.colorbar(cf, ax=ax, shrink=0.5, extend='max')
+ return H
+
+############### main test code #################
+
+# check if mat file exists, if not run the mat file creator code
+mat_file = os.path.join(defines.read_folder, f'{defines.model}_{defines.over_write_years[0]}.mat')
+if (not os.path.exists(mat_file)):
+ os.system('python3 main_create_dict.py')
+
+# data = {'genesis': {'lat': [], 'lon': [], 'slp': []}, \
+# 'lysis': {'lat': [], 'lon': [], 'slp': []}, \
+# 'all': {'lat': [], 'lon': [], 'slp': []}}
+# # loop through all the years
+# for year in range(defines.over_write_years[0], defines.over_write_years[1]+1):
+# tracks = read_tracks(year)
+# tmp = get_data(tracks)
+# for key in data.keys():
+# for inner_key in data[key].keys():
+# data[key][inner_key].extend(tmp[key][inner_key])
+
+# -- new statistic
+# loop through all the years
+# this part of the code is where I have to keep adding to the histogram
+# because now we only have to count one occurence per grid, not all occurences
+
+# Defining the bins
+lat_div = 5.
+lon_div = 5.
+bins = (np.arange(-180, 180+lon_div, lon_div), np.arange(-90, 90+lat_div, lat_div))
+lon_mids = bins[0][:-1] + (bins[0][1] - bins[0][0])/2.
+lat_mids = bins[1][:-1] + (bins[1][1] - bins[1][0])/2.
+
+# initializing dict that I need
+init_shape = (len(lat_mids), len(lon_mids))
+stats = {}
+for stat_type in ['all', 'genesis', 'lysis']:
+ stats[stat_type] = {}
+ if (stat_type == 'all'):
+ stats[stat_type]['feature_density'] = np.zeros(init_shape)
+ stats[stat_type]['track_density'] = np.zeros(init_shape)
+ else:
+ stats[stat_type] = np.zeros(init_shape)
+
+g_lon = []
+g_lat = []
+l_lon = []
+l_lat = []
+# loop through all the years and save the tracks
+for year in range(defines.over_write_years[0], defines.over_write_years[1]+1):
+ tracks = read_tracks(year)
+ for track in tracks:
+ lon = np.squeeze(track['fulllon'])
+ lat = np.squeeze(track['fulllat'])
+
+ # considering only lat cases between -60 and 60
+ ind = (np.abs(lat) < 60)
+ if (not np.any(ind)):
+ continue
+ lon = lon[ind]
+ lat = lat[ind]
+
+ l_lon.append(lon[-1])
+ l_lat.append(lat[-1])
+ g_lon.append(lon[0])
+ g_lat.append(lat[0])
+
+ # feature density
+ H = hist_2d(lon, lat, bins=bins)
+
+ # feature density - count all occurences
+ stats['all']['feature_density'] += H['cnts']
+ stats['all']['track_density'] += np.double(H['cnts'] > 0)
+
+# lysis
+H = hist_2d(l_lon, l_lat)
+stats['lysis'] = H['cnts']
+
+# genesis
+H = hist_2d(g_lon, g_lat)
+stats['genesis'] = H['cnts']
+
+# normalizing all the global histograms
+stats['genesis'] /= np.nansum(stats['genesis'])
+stats['lysis'] /= np.nansum(stats['lysis'])
+stats['all']['feature_density'] /= np.nansum(stats['all']['feature_density'])
+stats['all']['track_density'] /= np.nansum(stats['all']['track_density'])
+
+# Creating the necessary plots
+# track density
+plt.close('all')
+
+out_file = os.path.join(defines.images_folder, f'{defines.model}_{defines.over_write_years[0]}_{defines.over_write_years[1]}_track_stats.png')
+cmap = 'jet'
+
+# creating the 2x2 plot
+fig, axes = plt.subplots(ncols=2, nrows=2, subplot_kw={'projection': cartopy.crs.PlateCarree()}, figsize=(12,8))
+
+ax = global_map(axes[0, 0])
+levels = np.linspace(0, 0.0025, 10)
+levels = np.linspace(0, 0.005, 10)
+ax.set_title(f'Feature Density')
+cf = ax.contourf(lon_mids, lat_mids, stats['all']['feature_density'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7)
+
+ax = global_map(axes[0, 1])
+ax.set_title(f'Track Density')
+levels = np.linspace(0, 0.0025, 10)
+levels = np.linspace(0, 0.005, 10)
+cf = ax.contourf(lon_mids, lat_mids, stats['all']['track_density'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7)
+
+ax = global_map(axes[1, 0])
+ax.set_title(f'Genesis')
+levels = np.linspace(0, 0.0025, 10)
+levels = 20
+cf = ax.contourf(lon_mids, lat_mids, stats['genesis'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7)
+
+ax = global_map(axes[1, 1])
+ax.set_title(f'Lysis')
+levels = np.linspace(0, 0.0025, 10)
+levels = 20
+cf = ax.contourf(lon_mids, lat_mids, stats['lysis'], cmap=cmap, extend='max', levels=levels)
+plt.colorbar(cf, ax=ax, shrink=0.7)
+
+plt.suptitle(f'{defines.model.upper()} ({defines.over_write_years[0]} - {defines.over_write_years[1]})')
+plt.tight_layout()
+plt.savefig(out_file, dpi=300.)
+plt.close('all')
diff --git a/diagnostics/etc_composites/util/tracker/run_tracker.py.bak b/diagnostics/etc_composites/util/tracker/run_tracker.py.bak
new file mode 100755
index 000000000..5d9a68aa6
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/run_tracker.py.bak
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+################################# INSTRUCTIONS ##########################
+# Edit the defines.py with the folder and information
+# then run this code run_tracker.py
+
+import os
+import defines
+
+def init_setup():
+ '''
+ Creates the necessary directories and copies over the slp data into the folders
+ '''
+
+ # Create main folder specified in defines
+ if not os.path.exists(defines.main_folder):
+ print ("Making Directory!")
+ os.makedirs(defines.main_folder)
+ os.makedirs(defines.code_folder)
+ os.makedirs(defines.out_folder)
+ os.makedirs(defines.out_files_folder)
+ os.makedirs(defines.slp_folder)
+ os.makedirs(defines.read_folder)
+ os.makedirs(defines.images_folder)
+ print ("Completed making directories...")
+ else:
+ print ("Folder already exists!")
+
+ if not defines.slp_data_directory:
+ print ("SLP source directory not defined, copy slp data into the data folder!")
+ elif (defines.hard_copy):
+ sys_cmd = 'rsync %sslp*.nc %s'%(defines.slp_data_directory, defines.slp_folder)
+ os.system(sys_cmd)
+ print ("Loaded slp data files into the data folder...")
+ else:
+ for root, dirs, files in os.walk(defines.slp_data_directory):
+ for fn in files:
+ if (fn.endswith('.nc') & fn.startswith('slp')):
+ full_file = os.path.join(root, fn)
+ link_file = os.path.join(defines.slp_folder, fn)
+ sys_cmd = "ln -s %s %s"%(full_file, link_file)
+ os.system(sys_cmd)
+ print ("Soft linked slp data files into the data folder...")
+
+ # cd'ing into the CODE folder
+ os.system('cd %s'%(defines.code_folder))
+ print ("Cd'ing into the code folder...")
+
+def copy_code_over():
+ '''
+ Function to copy code over from the specified locations to the locations needed by the tracker.
+ '''
+ print ("Copying files over...")
+ sys_cmd = 'rsync -r --exclude ".git*" --exclude "*.mat" --exclude "*.nc" %s/ %s'%(os.path.join(defines.source_code_folder, 'tracker'), defines.code_folder)
+ os.system(sys_cmd)
+ if (defines.hard_copy):
+ sys_cmd = 'rsync --progress %s %s'%(defines.topo_file, os.path.join(defines.out_files_folder, '%s_hgt.nc'%(defines.model)))
+ os.system(sys_cmd)
+ print ("Copied code and topography file...")
+ else:
+ sys_cmd = 'ln -s %s %s'%(defines.topo_file, os.path.join(defines.out_files_folder, '%s_hgt.nc'%(defines.model)))
+ os.system(sys_cmd)
+ print ("Copied code and soft linked topography file...")
+
+
+
+################## MAIN CODE #################
+
+# Initially create the folders
+# then copy the codes over
+init_setup()
+copy_code_over()
+
+os.chdir(defines.code_folder)
+print ("Curently in folder: ", os.getcwd())
+
+####### running the code to track ###########
+os.system('python3 setup_v4.py')
+os.system('python3 center_finder_v4.py')
+os.system('python3 track_finder_v4.py')
+os.system('python3 read_mcms_v4.py template_temp_multi_1.py')
+os.system('python3 read_mcms_v4.py template_temp_multi_2.py')
+# if (defines.create_matlab_dictionaries):
+# os.system('python3 main_create_dict.py')
+
+
+####todo list
+# 1) lat and lon adjust and time
+# create a python dictionary for the identified tracks
+# and grab the necessary data for the tracks
+# move read folder to a backup with timestamp
+# and create the new folder
diff --git a/diagnostics/etc_composites/util/tracker/save_netcdf_v4.py b/diagnostics/etc_composites/util/tracker/save_netcdf_v4.py
new file mode 100755
index 000000000..f7a19a076
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/save_netcdf_v4.py
@@ -0,0 +1,186 @@
+# import netcdftime # Jeyavinoth removed netcdftime
+import netCDF4 as NetCDF
+import jj_calendar as jjCal
+
+class Save_NetCDF:
+ """Save to basic NetCDF File"""
+
+ def __init__(self,*args,**kwargs):
+
+ self.z = args[0]
+ self.lons = args[1]
+ self.lats = args[2]
+ self.cdf_file = args[3]
+ self.missing = args[4]
+
+ if 'name' in kwargs:
+ self.zname = kwargs['name']
+ else:
+ self.zname = 'comp'
+
+ if 'long_name' in kwargs:
+ self.zlong_name= kwargs['long_name']
+ else:
+ self.zlong_name = ''
+
+ if 'units' in kwargs:
+ self.zunits= kwargs['units']
+ else:
+ self.zunits = ''
+
+ # # Calendar Date Stuff
+ # the_calendar = 'standard'
+ # unit_string = 'hours since 1-1-1 00:00:0.0'
+ # cdftime = netcdftime.utime('hours since 0001-01-01 00:00:00',calendar=the_calendar)
+ #
+ # times = []
+ # for year in range(1990,1991):
+ # d = datetime.datetime(year,1,1,0)
+ # t = cdftime.date2num(d)
+ # times.append(t)
+ #
+ # Create the file
+
+ rootgrp = NetCDF.Dataset(self.cdf_file, 'w', format='NETCDF3_CLASSIC')
+
+ # Create dimensions:
+ rootgrp.createDimension('lon', len(self.lons))
+ rootgrp.createDimension('lat', len(self.lats))
+
+ # Create Variables:
+ lat = rootgrp.createVariable('lat','f4',('lat',))
+ lon = rootgrp.createVariable('lon','f4',('lon',))
+ thedata = rootgrp.createVariable(self.zname,'f4',('lat','lon',))
+
+ # Attributes:
+ lat.units = 'degrees north'
+ lat.actual_range = [self.lats[-1],self.lats[0]]
+ lat.long_name = "Latitude"
+
+ lon.units = 'degrees east'
+ lon.actual_range = [self.lons[-1],self.lons[0]]
+ lon.long_name = "Longitude"
+
+ if self.zunits:
+ thedata.units = self.zunits
+ if self.zlong_name:
+ thedata.long_name = self.zlong_name
+
+ # missing_value:
+ thedata.missing_value = self.missing
+
+ # Populate lats
+ lat[:] = self.lats
+
+ # Populate lons
+ lon[:] = self.lons
+
+ # Populate data
+ thedata[:] = self.z
+
+ # Write to file
+ rootgrp.close()
+
+class Save_NetCDF_TimeSeries:
+ """Save to basic NetCDF File with multiple time steps"""
+
+ def __init__(self,*args,**kwargs):
+
+ self.z = args[0]
+ self.lons = args[1]
+ self.lats = args[2]
+ self.times = args[3]
+ self.cdf_file = args[4]
+
+ # # Jeyavinoth: here I am removing till "Jeyavinoth: End"
+ # # I try and get rid of any netcdftime dependencies
+ # # so I use my jj_calendar.py code here to get datetimes
+ # # What is needed is the self.the_time, which is datetime format given the times
+ # # This code doesn't look like it is getting used anywhere, but not sure
+ # # Calendar Date Stuff
+ # the_calendar = 'standard'
+ # unit_string = 'hours since 1-1-1 00:00:0.0'
+ # #cdftime = netcdftime.utime('hours since 0001-01-01 00:00:00',calendar=the_calendar)
+ # cdftime = netcdftime.utime('hours since 1800-1-1 00:00:0.0',calendar=the_calendar)
+ # self.the_times = []
+ # for dt in self.times:
+ # year = int(dt[:4])
+ # month = int(dt[4:6])
+ # day = int(dt[6:8])
+ # hour = int(dt[8:10])
+ # d = datetime.datetime(year,month,day,hour)
+ # t = cdftime.date2num(d)
+ # self.the_times.append(t)
+ # # Jeyavinoth:End
+
+ # above commented out code is replaced by the code below
+ the_calendar = 'standard'
+ unit_string = 'hours since 1800-1-1 00:00:0.0'
+ self.the_times, _, _ = jjCal.get_time_info(unit_string, self.times, calendar=the_calendar)
+
+ # Create the file
+ rootgrp = NetCDF.Dataset(self.cdf_file, 'w', format='NETCDF3_CLASSIC')
+
+ # Create dimensions:
+ rootgrp.createDimension('lon', len(self.lons))
+ rootgrp.createDimension('lat', len(self.lats))
+ rootgrp.createDimension('time', None)
+
+ # Create Variables:
+ lat = rootgrp.createVariable('lat','f4',('lat',))
+ lon = rootgrp.createVariable('lon','f4',('lon',))
+ times = rootgrp.createVariable('time','f8',('time',))
+ thedata = rootgrp.createVariable('comp','f4',('time','lat','lon',))
+
+ # Attributes:
+ lat.units = 'degrees north'
+ lat.actual_range = [self.lats[-1],self.lats[0]]
+ lat.long_name = "Latitude"
+
+ lon.units = 'degrees east'
+ lon.actual_range = [self.lons[-1],self.lons[0]]
+ lon.long_name = "Longitude"
+
+ times.units = 'hours since 1800-1-1 00:00:0.0'
+ times.calendar = the_calendar
+ times.long_name = "Time"
+ times.delta_t = "0000-00-00 06:00:00"
+ times.standard_name = "time";
+ times.axis = "t";
+ times.coordinate_defines = "point";
+ times._CoordinateAxisType = "Time";
+ times.actual_range = self.the_times[0],self.the_times[-1];
+
+ # Populate lats
+ lat[:] = self.lats
+
+ # Populate lons
+ lon[:] = self.lons
+
+ # Populate times
+ times[:] = self.the_times
+
+ # Populate data
+ thedata[:] = self.z
+
+ # Write to file
+ rootgrp.close()
+
+class Read_NetCDF:
+
+ def __init__(self,*args,**kwargs):
+ cdf_file = args[0]
+
+ # Open for read
+ rootgrp = NetCDF.Dataset(cdf_file, 'r', format='NETCDF3_CLASSIC')
+
+ #print rootgrp.dimensions
+ #print rootgrp.variables
+
+ # Extract data
+ self.lats = rootgrp.variables["lat"][:]
+ self.lons = rootgrp.variables["lon"][:]
+ self.thedata = rootgrp.variables["comp"][:]
+
+ # Close file
+ rootgrp.close()
diff --git a/diagnostics/etc_composites/util/tracker/setup_g2l_v4.py b/diagnostics/etc_composites/util/tracker/setup_g2l_v4.py
new file mode 100755
index 000000000..e6907026f
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/setup_g2l_v4.py
@@ -0,0 +1,12 @@
+
+# use python setup_g2l_v4.py build_ext --inplace
+from distutils.core import setup
+from distutils.extension import Extension
+from Cython.Build import cythonize
+#from Pyrex.Distutils import build_ext
+
+setup(
+ name = 'g2l_v4',
+ # ext_modules=[Extension("g2l_v4",["g2l_v4.pyx"]),],cmdclass={'build_ext': build_ext}
+ ext_modules=cythonize([Extension("g2l_v4",["g2l_v4.pyx"]),]),
+)
diff --git a/diagnostics/etc_composites/util/tracker/setup_gcd_v4.py b/diagnostics/etc_composites/util/tracker/setup_gcd_v4.py
new file mode 100755
index 000000000..b8ef13694
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/setup_gcd_v4.py
@@ -0,0 +1,12 @@
+
+# use python setup_gcd_v4.py build_ext --inplace
+from distutils.core import setup
+from distutils.extension import Extension
+from Cython.Build import cythonize
+# from Pyrex.Distutils import build_ext
+
+setup(
+ name = 'gcd_v4',
+ # ext_modules=[Extension("gcd_v4",["gcd_v4.pyx"]),],cmdclass={'build_ext': build_ext}
+ ext_modules=cythonize([Extension("gcd_v4",["gcd_v4.pyx"]),]),
+)
diff --git a/diagnostics/etc_composites/util/tracker/setup_rhumb_line_nav_v4.py b/diagnostics/etc_composites/util/tracker/setup_rhumb_line_nav_v4.py
new file mode 100755
index 000000000..26d5a2c4a
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/setup_rhumb_line_nav_v4.py
@@ -0,0 +1,12 @@
+
+# use python setup_rhumb_line_nav_v4.py build_ext --inplace
+from distutils.core import setup
+from distutils.extension import Extension
+from Cython.Build import cythonize
+# from Pyrex.Distutils import build_ext
+
+setup(
+ name = 'rhumb_line_nav_v4',
+ # ext_modules=[Extension("rhumb_line_nav_v4",["rhumb_line_nav_v4.pyx"]),],cmdclass={'build_ext': build_ext}
+ ext_modules=cythonize([Extension("rhumb_line_nav_v4",["rhumb_line_nav_v4.pyx"]),]),
+)
diff --git a/diagnostics/etc_composites/util/tracker/setup_v4.py b/diagnostics/etc_composites/util/tracker/setup_v4.py
new file mode 100755
index 000000000..dc6da7882
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/setup_v4.py
@@ -0,0 +1,1751 @@
+"""This module runs all the setup routines to pre-calculate things for
+the MCMS project.
+
+#!/usr/bin/env python -tt
+Options/Arguments:
+ defs_set -- directory of options.
+ imports -- list of modules to import.
+
+Returns/Creates:
+
+Examples:
+
+Notes:
+
+Author: Mike Bauer
+
+Log:
+ 2009/04 MB - File created.
+"""
+
+import sys,os
+import math, numpy
+import defines
+import jj_calendar as jjCal
+
+def setup_center_finder(defs,gcd,g2l,ij2grid,grid2ij,defs_grid):
+ """Setup stuff for center_finder"""
+
+ # Tunable parameters to warn that something might be wrong....
+ #
+ # min_centers_per_tstep: smallest number of center found per timestep
+ # before a warning or a halt of the center_finder is called. Generally
+ # 20-40 centers found at any given time on whole planet.
+ # max_centers_per_tstep: same as min_centers_per_tstep but for maximum number
+ # of centers.
+ # max_centers_per_tstep_change: Maximum allowable timestep to timestep change
+ # in total center count.
+ min_centers_per_tstep = 10
+ max_centers_per_tstep = 60
+ max_centers_per_tstep_change = 10
+
+ #
+ # Threshold for Laplacian Test below which center discarded as unlikely
+ # to be cyclone.
+ #
+ lapp_cutoff = 0.15#0.4
+
+ #
+ # Threshold for the peak horizontal pressure gradient.
+ # Holton says the horizontal pressure gradient is on the order of 0.01 hPa/km.
+ #
+ hpg_cutoff = 0.07
+
+ # Find latitudes where all longitudes fit with critical_radius.
+ use_all_lons = []
+ search_radius = []
+ regional_nys = []
+ for row in range(jm):
+ start = row_start[row]
+ starti,startj = grid2ij(start,im,jm)
+ startlon = g2l(starti,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ startlat = g2l(startj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",
+ faux_grids = defs.faux_grids)
+ end = row_end[row]
+ endi,endj = grid2ij(end,im,jm)
+ endlon = g2l(endi,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ endlat = g2l(endj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",
+ faux_grids = defs.faux_grids)
+
+ # Deal with cases of 89.99999
+ startlon = round(startlon,2)
+ startlat = round(startlat,2)
+ endlon = round(endlon,2)
+ endlat = round(endlat,2)
+
+ distx = round(gcd(startlon,startlat,endlon,endlat),2)
+ if defs.critical_radius > 0.0:
+ tmp = defs.critical_radius
+ search_radius.append(tmp)
+ else:
+ tmp = 0.5 * (2.0 * math.pi * defs.earth_radius *
+ math.cos(math.radians(startlat)) ) / float(defs.wavenumber)
+ tmp = round(tmp,2)
+ search_radius.append(tmp)
+
+ # Circumference less than *fixed* critical_radius or grid spacing
+ # effectively zero (i.e. centered on pole)
+ #print "row %02d, distx(dx) %8.2f, tmp(radius) %8.2f, distx*im(circum) %8.2f" % (row,distx,tmp,distx*im)
+ if distx*im <= tmp or distx <= 1.0:
+ use_all_lons.append(row)
+
+ # Maximum number of rows to check within critical_radius, minimum 1
+ temp = int(round(tmp/111.0)/math.degrees(dlat))
+ if temp < 1:
+ temp = 1
+ regional_nys.append(temp)
+
+ """
+ Define a gridid for every grid point on the entire
+ model grid and then makes a dictionary with the 8
+ grid points around gridid ... adapting for poles
+ grid nomenclature
+
+ upm upc upp 0 1 2
+ cnm cnt cnp 3 4 5
+ dnm dnc dnp 6 7 8
+
+ Check: Counts will show that each point on map
+ visited 9 times (once for each grid in the 9-pnt
+ cell). The exceptions to this are in the polar rows
+ because of wrap over more visits are possible.
+ Specifically, the upper/lower most rows are visited
+ only 6 times because 3 wrap over visits are put into
+ the next row (down/up) so that the row before/after
+ the upper/lower most rows is visited 12 times.
+
+ Checked as correct for NCEP reanalysis grid.
+
+ * For fixed critical_radius the pattern should be zonally symmetric
+ with the count increasing rapidly toward the poles because the number
+ grids within critical_radius increases with latitude and over-the
+ pole wrap-around leads to multiple hits.
+
+ * For wave-number the pattern should be the opposite as the number of
+ search grids decreases with latitude. Thus the peak is around the
+ equator.
+ """
+
+ gdict = {} # define dictionary of gridids
+ rdict = {} # define dictionary of regional grids
+ ldict = {} # define dictionary for laplacian calcuations
+ ijdict = {} # define dictionary of i,j lon,lat for gridid
+
+ for cnt in range(maxid):
+
+ i,j = grid2ij(cnt,im,jm)
+ lon = g2l(i,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ lat = g2l(j,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+ ijdict[cnt] = [i,j,lon,lat]
+
+ lat = round(lat,2)
+ lon = round(lon,2)
+ # Laplacian in spherical coordinates (radians)
+ # LAP_P = 1/a^2 * d^2P/dlat^2 + 1/a^2sin^2(lat) * d^2P/dlon^2
+ # + cot(lat)/a^2 * dP/dlat
+ rlat = math.radians(lat)
+ temps = 0.5*(1.0+math.cos(rlat*2.0)) # equivulent sin^2(lat)
+ part1 = 1
+ # Note convert radius to m as pressure in Pa (kg/ms^2)
+ if temps == 0.0:
+ # Poles
+ part1 = part2 = 0.0
+ else:
+ # 1/sin^2(lat) * 1/a^2
+ part2 = 1.0/temps * (defs.inv_earth_radius_sq*0.0001)
+ temps = math.tan(rlat)
+ if temps == 0.0:
+ # Equator
+ part1 = part3 = 0.0
+ else:
+ # cot(lat)/a^2
+ part3 = 1.0/temps * (defs.inv_earth_radius_sq*0.0001)
+ # Pre-calculated values for each grid
+ # [1 if non-polar,1/sin^2(lat)*1/a^2,cot(lat)/a^2]
+
+ ldict[cnt] = [part1,part2,part3]
+
+ # Assign center grid offsets
+ cnp = cnt + 1
+ cnm = cnt - 1
+ if cnt in row_end: # wrap
+ cnp -= im
+ if cnt in row_start: # wrap
+ cnm += im
+
+ upc = cnt + im
+ dnc = cnt - im
+ # Deal with polar rows
+ if cnt < im:
+ dnc = cnt + (im//2) + im
+ if dnc > 2*im - 1:
+ dnc -= im
+ elif cnt > row_start[-1] - 1:
+ upc = cnt + im//2 - im
+ if upc > row_start[-1] - 1:
+ upc -= im
+
+ # Deal with Diagonals
+ upm = upc - 1
+ upp = upc + 1
+ dnm = dnc - 1
+ dnp = dnc + 1
+ # Deal with polar rows
+ if upc in row_start:
+ upm = upc + im - 1
+ if upc in row_end:
+ upp -= im
+ if dnc in row_start:
+ dnm += im
+ if dnc in row_end:
+ dnp -= im
+ # Push into dictionary
+ gdict[cnt] = [upm,upc,upp,cnm,cnt,cnp,dnm,dnc,dnp]
+
+ """
+ Define a gridid for every grid point on the entire
+ model grid and then makes a dictionary with all the
+ points within a great circle radius of (critical_radius).
+ """
+ bylat = []
+ bylat.append(int(cnt))
+
+ # Find candidate grid centers by latitude
+ goingy = 0
+ polarity = 1
+ check_here = cnt
+
+ # Which latitude row is center in?
+ for rowe in row_end:
+ if check_here <= rowe:
+ row = row_end.index(rowe)
+ break
+
+ # Search "up"
+ while goingy < regional_nys[row]:
+ cupc = check_here + polarity*im
+ if check_here > row_start[-1] - 1 and polarity > 0: # upper polar
+ polarity = -1 # wrap over
+ cupc = check_here - (im // 2) + im
+ if cupc > maxid - 1:
+ cupc -= im
+ elif cupc > row_start[-1]:
+ cupc -= im
+ bylat.append(int(cupc))
+ check_here = cupc # shift search
+ goingy += 1
+
+ # Search "down"
+ goingy = 0
+ polarity = -1
+ check_here = cnt
+ while goingy < regional_nys[row]:
+ dnpc = check_here + polarity*im
+ if check_here < im and polarity < 0: # lower polar
+ polarity *= -1 # wrap over
+ dnpc = check_here + im + (im // 2)
+ if dnpc > 2*im - 1:
+ dnpc -= im
+ if dnpc < 0:
+ dnpc = abs(dnpc)
+ bylat.append(int(dnpc))
+ check_here = dnpc # sift search
+ goingy += 1
+
+ bylon = []
+
+ for eachone in bylat:
+ # Which latitude row is center in?
+ for rowe in row_end:
+ if eachone <= rowe:
+ row = row_end.index(rowe)
+ break
+
+ if row in use_all_lons: # only keep 1st entry
+ continue
+ ## uncomment instead to keep all lons
+ #for nextgrid in range(row_start[row],row_end[row]+1):
+ # if nextgrid not in bylon:
+ # bylon.append(int(nextgrid))
+ else:
+ # Check along "plus" longitude at this latitude
+ nextgrid = eachone + 1
+ polarity = 1
+ newadds = 0
+ distx = 0.0
+ while distx < search_radius[row]:
+ if nextgrid > row_end[row]:
+ nextgrid -= im*polarity
+ nexti,nextj = grid2ij(nextgrid,im,jm)
+ tmplon = g2l(nexti,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ tmplat = g2l(nextj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",
+ faux_grids = defs.faux_grids)
+ distx = gcd(lon,lat,tmplon,tmplat)
+ if nextgrid not in bylon:
+ bylon.append(int(nextgrid))
+ newadds += 1
+ nextgrid += 1
+ if newadds >= im: # stop inf loops
+ distx = search_radius[row]
+ bylon.pop()
+
+ # Check along "minus" longitude at this latitude
+ nextgrid = eachone - 1
+ polarity = -1
+ newadds = 0
+ distx = 0.0
+ while distx < search_radius[row]:
+ if nextgrid < row_start[row]:
+ nextgrid -= im*polarity
+ nexti,nextj = grid2ij(nextgrid,im,jm)
+ tmplon = g2l(nexti,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ tmplat = g2l(nextj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",
+ faux_grids = defs.faux_grids)
+ distx = gcd(lon,lat,tmplon,tmplat)
+ if nextgrid not in bylon:
+ bylon.append(int(nextgrid))
+ newadds += 1
+ nextgrid -= 1
+ if newadds >= im: # stop inf loops
+ distx = search_radius[row]
+ bylon.pop()
+
+ # Merge center list
+ regional = bylat
+ for g in bylon:
+ if g not in regional:
+ regional.append(g)
+ rdict[cnt] = regional
+ #for row in range(jm):
+ # print row,lats[row],search_radius[row],regional_nys[row],len(rdict[row_start[row]])
+ #sys.exit()
+ return (use_all_lons,search_radius,regional_nys,gdict,rdict,ldict,ijdict,
+ min_centers_per_tstep,max_centers_per_tstep,max_centers_per_tstep_change,
+ lapp_cutoff,hpg_cutoff)
+
+def setup_tracking(defs,gdict,gcd,g2l,ij2grid,grid2ij,defs_grid):
+
+ """Setup stuff"""
+
+ travel_distance = defs.max_cyclone_speed*timestep
+ sin60 = math.sin(math.radians(60.0))
+
+ # Find Latitude weighting function
+ lwdict = {} # define dictionary of latitude weighting
+ gridid = -1
+ for each in lats:
+ weight = 100.0
+ # Deal with equator and division by zero.
+ if abs(each) > 2.0:
+ weight = sin60/(timestep*math.sin(math.radians(abs(each))))
+ for j in range(im):
+ gridid += 1
+ lwdict[gridid] = weight
+
+ # Calculate maximum absolute latitude for tracking... don't allow tracking
+ # above latitude where a center could cross-over pole at maximum assumed
+ # cyclone speed in 1 time step.
+ if defs.polar_filter:
+ polar_screen = [] # define list of polar screened gridids
+ plon = 0.0
+ polar_cross = defs.max_cyclone_speed*timestep
+ for each in row_start:
+ i,j = grid2ij(each,im,jm)
+ lon = g2l(i,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ lat = g2l(j,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+ if lat < 0.0:
+ plat = -90.0
+ else:
+ plat = 90.0
+ distp = gcd(plon,plat,lon,lat)
+ if distp <= polar_cross:
+ gridid = each-1
+ for jj in range(im):
+ gridid += 1
+ polar_screen.append(gridid)
+
+ # Find latitudes where all longitudes fit within travel_distance. Not
+ # the same as for center_finder as this is for reasonable cyclone movement
+ # not reasonable low pressure proximity.
+ use_all_lons = []
+ regional_nys = []
+ for row in range(jm):
+ start = row_start[row]
+ starti,startj = grid2ij(start,im,jm)
+ startlon = g2l(starti,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ startlat = g2l(startj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+
+ end = row_end[row]
+ endi,endj = grid2ij(end,im,jm)
+ endlon = g2l(endi,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ endlat = g2l(endj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+
+ distx = gcd(startlon,startlat,endlon,endlat)
+
+ # Circumference less than *fixed* travel_distance or grid spacing
+ # effectively zero (i.e. centered on pole)
+ if distx*im <= travel_distance or distx <= 1.0:
+ use_all_lons.append(row)
+
+ # Maximum number of rows to check within travel_distance, minimum 1
+ temp = int(round(travel_distance/111.0)/math.degrees(dlat))
+ if temp < 1:
+ temp = 1
+ regional_nys.append(temp)
+
+ tdict = {} # define dictionary of travel_distance grids
+ for cnt in range(maxid):
+
+ upm,upc,upp,cnm,cnt,cnp,dnm,dnc,dnp = gdict[cnt]
+
+ if defs.polar_filter:
+ #if cnt in polar_screen: # total screen centers in polar
+ # tdict[cnt] = []
+ # continue
+ if cnt in polar_screen: # limit to local-9 in polar
+ tdict[cnt] = [cnp,cnm,cnt,upc,dnc,upm,upp,dnm,dnp]
+ continue
+
+ """
+ Define a gridid for every grid point on the entire
+ model grid and then makes a dictionary with all the
+ points within a great circle radius of (travel_distance).
+ """
+ bylat = []
+ bylat.append(int(cnt))
+
+ # Find candidate grid centers by latitude
+ goingy = 0
+ polarity = 1
+ check_here = cnt
+
+ # which latitude row is center in?
+ for rowe in row_end:
+ if check_here <= rowe:
+ row = row_end.index(rowe)
+ break
+
+ # Search "up"
+ while goingy < regional_nys[row]:
+ cupc = check_here + polarity*im
+ if check_here > row_start[-1] - 1 and polarity > 0: # upper polar
+ polarity = -1 # wrap over
+ cupc = check_here - (im // 2) + im
+ if cupc > maxid - 1:
+ cupc -= im
+ elif cupc > row_start[-1]:
+ cupc -= im
+ bylat.append(int(cupc))
+ check_here = cupc # shift search
+ goingy += 1
+
+ # Seach "down"
+ goingy = 0
+ polarity = -1
+ check_here = cnt
+ while goingy < regional_nys[row]:
+ dnpc = check_here + polarity*im
+ if check_here < im and polarity < 0: # lower polar
+ polarity *= -1 # wrap over
+ dnpc = check_here + im + (im // 2)
+ if dnpc > 2*im - 1:
+ dnpc -= im
+ if dnpc < 0:
+ dnpc = abs(dnpc)
+ bylat.append(int(dnpc))
+ check_here = dnpc # sift search
+ goingy += 1
+
+ # Find candidate grid centers along longitude for each lat
+ i,j = grid2ij(cnt,im,jm)
+ lon = g2l(i,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ lat = g2l(j,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+ bylon = []
+
+ for eachone in bylat:
+ # which latitude row is center in?
+ for rowe in row_end:
+ if eachone <= rowe:
+ row = row_end.index(rowe)
+ break
+
+ if row in use_all_lons: # only keep 1st entry
+ continue
+ ## uncomment instead to keep all lons
+ #for nextgrid in range(row_start[row],row_end[row]+1):
+ # if nextgrid not in bylon:
+ # bylon.append(int(nextgrid))
+ else:
+ # check along "plus" longitude at this latitude
+ nextgrid = eachone + 1
+ polarity = 1
+ newadds = 0
+ distx = 0.0
+ while distx < travel_distance:
+ if nextgrid > row_end[row]:
+ nextgrid -= im*polarity
+ nexti,nextj = grid2ij(nextgrid,im,jm)
+ tmplon = g2l(nexti,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ tmplat = g2l(nextj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+ distx = gcd(lon,lat,tmplon,tmplat)
+ if nextgrid not in bylon:
+ bylon.append(int(nextgrid))
+ newadds += 1
+ nextgrid += 1
+ if newadds >= im: # stop inf loops
+ distx = travel_distance
+ bylon.pop()
+
+ # check along "minus" longitude at this latitude
+ nextgrid = eachone - 1
+ polarity = -1
+ newadds = 0
+ distx = 0.0
+ while distx < travel_distance:
+ if nextgrid < row_start[row]:
+ nextgrid -= im*polarity
+ nexti,nextj = grid2ij(nextgrid,im,jm)
+ tmplon = g2l(nexti,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ tmplat = g2l(nextj,start_lon,start_lat,dlon,dlat,jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+ distx = gcd(lon,lat,tmplon,tmplat)
+
+ if nextgrid not in bylon:
+ bylon.append(int(nextgrid))
+ newadds += 1
+ nextgrid -= 1
+ if newadds >= im: # stop inf loops
+ distx = travel_distance
+ bylon.pop()
+
+ # Merge center list
+ regional = bylat
+ for g in bylon:
+ if g not in regional:
+ regional.append(g)
+ if defs.polar_filter:
+ #for check in regional[:]: # total screen of regional in polar
+ # if check in polar_screen:
+ # regional.remove(check)
+ temps = [cnp,cnm,cnt,upc,dnc,upm,upp,dnm,dnp]
+ for check in regional[:]: # limit to local-9 regional in polar
+ if check in polar_screen:
+ if check not in temps:
+ regional.remove(check)
+ # populate dictionary
+ tdict[cnt] = regional
+
+ return tdict,lwdict
+
+def setup_att(defs,gdict,gcd,g2l,ij2grid,grid2ij,
+ grid_area,make_screen,rhumb_line_nav,defs_grid):
+ """Setup stuff"""
+
+ # Pre-bind and define.
+ twopi = 2.0 * math.pi;cos = math.cos
+ radians = math.radians; degrees = math.degrees
+ twopier = twopi*defs.earth_radius
+ rln = rhumb_line_nav
+
+ gdict_new = {} # same as gdict but reordered.
+ ijdict = {} # define dictionary of i,j lon,lat for gridID
+ darea = {} # define dictionary of area for grid i
+
+ top_lat = defs.tropical_boundary
+ bot_lat = -1*top_lat
+
+ # Find the area of each grid
+ # NOTE only checked for regular grid!!
+ multiplier = defs.earth_radius * defs.earth_radius * dlon
+
+ distance_lookup = -1*numpy.ones((jm,maxid),numpy.int)
+ angle_lookup = -1*numpy.ones((jm,maxid),numpy.int)
+
+ # Apply a parabolic tropical penitally such that at ABS(30 lat) the
+ # distance from the center is exaggerated such that a wavenumber
+ # 4 search radius can't see equatorward of ABS(lat 12) degrees for a
+ # center located at ABS(30 lat).
+ force_constant = 0.0005
+ meridional_tropical_debt = []
+ distc = -1*dx*111.0
+
+ #
+ # Everything done relative to a single lon and all lats
+ #
+ for cnt in row_start:
+ i,j = grid2ij(cnt,im,jm)
+ lon = g2l(i,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+ lat = g2l(j,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=False,center_flag="center",faux_grids = defs.faux_grids)
+
+ # Loop over all grids finding distance/angle from central lon
+ for pnt in range(maxid):
+
+ # Skip current grid
+ if pnt == cnt:
+ distance_lookup[j][pnt] = 0
+ angle_lookup[j][pnt] = 0
+ ii,jj = grid2ij(pnt,im,jm)
+ llat = g2l(jj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids = defs.faux_grids)
+ darea[pnt] = grid_area(math,llat,multiplier)
+ continue
+
+ # Calculate the distance/angle
+ ii,jj = grid2ij(pnt,im,jm)
+ llat = g2l(jj,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lat",
+ edge_flag=True,center_flag="center",faux_grids = defs.faux_grids)
+ if cnt == 0:
+ darea[pnt] = grid_area(math,llat,multiplier)
+ llat = llat[1]
+ llon = g2l(ii,start_lon,start_lat,dlon,dlat,jm=jm,lat_lon_flag="lon",
+ edge_flag=False,center_flag="center",flag_360=True,
+ faux_grids = defs.faux_grids)
+
+ fnc = []
+ fnc = rln(llon,llat,lon,lat,True)
+ if defs.use_gcd:
+ dist = gcd(llon,llat,lon,lat)
+ distance_lookup[j][pnt] = int(round(dist))
+ else:
+ distance_lookup[j][pnt] = int(round(fnc[1]))
+ # angle_lookup[j][pnt] = int(round(fnc[0]))
+
+ ## JJJ fix angle lookup error that happens if the first value of fnc[0] is nan
+ if math.isnan(fnc[0]):
+ angle_lookup[j][pnt] = angle_lookup[j-1][pnt]
+ else:
+ angle_lookup[j][pnt] = int(round(fnc[0]))
+
+ if bot_lat <= round(lat):
+ if lat <= 0.0:
+ distc += dx*111.0
+ parab = force_constant*(distc*distc)
+ meridional_tropical_debt.append(round(parab))
+ elif round(lat) <= top_lat:
+ distc -= dx*111.0
+ parab = force_constant*(distc*distc)
+ meridional_tropical_debt.append(round(parab))
+ else:
+ meridional_tropical_debt.append(0.0)
+ else:
+ meridional_tropical_debt.append(0.0)
+
+ scale_b = 0.50
+ close_by = make_screen(jm,im,inv_wn,scale_b,row_start,row_end,dx,
+ bot,top,dlat,dlon,start_lon,start_lat,defs.faux_grids,
+ meridional_tropical_debt,twopier,cos,radians,
+ degrees,g2l,gcd)
+
+ scale_a = 0.25
+ if scale_a >= scale_b:
+ # The search logic depends on the opposite condition!
+ print ("Warning scale_a >= scale_b!")
+ wander_test = make_screen(jm,im,inv_wn,scale_a,row_start,row_end,dx,bot,
+ top,dlat,dlon,start_lon,start_lat,defs.faux_grids,
+ meridional_tropical_debt,twopier,cos,radians,
+ degrees,g2l,gcd)
+
+ scale_c = 0.25
+ # Used to say a center is a potential stormy neighbor for another grid.
+ # Set wavenumber at double input
+ inv_wn1 = inv_wn * 0.5
+ neighbor_test = make_screen(jm,im,inv_wn1,scale_c,row_start,row_end,dx,bot,
+ top,dlat,dlon,start_lon,start_lat,defs.faux_grids,
+ meridional_tropical_debt,twopier,cos,radians,
+ degrees,g2l,gcd)
+
+ #
+ # Fill Moore Neighbor Lookup Table
+ #
+ for cnt in range(maxid):
+ # upm upc upp 0 1 2
+ # cnm cnt cnp 3 4 5
+ # dnm dnc dnp 6 7 8
+ upm,upc,upp,cnm,cnt,cnp,dnm,dnc,dnp = gdict[cnt]
+ # push into dictionary in different order.
+ gdict_new[cnt] = [upm,upc,upp,cnp,dnp,dnc,dnm,cnm,cnt]
+
+ return(darea,distance_lookup,angle_lookup,close_by,wander_test,gdict_new,neighbor_test)
+
+#---Start of main code block.
+if __name__=='__main__':
+
+ months = {1: 'January', 2: 'February', 3: 'March', 4: 'April', 5: 'May',
+ 6: 'June', 7: 'July', 8: 'August', 9: 'September', 10: 'October',
+ 11: 'November', 12: 'December'}
+
+ # Set to 1 if plots wanted and matplotlib available.
+ save_plot = 1
+
+ # If setup_vX.py executed with a file, redirect output to that file.
+ # e.g. python setup_v2.py /Volumes/scratch/output/logfile.txt
+ keep_log = 0
+ if len(sys.argv) > 1:
+ keep_log = 1
+ if keep_log:
+ screenout = sys.stdout
+ log_file = open(sys.argv[1], 'w',0)
+ sys.stdout = log_file
+
+ # --------------------------------------------------------------------------
+ # Define all modules to be imported.
+ # --------------------------------------------------------------------------
+
+ # Extract version number from this scripts name.
+ tmp = sys.argv[0]
+ file_len = len(tmp.split("_"))
+ vnum = "_"+tmp.split("_")[file_len-1][:2]
+
+ # Basic standard Python modules to import.
+ imports = []
+ system_imports = "import math,numpy, pickle"
+ imports.append(system_imports)
+ imports.append("import netCDF4 as NetCDF")
+
+ # My modules to import w/ version number appended.
+ my_base = ["defs","where_y","gcd","g2l","ij2grid","grid2ij",
+ "rhumb_line_nav","first_last_lons","grid_area",
+ "make_screen","print_col"]
+ if save_plot:
+ # Jeyavinoth: removed netcdftime from line below; in this case I just remove the line below
+ # imports.append("import netcdftime")
+ my_base.append("plot_map")
+ my_base.append("pull_data")
+ for x in my_base:
+ tmp = "import %s%s as %s" % (x,vnum,x)
+ imports.append(tmp)
+ for i in imports:
+ exec(i)
+
+ # Pre_bind
+ where_y = where_y.where_y
+ gcd = gcd.gcd
+ g2l = g2l.g2l
+ ij2grid = ij2grid.ij2grid
+ grid2ij = grid2ij.grid2ij
+ rhumb_line_nav = rhumb_line_nav.rhumb_line_nav
+ first_last_lons = first_last_lons.first_last_lons
+ grid_area = grid_area.grid_area
+ make_screen = make_screen.make_screen
+ print_col = print_col.print_col
+ if save_plot:
+ Plot_Map = plot_map.plotmap
+
+ # --------------------------------------------------------------------------
+ # Start basic definitions of what is to be done.
+ # --------------------------------------------------------------------------
+
+ # If set to 1, then topography information will not be used to determine
+ # potentially troublesome locations for SLP dependent analysis. For
+ # example, regions of very high or steep topography can result in erroneous
+ # SLP values that either mimic or obscure cyclones. Generally, the
+ # results are better with no_topo set to 0.
+ no_topo = 0
+
+ # If set to 1, then the land_sea mask is not used to separate results
+ # that occur over land or ocean grids. This info is not required for
+ # full analysis. Note: the topography field can be used for this in
+ # a pinch.
+ no_mask = 0
+
+ model=defines.model
+
+ # Full path to the directory where the SLP data files are stored.
+ slp_source_directories = defines.slp_folder
+
+ slp_path = slp_source_directories
+ if not os.path.exists(slp_path):
+ sys.exit("ERROR: slp_path not found.")
+
+ # Full path to the root directory where output will be stored.
+ # Note it's possible that all of these directories are identical.
+ result_directory = defines.out_folder
+ if not os.path.exists(result_directory):
+ sys.exit("ERROR: result_directory not found.")
+
+ # Directory to be created for storing temporary model specific files.
+ shared_path = "%s%s_files/" % (result_directory,model)
+
+ # Directory to be created for storing model specific results.
+ out_path = "%s%s/" % (result_directory,model)
+ if not os.path.exists(out_path):
+ dirs = list(map(os.makedirs, (out_path,
+ out_path+'comps/',
+ out_path+'pdfs/',
+ out_path+'netcdfs/',
+ out_path+'stats/',
+ out_path+'figs/pdfs/',
+ out_path+'figs/comps/')))
+ print ("Directory %s Created." % (out_path))
+
+
+ print ("Running Setup\n")
+ print ("Using:")
+ msg = "\tModel:\t\t %s\n\tOut_Path:\t%s\n\tShared_Dir:\t%s\n\tSLP_Path:\t%s"
+ print (msg % (model,out_path,shared_path,slp_path))
+ msg = "\tno_topo:\t%s\n\tno_mask:\t%s"
+ print (msg % (bool(no_topo),bool(no_mask)))
+
+ # --------------------------------------------------------------------------
+ # Define the definitions to be read in.
+ #
+ # Hows how to alter a parameter in defs w/out having to alter the file
+ # itself. Here I use setup_all as a flag to alter the defs so that all data
+ # that can be precalculated and saved are, rather than the default which is
+ # to read those data from file.
+ # --------------------------------------------------------------------------
+
+ # Select wavenumber for regional screens.
+ wavenumbers = [4.0,8.0,13.0,26.0,52.0]
+
+ # This value is used by center_finder_vX.py to screen for regional minima
+ # status. Generally, using too low wavenumbers means overly screening
+ # centers and too large values have little effect other than to
+ # make the analysis run longer. Good rule of thumb is 8-26.
+ wavenumber = wavenumbers[2]
+
+ # This value is used by attribute_vX.py to limit searches. This value
+ # should be lower wavenumber than wavenumber above. Basically this
+ # defines the largest system allowed. A good rule of thumb is 3-5.
+ wavenumber_a = wavenumbers[0]
+
+ # See defs_vX.py for what these variables represent.
+ defs_set = {'tropical_boundary':15, "tropical_boundary_alt":30,
+ "critical_radius":0.0,"polar_filter":False,
+ "skip_polars":True,"wavenumber":wavenumber, "use_gcd":True}
+ # match RAIBLE et. all 2007
+ #defs_set = {'max_cyclone_speed': 42.0,'age_limit':72.0}
+
+ # Fetch and update definitions. These are held in defs_vX.py
+ defs = defs.defs(**defs_set)
+ #######
+ #defs.read_scale = 1.0 # I have made this change in defs_v4.py
+
+ # Here we attempt read a sample of the SLP data in order
+ # to auto-configure some things such as the grid geometry
+ # for this model/data source. It is assumed that these
+ # files are netCDF files following the so called
+ # Climate and Forecast (CF) Metadata Convention
+ # (http://cf-pcmdi.llnl.gov/).
+ #
+ # At a minimum this assumes that the files contain
+ # the latitude, longitude, time and SLP fields.
+ #
+ # Here is an example header from the NCEP-DOE Reanalysis 2
+ # showing the expected output of ncdump -c file.nc
+ # ncdump -c mslp.2008.nc
+ # netcdf mslp.2008 {
+ # dimensions:
+ # lon = 144 ;
+ # lat = 73 ;
+ # time = UNLIMITED ; // (1464 currently)
+ # variables:
+ # float lat(lat) ;
+ # lat:units = "degrees_north" ;
+ # lat:actual_range = 90.f, -90.f ;
+ # lat:long_name = "Latitude" ;
+ # lat:standard_name = "latitude_north" ;
+ # lat:axis = "y" ;
+ # lat:coordinate_defines = "point" ;
+ # float lon(lon) ;
+ # lon:units = "degrees_east" ;
+ # lon:long_name = "Longitude" ;
+ # lon:actual_range = 0.f, 357.5f ;
+ # lon:standard_name = "longitude_east" ;
+ # lon:axis = "x" ;
+ # lon:coordinate_defines = "point" ;
+ # double time(time) ;
+ # time:units = "hours since 1800-1-1 00:00:0.0" ;
+ # time:long_name = "Time" ;
+ # time:actual_range = 1823280., 1832058. ;
+ # time:delta_t = "0000-00-00 06:00:00" ;
+ # time:standard_name = "time" ;
+ # time:axis = "t" ;
+ # time:coordinate_defines = "point" ;
+ # short mslp(time, lat, lon) ;
+ # mslp:long_name = "6-Hourly Mean Sea Level Pressure" ;
+ # mslp:valid_range = -32765s, 15235s ;
+ # mslp:unpacked_valid_range = 77000.f, 125000.f ;
+ # mslp:actual_range = 92980.f, 107630.f ;
+ # mslp:units = "Pascals" ;
+ # mslp:add_offset = 109765.f ;
+ # mslp:scale_factor = 1.f ;
+ # mslp:missing_value = 32766s ;
+ # mslp:_FillValue = -32767s ;
+ # mslp:precision = 0s ;
+ # mslp:least_significant_digit = -1s ;
+ # mslp:GRIB_id = 2s ;
+ # mslp:GRIB_name = "PRMSL" ;
+ # mslp:var_desc = "Mean Sea Level Pressure" ;
+ # mslp:dataset = "NCEP/DOE AMIP-II Reanalysis (Reanalysis-2)" ;
+ # mslp:level_desc = "Sea Level" ;
+ # mslp:statistic = "Individual Obs" ;
+ # mslp:parent_stat = "Other" ;
+ # mslp:standard_name = "pressure" ;
+ #
+ # // global attributes:
+ # :Conventions = "CF-1.0" ;
+ # :title = "4x Daily NCEP/DOE Reanalysis 2" ;
+ # :history = "created 2009/03 by NOAA/ESRL/PSD" ;
+ # :comments = "Data is from \n",
+ # "NCEP/DOE AMIP-II Reanalysis (Reanalysis-2)\n",
+ # "(4x/day). Data interpolated from model (sigma) surfaces to\n",
+ # "a rectangular grid." ;
+ # :platform = "Model" ;
+ # :source = "NCEP/DOE AMIP-II Reanalysis (Reanalysis-2) Model" ;
+ # :institution = "National Centers for Environmental Prediction" ;
+ # :references = "http://www.cpc.ncep.noaa.gov/products/wesley/reanalysis2/\n",
+ # "http://www.cdc.noaa.gov/data/gridded/data.ncep.reanalysis2.html" ;
+ # data:
+ #
+ # lat = 90, 87.5, 85, 82.5, 80, 77.5, 75, 72.5, 70, 67.5, 65, 62.5, 60, 57.5,
+ # 55, 52.5, 50, 47.5, 45, 42.5, 40, 37.5, 35, 32.5, 30, 27.5, 25, 22.5, 20,
+ # 17.5, 15, 12.5, 10, 7.5, 5, 2.5, 0, -2.5, -5, -7.5, -10, -12.5, -15,
+ # -17.5, -20, -22.5, -25, -27.5, -30, -32.5, -35, -37.5, -40, -42.5, -45,
+ # -47.5, -50, -52.5, -55, -57.5, -60, -62.5, -65, -67.5, -70, -72.5, -75,
+ # -77.5, -80, -82.5, -85, -87.5, -90 ;
+ #
+ # lon = 0, 2.5, 5, 7.5, 10, 12.5, 15, 17.5, 20, 22.5, 25, 27.5, 30, 32.5, 35,
+ # 37.5, 40, 42.5, 45, 47.5, 50, 52.5, 55, 57.5, 60, 62.5, 65, 67.5, 70,
+ # 72.5, 75, 77.5, 80, 82.5, 85, 87.5, 90, 92.5, 95, 97.5, 100, 102.5, 105,
+ # 107.5, 110, 112.5, 115, 117.5, 120, 122.5, 125, 127.5, 130, 132.5, 135,
+ # 137.5, 140, 142.5, 145, 147.5, 150, 152.5, 155, 157.5, 160, 162.5, 165,
+ # 167.5, 170, 172.5, 175, 177.5, 180, 182.5, 185, 187.5, 190, 192.5, 195,
+ # 197.5, 200, 202.5, 205, 207.5, 210, 212.5, 215, 217.5, 220, 222.5, 225,
+ # 227.5, 230, 232.5, 235, 237.5, 240, 242.5, 245, 247.5, 250, 252.5, 255,
+ # 257.5, 260, 262.5, 265, 267.5, 270, 272.5, 275, 277.5, 280, 282.5, 285,
+ # 287.5, 290, 292.5, 295, 297.5, 300, 302.5, 305, 307.5, 310, 312.5, 315,
+ # 317.5, 320, 322.5, 325, 327.5, 330, 332.5, 335, 337.5, 340, 342.5, 345,
+ # 347.5, 350, 352.5, 355, 357.5 ;
+ #
+ # time = 1823280, 1823286, 1823292, 1823298, 1823304, 1823310,
+ # 1823316, ... 1832058 ;
+ # }
+ #
+ # It is not necessary that your data files be exactly like this.
+ # However, there are some basic requirements and also some
+ # conventions that will make your life easier if followed.
+ #
+ # If your files differ from what is required you can modify
+ # pull_data_vX.py to accommodate your data, completely replace
+ # pull_data_vX.py to suit your data or modify your data.
+ #
+ # Requirements:
+ # 1) Files need 3 dimensions named by default lon,lat and time.
+ # If your files use these dimensions with different names
+ # you can alter these assumptions below.
+ # 2) Files need 4 variables named by default lat, lon, time and slp.
+ # 3) The variables lat, lon, time and slp have units of
+ # "degrees_north","degrees_east", "hours since ", and "Pascals"
+ # (or other unit that can be scaled to hPa) respectfully.
+ # 4) The time dimension supports 2 calendars; "standard" and "noleap".
+ #
+ # Conventions to make your work easier and faster:
+ # 1) The SLP files are assumed to contain a single year's worth
+ # of data with the file name something like slp.1990.nc. On
+ # a practical note be aware that parallel processing works by
+ # spawning versions of the code each with parts of the data
+ # (i.e., different files) so that a single large data file
+ # precludes multiprocessing.
+ # 2) The 1st latitude is assumed to start at the south pole, or
+ # at least to contain it. If not the case the code will auto
+ # flip the latitude and data arrays.
+ # 3) The 1st longitude is assumed to start at the prime meridian,
+ # or at least to contain it. If not the case the code will
+ # auto rotate the longitude and data arrays.
+ # 4) We assume regular lat-lon grids. Gaussian grids will work
+ # but with this assumption, or a version which accommodates
+ # Gaussian grids can be made (contact mbauer@giss.nasa.gov).
+ # 5) Longitude is assumed to be in the 0-360 degree format. If
+ # using the +-180 degree format, the code will auto convert
+ # for you.
+ # 6) It's best if the time variable has the attribute "delta_t".
+ # In any event, we assume constant time steps. Also, as a
+ # general rule cyclone tracking works best with time steps
+ # of 6 hours or less.
+
+ # Data can be stored in one of three ways:
+ #
+ # pixel-registered (i.e. nodes centered on the grid boxes).
+ # The lon/lat points represent the center of the grid.
+ # Number of longitudes = (lon[-1]-lon[0])/dx
+ # Number of latitudes = (lat[-1]-lat[0])/dy
+ # Sometimes people store pixel-registered data but
+ # give the lon/lat as a grid edge/corner. This is done so that
+ # it is easy to see the data covers a full range. For
+ # example, the first grid might be centered on 88.75N
+ # with a 2.5 spacing but the first longitude is 90N or the
+ # upper edge.
+ #
+ # gridline-registered (i.e. nodes are at gridline intersections).
+ # The lon/lat points represent the edge of the grid (often the
+ # upper right hand corner).
+ # Number of longitudes = (lon[-1]-lon[0])/(dx+1)
+ # Number of latitudes = (lat[-1]-lat[0])/(dy+1)
+ #
+ # point-registered (i.e. nodes point measurements. There are no grids).
+ # Here the data is sampled at the lon/lat points but do not
+ # represent grid-values. In this case some sort of interpolation
+ # can be used to fill in the grids or one can assume faux-grids
+ # based on the point spacing.
+ #
+ pixel_registered = []
+ point_registered = defines.model
+ gridline_registered = []
+ all_registered = pixel_registered[:]
+ all_registered.extend(point_registered)
+ all_registered.extend(gridline_registered)
+
+ # Assess the slp_path. Get a list of files and from the file
+ # names get the time frame that the data cover; if this won't
+ # work allow user to manually set years.
+
+ # Determine years, if manually set use integers (i.e., 1989).
+ # If using model results without 'years' just enumerate them
+ # or if all data in a single large file use 1 (note no
+ # multiprocessing in this case).
+ #start_year = -1
+ #end_year = -1
+ start_year = defines.over_write_years[0]
+ end_year = defines.over_write_years[1]
+
+ # What symbol separates file name.
+ file_seperator = "."
+
+ # Pull the list of available files, put in chronological order.
+ print ("\nScanning %s for data files:" % (slp_path))
+ file_list = os.listdir(slp_path)
+ file_list = [x for x in file_list if x.find(".nc") != -1]
+ file_list.sort()
+ print_col(file_list,indent_tag="\t",fmt="%s",cols=6,width=10)
+
+ # Loop over available files for correct years
+ found_years = {}
+ for infile in file_list:
+ if infile.find(".nc") != -1:
+ #print "Scanning File:",infile,
+ # This works for filenames like slp.1998.nc
+ year = infile.split(file_seperator)[1]
+ found_years[year] = 1
+ found_years = list(found_years.keys())
+ found_years.sort()
+ print ("\nFound_years:")
+ print_col(found_years,indent_tag="\t",fmt="%s ",cols=10,width=10)
+
+ if start_year == end_year == -1:
+ start_year = found_years[0]
+ end_year = found_years[-1]
+ super_years =[start_year,end_year]
+
+ print ("\nYear Bounds: [%s,%s]" % (super_years[0],super_years[1]))
+
+ # Pull in the 1st data file to extract some information.
+ #
+ # Dimension & Variable Names: These can be changed if needed.
+ dim_lat = "lat"
+ dim_lon = "lon"
+ dim_time = "time"
+ var_lat = "lat"
+ var_lon = "lon"
+ var_time = "time"
+ var_slp = "slp"
+ var_topo = "hgt"
+ var_land_sea_mask = var_topo
+
+
+ # Test for extra files if requested.
+ if not os.path.exists(shared_path):
+ os.makedirs(shared_path)
+ print ("Directory %s Created." % (shared_path))
+ if no_topo and no_mask:
+ print ("WARNING: Proceeding without topography or land_sea mask.")
+ elif not no_topo and no_mask:
+ print ("WARNING: Proceeding without land_sea mask.")
+ print ("Copy topography file into %s and re-run setup." % (shared_path))
+ sys.exit()
+ elif no_topo and not no_mask:
+ print ("WARNING: Proceeding without topography.")
+ print ("Copy land_sea mask file into %s and re-run setup." % (shared_path))
+ sys.exit()
+ else:
+ msg = "Copy the topography and land_sea mask files into %s and re-run setup."
+ print (msg % (shared_path))
+ msg = "This step can be skipped by setting no_topo and no_mask to 1 and re-running."
+ sys.exit(msg)
+ else:
+ topo_file = "%s%s_%s.nc" % (shared_path,model,var_topo)
+ # If no specific land/sea mask available the topo_file can be
+ # used to separate land as all grids with topography > sea_level.
+ # Do this by setting var_land_sea_mask = var_topo.
+ mask_file = "%s%s_%s.nc" % (shared_path,model,var_land_sea_mask)
+ if no_topo and no_mask:
+ print ("WARNING: Proceeding without topography or land_sea mask.")
+ elif not no_topo and no_mask:
+ print ("WARNING: Proceeding without land_sea mask.")
+ if not os.path.exists(topo_file):
+ sys.exit("ERROR: %s not found." % (topo_file))
+ elif no_topo and not no_mask:
+ print ("WARNING: Proceeding without topography.")
+ if not os.path.exists(mask_file):
+ sys.exit("ERROR: %s not found." % (mask_file))
+ else:
+ if not os.path.exists(topo_file):
+ sys.exit("ERROR: %s not found." % (topo_file))
+ if not os.path.exists(mask_file):
+ sys.exit("ERROR: %s not found." % (mask_file))
+
+ # Open test file
+ test_file = "%s%s" % (slp_path,file_list[0])
+ # print ("JJJ")
+ print (test_file)
+ nc_in = NetCDF.Dataset(test_file,'r',format='NETCDF3_CLASSIC')
+
+ # Pull Time
+ time = nc_in.variables[var_time]
+ time = numpy.array(time[:],dtype=float,copy=1)
+ tsteps = len(time)
+ if 'calendar' in nc_in.variables[var_time].ncattrs():
+ the_calendar = nc_in.variables[var_time].calendar
+ the_calendar = the_calendar.lower()
+ else:
+ the_calendar = 'standard'
+ if 'delta_t' in nc_in.variables[var_time].ncattrs():
+ tmp = nc_in.variables[var_time].delta_t.split(':')
+ else:
+ print ("WARNING! netcdf file lacks delta_t', using 6hr default!")
+ tmp = "0000-00-00 06:00:00".split(':')
+ timestep = int(tmp[0][-2:])
+ print ("\nTime Information:")
+ print ("\tCalendar:\t%s" % (the_calendar))
+ print ("\tTime steps:\t%d" % (tsteps))
+ print ("\tTime step:\t%d" % (timestep))
+
+ # Pull in Latitude
+ lat = nc_in.variables[var_lat]
+ lats = numpy.array(lat[:],dtype=float,copy=1)
+ if lats[0] > lats[-1]:
+ lat_flip = 1
+ lats = lats[::-1]
+ else:
+ lat_flip = 0
+ dy = abs(lats[10]-lats[11])
+ jm = len(lats)
+ dlat = math.radians(dy)
+ start_lat = lats[0]
+ dlat_sq = dlat*dlat
+ two_dlat = 2.0*dlat
+ interval = dy*0.5
+ edges = dict([(i+interval,1) for i in lats])
+ edges.update([(i-interval,1) for i in lats])
+ edges = list(edges.keys())
+ edges.sort()
+ lat_edges = edges
+
+ # Make faux_grids based on point-registered data.
+ faux_grids = 1
+ defs.faux_grids = faux_grids
+ if lat_flip:
+ lats = lats[::-1]
+ # if faux_grids==1; % reminder JJJ
+ edges = dict([(round(i+interval),1) for i in lats])
+ edges.update([(round(i-interval),1) for i in lats])
+ edges = list(edges.keys())
+ edges.sort()
+ edges.reverse()
+ lat_edges = edges
+ if lat_flip:
+ lats = lats[::-1]
+
+ print ("\tLatitude Flipped: %s" % (bool(lat_flip)))
+ print ("\tLatitude Spacing: %g" % (dy))
+ if defines.verbose:
+ print ("\tLatitudes (jm = %d):" % (jm))
+ print_col(list(lats),indent_tag="\t\t",fmt="% 7.2f",cols=5,width=10)
+ print ("\tLatitudes Edges (%d):" % (len(lat_edges)))
+ print_col(lat_edges,indent_tag="\t\t",fmt="% 7.2f",cols=5,width=10)
+
+ # Pull in Longitude
+ lon = nc_in.variables[var_lon]
+ lons = numpy.array(lon[:],dtype=float,copy=1)
+ # If lons in +-180 format shift to 360
+ if numpy.any(numpy.less(lons,0.0)):
+ tmp = numpy.where(lons < 0.0,numpy.add(lons,360.0),lons)
+ lons = tmp
+ dx = abs(lons[0]-lons[1])
+ for lon_shift in range(len(lons)):
+ left_edge = lons[lon_shift] - dx*0.5
+ #if left_edge < 0.0:
+ # left_edge += 360.0
+ right_edge = lons[lon_shift] + dx*0.5
+ #if right_edge > 360.0:
+ # right_edge -= 360.0
+ # Test where prime meridian is located
+ if left_edge <= 0.0 < right_edge:
+ break
+ if lon_shift:
+ lons = numpy.roll(lons,lon_shift)
+ im = len(lons)
+ dlon = math.radians(dx)
+ start_lon = lons[0]
+ dlon_sq = dlon*dlon
+ interval = dx*0.5
+ edges = dict([(i+interval,1) for i in lons])
+ edges.update([(i-interval,1) for i in lons])
+ edges = list(edges.keys())
+ edges.sort()
+ lon_edges = edges
+ print ("\nLongitude Information:")
+ print ("\tLongitude Rotated: %s (%d)" % (bool(lon_shift),lon_shift))
+ print ("\tLongitude Spacing: %g degrees" % (dx))
+ if defines.verbose:
+ print ("\tLongitudes (im = %d):" % (im))
+ print_col(list(lons),indent_tag="\t\t",fmt="% 7.2f",cols=5,width=10)
+ print ("\tLongitude Edges (%d):" % (len(lon_edges)))
+ print_col(lon_edges,indent_tag="\t\t",fmt="% 7.2f",cols=5,width=10)
+
+ # Maximum number of gridIDs in model grid
+ maxid = jm*im
+
+ model_flag = "_%s_" % (model.upper())
+
+ # Set some boundaries (latitude row)
+ eq_grid = where_y(sys,list(lats),0.0,dy*0.5)
+ defs.tropical_boundary=15.0
+ tropical_s = where_y(sys,list(lats),-1*defs.tropical_boundary,dy*0.5)
+ tropical_n = where_y(sys,list(lats),defs.tropical_boundary,dy*0.5)
+ bot = ij2grid(tropical_s,0,im,jm)
+ mid = ij2grid(eq_grid,0,im,jm)
+ top = ij2grid(tropical_n,im-1,im,jm)
+ msg = "\nTropical_boundary:\t%d degrees\nTropical_n:\t\t%d %d (row,gridid)\nEQ_grid:\t\t%d %d (row,gridid)\nTropical_s:\t\t%d %d (row,gridid)"
+ print (msg % (defs.tropical_boundary,tropical_n,top,eq_grid,mid,tropical_s,bot))
+
+ defs.tropical_boundary_alt = -30.0
+ tropical_s_alt = where_y(sys,list(lats),-1*defs.tropical_boundary_alt,dy*0.5)
+ tropical_n_alt = where_y(sys,list(lats),defs.tropical_boundary_alt,dy*0.5)
+ bot_alt = ij2grid(tropical_s_alt,0,im,jm)
+ top_alt = ij2grid(tropical_n_alt,im-1,im,jm)
+ msg = "\nTropical_boundary Alt:\t%d degrees\nTropical_n_alt:\t\t%d %d (row,gridid)\nEQ_grid:\t\t%d %d (row,gridid)\nTropical_s_alt:\t\t%d %d (row,gridid)"
+ print (msg % (defs.tropical_boundary_alt,tropical_n_alt,top_alt,eq_grid,mid,tropical_s_alt,bot_alt))
+
+ # Get gridids for 1st and last lon of each lat row.
+ row_start,row_end = first_last_lons(jm,im)
+ if defines.verbose:
+ print ("\nRow_start:")
+ print_col(row_start,indent_tag="\t",fmt="% 6d",cols=10,width=10)
+ print ("Row_end:")
+ print_col(row_end,indent_tag="\t",fmt="% 6d",cols=10,width=10)
+
+ # Set up some default values for the next set of routines
+ defs_grid = {"im" : im, "jm" : jm, "maxid" : maxid, "lats" : lats,
+ "lons" : lons, "dx" : dx, "dy" : dy, "dlon" : dlon,
+ "dlat" : dlat, "dlon_sq" : dlon_sq, "dlat_sq" : dlat_sq,
+ "two_dlat" : two_dlat, "start_lat" : start_lat,
+ "start_lon" : start_lon, "model_flag" : model_flag,
+ "eq_grid" : eq_grid, "tropical_s" : tropical_s,
+ "tropical_n" : tropical_n, "bot" : bot,"top" : top,
+ "mid" : mid, "row_start" : row_start, "row_end" : row_end,
+ "timestep" : timestep, "tropical_s_alt" : tropical_s_alt,
+ "tropical_n_alt" : tropical_n_alt, "bot_alt" : bot_alt,
+ "top_alt" : top_alt, "lon_shift" : lon_shift,
+ "lat_flip" : lat_flip, "faux_grids" : faux_grids
+ }
+
+ # Purpose: Create various things needed for center_finder and others.
+ # Returns:
+ # use_all_lons - Latitude row where whole thing fits in critical_radius.
+ # search_radius - Defines Regional_Low radius by critical_radius or wavenumber.
+ # regional_nys - Maximum number or latitude rows to check based on critical_radius.
+ # gdict - Lookup table for local-8 grids by gridid.
+ # rdict - Lookup table for all grids fit within search_radius by gridid.
+ # ldict - Lookup table for Concavity/Laplacian Test.
+ # ijdict - Lookup table for i,j,lon,lat of a gridid.
+ # Stores: cf_dat.p
+ fnc_out = []
+ args = (defs,gcd,g2l,ij2grid,grid2ij,defs_grid)
+ print ("\nSetup Center_Finder...")
+ fnc_out = setup_center_finder(*args)
+ cf_file = "%scf_dat.p" % (shared_path)
+ pickle.dump(fnc_out, open(cf_file, "wb",-1))
+ print ("\tMade: %s\n" % (cf_file))
+ # Re-read/test.
+ #fnc_out = pickle.load(open(cf_file))
+ (use_all_lons,search_radius,regional_nys, gdict,rdict,ldict,ijdict,
+ min_centers_per_tstep,max_centers_per_tstep,max_centers_per_tstep_change,
+ lapp_cutoff,hpg_cutoff) = fnc_out
+ if save_plot:
+ pname = "%s/%s_regional_radius_by_latitude.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar',lon_0=0.0)
+ plot.create_fig()
+ blank = numpy.zeros(im*jm)
+ #print "Row Latitude, Search Radius (km) Approximate model Grids "
+ tmp = [abs(180.0-lons[x]) for x in range(im)]
+ center_lon = tmp.index(min(tmp))
+ # Note these plots may seem offset in longitude due to how lons
+ # breaks with a plot centered on 180.0 degrees. A similar problem
+ # occurs in latitude so the polar most grids may look offset.
+ for j in range(jm):
+ #print j,lats[j],search_radius[j],search_radius[j]/(dx*111.0)
+ for pnt in rdict[row_start[j]+center_lon]:
+ # Just values along this latitude
+ if pnt >= row_start[j] and pnt <= row_end[j]:
+ blank[pnt] = 1.0
+ #plot.add_field(lons,lats,blank,ptype='pcolor')
+ plot.add_field(lons,lats,blank,ptype='imshow')
+ title = "Regional Radius At Each Latitude"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+ pname = "%s/%s_regional_radius_full.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar')
+ plot.create_fig()
+ blank = numpy.zeros(im*jm)
+ # Note: the poleward and equatorward ends are not exactly
+ # symmetrical because the fixed length search radius can
+ # result in differing numbers of latitudes being reached as
+ # of latitude spacing changes with latitude.
+ for j in range(jm):
+ interval = dy*0.5
+ edge1 = lats[j]-interval
+ edge2 = lats[j]+interval
+ if edge1 <= -45.0 and edge2 >= -45.0:
+ for pnt in rdict[row_start[j]+center_lon]:
+ blank[pnt] = 1.0
+ break
+ for j in range(jm):
+ interval = dy*0.5
+ edge1 = lats[j]-interval
+ edge2 = lats[j]+interval
+ if edge1 <= 45.0 and edge2 >= 45.0:
+ for pnt in rdict[row_start[j]+center_lon]:
+ blank[pnt] = 1.0
+ break
+ #plot.add_field(lons,lats,blank,ptype='pcolor')
+ plot.add_field(lons,lats,blank,ptype='imshow')
+ title = u"Full Regional Search at 45\u00b0N/S"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+
+ # Purpose: Locate grids for land-sea mask
+ # Returns: lists of gridids used for optional screens.
+ # Stores:
+ if not no_mask:
+ if mask_file == topo_file:
+ if no_topo:
+ # Topography screen disabled, but want
+ # to use topo field to make mask.
+ tfile = NetCDF.Dataset(topo_file,'r',format='NETCDF3_CLASSIC')
+ topo = tfile.variables[var_topo]
+ topo = numpy.array(topo[:],dtype=float,copy=1)
+ if len(topo.shape) > 2:
+ # Assuming arranged as time,lat,long
+ topo = topo[0,:,:]
+ if lat_flip:
+ tmp = topo[::-1,:]
+ topo = numpy.array(tmp) # ensures contiguous
+ if lon_shift:
+ tmp = numpy.roll(topo,lon_shift,axis=1)
+ topo = numpy.array(tmp)
+ tfile.close()
+ topo.shape = im*jm
+ # Use topo file to make land-sea mask
+ #land_gridids = [x for x in range(maxid) if topo[x] > 0.0]
+ land_gridids = [x for x in range(maxid) if topo[x] > defines.thresh_landsea_hgt]
+ else:
+ # Mask created below
+ pass
+ else:
+ # Use provided mask file. Note it is assumed that land grids
+ # have a value of > 0. If not please make exception based
+ # on if pick:
+ tfile = NetCDF.Dataset(mask_file,'r',format='NETCDF3_CLASSIC')
+ mask = tfile.variables[var_land_sea_mask]
+ mask = numpy.array(mask[:],dtype=float,copy=1)
+ # Remove time dimension
+ if len(mask.shape) > 2:
+ # Assuming arranged as time,lat,long
+ mask = mask[0,:,:]
+ if lat_flip:
+ tmp = mask[::-1,:]
+ mask = numpy.array(tmp) # ensures contiguous
+ if lon_shift:
+ tmp = numpy.roll(mask,lon_shift,axis=1)
+ mask = numpy.array(tmp)
+ tfile.close()
+ mask.shape = im*jm
+ # Make land-sea mask
+ land_gridids = [x for x in range(maxid) if mask[x] > defines.thresh_landsea_lsm]
+ land_gridids.sort()
+
+ # Purpose: Locate grids that fail the topography screen.
+ # Returns: lists of gridids used for optional screens.
+ # Stores:
+ if not no_topo:
+ print ("\nMaking Masks")
+ tfile = NetCDF.Dataset(topo_file,'r',format='NETCDF3_CLASSIC')
+ topo = tfile.variables[var_topo]
+ topo = numpy.array(topo[:],dtype=float,copy=1)
+ # Remove time dimension
+ if len(topo.shape) > 2:
+ # Assuming arranged as time,lat,long
+ topo = topo[0,:,:]
+ if lat_flip:
+ tmp = topo[::-1,:]
+ topo = numpy.array(tmp) # ensures contiguous
+ if lon_shift:
+ tmp = numpy.roll(topo,lon_shift,axis=1)
+ topo = numpy.array(tmp)
+ if 'add_offset' in tfile.variables[var_topo].ncattrs():
+ offset = getattr(tfile.variables[var_topo],'add_offset')
+ #print (" JJJ OFFSET")
+ offset = 0.0
+ print (offset)
+ else:
+ offset = 0.0
+ if 'scale_factor' in tfile.variables[var_topo].ncattrs():
+ scale_factor = getattr(tfile.variables[var_topo],'scale_factor')
+ #print (" JJJ SCALE FACTOR HACK")
+ scale_factor = 1.0
+ print (scale_factor)
+ else:
+ scale_factor = 1.0
+ ## Apply offset and scale_factor
+ tmp = numpy.multiply(numpy.add(numpy.array(topo[:],dtype=float,copy=1)
+ ,offset),scale_factor)
+ topo = numpy.array(tmp)
+ tfile.close()
+ topo.shape = im*jm
+ if mask_file == topo_file:
+ # Use topo file to make land-sea mask
+ land_gridids = [x for x in range(maxid) if topo[x] > 0.0]
+ #JJJ - using non-zero for land-sea mask so that
+ land_gridids = [x for x in range(maxid) if topo[x] > defines.thresh_landsea_hgt]
+
+ # Troubled Grids: SLP away from sea level is a derived quantity.
+ # Certain conditions seem to allow for erroneous SLP values.
+ # These can hamper cyclone detection by mimicking or
+ # obscuring real cyclones. Here we create an optional list
+ # of grids that the scheme used to warn itself that the
+ # SLP values in these locations may not be reliable.
+ # Generally, we are skeptical of SLP values over high
+ # elevation (>= 1000m) or steep topography (average local
+ # relief > 150m) and in some cases land bound grids with
+ # sub-level elevations (deep basins). Note centers in these
+ # grids are not discarded out of hand, but rather, undergo
+ # extra tests during the analysis.
+
+ # List all land or near land grids
+ troubled_centers = []
+ for gridid in range(maxid):
+ # Skip grids where all 8 neighbors are ocean
+ near_8 = gdict[gridid]
+ land = [x for x in near_8 if x in land_gridids]
+ if len(land) < 1:
+ continue
+ relief = [topo[x]-topo[gridid] for x in gdict[gridid]]
+ ave_relief = sum(relief)/9.0
+ if ave_relief >= 150:
+ troubled_centers.append(gridid)
+ elif topo[gridid] >= 1000:
+ troubled_centers.append(gridid)
+
+ # Add gridids that you specifically want screened, for
+ # example if you find you have a hotspot of center
+ # activity that you feel is erroneous due to say SLP
+ # reduction errors that are not caught by the above
+ # screen. Or if you want to discard all centers from
+ # a certain locale.
+ grids_of_interest = []
+ for gridid in grids_of_interest:
+ if gridid not in troubled_centers:
+ troubled_centers.append(gridid)
+
+ troubled_centers.sort()
+
+ if save_plot:
+ pname = "%s/%s_topo.pdf" % (shared_path,model)
+ plot = Plot_Map(clevs=[-500,4000,200],cints=[-500.0,4000.0],color_scheme='bone_r')
+ # plot = Plot_Map(clevs=[30000,40000,200],cints=[30000.0,40000.0],color_scheme='bone_r')
+ center_loc = []
+ plot.create_fig()
+ plot.add_field(lons,lats,topo,ptype='pcolor')
+ plot.finish(pname,title="Topography")
+ print ("\tMade figure: %s" % (pname))
+
+ if land_gridids:
+ pname = "%s/%s_land_mask.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar')
+ plot.create_fig()
+ center_loc = []
+ for pnt in land_gridids:
+ i,j = grid2ij(pnt,im,jm)
+ latval = g2l(j,lons[0],lats[0],dlon,dlat,jm,"lat","center",False,False,defs.faux_grids)
+ lonval = g2l(i,lons[0],lats[0],dlon,dlat,jm,"lon","center",False,True,defs.faux_grids)
+ center_loc.append((lonval,latval))
+ blank = numpy.zeros(im*jm)
+ plot.add_field(lons,lats,blank,ptype='pcolor',)
+ plot.add_pnts(center_loc,marker='o',msize=4,mfc='black',mec='black',lw=1.)
+ title = "Land Mask"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+
+ if troubled_centers:
+ pname = "%s/%s_troubled_grids.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar')
+ plot.create_fig()
+ center_loc = []
+ for pnt in troubled_centers:
+ i,j = grid2ij(pnt,im,jm)
+ latval = g2l(j,lons[0],lats[0],dlon,dlat,jm,"lat","center",False,False,defs.faux_grids)
+ lonval = g2l(i,lons[0],lats[0],dlon,dlat,jm,"lon","center",False,True,defs.faux_grids)
+ center_loc.append((lonval,latval))
+ blank = numpy.zeros(im*jm)
+ plot.add_field(lons,lats,blank,ptype='pcolor',)
+ plot.add_pnts(center_loc,marker='o',msize=4,mfc='black',mec='black',lw=1.)
+ title = "Troubled Grids"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+ # ---------------------------------
+ # For Center_Finder, and others
+ # ---------------------------------
+
+ # Purpose: Store all the setup stuff I just imported.
+ # Returns:
+ # Stores: s_dat.p
+ print ("\nStoring Setup Values...")
+ fnc_out = (im,jm,maxid,lats,lons,timestep,dx,dy,dlon,dlat,start_lat,start_lon,
+ dlon_sq,dlat_sq,two_dlat,model_flag,eq_grid,tropical_n,tropical_s,
+ bot,mid,top,row_start,row_end,tropical_n_alt,tropical_s_alt,
+ bot_alt,top_alt,lon_shift,lat_flip,the_calendar,found_years,
+ super_years,dim_lat,dim_lon,dim_time,var_lat,var_lon,var_time,
+ var_slp,var_topo,var_land_sea_mask,file_seperator,no_topo,
+ no_mask,slp_path,model,out_path,shared_path,lat_edges,lon_edges,
+ land_gridids,troubled_centers,faux_grids)
+ sf_file = "%ss_dat.p" % (shared_path)
+ pickle.dump(fnc_out, open(sf_file, "wb",-1))
+ print ("\tMade: %s\n" % (sf_file))
+ del fnc_out
+ if save_plot:
+ print ("\nTesting Pull_Data...")
+ # Produce an example plot of the SLP field. Good idea to check
+ # correctness with other software as this is how the data will
+ # be read and used. Look for upside down or longitude shifts
+ # in output.
+ print (defs.read_scale)
+ fnc = pull_data.pull_data(NetCDF,numpy,slp_path,file_seperator,
+ int(super_years[0]),defs.read_scale,var_slp,var_time,
+ lat_flip,lon_shift)
+ (slp,times,the_time_units) = fnc
+ del fnc
+
+ print (slp[1])
+
+
+ # Jeyavinoth
+ # removed the way the original code computes date time
+ # wrote my own function for the calendar proceesing
+ # function returns dtimes, date_stamps & adates
+ # since the code below doesn't use adates or dtimes, I don't save those variables here
+ # if needed use
+ # dtimes, date_stamps, adates = jjCal.get_time_info(the_time_units, times, calendar=the_calendar)
+ # right now the code runs assuming it is "standarad" calendar
+ # I started writing code for 'julian' calendar, but have to test this
+ # since adates is a different format output now, I change defs_v4.py to account for this (check comments in that file)
+ defs.start_date_str = jjCal.get_start_date(the_time_units)
+ _, date_stamps, _ = jjCal.get_time_info(the_time_units, times, calendar=the_calendar)
+
+ # # Jeyavinoth: Commented code from here till "Jeyavinoth: End"
+ # # Work with the time dimension a bit.
+ # # This is set in setup_vX.py
+ #
+ # jd_fake = 0
+ # if the_calendar != 'standard':
+ # # As no calendar detected assume non-standard
+ # jd_fake = 1
+ # elif the_calendar == 'proleptic_gregorian':
+ # jd_fake = False
+ # tsteps = len(times)
+ # print (tsteps)
+ # print (the_time_units)
+ # the_time_range = [times[0],times[tsteps-1]]
+ # start = "%s" % (the_time_units)
+ # tmp = start.split()
+ #
+ # print ('JIMMY, here is tmp:')
+ # print (tmp)
+ # tmp1 = tmp[2].split("-")
+ # tmp2 = tmp[3].split(":")
+ #
+ # print (tmp1 )
+ # print (tmp2)
+ # #tmp3 = tmp2[2][0]
+ # tmp3 = 0
+ # start = "%s %s %04d-%02d-%02d %02d:%02d:%02d" % \
+ # (tmp[0],tmp[1],int(tmp1[0]),int(tmp1[1]),\
+ # int(tmp1[2]),int(tmp2[0]),int(tmp2[1]),\
+ # int(tmp3))
+ # print (start)
+ # # Warning this could get weird for non-standard
+ # # calendars if not set correctly (say to noleap)
+ # # in setup_vX.py
+ # cdftime = netcdftime.utime(start,calendar=the_calendar)
+ # get_datetime = cdftime.num2date
+ # dtimes = [get_datetime(times[step]) for step in range(0,tsteps)]
+ #
+ # # Get Julian Days.. unless GCM uses non-standard calendar in which case
+ # # enumerate with timestep and use uci_stamps for datetime things.
+ # if jd_fake:
+ # # Use timesteps rather than dates
+ # # examples '000000000', '000000001'
+ # adates = ["%09d" % (x) for x in range(tsteps)]
+ # else:
+ # # Using regular date/times
+ # # examples 244460562, 244971850i
+ # date2jd = netcdftime.JulianDayFromDate
+ # adates = [int(100*date2jd(x,calendar='standard')) for x in dtimes]
+ # date_stamps = ["%4d %02d %02d %02d" % (d.year,d.month,d.day,d.hour) for d in dtimes]
+ # print ("Start %s and End %s" % (date_stamps[0],date_stamps[-1]))
+ # del times
+ # del dtimes
+ #
+ # # Jeyavinoth: End
+
+ # Plot an example to see if okay.
+ plot = plot_map.plotmap(clevs=[960,1040,2],
+ cints=[960.0,1040.0],color_scheme="jet")
+ for step in range(tsteps):
+ msg = "State at %s UTC" % (date_stamps[step])
+ pname = "%s%s_example_slp_%s.pdf" % (shared_path,model,
+ date_stamps[step].replace(" ",""))
+ plot.create_fig()
+ slp_step = slp[step,:,:].copy()
+ slp_step.shape = jm*im
+# plot.add_field(lons,lats,slp_step,ptype='pcolor')
+ plot.add_field(lons,lats,slp_step,ptype='contour')
+ plot.finish(pname,title=msg)
+ print ("\tMade figure: %s" % (pname))
+ # only make a single plot, for more comment break
+ break
+ del slp
+
+ # Purpose: Create various things needed for tracking and others.
+ # Returns:
+ # tdict - Lookup table for grids within a great circle radius of (travel_distance).
+ # lwdict - Lookup table for latitude weights to ?
+ # Stores: tf_dat.p
+ print ("\nSetup Track_Finder...")
+ args = (defs,gdict,gcd,g2l,ij2grid,grid2ij,defs_grid)
+ fnc_out = setup_tracking(*args)
+ tf_file = "%stf_dat.p" % (shared_path)
+ pickle.dump(fnc_out, open(tf_file, "wb",-1))
+ print ("\tMade: %s\n" % (tf_file))
+ # Uncomment to re-read/test.
+ #fnc_out = pickle.load(open(tf_file))
+ (tdict,lwdict) = fnc_out
+ if save_plot:
+ pname = "%s/%s_travel_distance_by_latitude.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar',lon_0=0.0)
+ plot.create_fig()
+ blank = numpy.zeros(im*jm)
+ tmp = [abs(180.0-lons[x]) for x in range(im)]
+ center_lon = tmp.index(min(tmp))
+ # Note these plots may seem offset in longitude due to how lons
+ # breaks with a plot centered on 180.0 degrees.
+ for j in range(jm):
+ for pnt in tdict[row_start[j]+center_lon]:
+ # Just values along this latitude
+ if pnt >= row_start[j] and pnt <= row_end[j]:
+ blank[pnt] = 1.0
+ #plot.add_field(lons,lats,blank,ptype='pcolor')
+ plot.add_field(lons,lats,blank,ptype='imshow')
+ title = "Travel Distance At Each Latitude"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+ pname = "%s/%s_travel_distance_full.pdf" % (shared_path,model)
+ plot = Plot_Map(missing=-1.0,color_scheme="gray_r",nocolorbar='nocolorbar')
+ plot.create_fig()
+ blank = numpy.zeros(im*jm)
+ # Note: the poleward and equatorward ends are not exactly
+ # symmetrical because the fixed length search radius can
+ # result in differing numbers of latitudes being reached as
+ # of latitude spacing changes with latitude.
+ for j in range(jm):
+ interval = dy*0.5
+ edge1 = lats[j]-interval
+ edge2 = lats[j]+interval
+ if edge1 <= -45.0 and edge2 >= -45.0:
+ for pnt in tdict[row_start[j]+center_lon]:
+ blank[pnt] = 1.0
+ break
+ for j in range(jm):
+ interval = dy*0.5
+ edge1 = lats[j]-interval
+ edge2 = lats[j]+interval
+ if edge1 <= 45.0 and edge2 >= 45.0:
+ for pnt in tdict[row_start[j]+center_lon]:
+ blank[pnt] = 1.0
+ break
+ #plot.add_field(lons,lats,blank,ptype='pcolor')
+ plot.add_field(lons,lats,blank,ptype='imshow')
+ title = u"Full Travel Distance at 45\u00b0N/S"
+ plot.finish(pname,title=title)
+ print ("\tMade figure %s" % (pname))
+
+ # Purpose: Create various things needed for attributing.
+ # Returns:
+ # darea - Lookup table for area of each grid.
+ # distance_lookup - Lookup table for distance from grid to any another.
+ # angle_lookup - Lookup table for angle from grid to any another.
+ # close_by - Lookup table based on zonal wavenumber to limit search area.
+ # wander_test - Like close_by by larger... to limit runaway searches.
+ # gdict_new - Same as gdict but reordered.
+ # Stores: af_dat.p
+ print ("\nSetup Attributed...")
+ inv_wn = 1.0/float(wavenumber_a)
+ args = (defs,gdict,gcd,g2l,ij2grid,grid2ij,
+ grid_area,make_screen,rhumb_line_nav,defs_grid)
+ fnc_out = setup_att(*args)
+ af_file = "%saf_dat.p" % (shared_path)
+ pickle.dump(fnc_out, open(af_file, "wb",-1))
+ print ("\tMade: %s\n" % (af_file))
+ # Uncomment to re-read/test.
+ #fnc_out = pickle.load(open(af_file))
+ (darea,distance_lookup,angle_lookup,close_by,wander_test,gdict_new,neighbor_test) = fnc_out
+
+ if keep_log:
+ # Redirect stdout back to screen
+ log_file.close()
+ sys.stdout = screenout
diff --git a/diagnostics/etc_composites/util/tracker/strip_read_v4.py b/diagnostics/etc_composites/util/tracker/strip_read_v4.py
new file mode 100755
index 000000000..159ca8893
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/strip_read_v4.py
@@ -0,0 +1,14 @@
+def strip_read(line):
+ """
+ Reads parses a line from the read_file for the purpose of
+ extracting certain info
+ """
+ parts = line.split()
+
+ center_table = [int(parts[0]),int(parts[1]),int(parts[2]),
+ int(parts[3]),int(parts[4]),int(parts[5]),
+ int(parts[6]),int(parts[7]),int(parts[8]),
+ int(parts[9]),int(parts[10]),int(parts[11]),
+ int(parts[12]),int(parts[13]),parts[14],
+ parts[15]]
+ return center_table
diff --git a/diagnostics/etc_composites/util/tracker/template_temp_multi_1.py b/diagnostics/etc_composites/util/tracker/template_temp_multi_1.py
new file mode 100755
index 000000000..b6b49bdda
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/template_temp_multi_1.py
@@ -0,0 +1,166 @@
+import defines
+
+if (defines.over_write_years[0] == defines.over_write_years[1]):
+ snyear = '%04d'%(defines.over_write_years[0])
+else:
+ snyear = '%04d_%04d'%(defines.over_write_years[0], defines.over_write_years[1])
+
+model = defines.model
+rundir = 'out_%s'%(defines.model)
+startdir = defines.main_folder
+
+in_file = startdir+rundir+'/'+model+'/mcms_'+model+'_'+snyear+'_tracks.txt'
+out_file = startdir+'read_%s/'%(defines.model)+rundir+'_output_'+snyear+'.txt'
+save_output = True
+overwrite = False
+
+just_centers = False
+just_center_table = True
+
+start_time = 'YYYY MM DD HH SEASON'
+end_time = 'YYYY MM DD HH SEASON'
+
+places = ['GLOBAL']
+include_atts = False
+include_stormy = False
+
+detail_tracks= startdir+'read_%s/'%(defines.model)+rundir+'_track_out_'+snyear+'.txt'
+as_tracks = ''
+
+# Name: in_file
+# Description: Full path to the msmc data file you wish to read from.
+# Default: ''
+# Example: '/Volumes/scratch/in.txt'
+
+# Name: out_file
+# Description: Full path to the file you wish to save to. If left empty '' the
+# '.txt' in in_file will be replaced with with '_new.txt'.
+# Default: ''
+# Example: '/Volumes/scratch/in_new.txt'
+
+# Name: save_output
+# Description: Save results to out_file. If False/0 results saved in memeory for
+# further analysis such as graphics/plot creation.
+# Default: True/1
+# Example: True or 1
+
+# Name: overwrite
+# Description: Overwrites out_file if it exists.
+# Default: False/0
+# Example: False or 0
+
+# Name: just_centers
+# Description: Saves only basic center info (YYYY MM DD HH LAT LON).
+# YYYY MM DD HH - Date Time Group (hour GMT/Z).
+# LAT - Latitude (degrees, [-90,90]).
+# LON - Longitude (degrees, [0,360]).
+# Default: False/0
+# Example: False or 0
+
+# Name: start_time
+# Description: Limits the output date-times on and after start_time. By default
+# all dates-times are returned.
+#
+# Flag : Description
+# YYYY : Include the year YYYY and all subsquent years until
+# the end of the record is reached or the ending year.
+# MM : Include all data from the month number MM and only data
+# from that month MM. Can in limited with YYYY, DD or HH.
+# DD : Include all data from the day DD and only data from the
+# day DD. Can in limited with YYYY, MM or HH.
+# HH : Include all data from the GMT hour HH and only data from
+# the GMT hour HH. Can in limited with YYYY, MM or DD.
+# SEASON : Include only data from a list of predefined seasons:
+# (DJF, MAM, JJA, SON, NDJFMA, MJJASO)
+#
+# NOTE: SEASON can't be used concurrently with other time limits.
+# If you want to do this run the filter more than once passing
+# the out_file of one as the in_file to the other.
+#
+# Default: 'YYYY MM DD HH SEASON'
+# Example: start_time = '2005 10 1 HH SEASON' -> start saving on Oct, 1st 2005
+# start_time = '2006 MM DD HH SEASON' -> start saving on and after 2005
+# start_time = 'YYYY MM DD 0 SEASON' -> save all 0Z data
+# start_time = 'YYYY MM DD HH DJF' -> save all DJF data.
+
+# Name: end_time
+# Description: Limits data extraction to a range when used with start_time. The
+# default sets the end as the final time record in the data set.
+#
+# NOTE: SEASON has to match one in start_time!
+#
+# Default: 'YYYY MM DD HH SEASON'
+# Example: end_time = '2006 5 13 HH SEASON' -> stop saving on May, 13th 2006
+# end_time = '2006 MM DD HH SEASON' -> stop saving at the end of 2005
+
+# Name: places
+# Description: Limits the output to specific locals. By default all data are
+# returned.
+#
+# Flag : Description
+# GLOBAL : Includ centers from everywhere
+# NH : Include only centers from the Northern Hemisphere.
+# SH : Include only centers from the Southern Hemisphere.
+# LAND : Include only centers from over the land as defined by the
+# land/sea mask provided by the NCEP Reanalysis.
+# SEA : Include only centers from over the sea as defined by the
+# land/sea mask provided by the NCEP Reanalysis.
+# GridID : Includ only centers that occupy this list of gridIDs
+# (can be only one, space seperated, one line). A simple
+# script (l2g.py will convert a list of lon, lat pairs
+# into a list of gridIDs).x
+#
+# Note: The ability to have multiple places (e.g. ['LAND','NH']) is
+# not yet available. The work around is to run ['LAND'] and
+# use the out_file of that as input and rerun with ['NH'].
+#
+# Default: ['GLOBAL']
+# Example:
+# places = [5503, 3303, 2203] # limited to 3 gridids
+# places = ['NH'] # limited to the Northern Hemisphere
+# places = [7602]
+
+# Name: include_atts
+# Description: Include atttritubted grids in the places screen. That is, the
+# default behavior only uses the center gridID's to filter by place.
+# By uncommenting'include_att' attributed grids are also used to
+# filter by place. The differencs is the default treats cyclones as
+# point objects and only gives the cyclones whose centers passed
+# directly over the grid containing say New York City. This option
+# treats cyclones are non-point objects and returns all cyclones for
+# which any of its attributed grids pass overed the grid containing
+# New York City.
+# Default: False/0
+# Example: False or 0
+
+# Name: include_stormy
+# Description: Simular to include_atts but allows for stormy grids to be added
+# as well. In this case the stormy grids are asigned to the closest
+# of the n-centers that they are associated with on a grid by grid
+# basis.
+# Default: False/0
+# Example: False or 0
+
+# Name: detail_tracks
+# Description: A special case where we want to treat centers as tracks. To do
+# this we must first create detail_tracks which is a file storing
+# a list of each track and all of its centers. If left empty ''
+# nothing is done in this reguard.
+#
+# WARNING: in_file must contain tracked centers rather than just
+# centers else problems will occur.
+# Default: ''
+# Example: '/Volumes/scratch/sorted_by_track.txt'
+
+# Name: as_tracks
+# Description: Full path to the file created by 'detail_tracks'. The output is
+# sorted by track which means time specificity is lost (i.e., the
+# records are not by time by by track. As a result problematic,
+# stormy, and empty centers are not returned as of now, but filters
+# using include_stormy and include_atts are working. Also, note
+# that track records are separated by -444 and are not stored in
+# strictly chrological order.
+#
+# NOTE: Results stored in out_file
+# Default: ''
+# Example: '/Volumes/scratch/sorted_by_track.txt'
diff --git a/diagnostics/etc_composites/util/tracker/template_temp_multi_2.py b/diagnostics/etc_composites/util/tracker/template_temp_multi_2.py
new file mode 100755
index 000000000..11da4bd5c
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/template_temp_multi_2.py
@@ -0,0 +1,176 @@
+import defines
+
+# snyear = '2010'
+# snyear = '%d_%d'%(defines.over_write_years[0], defines.over_write_years[1])
+# snyear = '2000_2029'
+# model = 'temp'
+# rundir = 'out_temp'
+
+if (defines.over_write_years[0] == defines.over_write_years[1]):
+ snyear = '%04d'%(defines.over_write_years[0])
+else:
+ snyear = '%04d_%04d'%(defines.over_write_years[0], defines.over_write_years[1])
+# snyear = '%04d_%04d'%(defines.over_write_years[0], defines.over_write_years[1])
+model = defines.model
+rundir = 'out_%s'%(defines.model)
+startdir = defines.main_folder
+
+in_file = startdir+rundir+'/'+model+'/mcms_'+model+'_'+snyear+'_tracks.txt'
+out_file = startdir+'read_%s/'%(defines.model)+rundir+'_output_'+snyear+'.txt'
+
+save_output = True
+overwrite = False
+
+just_centers = False
+just_center_table = True
+
+start_time = 'YYYY MM DD HH SEASON'
+end_time = 'YYYY MM DD HH SEASON'
+
+places = ['GLOBAL']
+include_atts = False
+include_stormy = False
+
+#detail_tracks= startdir+'/READ_CAM6/'+rundir+'_track_out_'+snyear+'.txt'
+#as_tracks = ''
+detail_tracks = ''
+as_tracks = startdir+'read_%s/'%(defines.model)+rundir+'_track_out_'+snyear+'.txt'
+
+
+# Name: in_file
+# Description: Full path to the msmc data file you wish to read from.
+# Default: ''
+# Example: '/Volumes/scratch/in.txt'
+
+# Name: out_file
+# Description: Full path to the file you wish to save to. If left empty '' the
+# '.txt' in in_file will be replaced with with '_new.txt'.
+# Default: ''
+# Example: '/Volumes/scratch/in_new.txt'
+
+# Name: save_output
+# Description: Save results to out_file. If False/0 results saved in memeory for
+# further analysis such as graphics/plot creation.
+# Default: True/1
+# Example: True or 1
+
+# Name: overwrite
+# Description: Overwrites out_file if it exists.
+# Default: False/0
+# Example: False or 0
+
+# Name: just_centers
+# Description: Saves only basic center info (YYYY MM DD HH LAT LON).
+# YYYY MM DD HH - Date Time Group (hour GMT/Z).
+# LAT - Latitude (degrees, [-90,90]).
+# LON - Longitude (degrees, [0,360]).
+# Default: False/0
+# Example: False or 0
+
+# Name: start_time
+# Description: Limits the output date-times on and after start_time. By default
+# all dates-times are returned.
+#
+# Flag : Description
+# YYYY : Include the year YYYY and all subsquent years until
+# the end of the record is reached or the ending year.
+# MM : Include all data from the month number MM and only data
+# from that month MM. Can in limited with YYYY, DD or HH.
+# DD : Include all data from the day DD and only data from the
+# day DD. Can in limited with YYYY, MM or HH.
+# HH : Include all data from the GMT hour HH and only data from
+# the GMT hour HH. Can in limited with YYYY, MM or DD.
+# SEASON : Include only data from a list of predefined seasons:
+# (DJF, MAM, JJA, SON, NDJFMA, MJJASO)
+#
+# NOTE: SEASON can't be used concurrently with other time limits.
+# If you want to do this run the filter more than once passing
+# the out_file of one as the in_file to the other.
+#
+# Default: 'YYYY MM DD HH SEASON'
+# Example: start_time = '2005 10 1 HH SEASON' -> start saving on Oct, 1st 2005
+# start_time = '2006 MM DD HH SEASON' -> start saving on and after 2005
+# start_time = 'YYYY MM DD 0 SEASON' -> save all 0Z data
+# start_time = 'YYYY MM DD HH DJF' -> save all DJF data.
+
+# Name: end_time
+# Description: Limits data extraction to a range when used with start_time. The
+# default sets the end as the final time record in the data set.
+#
+# NOTE: SEASON has to match one in start_time!
+#
+# Default: 'YYYY MM DD HH SEASON'
+# Example: end_time = '2006 5 13 HH SEASON' -> stop saving on May, 13th 2006
+# end_time = '2006 MM DD HH SEASON' -> stop saving at the end of 2005
+
+# Name: places
+# Description: Limits the output to specific locals. By default all data are
+# returned.
+#
+# Flag : Description
+# GLOBAL : Includ centers from everywhere
+# NH : Include only centers from the Northern Hemisphere.
+# SH : Include only centers from the Southern Hemisphere.
+# LAND : Include only centers from over the land as defined by the
+# land/sea mask provided by the NCEP Reanalysis.
+# SEA : Include only centers from over the sea as defined by the
+# land/sea mask provided by the NCEP Reanalysis.
+# GridID : Includ only centers that occupy this list of gridIDs
+# (can be only one, space seperated, one line). A simple
+# script (l2g.py will convert a list of lon, lat pairs
+# into a list of gridIDs).x
+#
+# Note: The ability to have multiple places (e.g. ['LAND','NH']) is
+# not yet available. The work around is to run ['LAND'] and
+# use the out_file of that as input and rerun with ['NH'].
+#
+# Default: ['GLOBAL']
+# Example:
+# places = [5503, 3303, 2203] # limited to 3 gridids
+# places = ['NH'] # limited to the Northern Hemisphere
+# places = [7602]
+
+# Name: include_atts
+# Description: Include atttritubted grids in the places screen. That is, the
+# default behavior only uses the center gridID's to filter by place.
+# By uncommenting'include_att' attributed grids are also used to
+# filter by place. The differencs is the default treats cyclones as
+# point objects and only gives the cyclones whose centers passed
+# directly over the grid containing say New York City. This option
+# treats cyclones are non-point objects and returns all cyclones for
+# which any of its attributed grids pass overed the grid containing
+# New York City.
+# Default: False/0
+# Example: False or 0
+
+# Name: include_stormy
+# Description: Simular to include_atts but allows for stormy grids to be added
+# as well. In this case the stormy grids are asigned to the closest
+# of the n-centers that they are associated with on a grid by grid
+# basis.
+# Default: False/0
+# Example: False or 0
+
+# Name: detail_tracks
+# Description: A special case where we want to treat centers as tracks. To do
+# this we must first create detail_tracks which is a file storing
+# a list of each track and all of its centers. If left empty ''
+# nothing is done in this reguard.
+#
+# WARNING: in_file must contain tracked centers rather than just
+# centers else problems will occur.
+# Default: ''
+# Example: '/Volumes/scratch/sorted_by_track.txt'
+
+# Name: as_tracks
+# Description: Full path to the file created by 'detail_tracks'. The output is
+# sorted by track which means time specificity is lost (i.e., the
+# records are not by time by by track. As a result problematic,
+# stormy, and empty centers are not returned as of now, but filters
+# using include_stormy and include_atts are working. Also, note
+# that track records are separated by -444 and are not stored in
+# strictly chrological order.
+#
+# NOTE: Results stored in out_file
+# Default: ''
+# Example: '/Volumes/scratch/sorted_by_track.txt'
diff --git a/diagnostics/etc_composites/util/tracker/track_finder_v4.py b/diagnostics/etc_composites/util/tracker/track_finder_v4.py
new file mode 100755
index 000000000..ec10e4c40
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/track_finder_v4.py
@@ -0,0 +1,1686 @@
+""" This module extracts cyclone tracks from input lists of centers
+ from a series of sea level pressure fields. The is module contains
+ the main logic of this process.
+#!/usr/bin/env python -tt
+
+ Options/Arguments:
+ defs_set -- directory of options.
+ imports -- list of modules to import.
+ start_tstep -- starting timestep.
+ end_tstep -- ending timestep.
+ out_path -- path to directory for storing results.
+ center_file -- output of center_finder.
+ dumped_file -- discards file from refine_centers and center_finder.
+ intensity_file -- file of cyclone intensity statistics from refine_centers.
+ shared_path -- path to directory storing info about the data source.
+ slp_path -- path to the data.
+ year_start -- first year of data.
+ year_end -- last year of data.
+ model -- designator of the source of the data.
+ get_var -- name of the variable to be extracted from the data.
+
+ Returns/Creates:
+ tracks_save -- ASCII file of candidate tracks found and kept.
+ dumped_centers_file -- ASCII file of candidate centers found and
+ discarded as untrackable.
+
+ Examples:
+
+ Notes: See bottom of this document for extended notes that are denoted
+ in the code.
+
+ Memory Use: Depending on resolution and options expect at least 300MB per
+ instance and up to 2 GB.
+
+ Run Time: With the 60 Year NCEP Reanalysis I takes roughly 1 hour and uses
+ ~2 GB of memory. This is on a Mac Pro with 2.26 GHz processors,
+ 12 GB of RAM, and a 3 disk RAID.
+
+ Author: Mike Bauer
+
+ Log:
+ 2006/12 MB - File created.
+ lost track of dates of updates and fixes... sorry
+ 2008/10 MB - Added input checks, docstring.
+"""
+import sys,os
+import defines
+
+def process_data(values,outfile,minval,interval,verbose=False,raw=False) :
+ """
+ Convert to fractional percentage, find peak value etc.
+ """
+
+ step = interval
+
+ binsum = sum(values)
+ i = 0
+ bin2gmt = []
+ lower_end = minval - step
+ peak_frac = 0.0
+ peak_y = -1000.0
+ peak_y_x = 0
+
+ for bin in values :
+ lower_end = lower_end + step
+ if i == 0 :
+ min_x = lower_end-0.5*step
+ if i == len(values)-1 :
+ max_x = lower_end-0.5*step
+ if binsum > 0:
+ frac = float(bin)/float(binsum)*100.0
+ else:
+ frac = 0.0
+ if frac > peak_frac :
+
+ peak_frac = frac
+ peak_y = bin
+ peak_y_x = i
+
+ if i == 0:
+ min_x = lower_end+(0.5*step)
+ if i == len(values)-1:
+ max_x = lower_end+(0.5*step)
+
+ if raw:
+ #bin2gmt.append("%f %f\n"%(lower_end+(0.5*step),bin))
+ # done so -D option makes a 0 wind centered on 0 in the plot.
+ bin2gmt.append("%f %f\n"%(lower_end,bin))
+ else:
+ bin2gmt.append("%f %f\n"%(lower_end+(0.5*step),frac))
+
+ if verbose :
+ if i == 0 :
+ llower_end = lower_end - step
+ print ( "Bin #%3d | X < %f\tCnt = %d" % (i,llower_end+step,bin))
+ elif i == len(values)-1 :
+ print ( "Bin #%3d | %f <= X\t--- Cnt = %d" % (i,lower_end,bin))
+ else :
+ print ( "Bin #%3d | %f <= X < %f\tCnt = %d" % (i,lower_end,lower_end+step,bin))
+ i+=1
+
+ if verbose:
+ print ("Peak Fraction %f for bin %d = %f" % (peak_frac,peak_y_x,peak_y))
+ print ("MinX_val %f" % (min_x))
+ print ("MaxX_val %f" % (max_x))
+
+ file1 = open(outfile, 'w')
+ file1.writelines(bin2gmt)
+ file1.close()
+
+ return peak_frac,min_x,max_x
+
+def rose_plot(alldat,plot_name,minbin,maxbin,incbin,verbose=False,fig_format='.eps',title="Bearing"):
+
+ """
+ Use GMT to plot a histogram.
+ """
+ import commands
+
+ if verbose :
+ print ("Number of bins : %d" % (len(alldat)))
+ print ("Minimum bin : %f" % (minbin))
+ print ("Maximum bin : %f" % (maxbin))
+ print ("Bin interval : %f" % (incbin))
+
+ # Determine bin boundaries and dump to a txt file for GMT. Also, info for
+ # fractional amount.
+ peak_frac,min_x,max_x = process_data(alldat,"tmp.txt",minbin,incbin,verbose=verbose,raw=1)
+ if verbose:
+ print ("Peak Scaling Fraction %f" % (peak_frac))
+
+ # Deal with various graphic output options
+ defaults = "gmtset HEADER_FONT_SIZE 14p LABEL_FONT_SIZE 12p \
+ ANOT_FONT_SIZE 10p PAPER_MEDIA letter DOTS_PR_INCH 600 \
+ PAGE_ORIENTATION PORTRAIT"
+ temp_ps = "temp.ps"
+ plot_name = plot_name + fig_format
+ if fig_format == '.png':
+ make_fig = "convert -trim -quality 90 -density 144 144 %s %s; rm -f %s" % (temp_ps,plot_name,temp_ps)
+ elif fig_format == '.pdf':
+ make_fig = "ps2eps --force --quiet --removepreview --rotate=+ --loose \
+ temp.ps; ps2pdf13 -dEPSCrop -r144 temp.eps %s; rm temp.eps" % (plot_name)
+ #make_pdf = "ps2pdf13 -dEPSCrop -r144 %s %s; rm -f %s" % (temp_ps,plot_name,temp_ps)
+ elif fig_format == '.eps':
+ make_fig = "ps2eps --force --quiet --removepreview \
+ --rotate=+ --loose temp.ps ; mv temp.eps %s" % (plot_name)
+ if verbose:
+ print ("Making ",plot_name)
+
+ # Determine intervals etc.
+ x_label = "'Bearing'"
+ y_label = '"Frequency Of Occurrence (%)"'
+
+ #draw_histo = "psrose %s -: -A%s -S1.8in -R0/1/0/360 -B0.2g0.2:'Relative Frequency Of Occurrence':/30g30:.'Bearing': -Gblack -W0.75p,black -D > %s" % \
+ #draw_histo = "psrose %s -: -A%s -S1.8in -R0/1/0/360 -B0.2g0.2:'Relative Frequency Of Occurrence':/30g30:.'Bearing': -G124 -W0.75p,black -D > %s" % \
+ # ("tmp.txt",repr(incbin),temp_ps)
+ draw_histo = "psrose %s -: -A%s -S1.8in -R0/1/0/360 -B0.2g0.2:'Relative Frequency Of Occurrence':/30g30:.'%s': -G124 -W0.75p,black -D > %s" % \
+ ("tmp.txt",repr(incbin),title,temp_ps)
+
+
+ #draw_histo = "psrose %s -: -A%s -S1.8in -R0/1/0/360 -B0.2g0.2/30g30 -G125 > %s" % \
+ # ("tmp.txt",repr(incbin),temp_ps)
+
+ todos = [defaults,draw_histo,"rm -f tmp.txt"]
+
+ todos.append(make_fig)
+
+ for CMD in todos :
+ # watch running things in background/threads as some operations
+ # must complete before others start and too use of common names
+ # could cause overrunning for slow processes...
+ if verbose :
+ os.system("touch %s" % (plot_name))
+ else :
+ status = commands.getstatusoutput(CMD)
+ #print (status,CMD)
+ if status[0] != 0 :
+ print ("plothisto ERROR\n",status)
+ sys.exit()
+
+def plot_hist(plt,numpy,x,y,width,stat_file,pname,title,xlab):
+ total = y.sum()
+ cumsum = [(float(z)/float(total))*100.0 for z in numpy.cumsum(y)]
+ stat_save = open(stat_file,"w")
+ msg = ''.join(["%8.3f" % (z) for z in x])
+ stat_save.writelines("Bins "+msg+"\n")
+ msg = ''.join(["%8.3f" % (z) for z in cumsum])
+ stat_save.writelines("CFrac "+msg+"\n")
+ hmean = float(x.sum())/float(len(x))
+ hmin = 0
+ for tmp in y:
+ if tmp > 0.0:
+ break
+ hmin += 1
+ hmax = -1
+ for tmp in y[::-1]:
+ if tmp > 0.0:
+ break
+ hmax -= 1
+ msg = "%7.3f" % (hmean)
+ stat_save.writelines("Mean "+msg+"\n")
+ msg = "%7.3f" % (x[hmin])
+ stat_save.writelines("Min "+msg+"\n")
+ msg = "%7.3f" % (x[hmax])
+ stat_save.writelines("Max "+msg+"\n")
+ stat_save.close()
+ fig = plt.figure()
+ ax = fig.add_subplot(111)
+ plt.bar(x,y,width=width,color='0.6',edgecolor='k')
+ # Add Labels and such
+ ax.set_xlabel(xlab)
+ ax.set_ylabel("Count")
+ # Add title
+ ax.set_title(title)
+ # ax.grid(True)
+ # Save to File
+ fig.savefig(pname,dpi=144,orientation='landscape')
+ plt.close('all')
+ return "Made %s" % (pname)
+
+def setup_bins(bins_left_edge):
+ """Set up bins for histograms"""
+ # Make a few extras
+ bins_width = abs(bins_left_edge[0]-bins_left_edge[1])
+ bins_centers = bins_left_edge + 0.5*bins_width
+ bins_right_edge = bins_left_edge + bins_width
+
+ # # To print out bins
+ # fmt = "Bin % 4d: % 7.2f <= % 7.2f < % 7.2f"
+ # for bin in range(len(bins_left_edge)):
+ # print fmt % (bin,bins_left_edge[bin],bins_centers[bin],
+ # bins_right_edge[bin])
+ # import sys;sys.exit()
+
+ return (bins_left_edge,bins_centers,bins_right_edge,bins_width)
+
+def get_bin_index(value,bins,bin_width):
+ """Find the correct bin to put value into"""
+ if value<=bins[0]:
+ bin_index = 0
+ elif value>=bins[-1]:
+ bin_index = len(bins) - 1
+ else:
+ bin_index = int((value-bins[0])/bin_width)
+
+ return bin_index
+
+def rewrite(in_file,years,action="w",reorder=False):
+ import os
+ print ("\tRe-writing %s as yearly files..." % (in_file))
+ did_files = []
+ #---------------------------------------------------------------------------
+ # Partition unified file by year
+ #---------------------------------------------------------------------------
+
+ if len(years) < 2:
+ # JJ splitting up the full path before making year chagnes in the filenames
+ in_file_split = in_file.split('/')
+ in_file_split[-1] = in_file_split[-1].replace("%4d_%4d" % (years[0],years[-1]), "%4d" % (years[0]))
+ out_file = '/'.join(in_file_split)
+
+ # out_file = in_file.replace("%4d_%4d" % (years[0],years[-1]),
+ # "%4d" % (years[0]))
+
+ # Only a single year
+ os.rename(in_file,out_file)
+ return([out_file])
+
+ #JIMMY NEW CODE USE A DICTIONARY CALLED yrlydir
+ # TO HOLD THE HANDLES FOR THE FILES YOU WILL WRITE TO.
+
+ yrlydir = {}
+
+ for nyear in years:
+
+ # JJ splitting up the full path before making year changes in the filenames
+ in_file_split = in_file.split('/')
+ in_file_split[-1] = in_file_split[-1].replace("%4d_%4d" % (years[0],years[-1]), "%4d" % (nyear))
+ out_file = '/'.join(in_file_split)
+
+
+ # out_file = in_file.replace("%4d_%4d" % (years[0],years[-1]),
+ # "%4d" % (nyear))
+
+ save_file = open(out_file,action)
+ yrlydir[nyear] = save_file
+ did_files.append(out_file)
+
+ # LEAVE THIS PIECE AS MIKE HAD IT. OPEN THE FILE WITH ALL OF THE
+ # DIFFERENT YEARS TRACKS
+
+ read_file = open(in_file,"r")
+ yyyy = years[:]
+ first_year = years[0]
+ yyyy.reverse()
+
+ this_year = yyyy.pop()
+ last_year = this_year - 1
+
+ # GO THROUGH EACH LINE IN THE FILE, EXAMINE THE YEAR, FIND THE
+ # THE DICTIONARY (yrlydir) CORRESPONDING TO THE YEAR AND WRITE
+ # THE LINE TO THAT FILE.
+ for line in read_file:
+ year_line = int(line[0:4])
+ savetofile = yrlydir[year_line]
+ savetofile.writelines(line)
+
+
+ yrlydirkeys = yrlydir.keys()
+ for nyear in yrlydirkeys:
+ filetoclose = yrlydir[nyear]
+ filetoclose.close()
+
+
+
+ # For detached file I need to reorder as append didn't tac things
+ # on in chronological order
+ if reorder:
+ print ("\t\tReordering....")
+
+ #JJ fix below to make sure I only change the out_file_name instead of the whole path
+ out_file_split = out_file.split('/')
+ out_file_split[-1] = out_file_split[-1].replace(str(years[-1]),str(years[0]-1))
+ rev_file = '/'.join(out_file_split)
+ print(rev_file)
+ # rev_file = out_file.replace(str(years[-1]),str(years[0]-1))
+ for loop_year in years:
+ # JJ - fix again to make sure that I am replacing years in the filename instead of the whole path
+ rev_file_split = rev_file.split('/')
+ rev_file_split[-1] = rev_file_split[-1].replace(str(loop_year-1),str(loop_year))
+ rev_file = '/'.join(rev_file_split)
+ # rev_file = rev_file.replace(str(loop_year-1),str(loop_year))
+
+ read_f = open (rev_file,"r")
+ ucis = {}
+ for line in read_f:
+ parts = line.split()
+ uci = parts[14]
+ ucis[uci] = line
+ read_f.close()
+ items = list(ucis.keys())
+ items.sort()
+ # Write to tmp file
+ write_f = open("tmp.txt","w")
+ for item in items:
+ write_f.writelines(ucis[item])
+ write_f.close()
+ # Replace original
+ #os.rename('tmp.txt',rev_file)
+ os.system('mv %s %s' % ('tmp.txt',rev_file))
+ print ("\t\t\t%s" % (rev_file))
+ print ("Done")
+
+ return (did_files)
+
+
+def main(defs_set,imports,years,out_path,centers_file,shared_path,slp_path,
+ model,exit_on_error,save_plot,track_stats):
+
+ # --------------------------------------------------------------------------
+ # Setup Section
+ # --------------------------------------------------------------------------
+ print ("\tSetting up....",)
+
+
+ # Import needed modules.
+ for i in imports:
+ exec(i, globals())
+
+ defs_v4 = globals()['defs']
+ numpy = globals()['numpy']
+ strip_read = globals()['strip_read']
+ gcd = globals()['gcd']
+ ij2grid = globals()['ij2grid']
+ grid2ij = globals()['grid2ij']
+ rhumb_line_nav = globals()['rhumb_line_nav']
+ clean_dict = globals()['clean_dict']
+ jd_key = globals()['jd_key']
+ resort = globals()['resort']
+ try_to_connect = globals()['try_to_connect']
+
+ import pickle
+
+ # Fetch definitions.
+ defs = defs_v4.defs(**defs_set)
+
+ # What sort of figures
+ # fig_format = ".png"
+ # fig_format = ".eps"
+ fig_format = ".pdf"
+
+ # pre-bind for speed
+ gcd = gcd.gcd
+ ij2grid = ij2grid.ij2grid
+ grid2ij = grid2ij.grid2ij
+ rhumb_line_nav = rhumb_line_nav.rhumb_line_nav
+ strip_read = strip_read.strip_read
+ clean_dict = clean_dict.clean_dict
+ jd_key = jd_key.jd_key
+ resort = resort.resort
+ try_to_connect = try_to_connect.try_to_connect
+ cos = math.cos; sin = math.sin
+ d2r = math.radians; r2d = math.degrees
+ atan2 = math.atan2
+ if save_plot:
+ Save_NetCDF = save_netcdf.Save_NetCDF
+ if track_stats:
+ Plot_Map = plot_map.plotmap
+ Save_NetCDF = save_netcdf.Save_NetCDF
+
+ # Get some definitions. Note must have run setup_vx.py already!
+ sf_file = "%ss_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(sf_file, 'rb'))
+ (im,jm,maxid,lats,lons,timestep,dx,dy,dlon,dlat,start_lat,start_lon,
+ dlon_sq,dlat_sq,two_dlat,model_flag,eq_grid,tropical_n,tropical_s,
+ bot,mid,top,row_start,row_end,tropical_n_alt,tropical_s_alt,
+ bot_alt,top_alt,lon_shift,lat_flip,the_calendar,found_years,
+ super_years,dim_lat,dim_lon,dim_time,var_lat,var_lon,var_time,
+ var_slp,var_topo,var_land_sea_mask,file_seperator,no_topo,
+ no_mask,slp_path,model,out_path,shared_path,lat_edges,lon_edges,
+ land_gridids,troubled_centers,faux_grids) = fnc_out
+ except:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (sf_file))
+ # Save memory
+ if not defs.troubled_filter:
+ del troubled_centers
+ del land_gridids
+ del lat_edges
+ del lon_edges
+ del fnc_out
+
+ # Update over_write values
+ if slp_path:
+ slp_path = over_write_slp_path
+ if years:
+ print ("jimmy, hacker: ")
+ print (years)
+ super_years = years
+
+ # Create out_path if it doesn't exist.
+ if over_write_out_path:
+ out_path = over_write_out_path
+ if not os.path.exists(out_path):
+ dirs = list(map(os.makedirs, (out_path,
+ out_path+'/comps/',
+ out_path+'/pdfs/',
+ out_path+'/stats/',
+ out_path+'/netcdfs/',
+ out_path+'/figs/pdfs/',
+ out_path+'/figs/comps/')))
+ print ("Directory %s Created." % (out_path))
+
+ if defs.keep_log:
+ # Redirect stdout to file instead of screen (i.e. logfiles)
+ tmp = "%s/logfile" % (out_path)
+ lfile = make_unique_name(os,tmp,".txt")
+ screenout = sys.stdout
+ log_file = open(lfile, 'w')
+ sys.stdout = log_file
+
+ # timestep in julian days
+ # delta_jdate = int(((timestep/24.0)*100))
+ # Jeyavinoth: above line of code,
+ # computes the time step that is used adates
+ # this value is used to do a sanity check below, to make sure that the
+ # adates dont increment by more than 25
+ # hence I change this to hours, instead of days
+ delta_jdate = int(timestep)
+
+ fnc_out = []
+ tf_file = "%stf_dat.p" % (shared_path)
+ try:
+ fnc_out = pickle.load(open(tf_file, 'rb'))
+ (tdict,lwdict) = fnc_out
+ except:
+ sys.exit("\n\tWARNING: Error reading or finding %s." % (tf_file))
+ del lwdict
+ del fnc_out
+
+ # fnc_out some things
+ defs.maxdp = defs.maxdp*timestep
+ defs.travel_distance = defs.travel_distance*timestep
+
+ # Shortest allowable track length (based on time elapsed between pnts, such
+ # that 1 timestep spans 2 pnts).
+ min_trk_cnt = int(defs.age_limit/timestep) + 1
+
+ # Gives a bergero of 1 at 6 hPa per timestep at 60 degrees
+ sin60 = math.sin(d2r(60.0))/float(timestep)
+ ## Gives a bergero of 1 at 12 hPa per timestep at 60 degrees
+ # sin60 = math.sin(d2r(60.0))/float(2*timestep)
+ ## Gives a bergero of 1 at 10 hPa per timestep at 60 degrees
+ # sin60 = math.sin(d2r(60.0))/float(1.67*timestep)
+
+ # Skip high latitude for angle, separation, area
+ # if non-zero the value is the absolute latitude
+ # above which no values retained.
+ skip_high_lat = 85
+
+ # Limit bearing stats and StheC to centers with at least
+ # this must separation as likely to be large angle
+ # changes when centers are say 10 km apart due to
+ # a stalled/blocked cyclone.
+ # 2 deg lat equivalent separation
+ # Jeyavinoth: change following
+ # min_sep = 222.0
+ # with
+ min_sep = (222.0/6.0)*float(timestep)
+ # Jeyavinoth: End
+
+ inv_accuracy = 1.0/defs.accuracy
+
+ # Summary Stats: Checks on operation to report and catch oddities.
+ known_flags = {5 : "Failed trackable center",
+ 6 : "Failed track lifetime filter",
+ 7 : "Failed track travel filter",
+ 8 : "Failed track minimum SLP filter",
+ 10: "Failed extratropical track filter"}
+# 10 : "Troubled center"}
+ flag_files = {0 : "passed",
+ 5 : "trackable",
+ 6 : "lifetime",
+ 7 : "travel",
+ 8 : "minslp",
+ 10 : "tropical"}
+ nflags = 11
+ total_time_steps = 0
+ flags_used = [5,6,7,8,10] # used flags
+ super_total_centers_read = 0
+ super_total_tracks = 0
+ super_total_centers_cnt = [0]*nflags#*11 # make size of total flag count
+
+ if save_plot:
+ flag_cnt = numpy.zeros((jm*im,nflags),dtype=numpy.float)
+ # to see touch concerns
+ touch_concerns = numpy.zeros((jm*im),dtype=numpy.float)
+
+ if track_stats:
+ # insert quantity info:
+ # 0 - CisG
+ # 1 - StheC
+ # 2 - CisB
+ # 3 - Dissimilarity Scores
+ # 4 - Bearing
+ # 5 - Ccount
+ # 6 - Dscore used (selected) when dscore used at all
+ # 7 - Dscore not used (unselected values) when dscore used at all
+ stat_groups = ["CisG","StheC","CisB","Dscore","Bearing",
+ "Ccount","Dscore_Used","Dscore_Unused"]
+
+ # For frequency plots
+ data_index = [len(stat_groups)]
+ data_index.append(jm*im)
+ bucket_freq_sum = numpy.zeros(data_index,dtype=numpy.float)
+ bucket_freq_cnt = numpy.zeros(data_index,dtype=numpy.float)
+
+ big_bins = [x for x in range(len(stat_groups))]
+
+ # CisG bins for histograms
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,5.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ CisG_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[0] = CisG_bins
+
+ # StheC bins for histograms
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,2.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ StheC_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[1] = StheC_bins
+
+ # CisB bins for histograms
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,2.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ CisB_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[2] = CisB_bins
+
+ # Dscore bins for histograms
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,5.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ Dscore_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[3] = Dscore_bins
+
+ # Bearing bins for histograms
+ bin_width = 5.0#45.0
+ bins_left_edge = numpy.arange(0.0,360.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ bearing_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[4] = bearing_bins
+
+ # Ccount bins for histograms (number of potential connections)
+ bin_width = 1.0
+ bins_left_edge = numpy.arange(0.0,10.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ Ccount_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[5] = Ccount_bins
+
+ # Dscore_Used bins for histograms (these of the dscores used to make choice)
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,5.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ Dscore_Used_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[6] = Dscore_Used_bins
+
+ # Dscore_Unused bins for histograms (these are the pool of dscores when a choice is made)
+ bin_width = 0.1
+ bins_left_edge = numpy.arange(0.0,5.0,bin_width)
+ tmp = setup_bins(bins_left_edge)
+ Dscore_Unused_bins = [numpy.zeros(len(bins_left_edge),dtype=numpy.integer),
+ tmp[0],tmp[1],tmp[2],tmp[3]]
+ big_bins[7] = Dscore_Unused_bins
+
+ # Quit on error else just send message to logfile?
+ if exit_on_error:
+ do_this = 'print (smsg); print (msg); sys.exit("\t\tDate_Stamp:"+date_stamp)'
+ else:
+ do_this = 'print (smsg); print (msg); print ("\t\tDate_Stamp:"+date_stamp)'
+
+ print ("Done\n\tReading Centers....",)
+
+ #---------------------------------------------------------------------------
+ # Read all centers into memory
+ #---------------------------------------------------------------------------
+
+ # Counters for report
+ centers = []
+ centers_append = centers.append
+ # Read all centers into memory
+ i = 0
+ years = [x for x in range(int(super_years[0]),int(super_years[-1])+1)]
+ for loop_year in years:
+ # Define some files
+ header = "mcms_%s_%04d_" % (model,loop_year)
+ centers_file = "%s%scenters.txt" % (out_path,header)
+ # Open files for read.
+ centers_read = open(centers_file,"r")
+ ingested = 0
+ for line in centers_read:
+ # Process line
+ fnc = strip_read(line)
+ centers_append(fnc)
+ ingested += 1
+ centers_read.close()
+ if i == 0:
+ print (" %d (%d)" % (loop_year,ingested),)
+ elif loop_year == years[-1]:
+ print ( "%d (%d)" % (loop_year,ingested))
+ elif i < 55555:
+ print ("%d (%d)" % (loop_year,ingested),)
+ else:
+ print ("%d (%d)" % (loop_year,ingested))
+ i = -1
+ i += 1
+ super_total_centers_read = len(centers)
+ print ("Jimmy: super_total_centers_read")
+ print (super_total_centers_read)
+
+ # Ensure sorted and put in inverse order so t=0 at end of array
+ centers.sort()
+ centers.reverse()
+ # Make copy for later use
+ center_orig = centers[:]
+
+ # # Jeyavinoth: Start
+ # # commented out till "Jeyavinoth: End"
+ # # Extract all unique Julian dates
+ # # JIMMY: the lack of unique julian dates might be an issue.
+ # alldates = sorted(dict((x,1) for x in [x[4] for x in centers]).keys())
+ # nsteps = len(alldates)
+ # # JB/JJ removed print here
+ # # print ("Jimmy version 1 of alldates" )
+ # # print (alldates)
+ #
+ # # This is set in setup_vX.py
+ # jd_fake = 0
+ # print ("Jimmy, the calendar is: "+the_calendar)
+ # if the_calendar != 'standard':
+ # # As no calendar detected assume non-standard
+ # jd_fake = 1
+ #
+ # if the_calendar == 'proleptic_gregorian':
+ # jd_fake = True
+ # # jd_fake = False #### JJ sets this to fake to make it work with leap years
+ # if jd_fake:
+ # date_stamps = sorted(dict((x,1) for x in ["%4d%02d%02d%02d" % (x[0],x[1],x[2],x[3]) for x in centers]).keys())
+ # else:
+ # dtimes = [netcdftime.DateFromJulianDay(adate*0.01) for adate in alldates]
+ # date_stamps = ["%4d%02d%02d%02d" % (d.year,d.month,d.day,d.hour) for d in dtimes]
+ # # Jeyavinoth: End
+
+ # Jeyavinoth
+ # replaced above commented out code with the following
+ # here the date stamps are the values saved in the center data
+ alldates = sorted(dict((x,1) for x in [x[4] for x in centers]).keys())
+ nsteps = len(alldates)
+ date_stamps = sorted(dict((x,1) for x in ["%4d%02d%02d%02d" % (x[0],x[1],x[2],x[3]) for x in centers]).keys())
+
+ # JB/JJ removed print here
+ # print (date_stamps)
+ # JIMMY, forcing the lenght of alldates to be to corrects does not solve the problem
+ #sorteddates=sorted(date_stamps)
+ #alldates=array.array('i',(i+0 for i in range(0,2920)))
+ #print ("Jimmy new alldates" )
+ #print (alldates)
+
+ print ("\tDone\n\tFinding Dissimilarity Scores....",)
+
+ #---------------------------------------------------------------------------
+ # Main Program Logic
+ #---------------------------------------------------------------------------
+ # Save track stuff to a master file for all times and breakup later
+
+ tracks_file = centers_file.replace("centers","tracks")
+ # JIMMY: issue is here for file-dir name. problem is my dir name has the year in it.
+
+ # JJ - made changes to only change the filename instead of the full directory
+ tracks_file_split = tracks_file.split('/')
+ tracks_file_split[-1] = tracks_file_split[-1].replace(str(loop_year),
+ "%4d_%4d" % (years[0],years[-1]))
+
+ tracks_file = '/'.join(tracks_file_split)
+
+ # JJ - removed the line below, and replaced it with the line above
+ # # simplest solution: dont use the year in the directory name.
+ # tracks_file = tracks_file.replace(str(loop_year),
+ # "%4d_%4d" % (years[0],years[-1]))
+
+ dumped_file = tracks_file.replace("tracks","dumped_centers")
+
+ # Open files for storage
+ if track_stats != 2:
+ tracks_save = open(tracks_file,"w")
+ if defs.keep_discards:
+ dumped_centers_save = open(dumped_file,"w")
+ else:
+ dumped_file = ""
+
+ # Containers
+ live_tracks = {}
+ dead_tracks = {}
+ current_centers = []
+ past_centers = []
+ dissimilar_score = 0.0
+
+ # Tracking Step 1: See Note 4
+ step = -1
+ adatelast = alldates[0]
+ print ("Jimmy adate last")
+ print (alldates[-1])
+
+ #Jeyavinoth:
+ jj_dlist = numpy.zeros((10, 1))
+ # Jeyavinoth: End
+ for adate in alldates:
+
+ # Check for gapless alldates
+ if adate > alldates[0]:
+ if adate-adatelast > delta_jdate:
+ err_num = 1
+ smsg = "\n\tFail Check %d: A timestep(s) skipped." % (err_num)
+ msg = "\t\tadate: %d adatelast: %d" % (adate,adatelast)
+ exec(do_this)
+ adatelast = adate
+ step += 1
+ # Tracking Step 2: See Note 5
+ if adate == alldates[0]:
+ # Add all current_centers as track starts
+ current = True
+ current_centers = []
+ while current:
+ try:
+ center = centers.pop()
+ except: # Hit end of list
+ break
+ if center[4] == adate:
+ current_centers.append(center)
+ else:
+ current = False
+ centers.append(center) # Overshoot, put back on
+ for x in current_centers:
+ live_tracks[x[14]] = [(x[14],"")]
+ continue # Get another date
+ else:
+ # Move last set of current_centers to past_centers
+ past_centers = current_centers # Set to past centers
+ current = True
+ current_centers = []
+ while current:
+ try:
+ center = centers.pop()
+ except: # Hit end of list
+ break
+ if center[4] == adate:
+ current_centers.append(center)
+ else:
+ current = False
+ centers.append(center) # Overshoot, put back on
+ deaddate = date_stamps[step]
+
+ #print "Doing",deaddate#,step
+ #print "\tFound current and past centers:",len(current_centers),len(past_centers)
+
+ # Tracking Step 3: See Note 6
+ past_scores = {}
+ for past in past_centers:
+ # All current_centers w/in search radius of past.
+ # Note that a center in past_centers might not
+ # fall into candidates at all
+ candidates = [x for x in current_centers
+ if x[7] in tdict[past[7]]]
+
+ # Selection based on minimizing the dissimilar_score
+ # between past and all candidates.
+ scores = {}
+
+ if track_stats:
+ # Accrue Freq plot
+ Ccount = len(candidates)
+ bucket_freq_sum[5,past[7]] += Ccount
+ bucket_freq_cnt[5,past[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(Ccount,Ccount_bins[1],float(Ccount_bins[4]))
+ Ccount_bins[0][bin_index] += 1
+
+ if candidates:
+
+ clon = past[6]*0.01
+ clat = 90.0 - past[5]*0.01
+
+ for pnt in candidates:
+
+ if pnt[7] == past[7]:
+ # skip same grid
+ dissimilar_score = 0.0
+ scores[pnt[14]] = dissimilar_score
+ continue
+
+ plon = pnt[6]*0.01
+ plat = 90.0 - pnt[5]*0.01
+
+ if defs.use_gcd:
+ distx = gcd(clon,clat,plon,plat)
+ else:
+ fnc = rhumb_line_nav(plon,plat,clon,clat)
+ distx = fnc[1]
+ bearing = fnc[0]
+
+ # Close is Best
+ CisB = distx/defs.travel_distance
+ if track_stats:
+ # Acrue Freq plot
+ bucket_freq_sum[2,pnt[7]] += CisB
+ bucket_freq_cnt[2,pnt[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(CisB,CisB_bins[1],float(CisB_bins[4]))
+ CisB_bins[0][bin_index] += 1
+
+ # Change is Gradual
+ mid_lat = 1.0/abs(sin(d2r((plat+clat)*0.5)))
+ CisG = abs(past[8]-pnt[8])*inv_accuracy*sin60*mid_lat
+ if track_stats:
+ # Accrue Freq plot
+ bucket_freq_sum[0,pnt[7]] += CisG
+ bucket_freq_cnt[0,pnt[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(CisG,CisG_bins[1],float(CisG_bins[4]))
+ CisG_bins[0][bin_index] += 1
+
+ # Because of the limitations of bearing finding at high latitudes
+ # and because a nearly stationary center (i.e., small distx) the
+ # apparent bearing could be large but not relevant.
+ if distx > min_sep and abs(clat) <= skip_high_lat:
+ # Bearing always from rhumb line, gcd can't be used!
+ fnc = rhumb_line_nav(
+ plon,plat,clon,clat,True)
+ bearing = fnc[0]
+ if track_stats:
+ # Acrue Freq plot
+ bucket_freq_sum[4,pnt[7]] += bearing
+ bucket_freq_cnt[4,pnt[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(bearing,bearing_bins[1],float(bearing_bins[4]))
+ bearing_bins[0][bin_index] += 1
+
+ # Stay the Coarse
+ # 1 at 0deg or 180deg
+ # 0 at 90deg
+ # if bearing greater than 180 discourage by
+ # increasing from 1 at 360deg and 180deg to 2 at 270deg
+ if bearing <= 180.0:
+ StheC = abs(bearing-90.0)/90.0
+ else:
+ StheC = (-1.0*abs(bearing-270.0)/90.0) + 2.0
+ if track_stats:
+ # Accrue Freq plot
+ bucket_freq_sum[1,pnt[7]] += StheC
+ bucket_freq_cnt[1,pnt[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(StheC,StheC_bins[1],float(StheC_bins[4]))
+ StheC_bins[0][bin_index] += 1
+ else:
+ bearing = 0.0
+ StheC = 0.0
+
+ # Weighting by CisB means that large coarse or pressure
+ # changes not as important if grids very close.
+ dissimilar_score = CisB*(StheC + CisG)
+ scores[pnt[14]] = dissimilar_score # key by current uci
+ if track_stats:
+ # Accrue Freq plot
+ bucket_freq_sum[3,pnt[7]] += dissimilar_score
+ bucket_freq_cnt[3,pnt[7]] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(dissimilar_score,Dscore_bins[1],float(Dscore_bins[4]))
+ Dscore_bins[0][bin_index] += 1
+
+ # Example of past_scores nested dictionary:
+ # keyed on a prior center with dissimilar_scores for each
+ # candidate among all current_centers.
+ # {19960101001742823250:
+ # {19960101061741323000: 0.085061230289818587}}
+ past_scores[past[14]] = scores
+
+ if track_stats and len(scores) > 1:
+ # only when multiple choices possible
+ for peach in scores:
+ pnt = -1
+ for cc in candidates:
+ if cc[14] == peach:
+ pnt = cc[7]
+ break
+ if pnt < 0:
+ sys.exit("pnt Error: %s" % (pnt))
+ bucket_freq_sum[7,pnt] += scores[peach]
+ bucket_freq_cnt[7,pnt] += 1
+ bin_index = get_bin_index(scores[peach],Dscore_Unused_bins[1],float(Dscore_Unused_bins[4]))
+ Dscore_Unused_bins[0][bin_index] += 1
+
+ # Tracking Step 4: See Note 7
+ # FIX? sort order of past_centers has some effect
+ # see diff with v2 and v3 for usi 20080903001725023750 20080903121700018000
+ # with v3 see as one track and v2 as two tracks.
+ for past in past_centers:
+
+ # See if past can be connected to a current center
+ tfpick = try_to_connect(copy,past[14],past_scores)
+
+ if tfpick[1] == 1:
+ # A connection was made
+ # Remove past from further consideration
+ del past_scores[past[14]]
+
+ # Cull the current center from further consideration
+ past_scores = clean_dict(past_scores,tfpick[0])
+
+ if track_stats and scores:
+ for peach in past_scores:
+ pnt = past[7]
+ ss = list(past_scores[peach].values())
+ if not ss:
+ break
+ bucket_freq_sum[6,pnt] += ss[0]
+ bucket_freq_cnt[6,pnt] += 1
+ # Bin for histogram
+ bin_index = get_bin_index(ss[0],Dscore_Used_bins[1],float(Dscore_Used_bins[4]))
+ Dscore_Used_bins[0][bin_index] += 1
+
+ # Update live_tracks
+ new_track = True
+
+ for usi in live_tracks:
+ # Continuation of a existing track
+ if past[14] == live_tracks[usi][-1][0]:
+ live_tracks[usi].append((tfpick[0],tfpick[2]))
+ new_track = False
+ break
+ if new_track:
+ # Create a new track
+ live_tracks[past[14]] = [(past[14],""),(tfpick[0],tfpick[2])]
+ elif tfpick[1] == 0:
+ # No connection made
+ # Update past_scores to prevent duplication
+ del past_scores[past[14]]
+ else:
+ err_num = 2
+ smsg = "\n\tFail Check %d: No discard or use of a connection" % (err_num)
+ msg = "\t\tpast usi and score" % (past[14],past_scores)
+ exec(do_this)
+ #print "\tLive Tracks cnt:",len(live_tracks)
+
+ # Saved_tracks get large and slow to search so separate
+ # tracks into 'live' (last entry was current datetime),
+ # 'dead' (last entry was too old to possibly be used again).
+ test = list(live_tracks.keys())
+ test.sort()
+ for usi in test:
+ # See if last entry was today
+ lasttrack = live_tracks[usi][-1][0]
+ if lasttrack[:10] != deaddate:
+ # Dead track removed from live_tracks
+ dead_tracks[usi] = live_tracks.pop(usi)
+
+ #print "\tDead Tracks cnt:",len(dead_tracks)
+
+ # Move all remaining live tracks to dead... clearly these did
+ # not terminate in the normal way, just as tracks starting on
+ # t=0 might not be the full track.
+ for usi in list(live_tracks.keys()):
+ dead_tracks[usi] = live_tracks.pop(usi)
+
+ # Make a dictionary of centers to
+ # speed up recalls, note chronological
+ # nature of centers is lost.
+ centers = center_orig
+ hits = {}
+ for each in centers:
+ hits[each[14]] = each
+
+ print ("Done\n\tFiltering Tracks....",)
+ print ("\tNumber of original centers",len(hits))
+ #print "\tNumber of potential tracks",len(dead_tracks),
+
+ if track_stats:
+ for loopy in range(len(stat_groups)):
+
+ use_bins = big_bins[loopy]
+ x = use_bins[1]
+ y = use_bins[0]
+ # Jeyavinoth 1hr: I turn this off here
+ # if there are no cases found, I skip onto the next plot
+ if (y.sum() == 0):
+ print('\n****\nJeyavinoth: Skipping %s, %s\n****\n'%(title, xlab))
+ import pdb; pdb.set_trace()
+ continue
+
+ # Jeyavinoth 1hr end
+ width = use_bins[4]
+ title = "%s %4d-%4d" % (model,years[0],years[-1])
+ xlab = stat_groups[loopy]
+ stat_file = "%s%s_%s_%4d_%4d.txt" % (out_path,model,stat_groups[loopy],years[0],years[-1])
+ pname = "%sfigs/%s_hist_%s_%4d_%4d%s" % (out_path,model,stat_groups[loopy],years[0],years[-1],fig_format)
+ msg = plot_hist(plt,numpy,x,y,width,stat_file,pname,title,xlab)
+ print ("\t\t"+msg)
+
+ bave = numpy.zeros(im*jm,dtype=numpy.float)
+ for i in range(im*jm): # loop over each grid
+ if bucket_freq_cnt[loopy,i] >= 1:
+ bave[i] = numpy.divide(bucket_freq_sum[loopy,i],bucket_freq_cnt[loopy,i])
+ else:
+ bave[i] = 0.0
+ pname = "%sfigs/%s_freq_%s_%4d_%4d%s" % (out_path,model,stat_groups[loopy],years[0],years[-1],fig_format)
+ fplot = Plot_Map(missing=0.0,color_scheme="jet")
+ fplot.create_fig()
+ fplot.add_field(lons,lats,bave,ptype='pcolor',)
+ fplot.finish(pname)
+ pname = pname.replace("/figs","/netcdfs")
+ pname = pname.replace(fig_format,".nc")
+ save_it = Save_NetCDF(bave,lons,lats,pname,0)
+
+ #pname = "%sfigs/%s_hist_rose_%4d_%4d%s" % (out_path,model,years[0],years[-1],fig_format)
+ #use_bins = big_bins[4]
+ #x = use_bins[1]
+ #y = use_bins[0]
+ #rose_plot(y,pname,use_bins[1][0],use_bins[1][-1],big_bins[4][4],verbose=0,fig_format=fig_format)
+
+ if track_stats == 2:
+ return
+
+ # Refactor Tracks:
+ # 1) Update each center's usi, i.e., assign to a track
+ # 2) Apply post-tracking filters
+ # a) Tropical Filter
+ # b) Minimum Lifetime
+ # c) Minimum Lifetime SLP
+ # d) Minimum Lifetime Travel
+ used_usi = list(dead_tracks.keys())
+ used_usi.sort()
+
+ # Loop over tracks and find member centers
+ for usi in used_usi:
+
+ caught = []
+ # Centers found with this usi
+ caught = [hits[y[0]] for y in dead_tracks[usi]]
+
+ # Get dissimilarity scores
+ ds = [x[1] for x in dead_tracks[usi]]
+
+ # Trim for speed as search pool shrinks
+ for each in caught:
+ del hits[each[14]]
+ del dead_tracks[usi]
+
+ # Apply Tropical Test:
+ # Whole track discarded if track never leaves the tropics.
+ extra_tropical_system = [x[5] for x in caught if
+ abs(90-x[5]*0.01) >= defs.tropical_boundary_alt]
+ if not extra_tropical_system:
+ if defs.keep_discards:
+ for part in caught:
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],
+ part[4],part[5],part[6],part[7],
+ part[8],part[9],part[10],10,
+ part[12],part[13],part[14],
+ part[15])
+ dumped_centers_save.writelines(msg)
+ super_total_centers_cnt[10] += 1
+ # Jeyavinoth
+ jj_dlist[0] += 1
+ if save_plot:
+ flag_cnt[part[7],10] += 1
+ continue
+
+ # Don't apply remaining filters is track starts in the very
+ # 1st timestep as it could have been truncated and so
+ # might incorrectly fail these tests. Same idea if last member
+ # of the track in the last timestep.
+ do_filter = 1
+ if usi.startswith(date_stamps[0]):
+ do_filter = 0
+ if caught[-1][14].startswith(date_stamps[-1]):
+ do_filter = 0
+
+ if do_filter:
+
+ if save_plot and defs.troubled_filter:
+ # To make map of tracks that touch troubled grids uncomment
+ # and comment minimum lifetime flag_cnt
+ in_trouble = [x for x in caught if x[7] in troubled_centers]
+ if in_trouble:
+ for part in caught:
+ touch_concerns[part[7]] += 1
+
+ # Apply minimum lifetime
+ if len(caught) < min_trk_cnt:
+
+ if defs.keep_discards:
+ for part in caught:
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],
+ part[4],part[5],part[6],part[7],
+ part[8],part[9],part[10],6,
+ part[12],part[13],part[14],
+ part[15])
+ dumped_centers_save.writelines(msg)
+ # Jeyavinoth
+ jj_dlist[1] += 1
+ super_total_centers_cnt[6] += 1
+ if save_plot:
+ flag_cnt[part[7],6] += 1
+ continue
+ # Apply minimum lifetime SLP
+ min_val = min(x[8] for x in caught)
+
+ if min_val > defs.keep_slp:
+ if defs.keep_discards:
+ for part in caught:
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],
+ part[4],part[5],part[6],part[7],
+ part[8],part[9],part[10],8,
+ part[12],part[13],part[14],
+ part[15])
+ dumped_centers_save.writelines(msg)
+ # Jeyavinoth
+ jj_dlist[2] += 1
+ super_total_centers_cnt[8] += 1
+ if save_plot:
+ flag_cnt[part[7],8] += 1
+ continue
+
+ # Apply Minimum Lifetime Travel:
+ # Methods:
+ # 1) total distance traveled between segments
+ # 2) distance between start and end centers
+ # 3) maximum displacement from start center
+ #
+ # Note option to discard tracks that never
+ # leave high topography regions... likely noise
+ #
+ first = True
+ total_travel = 0.0
+
+ ## Method 1
+ #for segment in caught:
+ # if first:
+ # first = False
+ # plon = segment[6]*0.01
+ # plat = 90.0 - segment[5]*0.01
+ # continue
+ # elon = segment[6]*0.01
+ # elat = 90.0 - segment[5]*0.01
+
+ # if defs.use_gcd:
+ # disty = gcd(plon,plat,elon,elat)
+ # else:
+ # fnc = rhumb_line_nav(elon,elat,plon,plat,True)
+ # disty = fnc[1]
+
+ # total_travel += disty
+ # plon = elon
+ # plat = elat
+
+ # Method 3
+ total_travel = 0.0
+ for segment in caught:
+ if first:
+ first = False
+ plon = segment[6]*0.01
+ plat = 90.0 - segment[5]*0.01
+ continue
+ elon = segment[6]*0.01
+ elat = 90.0 - segment[5]*0.01
+
+ if defs.use_gcd:
+ disty = gcd(plon,plat,elon,elat)
+ else:
+ fnc = rhumb_line_nav(elon,elat,plon,plat,True)
+ disty = fnc[1]
+
+ if disty > total_travel:
+ total_travel = disty
+
+ # Discard tracks that never leave troubled area
+ if defs.troubled_filter:
+ in_trouble = [x for x in caught if x[7] in troubled_centers]
+ if len(in_trouble) == len(caught):
+ total_travel = 0.0
+
+ if total_travel < defs.min_trk_travel:
+ if defs.keep_discards:
+ for part in caught:
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],
+ part[4],part[5],part[6],part[7],
+ part[8],part[9],part[10],7,
+ part[12],part[13],part[14],
+ part[15])
+ dumped_centers_save.writelines(msg)
+ # Jeyavinoth
+ jj_dlist[3] += 1
+ super_total_centers_cnt[7] += 1
+ if save_plot:
+ flag_cnt[part[7],7] += 1
+ continue
+
+ # Extract dissimilarity scores for track and alter usi
+ ii = 0
+ for part in caught:
+ if ii == 0:
+ dissimilarity = 0
+ else:
+ dissimilarity = int(ds[ii]*100.0)
+ ii += 1
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],part[4],
+ part[5],part[6],part[7],part[8],part[9],
+ part[10],part[11],part[12],dissimilarity
+ ,part[14],usi)
+ tracks_save.writelines(msg)
+ #Move and change
+ super_total_centers_cnt[0] += 1
+ if save_plot:
+ flag_cnt[part[7],0] += 1
+ super_total_tracks += 1
+ tracks_save.close()
+
+ if defs.keep_discards:
+ # Dump untrackable centers
+ for part in hits.values():
+ msg = defs.center_fmt2 % (part[0],part[1],part[2],part[3],part[4],
+ part[5],part[6],part[7],part[8],part[9],
+ part[10],5,part[12],part[13],part[14],
+ part[15])
+ dumped_centers_save.writelines(msg)
+ # Jeyavinoth
+ jj_dlist[4] += 1
+ super_total_centers_cnt[5] += 1
+ if save_plot:
+ flag_cnt[part[7],5] += 1
+ dumped_centers_save.close()
+ if track_stats:
+ print ("\tDone")
+ else:
+ print ("Done")
+
+ # import pdb; pdb.set_trace()
+ #-------------------------------------------------------------------------
+ # Clean up and Quit
+ #-------------------------------------------------------------------------
+ if defs.keep_discards:
+ dumped_centers_save.close()
+
+ # Final check to ensure order
+ resort(tracks_file,strip_read,jd_key,defs.center_fmt2)
+ if defs.keep_discards:
+ resort(dumped_file,strip_read,jd_key,defs.center_fmt2)
+
+ if defs.keep_log:
+ log_file.close()
+ sys.stdout = screenout # redirect stdout back to screen
+
+ #
+ # FINAL check to be sure all timesteps run and all centers accounted for.
+ #
+ report_file = tracks_file.replace("tracks.txt","tracks_report.txt")
+ report_file = report_file.replace(out_path,"%sstats/" % (out_path))
+ report_save = open(report_file,"w")
+
+ msg1 = "Total Centers Read: %d\nTotal Centers Saved: %d (%6.2f%%)\nTotal Tracks: %d\nDiscards:\n"
+ msg = msg1 % (super_total_centers_read,super_total_centers_cnt[0],
+ 100.0*(float(super_total_centers_cnt[0])/float(super_total_centers_read)),super_total_tracks)
+ report_save.writelines(msg)
+ msg1 = "\t% 6d\t(%6.2f%%)\t%s\n"
+ for e in flags_used:
+ msg = msg1 % (super_total_centers_cnt[e],
+ 100.0*(float(super_total_centers_cnt[e])/float(super_total_centers_read)),
+ known_flags[e])
+ report_save.writelines(msg)
+
+ # Final sanity check that everything is accounted for.
+ if super_total_centers_read != sum(super_total_centers_cnt):
+ msg = "Final Total Count Error:\n\tsuper_total_centers_read = %d\n\tsuper_total_centers_cnt = %s sum(%d)"
+ sys.exit(msg % (super_total_centers_read,repr(super_total_centers_cnt),
+ sum(super_total_centers_cnt)))
+ report_save.close()
+
+ if save_plot:
+ # Make frequency plot
+ for flag in flag_files:
+
+ # Just counts
+ # FIX error with numpy/matplot lib and missing so just set to zero now
+ #comp_out = numpy.where(flag_cnt[:,flag] < 1.,-10000000000.0,flag_cnt[:,flag])
+ comp_out = numpy.where(flag_cnt[:,flag] < 1.,0.0,flag_cnt[:,flag])
+ pname = "%sfigs/%s_freq_%s_%4d_%4d%s" % (out_path,model,flag_files[flag],
+ years[0],years[-1],fig_format)
+ fplot = Plot_Map(missing=0.0,color_scheme="jet")
+ fplot.create_fig()
+ fplot.add_field(lons,lats,comp_out,ptype='pcolor',)
+ fplot.finish(pname)
+ pname = pname.replace("/figs","/netcdfs")
+ pname = pname.replace(fig_format,".nc")
+# pname = "%sfreq_%s_%4d_%4d.nc" % (out_path,flag_files[flag],years[0],years[-1])
+ # JJ changed the below line, where I convert the flag_cnt[:,flag] to comp_out,
+ # because some where in fplot the comp_out changes from X*1 to lat_size*lon_size
+ save_it = Save_NetCDF(comp_out,lons,lats,pname,0)
+ # save_it = Save_NetCDF(flag_cnt[:,flag],lons,lats,pname,0)
+ print ("\t\tCreated flag %d: %s" % (flag,pname))
+ comp_out = numpy.where(touch_concerns < 1.,0.0,touch_concerns)
+ pname = "%sfigs/%s_touch_concerns_%4d_%4d%s" % (out_path,model,
+ years[0],years[-1],fig_format)
+ fplot = Plot_Map(missing=0.0,color_scheme="jet")
+ fplot.create_fig()
+ fplot.add_field(lons,lats,comp_out,ptype='pcolor',)
+ fplot.finish(pname)
+ pname = pname.replace("/figs","/netcdfs")
+ pname = pname.replace(fig_format,".nc")
+# pname = "%stouch_concerns_%4d_%4d.nc" % (out_path,years[0],years[-1])
+ # JJ editted this line below to change from touch_concerns, to comp_out to make sure that the single dim array gets converted to lat_size, lon_size
+ save_it = Save_NetCDF(comp_out,lons,lats,pname,0)
+ # save_it = Save_NetCDF(touch_concerns,lons,lats,pname,0)
+ print ("\tCreated: %s" % (pname))
+
+ return (tracks_file,dumped_file,years)
+
+#---Start of main code block
+if __name__=='__main__':
+
+ # --------------------------------------------------------------------------
+ # Select options for this run.
+ # --------------------------------------------------------------------------
+
+ # This next set of lines should be copied from setup_vX.py
+ # Short names by which model will be labeled.
+ model = defines.model
+
+ # Halt program on error or just warn?
+ exit_on_error = 0
+
+ # Save/plot Stats (debugging mostly)
+ save_plot = 1
+
+ # Store tracking stats and make plots
+ # Set track_stats = 2 to not save tracks to file... just stats and plots
+ track_stats = 1
+
+ # --------------------------------------------------------------------------
+ # Define all modules to be imported.
+ # --------------------------------------------------------------------------
+
+ # Extract version number from this scripts name.
+ tmp = sys.argv[0]
+ file_len = len(tmp.split("_"))
+ vnum = "_"+tmp.split("_")[file_len-1][:2]
+
+ # Basic standard Python modules to import.
+ imports = []
+ # Jeyavinoth: removed netcdftime from line below
+ # imports.append("import math,numpy,netcdftime")
+ imports.append("import math,numpy")
+
+ imports.append("import copy")
+ imports.append("import netCDF4 as NetCDF")
+ imports.append("import _pickle as cPicke")
+ if track_stats:
+ imports.append("import matplotlib.pyplot as plt")
+
+ # My modules to import w/ version number appended.
+ my_base = ["defs","make_unique_name","strip_read","gcd","ij2grid",
+ "grid2ij","clean_dict","jd_key","resort","rhumb_line_nav",
+ "try_to_connect"]
+ if save_plot or track_stats:
+ my_base.append("save_netcdf")
+ my_base.append("plot_map")
+ for x in my_base:
+ tmp = "import %s%s as %s" % (x,vnum,x)
+ imports.append(tmp)
+
+ # --------------------------------------------------------------------------
+ # Alter default behavior found in either defs_vX.py or setup_vX.py
+ # --------------------------------------------------------------------------
+
+ # The default behavior is to read SLP data from the
+ # directory slp_path defined in setup_vX.py.
+ # Here you can elect to override this behavior.
+ over_write_slp_path = ""
+
+ # The default behavior is to save results
+ # in the directory out_path defined in
+ # setup_vX.py. Here you can elect to override
+ # this behavior.
+ over_write_out_path = ""
+
+ # This next set of lines should be copied from setup_vX.py
+ # Full path to the root directory where model specific output will be stored.
+ result_directory = defines.out_folder
+ if not os.path.exists(result_directory):
+ sys.exit("ERROR: result_directory not found.")
+
+ # Directory to be created for storing temporary model specific files.
+ shared_path = "%s%s_files/" % (result_directory,model)
+
+ # The default behavior is to run over all the
+ # years found by setup_vX.py. Here you can
+ # elect to override this behavior.
+ over_write_years = defines.over_write_years
+
+ # Here you can alter the default behavior as determined
+ # by defs_vX.py and possibly setup_vX.py.
+ defs_set = {"keep_log":False,"polar_filter":False,
+ "troubled_filter":True}
+
+ # Define some files
+ centers_file = "centers.txt"
+ dumped_centers_file = "dumped_centers.txt"
+
+ # Shortcut to keep parameter list shorter.
+ specifics = {'years' : over_write_years,
+ 'out_path' : over_write_out_path,
+ 'centers_file' : centers_file,
+ 'shared_path' : shared_path,
+ 'slp_path' : over_write_slp_path,
+ 'model' : model,
+ 'exit_on_error' : exit_on_error,
+ 'save_plot' : save_plot,
+ 'track_stats' : track_stats
+ }
+
+ ### --------------------------------------------------------------------------
+ ## Run main()
+ ## --------------------------------------------------------------------------
+ msg = "\n\t====\tCenter Tracking\t===="
+ print (msg)
+ if over_write_slp_path:
+ print ("\tUsing over_write_slp_path: %s" % (over_write_slp_path))
+ else:
+ print ("\tUsing default slp_path")
+ if over_write_out_path:
+ print ("\tUsing over_write_out_path: %s" % (over_write_out_path))
+ else:
+ print ("\tUsing default out_path")
+ if not os.path.exists(shared_path):
+ sys.exit("\tCan't find shared_path!")
+ else:
+ print ("\tUsing shared_path: %s" % (shared_path))
+ if over_write_years:
+ print ("\tUsing over_write_years: %s" % (repr(over_write_years)))
+ else:
+ print ("\tUsing default years")
+ if defs_set:
+ print ("\tUsing modified defs for defs_vX.py:")
+ for d in defs_set:
+ print ("\t\t%20s:\t%s" % (d,defs_set[d]))
+ else:
+ print ("\tUsing defaults from defs_vX.py")
+
+ msg = main(defs_set,imports,**specifics)
+ print ("\tCreated:",msg[0],msg[1])
+ years = msg[2]
+
+ #print "JIMMY"
+ ## To test rewrite, comment out main call above and uncomment below
+ # super_years = [1989,1990]
+ # jimdir = '/Users/jfbooth/MCMS_DIR/CODE/TEST_TRACK_CENTER_REWRITE/'
+ # years = [x for x in range(int(super_years[0]),int(super_years[-1])+1)]
+ # msg = (jimdir+'%s/mcms_%s_%04d_%04d_tracks.txt' % (model,model,years[0],years[-1]),
+ # jimdir+'%s/mcms_%s_%04d_%04d_dumped_centers.txt' % (model,model,years[0],years[-1]))
+ # print msg
+
+ # Rewrite tracks
+ msg2 = rewrite(msg[0],years)
+
+ # Append dumped
+ msg3 = rewrite(msg[1],years,action="a",reorder=True)
+
+# ------------------------------------------------------------------------------
+# Notes: Rather than embed in code make reference here
+# ------------------------------------------------------------------------------
+Notes = """
+
+4): Loop over each datetime connecting the 'current' set of centers
+ to the 'past' set of centers via nearest neighbor and likeness
+ arguments.
+
+ Important objects:
+ adate: the current julian date being examined.
+ saved_tracks: dictionary with key value of the 1st uci
+ in the track. The values from each key are a list of
+ ucis making up that track.
+
+5) Find all current centers. If first adate then all centers
+ are new tracks and immediately get next adate. Otherwise,
+ move the previous adate's current_centers to past_centers
+ before updating the current_centers list.
+
+ What can happen to each candidate from current_centers?
+ A) If 1st datetime then all of current_centers goes to
+ saved_tracks as a new track.
+ B) The candidate center mismatches with all prior centers because:
+ B1) The candidate center is too isolated from prior centers.
+ B2) Other candidate centers are more more similar to all
+ prior centers than is this candidate center.
+ * Note the candidate center may be the start of a new
+ track. This will be dealt with in the next iteration.
+ C) Connects to a prior center and:
+ C1) The candidate center is appended to an existing track
+ of which the prior center is member.
+ C2) The candidate center is appended to an new track of
+ which the prior center becomes the first member.
+ Important object:
+ current_centers: list of the centers for adate.
+ past_centers: the current_centers for the previous adate.
+ saved_tracks: dictionary keyed on the 1st trackable point
+ containing list of all centers as tuples
+ (dislikeness_scores,choice_codes) in this track.
+
+6) Find the dislikeness_scores for current and past centers.
+
+ Important objects:
+ candidates: list of all current_centers w/in the pre-defined
+ search radius in tdict for each center in past_centers.
+ dissimilar_score: Weighted estimate of the dissimilarity b/t
+ a past_center and a current_center. Based on proximity,
+ SLP difference, and implied relative coarse between the
+ centers.
+ past_scores: dictionary keyed by past uci with
+ the dissimilar_score for each candidate current center.
+
+7) Loop over each prior center in past_scores and find the
+ current center with the lowest dissimilar_score and then loop
+ over all the other past_scores to ensure that other centers don't
+ claim that current_center as well as are maybe more similar.
+
+ If a conflict arises the following procedure is followed:
+ a) the original past center uses the current center only if
+ its dissimilar_score is lower than the current center's
+ dissimilar_score with any other past center.
+ b) if another past center has a lower dissimilar_score
+ with the current center, then the original past center
+ must try again with the next least dissimilar
+ current center (if one exists) or go unattached.
+
+ When a current center is associated with a past center into
+ a track. Then those centers are no longer used to judge other
+ centers for tracking purposes.
+
+ The dissimilar_score for each track segment is stored as a
+ rough measure of the uncertainty of that tracking choice.
+
+ Past centers without a associated current center are treated
+ as terminated tracks.
+
+ Current centers without a trackable past center are treated
+ as new tracks.
+
+ Important objects:
+ used_current_centers: list of current centers that have been
+ associated with a past center and should no longer be
+ considered for tracking.
+ used_past_centers: same as above but for past centers
+ tfpick: a tuple of the (uci,choice_flag).
+ If choice_flag = 1, then a connection is made, if 0 then no
+ connection found, if -1 something went wrong.
+
+8) Searching very large lists is very slow. There are two approaches
+ to this that are basically the same speed. Both depend on the list
+ to be sorted, in this case chronologically, so that we can use shortcuts
+ to limit the search. Both of these methods are about equally fast, both
+ being 500x faster than doing the search over the whole list.
+
+ Method 1: Reverse the list and remove items off and check to see if
+ one correct day. This is done because removing items off the end of
+ a list is much faster because the remaining list items don't have
+ to be renumbered. In this case the search is limited to testing
+ each item as if comes off until we've gone too far.
+
+ Method 2: Assume that there are only a limited number of items that
+ we want at any one search and limit how far forward we search... that
+ is a sliding search window. Not used because it is more complicated
+ than Method 1. Included here in case Method 1 ever proves problematic.
+ # cinc = sets how far forward I will search for
+ # centers, set to 3 times the average number
+ # of centers per day to accommodate variability
+ # and future looks.
+ # Basically this is a sliding search
+ cmax = len(centers)
+ cinc = (cmax/len(alldates))*3
+ cstart = 0
+ fstart = 0
+ cend = cstart + cinc
+ # don't overshoot array
+ if cend > cmax:
+ cend = cmax
+ for adate in alldates:
+ if adate == alldates[0]:
+ nextdate = alldates[alldates.index(adate)+1]
+ past_centers = []
+ # generator to extract subset based on julian date (pyhack)
+ current_centers = [x for x in
+ centers[cstart:cend] if x[4] == adate]
+ # search forward of last current_center
+ fstart = cstart + len(current_centers)
+ fend = fstart + cinc
+ # don't overshoot array
+ if fend > cmax:
+ fend = cmax
+ future_centers = [x for x in
+ centers[fstart:fend] if x[4] == nextdate]
+ # warn if the number of found centers is too close to the
+ # assumption of cinc
+ if len(current_centers) > cinc*0.8:
+ print 'Warning cinc = %d may be too small. Found %d centers' \
+ % (cinc,len(current_centers))
+ stop()
+ elif adate == alldates[-1]:
+ past_centers = current_centers
+ current_centers = future_centers
+ future_centers = []
+ else:
+ nextdate = alldates[alldates.index(adate)+1]
+ past_centers = current_centers
+ current_centers = future_centers
+ # search forward of last current_center
+ fstart = cstart + len(current_centers)
+ fend = fstart + cinc
+ # don't overshoot array
+ if fend > cmax:
+ fend = cmax
+ future_centers = [x for x in
+ centers[fstart:fend] if x[4] == nextdate]
+ # move search forward
+ cstart = cstart + len(past_centers)
+ cend = cstart + cinc
+ # don't overshoot array
+ if cend > cmax:
+ cend = cmax
+
+"""
diff --git a/diagnostics/etc_composites/util/tracker/tree_traversal_v4.py b/diagnostics/etc_composites/util/tracker/tree_traversal_v4.py
new file mode 100755
index 000000000..bc1c5dda9
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/tree_traversal_v4.py
@@ -0,0 +1,19 @@
+def tree_traversal (parent,tree,harvest,level=0):
+ '''Utility function for center_finder
+ '''
+ if parent not in harvest:
+ # if use x[4] using regional mean slp
+ # if use x[1] using raw center mean slp
+ #slp = [x[4] for x in tree[parent] if x[0] == parent]
+ slp = [x[1] for x in tree[parent] if x[0] == parent]
+ harvest[parent] = slp[0]
+ children = [x[0] for x in tree[parent] if x[0] not in harvest]
+ if children:
+ for child in children:
+ if child not in harvest:
+ #slp = [x[4] for x in tree[child] if x[0] == child]
+ slp = [x[1] for x in tree[child] if x[0] == child]
+ harvest[child] = slp[0]
+ tree_traversal(child,tree,harvest,level+1)
+ else:
+ return
diff --git a/diagnostics/etc_composites/util/tracker/try_to_connect_v4.py b/diagnostics/etc_composites/util/tracker/try_to_connect_v4.py
new file mode 100755
index 000000000..061928d00
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/try_to_connect_v4.py
@@ -0,0 +1,61 @@
+def try_to_connect(copy,new_past_uci,past_scores):#,verbose=False):
+ """Try to find most similar current center to past centers"""
+
+ pick = (new_past_uci,0,0) # default result
+ if past_scores[new_past_uci] == {}:
+ return pick
+
+ if past_scores[new_past_uci]:
+ min_val = min(past_scores[new_past_uci].values())
+ low_keys = [k for k,v in past_scores[new_past_uci].items()
+ if v == min_val]
+ most_same_current_uci = low_keys[0]
+ #if verbose:
+ # print "\nChecking Past Center",new_past_uci,past_scores[new_past_uci]
+ # print "\tBest Current Center",most_same_current_uci,\
+ # past_scores[new_past_uci][most_same_current_uci]
+
+ lowest = True
+ conflicts = [rival_new_past_uci for rival_new_past_uci in past_scores
+ if most_same_current_uci in
+ past_scores[rival_new_past_uci]]
+
+ for each in conflicts: # always includes at least new_past_uci
+ # exact matches are ignored as extremely unlikely.
+ if past_scores[each][most_same_current_uci] < \
+ past_scores[new_past_uci][most_same_current_uci]:
+ lowest = False
+ #if verbose:
+ # print "\t\tConflicts",each,\
+ # past_scores[each][most_same_current_uci],lowest
+
+ if lowest:
+ pick = (most_same_current_uci,1,min_val)
+ #if verbose:
+ # print "\tUse most_same_current_uci",most_same_current_uci
+ else:
+ #if verbose:
+ # print "\tMost_same_current_uci Not the Best"
+ # remove the disqualified current_center and
+ # see if the past_center has any other choices
+ # if so recursively run this function on that.
+ if len(past_scores[new_past_uci]) > 1:
+ #if verbose:
+ # print "\t\tMany choices"
+ new_past_scores = copy.deepcopy(past_scores)
+ temp = {}
+ for each in new_past_scores[new_past_uci]:
+ if each != most_same_current_uci:
+ temp[each] = new_past_scores[new_past_uci][each]
+ new_past_scores[new_past_uci] = temp
+ pick = try_to_connect(copy,new_past_uci,new_past_scores)
+ else:
+ pick = (most_same_current_uci,0,min_val)
+ #if verbose:
+ # print "\tNo more choices for ",new_past_uci
+ else:
+ pick = (new_past_uci,-1,-1)
+ #if verbose:
+ # print "\nChecking Past Center",new_past_uci,past_scores[new_past_uci]
+ # print "\tNo possible choices for ",new_past_uci
+ return pick
diff --git a/diagnostics/etc_composites/util/tracker/where_y_v4.py b/diagnostics/etc_composites/util/tracker/where_y_v4.py
new file mode 100755
index 000000000..7227a21b1
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker/where_y_v4.py
@@ -0,0 +1,75 @@
+import defines
+
+def where_y(sys,source,target,interval):
+
+ """This function locates the y_index (integer) of a target latitude
+ by searching the source latitude array.
+
+ Options/Arguments:
+ sys -- module
+ source -- list of latitudes.
+ target -- latitude to find in source.
+ interval -- half the grid spacing of source.
+
+ Returns:
+ y -- the y_index of source holding target.
+
+ Examples:
+
+ Notes: If the target falls on a boundary between grids, then
+ we opt to assign y to the equatorward grid.
+
+ Author: Mike Bauer
+
+ Log:
+ 2008/05 MB - File created.
+ 2008/10 MB - Added input checks, docstring.
+ 2008/10 MB - Fixed bug in the 'find it' check.
+ """
+
+ # Given that source are the midpoints of the bin and interval is half
+ # the grid/bin spacing, then define the edges as so.
+ edges = dict([(round(i+interval),1) for i in source])
+ edges.update([(round(i-interval),1) for i in source])
+ edges = list(edges.keys())
+ edges.sort()
+
+ # Find the "bin" that target falls into. If target is on an edge then
+ # choose the equatorward grid. If target is a grid midpoint use that.
+ if defines.verbose:
+ print ("target value: "+str(target))
+ print (edges)
+ if target in source:
+ # target in source
+ y = source.index(target)
+ return y
+
+ if target in edges:
+ # target on an edge
+ y = edges.index(target)
+ if target > 0.0:
+ y -= 1
+ return y
+ for j in range(len(edges)-1):
+ # target between edges
+ if edges[j] <= target <= edges[j+1]:
+##FIX
+
+ ### ADDED BY JJ #########
+ ###### We pick the lower end of the edges if target is positive and higher end if target is positive
+ ### if target is negative then we have to pick j+1, else pick j
+ if (target > 0):
+ y = j
+ elif (target < 0):
+ y = j+1
+
+ #print "hmmm, I forgot to do this case"
+ #print edges[j]
+ #sys.exit("Stop HERE")
+ return y
+
+ # Error check
+ sys.exit("Error in where_y %s %s %s %s" % (repr(source),
+ repr(target),
+ repr(interval),
+ repr(hit)))
diff --git a/diagnostics/etc_composites/util/tracker_readme.md b/diagnostics/etc_composites/util/tracker_readme.md
new file mode 100755
index 000000000..d6647e46a
--- /dev/null
+++ b/diagnostics/etc_composites/util/tracker_readme.md
@@ -0,0 +1,205 @@
+# MCMS TRACKER
+
+***Created by: Jeyavinoth Jeyaratnam***
+
+***Last Modified: March 16th, 2022***
+
+***Branched off from Mike Bauer's MCMS Tracking Algorithm***
+
+This code is modified version of Mike Bauer's MCMS Tracking Algorithm using Python.
+
+The "tracker" folder contains the MCMS Cyclone Tracker.
+The "datacyc" folder contains the code to grab data around the tracked cyclones.
+
+#### Notes:
+1) Current version of the code runs on Python 3
+This code was tested on v3.6, because netcdftime is run on this version under the conda environment. This might cause issues with the basemap library (check below).
+
+This code was tested on v3.6, because netcdftime is run on this version under the conda environment. This might cause issues with the basemap library (check below).
+
+#### Changes to Mike's code
+* necessary integer division was changed in the code
+* cPickle was changed to pickle (should not be an issue)
+* hard coded in some imports, cuz of the way "exec" command works in python3
+* dictionaries in python2 are not ordered dicts, so the keys were sorted in python2 code and python3 code to compare consistently
+* tree\_travesal\_v4.py code had issues with the list being not ordered in python2, so I changed the python2 code to have an ordered dict for fair comparison of the python3 tracker
+
+
+## Installation of necesssary libraries
+
+You can setup conda to run python on your machine.
+
+Then create a new conda environment with Python version 3.6.
+
+* conda create -n tracker python=3.6
+
+Activate the conda environment in your terminal using:
+
+* conda activate tracker
+
+Then install the following libraries:
+
+* conda install scipy
+
+* conda install matplotlib
+
+* conda install numpy
+
+* conda install basemap
+
+* conda install proj4
+
+* conda install netcdf4
+
+* conda install cython
- this is needed to create the \*.so files below
+
+* conda install -c conda-forge netcdftime
+
+
+Then you have to run the following (make sure to cd into your tracker folder):
+
+* python3 setup\_g2l\_v4.py build\_ext --inplace
+
+* python3 setup\_gcd\_v4.py build\_ext --inplace
+
+* python3 setup\_rhumb\_line\_nav\_v4.py build\_ext --inplace
+
+These commands create 3 \*.so (shared objects) operators in the current directory. You have to rename the appropriate \*.so file.
+
+* g2l\_v4.cpython......so -> g2l\_v4.so
+
+* gcd\_v4.cpython.....so -> gcd\_v4.so
+
+* rhumb\_line\_nav\_v4.cpython......so -> rhumb\_line\_nav\_v4.so
+
+Note that in the current directory, these \*.so files already exist, just to show an example of what they look like.
+
+# Additional issues faced when setting up and running the code
+
+netcdftime library requires 3.6, so conda will downgrade your version of python to 3.6
+
+In python3.6 right now, basemap has an issue that is not fixed, so you will have to edit line 5111 from /lib/python3.6/site-packages/mpl\_toolkits/basemap/\_\_init\_\_.py file, where for example the envrionment\_base\_folder=/home/USER/anaconda3/envs/tracker
+
+from:
+
+ return list(map(\_addcyclic,arr[:-1]) + [\_addcyclic\_lon(arr[-1])])
+
+to:
+
+ return list(map(\_addcyclic,arr[:-1])) + [\_addcyclic\_lon(arr[-1])]
+
+Also, you will have to edit line 5096 in the above file (if not the code will print out repeated warnings on this issue).
+
+from:
+
+ return npsel.concatenate((a,a[slicer]),axis=axis)
+
+to:
+
+ return npsel.concatenate((a,a[tuple(slicer)]),axis=axis)
+
+
+
+# MAC OSX specific issues
+
+In order to run this tracker code, you will have to run it using:
+
+You will have to update GCC libraries by installing xcode from the terminal
+
+"xcode-select --install"
+
+and installing the SDK headers from the terminal
+
+"open /Library/Developer/CommandLineTools/Packages/macOS\_SDK\_headers\_for\_macOS\_10.14.pkg"
+
+In MAC OSX, you might face an issue with the plots created using matplotlib library in python.
+
+You have to add the following lines in setup\_cam6\_v4.py file, make sure the following lines are in the correct order:
+
+import matplotlib
+
+matplotlib.use('TKAgg')
+
+import matplotlib.pyplot as plt
+
+
+## Pre-Setup of Sea Level Pressure data files
+
+Convert all the SLP files into the appropriate 6 hourly slp.YEAR.nc files (where YEAR varies). This has to be done by the user, and the slp files should in the correct format for the tracker.
+
+SLP netcdf files should have the following variables: lat (degrees\_north), lon (degrees\_east), time(hours since start\_year/01/01 00:00:00), slp (3d array with dimensions [time, lat, lon], with units "mb").
+
+In the netcdf the "calendar" type has to be set for the time variables.
+The time calendar must be set as "365\_day" or "proleptic\_gregorian," depending on your data.
+
+
+***Internal Note:***
+
+*Some of the SLP convert code I created is in the folder /slp_converts/*
+
+*For Vee’s data I created a file that converts year 11 to 41, given a start year to the appropriate slp files.
+This file is in the folder, called “convert\_vee.py”*
+
+*In this file, you have to change the output folder location, input folder location, start\_year (the year to start labelling the slp files from) and the model year range (model years go from 00 to 40).*
+
+*Additionally adjust in\_file\_format variable to indicate the format in which the file is organized. The %04d in the file\_format will be replaced by the model years within the range given in the model\_year\_range variable.*
+
+*out\_file\_format can be changed as well, but for this tracker it should be kept as slp.YEAR.nc.*
+
+*After setting up the variables on the top of convert\_vee.py, run the python code using
+“Python convert\_vee.py” → this should create slp.YEAR.nc files in the output folder.*
+
+
+## Setting up the Tracker
+
+Edit the defines.py file, to make sure that you point to the correct folders.
+This file contains all the variables that need to be setup to run the MCMS tracker.
+
+***Setup defines.py***
+
+* source\_code\_folder -> ‘/mnt/drive1/jj/MCMS/V1/tracker’ the main source code location for the tracker, this will be the folder in which you clone this repository into.
+
+* slp\_data\_direcotry -> directory containing the slp data in the format needed by the tracker
+
+* topo\_file → path to the topographic file that is in the format of the slp data
+
+* model → model name that is provided to the tracker code, all folders created will be related to this
+
+* main\_folder\_location -> location in which to create a directory to setup the tracker code
+
+* over\_write\_years -> years to which to run the tracker code
+
+
+***Explanation of the additional directories:***
+
+The remaining locations are auto calculated by the defnes.py code.
+
+Source code folder is the directory extension of the code location given above (/mnt/drive1/jj/MCMS/V1/tracker).
+
+The code in this folder will be copied into the new directory specified by main\_folder\_location and model.
+
+In the main\_folder\_location, the code will create a folder with the name “model”, and copy over the contents of the code into this folder.
+
+For now: over\_write\_years should be specified to indicate the range of years to track cyclones for.
+Future: over\_write\_years can be left as an empty array, the code will find the min and max years for slp and track the data for all the available years in the folder.
+
+The necessary folder path needed by the tracker are auto computed by defines.py.
+
+**Additional Options**
+
+create\_matlab\_dictionaries is a flag that is set to convert the tracked cyclones into matlab dictionaries. This is needed if you want to run the grab datacycs code.
+
+### Running the Tracker
+
+Run the tracker code after setting up the defines.py file. “python3 run\_tracker.py.”
+
+This will create the necessary folders and run the tracker code.
+
+The output files are then converted to readable formats.
+
+You have to run read\_mcms\_v4.py with template\_temp\_multi\_1.py first, followed by template\_temp\_multi\_2.py. This is automatically done by run\_tracker.py.
+
+Finally the outputs are converted to matlab dictionaries using main\_create\_dicts.py. This step can be controlled using the create\_matlab\_dictionaries flag in defines.py.
+
+
+
diff --git a/src/cmip6-cmor-tables b/src/cmip6-cmor-tables
new file mode 160000
index 000000000..c4a8446ba
--- /dev/null
+++ b/src/cmip6-cmor-tables
@@ -0,0 +1 @@
+Subproject commit c4a8446bae0000ab3ccd0b4871bfdb7241421cc4
diff --git a/src/conda/env_etc_composites.yml b/src/conda/env_etc_composites.yml
new file mode 100644
index 000000000..da0402daa
--- /dev/null
+++ b/src/conda/env_etc_composites.yml
@@ -0,0 +1,14 @@
+name: _MDTF_etc_composites
+channels:
+ - anaconda
+ - defaults
+dependencies:
+ - python=3.7
+ - matplotlib=3.1
+ - basemap
+ - netCDF4
+ - xarray
+ - numpy
+ - cartopy
+ - pandas
+ - Cython
diff --git a/src/conda/env_eulerian_storm_track.yml b/src/conda/env_eulerian_storm_track.yml
new file mode 100644
index 000000000..a40ce8248
--- /dev/null
+++ b/src/conda/env_eulerian_storm_track.yml
@@ -0,0 +1,16 @@
+name: _MDTF_eulerian_storm_track
+channels:
+ - anaconda
+ - conda-forge
+ - defaults
+dependencies:
+ - python=3.7
+ - matplotlib=3.2.2
+ - cartopy
+ - basemap
+ - numpy
+ - netCDF4
+ - nco
+ - xarray
+ - mock
+ - ghostscript
diff --git a/src/default_jj.jsonc b/src/default_jj.jsonc
new file mode 100644
index 000000000..3d374ae7f
--- /dev/null
+++ b/src/default_jj.jsonc
@@ -0,0 +1,137 @@
+// Configuration for MDTF-diagnostics driver script self-test.
+//
+// Copy this file and customize the settings as needed to run the framework on
+// your own model output without repeating command-line options. Pass it to the
+// framework at the end of the command line (positionally) or with the
+// -f/--input-file flag. Any other explicit command line options will override
+// what's listed here.
+//
+// All text to the right of an unquoted "//" is a comment and ignored, as well
+// as blank lines (JSONC quasi-standard.)
+{
+ "case_list" : [
+ // The cases below correspond to the different sample model data sets. Note
+ // that the MDTF package does not currently support analyzing multiple
+ // models in a single invocation. Comment out or delete the first entry and
+ // uncomment the second to run NOAA-GFDL-AM4 only for the MJO_prop_amp POD,
+ // and likewise for the SM_ET_coupling POD.
+ {
+ // "CASENAME" : "QBOi.EXP1.AMIP.001",
+ // "CASENAME" : "LEOC.EXP1.MPM.001",
+ // "CASENAME" : "GFDL.EXP1.2PM.001",
+ // "CASENAME" : "ERAI.EXP1.TEST.001",
+ // "CASENAME" : "ERA5.MTPR.DEG10.001",
+ // "CASENAME" : "ERA5.MTPR.DEG15.001",
+ // "CASENAME" : "ERA5.TP.DEG10.001",
+ // "CASENAME" : "ERA5.TP.DEG15.001",
+ // "CASENAME" : "ERA5.ALL.DEG15.001",
+ // "CASENAME" : "ERA5.NEW.DEG10.001",
+ "CASENAME" : "ERA5.NEW.DEG15.001",
+ "model" : "GFDL",
+ "convention" : "GFDL",
+ "FIRSTYR" : 2020,
+ "LASTYR" : 2021,
+ "pod_list": [
+ // Optional: PODs to run for this model only (defaults to all)
+ // "Wheeler_Kiladis",
+ // "MJO_suite"
+ // "MJO_teleconnection"
+ // "convective_transition_diag",
+ // "precip_diurnal_cycle"
+ // "eulerian_storm_track"
+ "etc_composites"
+ ]
+ }
+ // {
+ // "CASENAME" : "GFDL.CM4.c96L32.am4g10r8",
+ // "model" : "AM4",
+ // "convention" : "AM4",
+ // "FIRSTYR" : 1977,
+ // "LASTYR" : 1981,
+ // "pod_list" : ["MJO_prop_amp"]
+ // }
+ // {
+ // "CASENAME" : "Lmon_GISS-E2-H_historical_r1i1p1",
+ // "model" : "CMIP",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 1951,
+ // "LASTYR" : 2005,
+ // "pod_list" : ["SM_ET_coupling"]
+ // }
+ // {
+ // "CASENAME" : "NCAR-CAM5.timeslice",
+ // "model" : "CESM",
+ // "convention" : "CMIP",
+ // "FIRSTYR" : 2000,
+ // "LASTYR" : 2004,
+ // "pod_list": ["example"]
+ // }
+ ],
+ // PATHS ---------------------------------------------------------------------
+ // Location of supporting data downloaded when the framework was installed.
+
+ // If a relative path is given, it's resolved relative to the MDTF-diagnostics
+ // code directory. Environment variables (eg, $HOME) can be referenced with a
+ // "$" and will be expended to their current values when the framework runs.
+
+ // Parent directory containing observational data used by individual PODs.
+ "OBS_DATA_ROOT": "../inputdata/obs_data",
+
+ // Parent directory containing results from different models.
+ "MODEL_DATA_ROOT": "../inputdata/model/",
+
+ // Working directory. Defaults to working directory if blank.
+ "WORKING_DIR": "../wkdir",
+
+ // Directory to write output. The results of each run of the framework will be
+ // put in a subdirectory of this directory.
+ "OUTPUT_DIR": "../wkdir",
+
+ // Location of the Anaconda/miniconda installation to use for managing
+ // dependencies (path returned by running `conda info --base`.) If empty,
+ // framework will attempt to determine location of system's conda installation.
+ "conda_root": "/home/jj/anaconda3",
+
+ // Directory containing the framework-specific conda environments. This should
+ // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left
+ // blank, the framework will look for its environments in the system default
+ // location.
+ "conda_env_root": "/home/jj/anaconda3/envs",
+
+ // SETTINGS ------------------------------------------------------------------
+ // Any command-line option recognized by the mdtf script (type `mdtf --help`)
+ // can be set here, in the form "flag name": "desired setting".
+
+ // Method used to fetch model data.
+ "data_manager": "Local_File",
+
+ // Method used to manage dependencies.
+ "environment_manager": "Conda",
+
+ // Settings affecting what output is generated:
+
+ // Set to true to have PODs save postscript figures in addition to bitmaps.
+ "save_ps": false,
+
+ // Set to true to have PODs save netCDF files of processed data.
+ "save_nc": false,
+
+ // Set to true to save HTML and bitmap plots in a .tar file.
+ "make_variab_tar": true,
+
+ // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved
+ // under a unique name.
+ "overwrite": false,
+
+ // Settings used in debugging:
+
+ // Log verbosity level.
+ "verbose": 1,
+
+ // Set to true for framework test. Data is fetched but PODs are not run.
+ "test_mode": false,
+
+ // Set to true for framework test. No external commands are run and no remote
+ // data is copied. Implies test_mode.
+ "dry_run": false
+}
diff --git a/src/default_tests.jsonc b/src/default_tests.jsonc
index 1fb9c22d5..561bee71d 100644
--- a/src/default_tests.jsonc
+++ b/src/default_tests.jsonc
@@ -23,12 +23,13 @@
"LASTYR" : 1981,
"pod_list": [
// Optional: PODs to run for this model only (defaults to all)
- "Wheeler_Kiladis",
- "EOF_500hPa",
- "MJO_suite",
- "MJO_teleconnection"
+ // "Wheeler_Kiladis",
+ // "EOF_500hPa",
+ // "MJO_suite",
+ // "MJO_teleconnection"
// "convective_transition_diag"
// "precip_diurnal_cycle"
+ "etc_composites"
]
}
// {
diff --git a/src/mdtf_settings.json b/src/mdtf_settings.json
new file mode 100644
index 000000000..a232606c5
--- /dev/null
+++ b/src/mdtf_settings.json
@@ -0,0 +1,142 @@
+{
+ "# Configuration for MDTF-diagnostics driver script. ":"",
+ "# All entries starting with '#' are treated as comments and ignored. ":"",
+ "# (The JSON format does not support comments.) ":"",
+
+ "# MODEL INPUT ----------------------------------------------------------":"",
+ "# ":"",
+ "# List all cases below with CASE tag at front of line ":"",
+ "# CASE casename model-type startyr endyr ":"",
+ "# Note that the mdtf package does not yet handle multiple cases. ":"",
+ "## ":"",
+ "# The cases below correspond to the different test data sets. ":"",
+ "# Uncomment the first entry to run NCAR-CESM and the second ":"",
+ "# entry to run NOAA-GFDL-AM4 only for the MJO_prop_amp POD. ":"",
+
+ "case_list" : [
+ {
+ "CASENAME" : "QBOi.EXP1.AMIP.001",
+ "model" : "CESM",
+ "variable_convention" : "CESM",
+ "FIRSTYR" : 1977,
+ "LASTYR" : 1981,
+ "pod_list": [
+ "# Optional: PODs to run for this model only",
+ "# (otherwise use pod_list below)",
+ "Wheeler_Kiladis",
+ "eulerian_storm_track",
+ "#EOF_500hPa",
+ "#cionvective_transition_diag",
+ "#MJO_suite",
+ "#MJO_teleconnection",
+ "#precip_diurnal_cycle #- high memory requirement"
+ ]
+ },
+ {
+ "CASENAME" : "GFDL.CM4.c96L32.am4g10r8",
+ "model" : "AM4",
+ "variable_convention" : "AM4",
+ "FIRSTYR" : 1977,
+ "LASTYR" : 1981,
+ "pod_list" : ["eulerian-storm-track"]
+ },
+ {
+ "CASENAME" : "Lmon_GISS-E2-H_historical_r1i1p1",
+ "model" : "CMIP",
+ "variable_convention" : "CMIP",
+ "FIRSTYR" : 1951,
+ "LASTYR" : 2005,
+ "pod_list" : ["#eulerian-storm-track"]
+ }
+ ],
+
+ "# DIAGNOSTICS ----------------------------------------------------------":"",
+ "# List of PODs to run by default, if not specified in caselist above. ":"",
+ "# Comment out any with # at start of line. ":"",
+
+ "pod_list" : [
+ "Wheeler_Kiladis",
+ "EOF_500hPa",
+ "convective_transition_diag",
+ "MJO_suite",
+ "MJO_teleconnection",
+ "MJO_prop_amp",
+ "precip_diurnal_cycle",
+ "SM_ET_coupling",
+ "eulerian_storm_track"
+ ],
+
+ "# PATHS ----------------------------------------------------------------":"",
+ "# Relative paths are resolved relative to $CODE_ROOT ":"",
+ "# (= parent directory of the directory this file is in.) ":"",
+
+ "paths" : {
+ "# Code installation directory. ":"",
+ "CODE_ROOT" : ".",
+ "# Parent directory containing results from different models. ":"",
+ "MODEL_DATA_ROOT" : "../inputdata/model/",
+ "# Parent directory containing observational data used by individual PODs.":"",
+ "OBS_DATA_ROOT" : "../inputdata/obs_data/",
+ "# Working directory. ":"",
+ "WORKING_DIR" : "../wkdir",
+ "# Directory to write output files. Defaults to working directory. ":"",
+ "OUTPUT_DIR" : "../wkdir"
+ },
+
+ "# SETTINGS -------------------------------------------------------------":"",
+
+ "settings" : {
+ "# Verbosity levels: # 0-minimal,1-normal,2-copious,3-debug ":"",
+ "verbose" : 2,
+ "# Set to true for framework test: ":"",
+ "# (script just reports what it would do, doesn't call actual packages)":"",
+ "test_mode": false,
+ "# Set to true for dry run: No data is copied. Implies test_mode. ":"",
+ "dry_run": false,
+
+ "# DATA FETCHING ......................................................":"",
+ "# Specify the method the code uses to fetch model data. ":"",
+ "# Currently supported options are ":"",
+ "# - 'Localfile': Do not attempt to fetch data; read from pre-existing ":"",
+ "# files at $MODEL_DATA_ROOT (set above). ":"",
+ "data_manager": "Local_File",
+ "# Amount of time (in seconds) to wait before giving up on ":"",
+ "# transferring a data file to the local filesystem. ":"",
+ "# Set to zero to disable. ":"",
+ "file_transfer_timeout": 300,
+
+ "# RUNTIME ENVIRONMENTS................................................":"",
+ "# Specify the method the code uses to manage POD's dependencies. ":"",
+ "# Supported options are ":"",
+ "# - 'None' (use whatever modules are found in system) ":"",
+ "# - 'Conda' (use the Anaconda package manager) ":"",
+ "environment_manager" : "None",
+ "# Sets the Anaconda installation. This is only used if ":"",
+ "# environment_manager is set to 'Conda'. ":"",
+ "# Set equal to '' to use conda from your system's $PATH. ":"",
+ "conda_root" : "",
+ "# Sets the root directory for Anaconda environment installs. This ":"",
+ "# is only used if environment_manager is set to 'Conda'. ":"",
+ "# Set this equal to '' to install in your system's default location. ":"",
+ "conda_env_root" : "./envs/conda",
+ "# Sets the root directory for python virtual environments. This is ":"",
+ "# only used if environment_manager = 'Virtualenv'. ":"",
+ "venv_root" : "./envs/venv",
+ "# Sets the root directory for R packages requested by PODs. This ":"",
+ "# is only used if environment_manager = 'Virtualenv'. ":"",
+ "# Set this equal to '' to install in your system library. ":"",
+ "r_lib_root" : "./envs/r_libs",
+
+ "# OUTPUT SETTINGS ....................................................":"",
+ "# Set to true to retain output netcdf files: ":"",
+ "save_nc" : true,
+ "# Set to true to retain output .ps plots: ":"",
+ "save_ps" : false,
+ "# default flags to pass to PS -> bitmap figure conversion: ":"",
+ "convert_flags": "-crop 0x0+5+5",
+ "# default bitmap figure output (for html) ":"",
+ "convert_output_fmt": "png",
+ "# Set to true to save output directory in .tar file: ":"",
+ "make_variab_tar": true
+ }
+}