Repository: climate
Updated Branches:
  refs/heads/master 9f3482714 -> bff23ffe3


2nd pass at CLIMATE-854 Ensure that ocw runs with Python 3.X


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/e050ad55
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/e050ad55
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/e050ad55

Branch: refs/heads/master
Commit: e050ad55f1e23bed65752faddfb99ed485306ebd
Parents: 3683b86
Author: Lewis John McGibbney <lewis.mcgibb...@gmail.com>
Authored: Wed Sep 14 21:20:24 2016 -0700
Committer: Lewis John McGibbney <lewis.mcgibb...@gmail.com>
Committed: Wed Sep 14 21:20:24 2016 -0700

----------------------------------------------------------------------
 ocw/data_source/local.py | 12 ++++++------
 ocw/dataset_processor.py | 16 +++++++---------
 ocw/esgf/download.py     |  4 ++--
 ocw/esgf/main.py         | 40 ++++++++++++++++++++--------------------
 ocw/esgf/search.py       |  6 +++---
 ocw/tests/test_local.py  | 20 ++++++++++----------
 6 files changed, 48 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/data_source/local.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/local.py b/ocw/data_source/local.py
index d32cf1b..5df0624 100644
--- a/ocw/data_source/local.py
+++ b/ocw/data_source/local.py
@@ -160,7 +160,7 @@ def load_WRF_2d_files(file_path=None,
     times = []
     nfile = len(WRF_files)
     for ifile, file in enumerate(WRF_files):
-        print 'Reading file '+str(ifile+1)+'/'+str(nfile), file
+        print('Reading file '+str(ifile+1)+'/'+str(nfile), file)
         file_object = netCDF4.Dataset(file)
         time_struct_parsed = strptime(file[-19:],"%Y-%m-%d_%H:%M:%S")
         for ihour in numpy.arange(24):
@@ -391,7 +391,7 @@ def load_WRF_2d_files_RAIN(file_path=None,
     times = []
     nfile = len(WRF_files)
     for ifile, file in enumerate(WRF_files):
-        print 'Reading file '+str(ifile+1)+'/'+str(nfile), file
+        print('Reading file '+str(ifile+1)+'/'+str(nfile), file)
         file_object = netCDF4.Dataset(file)
         time_struct_parsed = strptime(file[-19:],"%Y-%m-%d_%H:%M:%S")
         for ihour in range(24):
@@ -404,7 +404,7 @@ def load_WRF_2d_files_RAIN(file_path=None,
     times= numpy.array(times)
     years = numpy.array([d.year for d in times])
     ncycle = numpy.unique(years).size
-    print 'ncycle=',ncycle
+    print('ncycle=',ncycle)
     nt, ny, nx = values0.shape
     values = numpy.zeros([nt-ncycle*24, ny, nx])
     times2 = []
@@ -496,7 +496,7 @@ def load_dataset_from_multiple_netcdf_files(variable_name,
     times = []
     nfile = len(nc_files)
     for ifile, file in enumerate(nc_files):
-        print 'NC file '+str(ifile+1)+'/'+str(nfile), file
+        print('NC file '+str(ifile+1)+'/'+str(nfile), file)
         file_object0= load_file(file, variable_name, lat_name=lat_name,
                                 lon_name=lon_name, time_name=time_name)
         values0= file_object0.values
@@ -558,7 +558,7 @@ def load_NLDAS_forcingA_files(file_path=None,
     times = []
     nfile = len(NLDAS_files)
     for ifile, file in enumerate(NLDAS_files):
-        print 'Reading file '+str(ifile+1)+'/'+str(nfile), file
+        print('Reading file '+str(ifile+1)+'/'+str(nfile), file)
         file_object = netCDF4.Dataset(file)
         time_struct_parsed = strptime(file[-20:-7],"%Y%m%d.%H%M")
         times.append(datetime(*time_struct_parsed[:6]))
@@ -624,7 +624,7 @@ def load_GPM_IMERG_files(file_path=None,
     times = []
     nfile = len(GPM_files)
     for ifile, file in enumerate(GPM_files):
-        print 'Reading file '+str(ifile+1)+'/'+str(nfile), file
+        print('Reading file '+str(ifile+1)+'/'+str(nfile), file)
         file_object = h5py.File(file)
         time_struct_parsed = strptime(file[-39:-23],"%Y%m%d-S%H%M%S")
         times.append(datetime(*time_struct_parsed[:6]))

http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 36e337a..9641181 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -148,7 +148,7 @@ def temporal_rebin_with_time_index(target_dataset, 
nt_average):
     if nt % nt_average != 0:
         msg = ('Warning: length of time dimension must '
                'be a multiple of nt_average')
-        print msg
+        print(msg)
     # nt2 is the length of time dimension in the rebinned dataset
     nt2 = nt / nt_average
     binned_dates = target_dataset.times[np.arange(nt2) * nt_average]
@@ -904,11 +904,9 @@ def _rcmes_spatial_regrid(spatial_values, lat, lon, lat2, 
lon2, order=1):
 
     nlat = spatial_values.shape[0]
     nlon = spatial_values.shape[1]
-    # print nlat, nlon, "lats, lons - incoming dataset"
 
     nlat2 = lat2.shape[0]
     nlon2 = lon2.shape[1]
-    # print nlat2, nlon2, "NEW lats, lons - for the new grid output"
 
     # To make our lives easier down the road, let's
     # turn these into arrays of x & y coords
@@ -1050,7 +1048,7 @@ def _rcmes_calc_average_on_new_time_unit(data, dates, 
unit):
     acceptable = ((unit == 'full') | (unit == 'annual') |
                   (unit == 'monthly') | (unit == 'daily'))
     if not acceptable:
-        print 'Error: unknown unit type selected for time averaging: EXIT'
+        print('Error: unknown unit type selected for time averaging: EXIT')
         return - 1, - 1, - 1, - 1
 
     nt, ny, nx = data.shape
@@ -1124,7 +1122,7 @@ def _rcmes_calc_average_on_new_time_unit_K(data, dates, 
unit):
     acceptable = ((unit == 'full') | (unit == 'annual') |
                   (unit == 'monthly') | (unit == 'daily'))
     if not acceptable:
-        print 'Error: unknown unit type selected for time averaging: EXIT'
+        print('Error: unknown unit type selected for time averaging: EXIT')
         return - 1, - 1, - 1, - 1
 
     # Calculate arrays of: annual timeseries: year (2007,2007),
@@ -1296,9 +1294,9 @@ def _congrid(a, newdims, method='linear', centre=False, 
minusone=False):
     old = np.array(a.shape)
     ndims = len(a.shape)
     if len(newdims) != ndims:
-        print "[congrid] dimensions error. " \
+        print("[congrid] dimensions error. " \
               "This routine currently only supports " \
-              "rebinning to the same number of dimensions."
+              "rebinning to the same number of dimensions.")
         return None
     newdims = np.asarray(newdims, dtype=float)
     dimlist = []
@@ -1351,9 +1349,9 @@ def _congrid(a, newdims, method='linear', centre=False, 
minusone=False):
         newa = scipy.ndimage.map_coordinates(a, newcoords)
         return newa
     else:
-        print "Congrid error: Unrecognized interpolation type.\n", \
+        print("Congrid error: Unrecognized interpolation type.\n", \
               "Currently only \'neighbour\', \'nearest\',\'linear\',", \
-              "and \'spline\' are supported."
+              "and \'spline\' are supported.")
         return None
 
 

http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/esgf/download.py
----------------------------------------------------------------------
diff --git a/ocw/esgf/download.py b/ocw/esgf/download.py
index d643de8..f322e1d 100644
--- a/ocw/esgf/download.py
+++ b/ocw/esgf/download.py
@@ -55,7 +55,7 @@ def download(url, toDirectory="/tmp"):
     
     # download file
     localFilePath = join(toDirectory,url.split('/')[-1])
-    print "\nDownloading url: %s to local path: %s ..." % (url, localFilePath)
+    print("\nDownloading url: %s to local path: %s ..." % (url, localFilePath))
     localFile=open( localFilePath, 'w')
     webFile=opener.open(url)
     localFile.write(webFile.read())
@@ -64,4 +64,4 @@ def download(url, toDirectory="/tmp"):
     localFile.close()
     webFile.close()
     opener.close()
-    print "... done"
+    print("... done")

http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/esgf/main.py
----------------------------------------------------------------------
diff --git a/ocw/esgf/main.py b/ocw/esgf/main.py
index 54ddf62..799ad38 100644
--- a/ocw/esgf/main.py
+++ b/ocw/esgf/main.py
@@ -35,10 +35,10 @@ def main():
     password = raw_input('Enter your ESGF Password:\n')
 
     # step 1: obtain short-term certificate
-    print 'Retrieving ESGF certificate...'
+    print('Retrieving ESGF certificate...')
     # logon using client-side MyProxy libraries
     if logon(username, password):
-        print "...done."
+        print("...done.")
 
     # step 2: execute faceted search for files
     urls = main_obs4mips()
@@ -58,25 +58,25 @@ def main_cmip5():
     
     searchClient = 
SearchClient(searchServiceUrl="http://pcmdi9.llnl.gov/esg-search/search";, 
distrib=False)
     
-    print '\nAvailable projects=%s' % searchClient.getFacets('project')
+    print('\nAvailable projects=%s' % searchClient.getFacets('project'))
     searchClient.setConstraint(project='CMIP5')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable models=%s' % searchClient.getFacets('model')
+    print('\nAvailable models=%s' % searchClient.getFacets('model'))
     searchClient.setConstraint(model='INM-CM4')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable experiments=%s' % searchClient.getFacets('experiment')
+    print('\nAvailable experiments=%s' % searchClient.getFacets('experiment'))
     searchClient.setConstraint(experiment='historical')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable time frequencies=%s' % 
searchClient.getFacets('time_frequency')
+    print('\nAvailable time frequencies=%s' % 
searchClient.getFacets('time_frequency'))
     searchClient.setConstraint(time_frequency='mon')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
 
-    print '\nAvailable CF standard names=%s' % 
searchClient.getFacets('cf_standard_name')
+    print('\nAvailable CF standard names=%s' % 
searchClient.getFacets('cf_standard_name'))
     searchClient.setConstraint(cf_standard_name='air_temperature')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
     urls = searchClient.getFiles()
     return urls
@@ -90,21 +90,21 @@ def main_obs4mips():
     searchClient = SearchClient(distrib=False)
     
     # obs4MIPs
-    print '\nAvailable projects=%s' % searchClient.getFacets('project')
+    print('\nAvailable projects=%s' % searchClient.getFacets('project'))
     searchClient.setConstraint(project='obs4MIPs')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable variables=%s' % searchClient.getFacets('variable')
+    print('\nAvailable variables=%s' % searchClient.getFacets('variable'))
     searchClient.setConstraint(variable='hus')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable time frequencies=%s' % 
searchClient.getFacets('time_frequency')
+    print('\nAvailable time frequencies=%s' % 
searchClient.getFacets('time_frequency'))
     searchClient.setConstraint(time_frequency='mon')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasets())
     
-    print '\nAvailable models=%s' % searchClient.getFacets('model')
+    print('\nAvailable models=%s' % searchClient.getFacets('model'))
     searchClient.setConstraint(model='Obs-MLS')
-    print "Number of Datasets=%d" % searchClient.getNumberOfDatasets()
+    print("Number of Datasets=%d" % searchClient.getNumberOfDatasetsi())
     
     urls = searchClient.getFiles()
     return urls

http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/esgf/search.py
----------------------------------------------------------------------
diff --git a/ocw/esgf/search.py b/ocw/esgf/search.py
index a8790b3..a6b527a 100644
--- a/ocw/esgf/search.py
+++ b/ocw/esgf/search.py
@@ -53,7 +53,7 @@ class SearchClient():
         :param constraints: dictionary of (facet name, facet value) 
constraints.
         """
         for key in constraints:
-            print 'Setting constraint: %s=%s' % (key, constraints[key])
+            print('Setting constraint: %s=%s' % (key, constraints[key]))
             self.constraints[key] = constraints[key]
         self.context = self.context.constrain(**constraints)
         
@@ -78,10 +78,10 @@ class SearchClient():
         datasets = self.context.search()
         urls = []
         for dataset in datasets:
-            print "\nSearching files for dataset=%s with constraints: %s" % 
(dataset.dataset_id, self.constraints)
+            print("\nSearching files for dataset=%s with constraints: %s" % 
(dataset.dataset_id, self.constraints))
             files = dataset.file_context().search(**self.constraints)
             for file in files:
-                print 'Found file=%s' % file.download_url
+                print('Found file=%s' % file.download_url)
                 urls.append(file.download_url)
         return urls
         

http://git-wip-us.apache.org/repos/asf/climate/blob/e050ad55/ocw/tests/test_local.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_local.py b/ocw/tests/test_local.py
index 5f40466..7eff214 100644
--- a/ocw/tests/test_local.py
+++ b/ocw/tests/test_local.py
@@ -60,8 +60,8 @@ class test_load_file(unittest.TestCase):
 
     def test_function_load_file_times(self):
         """To test load_file function for times"""
-        newTimes = datetime.datetime(2001, 01, 01), datetime.datetime(
-            2001, 02, 01), datetime.datetime(2001, 03, 01)
+        newTimes = datetime.datetime(2001, 1, 1), datetime.datetime(
+            2001, 2, 1), datetime.datetime(2001, 3, 1)
         self.assertItemsEqual(local.load_file(
             self.file_path, "value").times, newTimes)
 
@@ -77,8 +77,8 @@ class test_load_file(unittest.TestCase):
 
     def test_function_load_file_alt_times(self):
         """To test load_file function for times with different variable 
names"""
-        newTimes = datetime.datetime(2001, 04, 01), datetime.datetime(
-            2001, 05, 01), datetime.datetime(2001, 06, 01)
+        newTimes = datetime.datetime(2001, 4, 1), datetime.datetime(
+            2001, 5, 1), datetime.datetime(2001, 6, 1)
         self.assertItemsEqual(local.load_file(
             self.file_path, "value", time_name="alt_time").times, newTimes)
 
@@ -129,8 +129,8 @@ class TestLoadMultipleFiles(unittest.TestCase):
         """To test load_multiple_files function for times"""
         dataset = local.load_multiple_files(self.file_path, "value")
 
-        newTimes = datetime.datetime(2001, 01, 01), datetime.datetime(
-            2001, 02, 01), datetime.datetime(2001, 03, 01)
+        newTimes = datetime.datetime(2001, 1, 1), datetime.datetime(
+            2001, 2, 1), datetime.datetime(2001, 3, 1)
         self.assertItemsEqual(dataset[0].times, newTimes)
 
     def test_function_load_multiple_files_values(self):
@@ -199,8 +199,8 @@ class 
TestLoadDatasetFromMultipleNetcdfFiles(unittest.TestCase):
 
     def test_function_load_dataset_from_multiple_netcdf_files_times(self):
         """To test load_multiple_files function for times"""
-        newTimes = datetime.datetime(2001, 01, 01), datetime.datetime(
-            2001, 02, 01), datetime.datetime(2001, 03, 01)
+        newTimes = datetime.datetime(2001, 1, 1), datetime.datetime(
+            2001, 2, 1), datetime.datetime(2001, 3, 1)
         self.assertItemsEqual(self.dataset.times, newTimes)
 
     def test_function_load_dataset_from_multiple_netcdf_files_alt_lats(self):
@@ -215,8 +215,8 @@ class 
TestLoadDatasetFromMultipleNetcdfFiles(unittest.TestCase):
 
     def test_function_load_dataset_from_multiple_netcdf_files_alt_times(self):
         """To test load_multiple_files function for non-default times"""
-        newTimes = datetime.datetime(2001, 04, 01), datetime.datetime(
-            2001, 05, 01), datetime.datetime(2001, 06, 01)
+        newTimes = datetime.datetime(2001, 4, 1), datetime.datetime(
+            2001, 5, 1), datetime.datetime(2001, 6, 1)
         self.assertItemsEqual(self.alt_dataset.times, newTimes)
 
     def test_function_load_dataset_from_multiple_netcdf_files_values(self):

Reply via email to