jjacob7734 closed pull request #18: SDAP-116 Fix bug in TimeAvgMapSpark that 
results in row of zeros for …
URL: https://github.com/apache/incubator-sdap-nexus/pull/18
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/analysis/webservice/NexusHandler.py 
b/analysis/webservice/NexusHandler.py
index b51c77c..50caef6 100644
--- a/analysis/webservice/NexusHandler.py
+++ b/analysis/webservice/NexusHandler.py
@@ -384,6 +384,10 @@ def _find_global_tile_set(self):
                     self._maxLatCent = np.max(lats_agg)
                     self._minLonCent = np.min(lons_agg)
                     self._maxLonCent = np.max(lons_agg)
+                    self._nlats = int((self._maxLatCent - self._minLatCent) /
+                                      self._latRes + 0.5) + 1
+                    self._nlons = int((self._maxLonCent - self._minLonCent) /
+                                      self._lonRes + 0.5) + 1
             t -= t_incr
         return nexus_tiles
 
@@ -506,10 +510,10 @@ def _prune_tiles(nexus_tiles):
             del nexus_tiles[i]
 
     def _lat2ind(self, lat):
-        return int((lat - self._minLatCent) / self._latRes)
+        return int((lat - self._minLatCent) / self._latRes + 0.5)
 
     def _lon2ind(self, lon):
-        return int((lon - self._minLonCent) / self._lonRes)
+        return int((lon - self._minLonCent) / self._lonRes + 0.5)
 
     def _ind2lat(self, y):
         return self._minLatCent + y * self._latRes
diff --git a/analysis/webservice/algorithms_spark/ClimMapSpark.py 
b/analysis/webservice/algorithms_spark/ClimMapSpark.py
index 7a744f0..eb567f5 100644
--- a/analysis/webservice/algorithms_spark/ClimMapSpark.py
+++ b/analysis/webservice/algorithms_spark/ClimMapSpark.py
@@ -156,9 +156,7 @@ def calc(self, computeOptions, **args):
         #    print 'lats: ', tile.latitudes.compressed()
         #    print 'lons: ', tile.longitudes.compressed()
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
         self.log.debug('center lat range = {0} to {1}'.format(self._minLatCent,
                                                               
self._maxLatCent))
         self.log.debug('center lon range = {0} to {1}'.format(self._minLonCent,
@@ -222,8 +220,8 @@ def calc(self, computeOptions, **args):
         #
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the time avg map data and lat-lon bounding box.
-        a = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        a = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
         for tile in avg_tiles:
             if tile is not None:
                 ((tile_min_lat, tile_max_lat, tile_min_lon, tile_max_lon),
diff --git a/analysis/webservice/algorithms_spark/CorrMapSpark.py 
b/analysis/webservice/algorithms_spark/CorrMapSpark.py
index e74f835..c6b0c99 100644
--- a/analysis/webservice/algorithms_spark/CorrMapSpark.py
+++ b/analysis/webservice/algorithms_spark/CorrMapSpark.py
@@ -198,9 +198,7 @@ def calc(self, computeOptions, **args):
 
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
 
         # Create array of tuples to pass to Spark map function
         nexus_tiles_spark = [[self._find_tile_bounds(t),
@@ -282,8 +280,8 @@ def calc(self, computeOptions, **args):
                                        mask=~(n.astype(bool))),
                            n)).collect()
 
-        r = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        r = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
 
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the following for each correlation map subset:
diff --git a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py 
b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
index 3e5191b..19de786 100644
--- a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
@@ -146,9 +146,7 @@ def calc(self, computeOptions, **args):
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
 
         self.log.debug('Using Native resolution: lat_res={0}, 
lon_res={1}'.format(self._latRes, self._lonRes))
-        nlats = int((self._maxLat - self._minLatCent) / self._latRes) + 1
-        nlons = int((self._maxLon - self._minLonCent) / self._lonRes) + 1
-        self.log.debug('nlats={0}, nlons={1}'.format(nlats, nlons))
+        self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
         self.log.debug('center lat range = {0} to {1}'.format(self._minLatCent,
                                                               
self._maxLatCent))
         self.log.debug('center lon range = {0} to {1}'.format(self._minLonCent,
@@ -213,8 +211,8 @@ def calc(self, computeOptions, **args):
         #
         # The tiles below are NOT Nexus objects.  They are tuples
         # with the time avg map data and lat-lon bounding box.
-        a = np.zeros((nlats, nlons), dtype=np.float64, order='C')
-        n = np.zeros((nlats, nlons), dtype=np.uint32, order='C')
+        a = np.zeros((self._nlats, self._nlons), dtype=np.float64, order='C')
+        n = np.zeros((self._nlats, self._nlons), dtype=np.uint32, order='C')
         for tile in avg_tiles:
             if tile is not None:
                 ((tile_min_lat, tile_max_lat, tile_min_lon, tile_max_lon),


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to