This is an automated email from the ASF dual-hosted git repository.

sebb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/whimsy.git


The following commit(s) were added to refs/heads/master by this push:
     new 2bd89240 Better error handling and reporting
2bd89240 is described below

commit 2bd892406a893f2567968237993e6fb643c608ce
Author: Sebb <[email protected]>
AuthorDate: Mon Nov 25 17:31:01 2024 +0000

    Better error handling and reporting
---
 lib/whimsy/sitestandards.rb | 23 ++++++++++++-----------
 www/site_or_pod.rb          |  6 +++++-
 2 files changed, 17 insertions(+), 12 deletions(-)

diff --git a/lib/whimsy/sitestandards.rb b/lib/whimsy/sitestandards.rb
index e9b33173..f8913b98 100644
--- a/lib/whimsy/sitestandards.rb
+++ b/lib/whimsy/sitestandards.rb
@@ -191,21 +191,22 @@ module SiteStandards
   # @return [hash of site data, crawl_time]
   def get_sites(tlp = true)
     local_copy = File.expand_path("#{get_url(true)}#{get_filename(tlp)}", 
__FILE__)
-    if File.exist? local_copy
-      crawl_time = File.mtime(local_copy).httpdate # show time in same format 
as last-mod
-      begin
+    begin
+      if File.exist? local_copy
+        crawl_time = File.mtime(local_copy).httpdate # show time in same 
format as last-mod
         sites = JSON.parse(File.read(local_copy, :encoding => 'utf-8'))
-      rescue StandardError => e
+      else
         require 'wunderbar'
-        Wunderbar.warn "Failed to read #{local_copy}: #{e.inspect}"
-          sites = {} # TODO temporary fix
+        Wunderbar.warn "Failed to find local copy #{local_copy}"
+        local_copy = "#{get_url(false)}#{get_filename(tlp)}"
+        response = Net::HTTP.get_response(URI(local_copy))
+        crawl_time = response['last-modified']
+        sites = JSON.parse(response.body)
       end
-    else
+    rescue StandardError => e
       require 'wunderbar'
-      Wunderbar.warn "Failed to find #{local_copy}"
-      response = 
Net::HTTP.get_response(URI("#{get_url(false)}#{get_filename(tlp)}"))
-      crawl_time = response['last-modified']
-      sites = JSON.parse(response.body)
+      Wunderbar.warn "Failed to parse #{local_copy}: #{e.inspect} 
#{e.backtrace.join("\n\t")}"
+        sites = {}
     end
     return sites, crawl_time
   end
diff --git a/www/site_or_pod.rb b/www/site_or_pod.rb
index 3407f665..46481fc4 100644
--- a/www/site_or_pod.rb
+++ b/www/site_or_pod.rb
@@ -87,7 +87,11 @@ _html do
       }
     ) do
       # Encapsulate data display (same for projects and podlings)
-      display_application(path_info, sites, analysis, checks_performed, 
cgi_for_tlps?)
+      if sites.size > 0
+        display_application(path_info, sites, analysis, checks_performed, 
cgi_for_tlps?)        
+      else 
+        _h3 'Could not parse the site data. Please check the error log for 
details.'
+      end
     end
 
     _script %{

Reply via email to