Script 'mail_helper' called by obssrc
Hello community,

here is the log from the commit of package rpm2docserv for openSUSE:Factory 
checked in at 2022-11-25 14:56:16
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/rpm2docserv (Old)
 and      /work/SRC/openSUSE:Factory/.rpm2docserv.new.1597 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "rpm2docserv"

Fri Nov 25 14:56:16 2022 rev:9 rq:1038209 version:20221125.c82d2b0

Changes:
--------
--- /work/SRC/openSUSE:Factory/rpm2docserv/rpm2docserv.changes  2022-11-25 
13:23:07.847671962 +0100
+++ /work/SRC/openSUSE:Factory/.rpm2docserv.new.1597/rpm2docserv.changes        
2022-11-25 14:56:17.972193958 +0100
@@ -1,0 +2,6 @@
+Fri Nov 25 13:13:25 UTC 2022 - ku...@suse.com
+
+- Update to version 20221125.c82d2b0:
+  * Create sitemap.xml for root directory, too
+
+-------------------------------------------------------------------

Old:
----
  rpm2docserv-20221125.be8d83b.tar.xz

New:
----
  rpm2docserv-20221125.c82d2b0.tar.xz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ rpm2docserv.spec ++++++
--- /var/tmp/diff_new_pack.Lr25Yb/_old  2022-11-25 14:56:18.772198168 +0100
+++ /var/tmp/diff_new_pack.Lr25Yb/_new  2022-11-25 14:56:18.784198231 +0100
@@ -17,7 +17,7 @@
 
 
 Name:           rpm2docserv
-Version:        20221125.be8d83b
+Version:        20221125.c82d2b0
 Release:        0
 Summary:        Make manpages from RPMs accessible in a web browser
 License:        Apache-2.0

++++++ _servicedata ++++++
--- /var/tmp/diff_new_pack.Lr25Yb/_old  2022-11-25 14:56:18.860198631 +0100
+++ /var/tmp/diff_new_pack.Lr25Yb/_new  2022-11-25 14:56:18.860198631 +0100
@@ -1,7 +1,7 @@
 <servicedata>
   <service name="tar_scm">
     <param name="url">https://github.com/thkukuk/rpm2docserv.git</param>
-  <param 
name="changesrevision">be8d83b7cad07b59db5563cc3cc29e392dd45e2d</param></service>
+  <param 
name="changesrevision">c82d2b05eb35ed36ea189bace117a34aa199bb9e</param></service>
 </servicedata>
 (No newline at EOF)
 

++++++ rpm2docserv-20221125.be8d83b.tar.xz -> 
rpm2docserv-20221125.c82d2b0.tar.xz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/rpm2docserv-20221125.be8d83b/cmd/docserv-sitemap/sitemap.go 
new/rpm2docserv-20221125.c82d2b0/cmd/docserv-sitemap/sitemap.go
--- old/rpm2docserv-20221125.be8d83b/cmd/docserv-sitemap/sitemap.go     
2022-11-25 09:47:26.000000000 +0100
+++ new/rpm2docserv-20221125.c82d2b0/cmd/docserv-sitemap/sitemap.go     
2022-11-25 14:12:46.000000000 +0100
@@ -116,7 +116,16 @@
 
 func collectFiles(basedir string, dir string, sitemapEntries 
map[string]time.Time) error {
 
-       fn := filepath.Join(basedir, dir)
+       var fn string
+       var fp string // prefix with directory and "/" if dir is not empty
+
+       if len(dir) > 0 {
+               fn = filepath.Join(basedir, dir)
+               fp = dir + "/"
+       } else {
+               fn = basedir
+               fp = ""
+       }
        entries, err := ioutil.ReadDir (fn)
        if err != nil {
                return fmt.Errorf("Cannot open %v: %v", fn, err)
@@ -124,28 +133,101 @@
 
        for _, bfn := range entries {
                if bfn.IsDir() ||
-                       bfn.Name() == "sitemap.xml.gz" {
+                       (strings.HasPrefix(bfn.Name(), "sitemap") &&
+                       strings.HasSuffix(bfn.Name(), ".xml.gz")) {
                        continue
                }
 
                n := strings.TrimSuffix(bfn.Name(), ".gz")
 
                if filepath.Ext(n) == ".html" && !bfn.ModTime().IsZero() {
-                       sitemapEntries[dir + "/" + n] = bfn.ModTime()
+                       sitemapEntries[fp + n] = bfn.ModTime()
                }
        }
        return nil
 }
 
+func writeSitemap(basedir string, suite string, baseUrl string,
+                 sitemapEntries map[string]time.Time, sitemaps 
map[string]time.Time) error {
+
+       escapedUrlPath := &url.URL{Path: suite}
+       if *verbose {
+               log.Printf("Found %d entries for %s/%s", len(sitemapEntries), 
basedir, escapedUrlPath)
+       }
+
+       // Split sitemapEntries in smaller chunks
+       // Google has a limit of 50.000 entries per file
+       count := 0
+       chunkSize := 45000
+       batchKeys := make([]string, 0, chunkSize)
+       saveChunks := func() error {
+               chunk := make(map[string]time.Time, len(batchKeys))
+               for _, v := range batchKeys {
+                       chunk[v] = sitemapEntries[v]
+               }
+               batchKeys = batchKeys[:0]
+
+               sitemapPath := filepath.Join(basedir, suite, "sitemap" + 
strconv.Itoa(count) + ".xml.gz")
+               if *verbose {
+                       log.Printf("Writing %d entries to %s", len(chunk), 
sitemapPath)
+               }
+
+               urlPrefix := baseUrl
+               if len(escapedUrlPath.String()) > 0 {
+                       urlPrefix = urlPrefix + "/" + escapedUrlPath.String()
+               }
+               if err := write.Atomically(sitemapPath, true, func(w io.Writer) 
error {
+                       return sitemap.WriteTo(w, urlPrefix, chunk)
+               }); err != nil {
+                       return fmt.Errorf("Write sitemap for %v failed: %v", 
suite, err)
+               }
+               st, err := os.Stat(sitemapPath)
+               if err == nil {
+                       sitemaps[escapedUrlPath.String() + "/sitemap" + 
strconv.Itoa(count) + ".xml"] = st.ModTime()
+               }
+               count++
+
+               return nil
+       }
+
+       for k := range sitemapEntries {
+               batchKeys = append(batchKeys, k)
+               if len(batchKeys) == chunkSize {
+                       err := saveChunks()
+                       if err != nil {
+                               return err
+                       }
+               }
+       }
+       // Process last, potentially incomplete batch
+       if len(batchKeys) > 0 {
+               err := saveChunks()
+               if err != nil {
+                       return err
+               }
+       }
+
+       return nil
+}
+
 func walkDirs(dir string, baseURL string) error {
        sitemaps := make(map[string]time.Time)
 
+       /* Collect files in root directory */
+       sitemapRootEntries := make(map[string]time.Time, 10)
+       collectFiles(dir, "", sitemapRootEntries)
+       err := writeSitemap(dir, "", baseURL, sitemapRootEntries, sitemaps)
+       if err != nil {
+               return err
+       }
+
        suitedirs, err := ioutil.ReadDir(dir)
        if err != nil {
                return fmt.Errorf("Reading %v failed: %v", dir, err)
        }
        for _, sfi := range suitedirs {
                if !sfi.IsDir() {
+
                        continue
                }
 
@@ -153,9 +235,9 @@
                        log.Printf("Searching in \"%v\"...", sfi.Name())
                }
 
-               // openSUSE Tumbleweed has ~11000 package entries, 120000 should
+               // openSUSE Tumbleweed has ~11000 package entries, 140000 should
                // be good enough as start
-               sitemapEntries := make(map[string]time.Time, 120000)
+               sitemapEntries := make(map[string]time.Time, 140000)
 
                fn := filepath.Join(*servingDir, sfi.Name())
                entrydirs, err := ioutil.ReadDir (fn)
@@ -164,7 +246,9 @@
                }
 
                for _, bfn := range entrydirs {
-                       if bfn.Name() == "sitemap.xml.gz" {
+                       // Ignore all sitemap*.xml.gz files
+                       if strings.HasPrefix(bfn.Name(), "sitemap") &&
+                               strings.HasSuffix(bfn.Name(), ".xml.gz") {
                                continue
                        }
 
@@ -178,58 +262,11 @@
 
                }
 
-
-               escapedUrlPath := &url.URL{Path: sfi.Name()}
-               if *verbose {
-                       log.Printf("Writing %d entries to %s/%s", 
len(sitemapEntries), dir, escapedUrlPath)
-               }
-
-               // Split sitemapEntries in smaller chunks
-               // Google has a limit of 50.000 entries per file
-               count := 0
-               chunkSize := 45000
-               batchKeys := make([]string, 0, chunkSize)
-               saveChunks := func() error {
-                       chunk := make(map[string]time.Time, len(batchKeys))
-                       for _, v := range batchKeys {
-                               chunk[v] = sitemapEntries[v]
-                       }
-                       batchKeys = batchKeys[:0]
-
-                       sitemapPath := filepath.Join(dir, sfi.Name(), "sitemap" 
+ strconv.Itoa(count) + ".xml.gz")
-                       if *verbose {
-                               log.Printf("Writing %d entries to %s", 
len(chunk), sitemapPath)
-                       }
-                       if err := write.Atomically(sitemapPath, true, func(w 
io.Writer) error {
-                               return sitemap.WriteTo(w, baseURL+"/" + 
escapedUrlPath.String(), chunk)
-                       }); err != nil {
-                               return fmt.Errorf("Write sitemap for %v failed: 
%v", sfi.Name(), err)
-                       }
-                       st, err := os.Stat(sitemapPath)
-                       if err == nil {
-                               sitemaps[escapedUrlPath.String() + "/sitemap" + 
strconv.Itoa(count) + ".xml"] = st.ModTime()
-                       }
-                       count++
-
-                       return nil
+               err = writeSitemap(dir, sfi.Name(), baseURL, sitemapEntries, 
sitemaps)
+               if err != nil {
+                       return err
                }
 
-               for k := range sitemapEntries {
-                       batchKeys = append(batchKeys, k)
-                       if len(batchKeys) == chunkSize {
-                               err = saveChunks()
-                               if err != nil {
-                                       return err
-                               }
-                       }
-               }
-               // Process last, potentially incomplete batch
-               if len(batchKeys) > 0 {
-                       err = saveChunks()
-                       if err != nil {
-                               return err
-                       }
-               }
        }
 
        if *verbose {

Reply via email to