Hello community,

here is the log from the commit of package rubygem-fluentd for openSUSE:Factory 
checked in at 2018-10-25 08:20:36
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/rubygem-fluentd (Old)
 and      /work/SRC/openSUSE:Factory/.rubygem-fluentd.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "rubygem-fluentd"

Thu Oct 25 08:20:36 2018 rev:8 rq:643120 version:1.2.5

Changes:
--------
--- /work/SRC/openSUSE:Factory/rubygem-fluentd/rubygem-fluentd.changes  
2018-07-18 22:53:49.554983587 +0200
+++ /work/SRC/openSUSE:Factory/.rubygem-fluentd.new/rubygem-fluentd.changes     
2018-10-25 08:20:59.691947683 +0200
@@ -1,0 +2,25 @@
+Wed Sep  5 10:12:02 UTC 2018 - co...@suse.com
+
+- updated to version 1.2.5
+ see installed CHANGELOG.md
+
+  ## Release v1.2.5 - 2018/08/22
+  
+  ### Bug fixes
+  
+  * in_tail: Fix resource leak by file rotation
+    https://github.com/fluent/fluentd/pull/2105
+  * fix typos
+  
+  ## Release v1.2.4 - 2018/08/01
+  
+  ### Bug fixes
+  
+  * output: Consider timezone when calculate timekey
+    https://github.com/fluent/fluentd/pull/2054
+  * output: Fix bug in suppress_emit_error_log_interval
+    https://github.com/fluent/fluentd/pull/2069
+  * server-helper: Fix connection leak by close timing issue.
+    https://github.com/fluent/fluentd/pull/2087
+
+-------------------------------------------------------------------

Old:
----
  fluentd-1.2.3.gem

New:
----
  fluentd-1.2.5.gem

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ rubygem-fluentd.spec ++++++
--- /var/tmp/diff_new_pack.0Ftha3/_old  2018-10-25 08:21:00.195947458 +0200
+++ /var/tmp/diff_new_pack.0Ftha3/_new  2018-10-25 08:21:00.199947456 +0200
@@ -24,7 +24,7 @@
 #
 
 Name:           rubygem-fluentd
-Version:        1.2.3
+Version:        1.2.5
 Release:        0
 %define mod_name fluentd
 %define mod_full_name %{mod_name}-%{version}

++++++ fluentd-1.2.3.gem -> fluentd-1.2.5.gem ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/CHANGELOG.md new/CHANGELOG.md
--- old/CHANGELOG.md    2018-07-11 04:57:37.000000000 +0200
+++ new/CHANGELOG.md    2018-08-23 03:04:46.000000000 +0200
@@ -1,5 +1,24 @@
 # v1.2
 
+## Release v1.2.5 - 2018/08/22
+
+### Bug fixes
+
+* in_tail: Fix resource leak by file rotation
+  https://github.com/fluent/fluentd/pull/2105
+* fix typos
+
+## Release v1.2.4 - 2018/08/01
+
+### Bug fixes
+
+* output: Consider timezone when calculate timekey
+  https://github.com/fluent/fluentd/pull/2054
+* output: Fix bug in suppress_emit_error_log_interval
+  https://github.com/fluent/fluentd/pull/2069
+* server-helper: Fix connection leak by close timing issue.
+  https://github.com/fluent/fluentd/pull/2087
+
 ## Release v1.2.3 - 2018/07/10
 
 ### Enhancements
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/GOVERNANCE.md new/GOVERNANCE.md
--- old/GOVERNANCE.md   2018-07-11 04:57:37.000000000 +0200
+++ new/GOVERNANCE.md   2018-08-23 03:04:46.000000000 +0200
@@ -35,10 +35,10 @@
 
 ## Projects
 
-The fluent organization is open to receive new sub-projects under it umbrella. 
To apply a project as part of the __fluent__ organization, it have to met the 
following criteria:
+The fluent organization is open to receive new sub-projects under it umbrella. 
To apply a project as part of the __fluent__ organization, it has to met the 
following criteria:
 
 - Licensed under the terms of the Apache License v2.0
-- Project have been active for at least one year since it inception
+- Project has been active for at least one year since it inception
 - More than 2 contributors
 - Related to one or more scopes of Fluentd ecosystem:
   - Data collection
Binary files old/checksums.yaml.gz and new/checksums.yaml.gz differ
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/agent.rb new/lib/fluent/agent.rb
--- old/lib/fluent/agent.rb     2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/agent.rb     2018-08-23 03:04:46.000000000 +0200
@@ -41,7 +41,7 @@
       # from plugins which DOES emit, then DOESN'T emit
       # (input -> output w/ router -> filter -> output w/o router)
       # for start: use this order DESC
-      #   (because plugins which appears later in configurations will receive 
events from plugins which appears ealier)
+      #   (because plugins which appears later in configurations will receive 
events from plugins which appears earlier)
       # for stop/before_shutdown/shutdown/after_shutdown/close/terminate: use 
this order ASC
       @lifecycle_cache = nil
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/command/fluentd.rb 
new/lib/fluent/command/fluentd.rb
--- old/lib/fluent/command/fluentd.rb   2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/command/fluentd.rb   2018-08-23 03:04:46.000000000 +0200
@@ -177,7 +177,7 @@
     opts[:regwinsvcautostart] = s
   }
 
-  op.on('--reg-winsvc-fluentdopt OPTION', "specify fluentd option paramters 
for Windows Service. (Windows only)") {|s|
+  op.on('--reg-winsvc-fluentdopt OPTION', "specify fluentd option parameters 
for Windows Service. (Windows only)") {|s|
     opts[:fluentdopt] = s
   }
   
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/config/literal_parser.rb 
new/lib/fluent/config/literal_parser.rb
--- old/lib/fluent/config/literal_parser.rb     2018-07-11 04:57:37.000000000 
+0200
+++ new/lib/fluent/config/literal_parser.rb     2018-08-23 03:04:46.000000000 
+0200
@@ -198,7 +198,7 @@
 
       def scan_json(is_array)
         result = nil
-        # Yajl does not raise ParseError for imcomplete json string, like 
'[1', '{"h"', '{"h":' or '{"h1":1'
+        # Yajl does not raise ParseError for incomplete json string, like 
'[1', '{"h"', '{"h":' or '{"h1":1'
         # This is the reason to use JSON module.
 
         buffer = (is_array ? "[" : "{")
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/event_router.rb 
new/lib/fluent/event_router.rb
--- old/lib/fluent/event_router.rb      2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/event_router.rb      2018-08-23 03:04:46.000000000 +0200
@@ -217,7 +217,7 @@
           @optimizable = if fs_filters.empty?
                            true
                          else
-                           # skip log message when filter is only 1, because 
its performace is same as non optimized chain.
+                           # skip log message when filter is only 1, because 
its performance is same as non optimized chain.
                            if @filters.size > 1 && fs_filters.size >= 1
                              $log.info "disable filter chain optimization 
because #{fs_filters.map(&:class)} uses `#filter_stream` method."
                            end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/plugin/in_tail.rb 
new/lib/fluent/plugin/in_tail.rb
--- old/lib/fluent/plugin/in_tail.rb    2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/plugin/in_tail.rb    2018-08-23 03:04:47.000000000 +0200
@@ -266,13 +266,16 @@
       line_buffer_timer_flusher = (@multiline_mode && 
@multiline_flush_interval) ? TailWatcher::LineBufferTimerFlusher.new(log, 
@multiline_flush_interval, &method(:flush_buffer)) : nil
       tw = TailWatcher.new(path, @rotate_wait, pe, log, @read_from_head, 
@enable_watch_timer, @enable_stat_watcher, @read_lines_limit, 
method(:update_watcher), line_buffer_timer_flusher, @from_encoding, @encoding, 
open_on_every_update, &method(:receive_lines))
       tw.attach do |watcher|
-        watcher.timer_trigger = timer_execute(:in_tail_timer_trigger, 1, 
&watcher.method(:on_notify)) if watcher.enable_watch_timer
-        event_loop_attach(watcher.stat_trigger) if watcher.enable_stat_watcher
+        event_loop_attach(watcher.timer_trigger) if watcher.timer_trigger
+        event_loop_attach(watcher.stat_trigger) if watcher.stat_trigger
       end
       tw
     rescue => e
       if tw
-        tw.detach
+        tw.detach { |watcher|
+          event_loop_detach(watcher.timer_trigger) if watcher.timer_trigger
+          event_loop_detach(watcher.stat_trigger) if watcher.stat_trigger
+        }
         tw.close
       end
       raise e
@@ -343,7 +346,10 @@
     # so adding close_io argument to avoid this problem.
     # At shutdown, IOHandler's io will be released automatically after 
detached the event loop
     def detach_watcher(tw, close_io = true)
-      tw.detach
+      tw.detach { |watcher|
+        event_loop_detach(watcher.timer_trigger) if watcher.timer_trigger
+        event_loop_detach(watcher.stat_trigger) if watcher.stat_trigger
+      }
       tw.close if close_io
       flush_buffer(tw)
       if tw.unwatched && @pf
@@ -352,6 +358,8 @@
     end
 
     def detach_watcher_after_rotate_wait(tw)
+      # Call event_loop_attach/event_loop_detach is high-cost for short-live 
object.
+      # If this has a problem with large number of files, use @_event_loop 
directly instead of timer_execute.
       timer_execute(:in_tail_close_watcher, @rotate_wait, repeat: false) do
         detach_watcher(tw)
       end
@@ -479,7 +487,7 @@
         @update_watcher = update_watcher
 
         @stat_trigger = @enable_stat_watcher ? StatWatcher.new(self, 
&method(:on_notify)) : nil
-        @timer_trigger = nil
+        @timer_trigger = @enable_watch_timer ? TimerTrigger.new(1, log, 
&method(:on_notify)) : nil
 
         @rotate_handler = RotateHandler.new(self, &method(:on_rotate))
         @io_handler = nil
@@ -513,8 +521,7 @@
       end
 
       def detach
-        @timer_trigger.detach if @enable_watch_timer && @timer_trigger && 
@timer_trigger.attached?
-        @stat_trigger.detach if @enable_stat_watcher && @stat_trigger && 
@stat_trigger.attached?
+        yield self
         @io_handler.on_notify if @io_handler
       end
 
@@ -613,6 +620,21 @@
         pe # This pe will be updated in on_rotate after TailWatcher is 
initialized
       end
 
+      class TimerTrigger < Coolio::TimerWatcher
+        def initialize(interval, log, &callback)
+          @callback = callback
+          @log = log
+          super(interval, true)
+        end
+
+        def on_timer
+          @callback.call
+        rescue => e
+          @log.error e.to_s
+          @log.error_backtrace
+        end
+      end
+
       class StatWatcher < Coolio::StatWatcher
         def initialize(watcher, &callback)
           @watcher = watcher
@@ -629,7 +651,6 @@
         end
       end
 
-
       class FIFO
         def initialize(from_encoding, encoding)
           @from_encoding = from_encoding
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/plugin/out_forward.rb 
new/lib/fluent/plugin/out_forward.rb
--- old/lib/fluent/plugin/out_forward.rb        2018-07-11 04:57:37.000000000 
+0200
+++ new/lib/fluent/plugin/out_forward.rb        2018-08-23 03:04:47.000000000 
+0200
@@ -442,7 +442,7 @@
         res = unpacker.read
         log.trace "getting response from destination", host: info.node.host, 
port: info.node.port, chunk_id: dump_unique_id_hex(info.chunk_id), response: res
         if res['ack'] != info.chunk_id_base64
-          # Some errors may have occured when ack and chunk id is different, 
so send the chunk again.
+          # Some errors may have occurred when ack and chunk id is different, 
so send the chunk again.
           log.warn "ack in response and chunk id in sent data are different", 
chunk_id: dump_unique_id_hex(info.chunk_id), ack: res['ack']
           rollback_write(info.chunk_id, update_retry: false)
           return nil
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/plugin/output.rb 
new/lib/fluent/plugin/output.rb
--- old/lib/fluent/plugin/output.rb     2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/plugin/output.rb     2018-08-23 03:04:47.000000000 +0200
@@ -92,8 +92,8 @@
         # k: times
         # total retry time: c + c * b^1 + (...) + c*b^k = c*b^(k+1) - 1
         config_param :retry_wait, :time, default: 1, desc: 'Seconds to wait 
before next retry to flush, or constant factor of exponential backoff.'
-        config_param :retry_exponential_backoff_base, :float, default: 2, 
desc: 'The base number of exponencial backoff for retries.'
-        config_param :retry_max_interval, :time, default: nil, desc: 'The 
maximum interval seconds for exponencial backoff between retries while failing.'
+        config_param :retry_exponential_backoff_base, :float, default: 2, 
desc: 'The base number of exponential backoff for retries.'
+        config_param :retry_max_interval, :time, default: nil, desc: 'The 
maximum interval seconds for exponential backoff between retries while failing.'
 
         config_param :retry_randomize, :bool, default: true, desc: 'If true, 
output plugin will retry after randomized interval not to do burst retries.'
       end
@@ -304,6 +304,10 @@
             raise Fluent::ConfigError, "<buffer ...> argument includes 'time', 
but timekey is not configured" unless @buffer_config.timekey
             Fluent::Timezone.validate!(@buffer_config.timekey_zone)
             @timekey_zone = @buffer_config.timekey_use_utc ? '+0000' : 
@buffer_config.timekey_zone
+            @timekey = @buffer_config.timekey
+            @timekey_use_utc = @buffer_config.timekey_use_utc
+            @offset = Fluent::Timezone.utc_offset(@timekey_zone)
+            @calculate_offset = @offset.respond_to?(:call) ? @offset : nil
             @output_time_formatter_cache = {}
           end
 
@@ -803,20 +807,17 @@
           if !@chunk_key_time && !@chunk_key_tag
             @buffer.metadata()
           elsif @chunk_key_time && @chunk_key_tag
-            time_int = time.to_i
-            timekey = (time_int - (time_int % @buffer_config.timekey)).to_i
+            timekey = calculate_timekey(time)
             @buffer.metadata(timekey: timekey, tag: tag)
           elsif @chunk_key_time
-            time_int = time.to_i
-            timekey = (time_int - (time_int % @buffer_config.timekey)).to_i
+            timekey = calculate_timekey(time)
             @buffer.metadata(timekey: timekey)
           else
             @buffer.metadata(tag: tag)
           end
         else
           timekey = if @chunk_key_time
-                      time_int = time.to_i
-                      (time_int - (time_int % @buffer_config.timekey)).to_i
+                      calculate_timekey(time)
                     else
                       nil
                     end
@@ -825,6 +826,16 @@
         end
       end
 
+      def calculate_timekey(time)
+        time_int = time.to_i
+        if @timekey_use_utc
+          (time_int - (time_int % @timekey)).to_i
+        else
+          offset = @calculate_offset ? @calculate_offset.call(time) : @offset
+          (time_int - ((time_int + offset)% @timekey)).to_i
+        end
+      end
+
       def chunk_for_test(tag, time, record)
         require 'fluent/plugin/buffer/memory_chunk'
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/plugin_helper/server.rb 
new/lib/fluent/plugin_helper/server.rb
--- old/lib/fluent/plugin_helper/server.rb      2018-07-11 04:57:37.000000000 
+0200
+++ new/lib/fluent/plugin_helper/server.rb      2018-08-23 03:04:47.000000000 
+0200
@@ -213,8 +213,10 @@
         socket_option_setter.call(sock)
         close_callback = ->(conn){ @_server_mutex.synchronize{ 
@_server_connections.delete(conn) } }
         server = Coolio::TCPServer.new(sock, nil, EventHandler::TCPServer, 
socket_option_setter, close_callback, @log, @under_plugin_development, block) 
do |conn|
-          @_server_mutex.synchronize do
-            @_server_connections << conn
+          unless conn.closing
+            @_server_mutex.synchronize do
+              @_server_connections << conn
+            end
           end
         end
         server.listen(backlog) if backlog
@@ -227,8 +229,10 @@
         socket_option_setter.call(sock)
         close_callback = ->(conn){ @_server_mutex.synchronize{ 
@_server_connections.delete(conn) } }
         server = Coolio::TCPServer.new(sock, nil, EventHandler::TLSServer, 
context, socket_option_setter, close_callback, @log, @under_plugin_development, 
block) do |conn|
-          @_server_mutex.synchronize do
-            @_server_connections << conn
+          unless conn.closing
+            @_server_mutex.synchronize do
+              @_server_connections << conn
+            end
           end
         end
         server.listen(backlog) if backlog
@@ -538,6 +542,8 @@
         end
 
         class TCPServer < Coolio::TCPSocket
+          attr_reader :closing
+
           def initialize(sock, socket_option_setter, close_callback, log, 
under_plugin_development, connect_callback)
             raise ArgumentError, "socket must be a TCPSocket: sock=#{sock}" 
unless sock.is_a?(TCPSocket)
 
@@ -594,7 +600,7 @@
           rescue => e
             @log.error "unexpected error on reading data", host: 
@callback_connection.remote_host, port: @callback_connection.remote_port, 
error: e
             @log.error_backtrace
-            close(true) rescue nil
+            close rescue nil
             raise if @under_plugin_development
           end
 
@@ -603,7 +609,7 @@
           rescue => e
             @log.error "unexpected error on reading data", host: 
@callback_connection.remote_host, port: @callback_connection.remote_port, 
error: e
             @log.error_backtrace
-            close(true) rescue nil
+            close rescue nil
             raise if @under_plugin_development
           end
 
@@ -618,6 +624,8 @@
         end
 
         class TLSServer < Coolio::Socket
+          attr_reader :closing
+
           # It can't use Coolio::TCPSocket, because Coolio::TCPSocket checks 
that underlying socket (1st argument of super) is TCPSocket.
           def initialize(sock, context, socket_option_setter, close_callback, 
log, under_plugin_development, connect_callback)
             raise ArgumentError, "socket must be a TCPSocket: sock=#{sock}" 
unless sock.is_a?(TCPSocket)
@@ -748,7 +756,7 @@
           rescue => e
             @log.error "unexpected error on reading data", host: 
@callback_connection.remote_host, port: @callback_connection.remote_port, 
error: e
             @log.error_backtrace
-            close(true) rescue nil
+            close rescue nil
             raise if @under_plugin_development
           end
 
@@ -757,7 +765,7 @@
           rescue => e
             @log.error "unexpected error on reading data", host: 
@callback_connection.remote_host, port: @callback_connection.remote_port, 
error: e
             @log.error_backtrace
-            close(true) rescue nil
+            close rescue nil
             raise if @under_plugin_development
           end
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/root_agent.rb new/lib/fluent/root_agent.rb
--- old/lib/fluent/root_agent.rb        2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/root_agent.rb        2018-08-23 03:04:47.000000000 +0200
@@ -318,7 +318,7 @@
         log.warn "send an error event stream to @ERROR:", error_info
         @error_collector.emit_stream(tag, es)
       else
-        now = Time.now
+        now = Time.now.to_i
         if @suppress_emit_error_log_interval.zero? || now > 
@next_emit_error_log_time
           log.warn "emit transaction failed:", error_info
           log.warn_backtrace
@@ -347,7 +347,7 @@
       end
 
       def handle_emits_error(tag, es, e)
-        now = EventTime.now
+        now = EventTime.now.to_i
         if @suppress_emit_error_log_interval.zero? || now > 
@next_emit_error_log_time
           log.warn "emit transaction failed in @ERROR:", error: e, tag: tag
           log.warn_backtrace
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/timezone.rb new/lib/fluent/timezone.rb
--- old/lib/fluent/timezone.rb  2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/timezone.rb  2018-08-23 03:04:47.000000000 +0200
@@ -139,5 +139,19 @@
 
       return nil
     end
+
+    def self.utc_offset(timezone)
+      return 0 if timezone.nil?
+
+      case timezone
+      when NUMERIC_PATTERN
+        Time.zone_offset(timezone)
+      when NAME_PATTERN
+        tz = TZInfo::Timezone.get(timezone)
+        ->(time) {
+          tz.period_for_utc(time).utc_total_offset
+        }
+      end
+    end
   end
 end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/lib/fluent/version.rb new/lib/fluent/version.rb
--- old/lib/fluent/version.rb   2018-07-11 04:57:37.000000000 +0200
+++ new/lib/fluent/version.rb   2018-08-23 03:04:47.000000000 +0200
@@ -16,6 +16,6 @@
 
 module Fluent
 
-  VERSION = '1.2.3'
+  VERSION = '1.2.5'
 
 end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/metadata new/metadata
--- old/metadata        2018-07-11 04:57:37.000000000 +0200
+++ new/metadata        2018-08-23 03:04:46.000000000 +0200
@@ -1,14 +1,14 @@
 --- !ruby/object:Gem::Specification
 name: fluentd
 version: !ruby/object:Gem::Version
-  version: 1.2.3
+  version: 1.2.5
 platform: ruby
 authors:
 - Sadayuki Furuhashi
 autorequire: 
 bindir: bin
 cert_chain: []
-date: 2018-07-11 00:00:00.000000000 Z
+date: 2018-08-23 00:00:00.000000000 Z
 dependencies:
 - !ruby/object:Gem::Dependency
   name: msgpack
@@ -769,7 +769,7 @@
       version: '0'
 requirements: []
 rubyforge_project: 
-rubygems_version: 2.7.6
+rubygems_version: 2.6.14.1
 signing_key: 
 specification_version: 4
 summary: Fluentd event collector
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/config/test_config_parser.rb 
new/test/config/test_config_parser.rb
--- old/test/config/test_config_parser.rb       2018-07-11 04:57:37.000000000 
+0200
+++ new/test/config/test_config_parser.rb       2018-08-23 03:04:47.000000000 
+0200
@@ -374,7 +374,7 @@
         </elem2>
       ]
         write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
-        k9 embeded
+        k9 embedded
         <elem3 name>
           nested nested_value
           include hoge
@@ -418,7 +418,7 @@
         elem2 = c.elements.find { |e| e.name == 'elem2' }
         assert(elem2)
         assert_equal('name', elem2.arg)
-        assert_equal('embeded', elem2['k9'])
+        assert_equal('embedded', elem2['k9'])
         assert_not_include(elem2, 'include')
 
         elem3 = elem2.elements.find { |e| e.name == 'elem3' }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/config/test_configurable.rb 
new/test/config/test_configurable.rb
--- old/test/config/test_configurable.rb        2018-07-11 04:57:37.000000000 
+0200
+++ new/test/config/test_configurable.rb        2018-08-23 03:04:47.000000000 
+0200
@@ -1032,12 +1032,12 @@
           detail_base = base.class.merged_configure_proxy.sections[:detail]
           detail_sub = sub.class.merged_configure_proxy.sections[:detail]
           detail_base_attributes = {
-            requried: detail_base.required,
+            required: detail_base.required,
             multi: detail_base.multi,
             alias: detail_base.alias,
           }
           detail_sub_attributes = {
-            requried: detail_sub.required,
+            required: detail_sub.required,
             multi: detail_sub.multi,
             alias: detail_sub.alias,
           }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/config/test_dsl.rb new/test/config/test_dsl.rb
--- old/test/config/test_dsl.rb 2018-07-11 04:57:37.000000000 +0200
+++ new/test/config/test_dsl.rb 2018-08-23 03:04:47.000000000 +0200
@@ -44,7 +44,7 @@
   </elem2>
 ]
   write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
-  k9 embeded
+  k9 embedded
   <elem3 name>
     nested nested_value
     include hoge
@@ -262,7 +262,7 @@
         elem2 = @root.elements.find { |e| e.name == 'elem2' }
         assert(elem2)
         assert_equal('name', elem2.arg)
-        assert_equal('embeded', elem2['k9'])
+        assert_equal('embedded', elem2['k9'])
         assert_not_include(elem2, 'include')
 
         elem3 = elem2.elements.find { |e| e.name == 'elem3' }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/config/test_system_config.rb 
new/test/config/test_system_config.rb
--- old/test/config/test_system_config.rb       2018-07-11 04:57:37.000000000 
+0200
+++ new/test/config/test_system_config.rb       2018-08-23 03:04:47.000000000 
+0200
@@ -7,7 +7,7 @@
 module Fluent::Config
   class FakeLoggerInitializer
     attr_accessor :level
-    def initalize
+    def initialize
       @level = nil
     end
   end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/plugin/test_in_syslog.rb 
new/test/plugin/test_in_syslog.rb
--- old/test/plugin/test_in_syslog.rb   2018-07-11 04:57:37.000000000 +0200
+++ new/test/plugin/test_in_syslog.rb   2018-08-23 03:04:47.000000000 +0200
@@ -49,7 +49,7 @@
 
     data('resolve_hostname' => 'resolve_hostname true',
          'source_hostname_key' => 'source_hostname_key source_host')
-    def test_configure_reslove_hostname(param)
+    def test_configure_resolve_hostname(param)
       d = create_driver([CONFIG, param].join("\n"))
       assert_true d.instance.resolve_hostname
     end
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/plugin/test_in_tail.rb 
new/test/plugin/test_in_tail.rb
--- old/test/plugin/test_in_tail.rb     2018-07-11 04:57:37.000000000 +0200
+++ new/test/plugin/test_in_tail.rb     2018-08-23 03:04:47.000000000 +0200
@@ -969,7 +969,7 @@
     end
 
     # For https://github.com/fluent/fluentd/issues/1455
-    # This test is fragile because test content depends on internal 
implementaion.
+    # This test is fragile because test content depends on internal 
implementation.
     # So if you modify in_tail internal, this test may break.
     def test_unwatched_files_should_be_removed
       config = config_element("", "", {
@@ -990,7 +990,7 @@
       waiting(20) { sleep 0.1 until Dir.glob("#{TMP_DIR}/*.txt").size == 0 } # 
Ensure file is deleted on Windows
       waiting(5) { sleep 0.1 until 
d.instance.instance_variable_get(:@tails).keys.size == 0 }
 
-      # Previous implementaion has an infinite watcher creation bug.
+      # Previous implementation has an infinite watcher creation bug.
       # Following code checks such unexpected bug by couting  actual object 
allocation.
       base_num = count_timer_object
       2.times {
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/plugin/test_out_file.rb 
new/test/plugin/test_out_file.rb
--- old/test/plugin/test_out_file.rb    2018-07-11 04:57:37.000000000 +0200
+++ new/test/plugin/test_out_file.rb    2018-08-23 03:04:47.000000000 +0200
@@ -561,6 +561,79 @@
     check_gzipped_result(path, formatted_lines * 3)
   end
 
+  test 'append when JST' do
+    with_timezone(Fluent.windows? ? "JST-9" : "Asia/Tokyo") do
+      time = event_time("2011-01-02 03:14:15+09:00")
+      formatted_lines = %[2011-01-02T03:14:15+09:00\ttest\t{"a":1}\n] + 
%[2011-01-02T03:14:15+09:00\ttest\t{"a":2}\n]
+
+      write_once = ->(){
+        d = create_driver %[
+          path #{TMP_DIR}/out_file_test
+          compress gz
+          append true
+          <buffer>
+            timekey_use_utc false
+            timekey_zone Asia/Tokyo
+          </buffer>
+        ]
+        d.run(default_tag: 'test'){
+          d.feed(time, {"a"=>1})
+          d.feed(time, {"a"=>2})
+        }
+        d.instance.last_written_path
+      }
+
+      path = write_once.call
+      assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
+      check_gzipped_result(path, formatted_lines)
+
+      path = write_once.call
+      assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
+      check_gzipped_result(path, formatted_lines * 2)
+
+      path = write_once.call
+      assert_equal "#{TMP_DIR}/out_file_test.20110102.log.gz", path
+      check_gzipped_result(path, formatted_lines * 3)
+    end
+  end
+
+  test 'append when UTC-02 but timekey_zone is +0900' do
+    with_timezone("UTC-02") do # +0200
+      time = event_time("2011-01-02 17:14:15+02:00")
+      formatted_lines = %[2011-01-02T17:14:15+02:00\ttest\t{"a":1}\n] + 
%[2011-01-02T17:14:15+02:00\ttest\t{"a":2}\n]
+
+      write_once = ->(){
+        d = create_driver %[
+          path #{TMP_DIR}/out_file_test
+          compress gz
+          append true
+          <buffer>
+            timekey_use_utc false
+            timekey_zone +0900
+          </buffer>
+        ]
+        d.run(default_tag: 'test'){
+          d.feed(time, {"a"=>1})
+          d.feed(time, {"a"=>2})
+        }
+        d.instance.last_written_path
+      }
+
+      path = write_once.call
+      # Rotated at 2011-01-02 17:00:00+02:00
+      assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
+      check_gzipped_result(path, formatted_lines)
+
+      path = write_once.call
+      assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
+      check_gzipped_result(path, formatted_lines * 2)
+
+      path = write_once.call
+      assert_equal "#{TMP_DIR}/out_file_test.20110103.log.gz", path
+      check_gzipped_result(path, formatted_lines * 3)
+    end
+  end
+
   test '${chunk_id}' do
     time = event_time("2011-01-02 13:14:15 UTC")
     formatted_lines = %[2011-01-02T13:14:15Z\ttest\t{"a":1}\n] + 
%[2011-01-02T13:14:15Z\ttest\t{"a":2}\n]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/test_config.rb new/test/test_config.rb
--- old/test/test_config.rb     2018-07-11 04:57:37.000000000 +0200
+++ new/test/test_config.rb     2018-08-23 03:04:47.000000000 +0200
@@ -53,7 +53,7 @@
       </elem2>
     ]
     write_config "#{TMP_DIR}/dir/config_test_9.conf", %[
-      k9 embeded
+      k9 embedded
       <elem3 name>
         nested nested_value
         include hoge
@@ -98,7 +98,7 @@
     elem2 = c.elements.find { |e| e.name == 'elem2' }
     assert_not_nil elem2
     assert_equal 'name', elem2.arg
-    assert_equal 'embeded', elem2['k9']
+    assert_equal 'embedded', elem2['k9']
     assert !elem2.has_key?('include')
 
     elem3 = elem2.elements.find { |e| e.name == 'elem3' }
@@ -145,7 +145,7 @@
     not_fetched = []; rule_conf.check_not_fetched {|key, e| not_fetched << key 
}
     assert_equal %w[pattern replace], not_fetched
 
-    # repeateadly accessing should not grow memory usage
+    # repeatedly accessing should not grow memory usage
     before_size = match_conf.unused.size
     10.times { match_conf['type'] }
     assert_equal before_size, match_conf.unused.size
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/test_log.rb new/test/test_log.rb
--- old/test/test_log.rb        2018-07-11 04:57:37.000000000 +0200
+++ new/test/test_log.rb        2018-08-23 03:04:47.000000000 +0200
@@ -493,7 +493,7 @@
     end
   end
 
-  def test_log_rotates_specifed_size_with_logdevio
+  def test_log_rotates_specified_size_with_logdevio
     with_timezone('utc') do
       rotate_age = 2
       rotate_size = 100
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/test/test_root_agent.rb new/test/test_root_agent.rb
--- old/test/test_root_agent.rb 2018-07-11 04:57:37.000000000 +0200
+++ new/test/test_root_agent.rb 2018-08-23 03:04:47.000000000 +0200
@@ -1,6 +1,7 @@
 require_relative 'helper'
 require 'fluent/event_router'
 require 'fluent/system_config'
+require 'timecop'
 require_relative 'test_plugin_classes'
 
 class RootAgentTest < ::Test::Unit::TestCase
@@ -609,6 +610,38 @@
     end
   end
 
+  sub_test_case 'configure emit_error_interval' do
+    setup do
+      system_config = SystemConfig.new
+      system_config.emit_error_log_interval = 30
+      @ra = RootAgent.new(log: $log, system_config: system_config)
+      stub(Engine).root_agent { @ra }
+      @ra.log.out.reset
+      one_minute_ago = Time.now.to_i - 60
+      Timecop.freeze(one_minute_ago)
+    end
+
+    teardown do
+      Timecop.return
+    end
+
+    test 'suppresses errors' do
+      mock(@ra.log).warn_backtrace()
+      e = StandardError.new('standard error')
+      begin
+        @ra.handle_emits_error("tag", nil, e)
+      rescue
+      end
+
+      begin
+      @ra.handle_emits_error("tag", nil, e)
+      rescue
+      end
+
+      assert_equal 1, @ra.log.out.logs.size
+    end
+  end
+
   sub_test_case 'configured at worker2 with 4 workers environment' do
     setup do
       ENV['SERVERENGINE_WORKER_ID'] = '2'


Reply via email to