@@ -164,6 +164,8 @@ class Fluent::Plugin::Sumologic < Fluent::Plugin::Output
164164 config_param :retry_min_interval , :time , :default => 1 # 1s
165165 config_param :retry_max_interval , :time , :default => 5 *60 # 5m
166166
167+ config_param :max_request_size , :size , :default => 0
168+
167169 # https://help.sumologic.com/Manage/Fields
168170 desc 'Fields string (eg "cluster=payment, service=credit_card") which is going to be added to every log record.'
169171 config_param :custom_fields , :string , :default => nil
@@ -252,6 +254,10 @@ def configure(conf)
252254 conf [ 'compress_encoding' ] ,
253255 log ,
254256 )
257+
258+ if !conf [ 'max_request_size' ] . nil? && conf [ 'max_request_size' ] . to_i <= 0
259+ conf [ 'max_request_size' ] = '0'
260+ end
255261 super
256262 end
257263
@@ -405,50 +411,75 @@ def write(chunk)
405411 fields = [ fields , @custom_fields ] . compact . join ( "," )
406412 end
407413
408- retries = 0
409- start_time = Time . now
410- sleep_time = @retry_min_interval
411-
412- while true
413- common_log_part = "#{ @data_type } records with source category '#{ source_category } ', source host '#{ source_host } ', source name '#{ source_name } ', chunk #{ chunk_id } , try #{ retries } "
414- begin
415- @log . debug { "Sending #{ messages . count } ; #{ common_log_part } " }
416-
417- @sumo_conn . publish (
418- messages . join ( "\n " ) ,
419- source_host = source_host ,
420- source_category = source_category ,
421- source_name = source_name ,
422- data_type = @data_type ,
423- metric_data_format = @metric_data_format ,
424- collected_fields = fields ,
425- dimensions = @custom_dimensions
426- )
427- break
428- rescue => e
429- if !@use_internal_retry
430- raise e
414+ if @max_request_size <= 0
415+ messages_to_send = [ messages ]
416+ else
417+ messages_to_send = [ ]
418+ current_message = [ ]
419+ current_length = 0
420+ messages . each do |message |
421+ current_message . push message
422+ current_length += message . length
423+
424+ if current_length > @max_request_size
425+ messages_to_send . push ( current_message )
426+ current_message = [ ]
427+ current_length = 0
431428 end
432- # increment retries
433- retries = retries + 1
434-
435- log . warn "error while sending request to sumo: #{ e } ; #{ common_log_part } "
436- log . warn_backtrace e . backtrace
437-
438- # drop data if
439- # - we reached out the @retry_max_times retries
440- # - or we exceeded @retry_timeout
441- if ( retries >= @retry_max_times && @retry_max_times > 0 ) || ( Time . now > start_time + @retry_timeout && @retry_timeout > 0 )
442- log . warn "dropping records; #{ common_log_part } "
429+ current_length += 1 # this is for newline
430+ end
431+ if current_message . length > 0
432+ messages_to_send . push ( current_message )
433+ end
434+ end
435+
436+ messages_to_send . each_with_index do |message , i |
437+ retries = 0
438+ start_time = Time . now
439+ sleep_time = @retry_min_interval
440+
441+ while true
442+ common_log_part = "#{ @data_type } records with source category '#{ source_category } ', source host '#{ source_host } ', source name '#{ source_name } ', chunk #{ chunk_id } , try #{ retries } , batch #{ i } "
443+
444+ begin
445+ @log . debug { "Sending #{ message . count } ; #{ common_log_part } " }
446+
447+ @sumo_conn . publish (
448+ message . join ( "\n " ) ,
449+ source_host = source_host ,
450+ source_category = source_category ,
451+ source_name = source_name ,
452+ data_type = @data_type ,
453+ metric_data_format = @metric_data_format ,
454+ collected_fields = fields ,
455+ dimensions = @custom_dimensions
456+ )
443457 break
444- end
445-
446- log . info "going to retry to send data at #{ Time . now + sleep_time } ; #{ common_log_part } "
447- sleep sleep_time
448-
449- sleep_time = sleep_time * 2
450- if sleep_time > @retry_max_interval
451- sleep_time = @retry_max_interval
458+ rescue => e
459+ if !@use_internal_retry
460+ raise e
461+ end
462+ # increment retries
463+ retries += 1
464+
465+ log . warn "error while sending request to sumo: #{ e } ; #{ common_log_part } "
466+ log . warn_backtrace e . backtrace
467+
468+ # drop data if
469+ # - we reached out the @retry_max_times retries
470+ # - or we exceeded @retry_timeout
471+ if ( retries >= @retry_max_times && @retry_max_times > 0 ) || ( Time . now > start_time + @retry_timeout && @retry_timeout > 0 )
472+ log . warn "dropping records; #{ common_log_part } "
473+ break
474+ end
475+
476+ log . info "going to retry to send data at #{ Time . now + sleep_time } ; #{ common_log_part } "
477+ sleep sleep_time
478+
479+ sleep_time *= 2
480+ if sleep_time > @retry_max_interval
481+ sleep_time = @retry_max_interval
482+ end
452483 end
453484 end
454485 end
0 commit comments