-
Notifications
You must be signed in to change notification settings - Fork 52
/
Copy pathfile.rb
277 lines (237 loc) · 7.45 KB
/
file.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
# encoding: utf-8
require "logstash/namespace"
require "logstash/outputs/base"
require "logstash/errors"
require "zlib"
# This output will write events to files on disk. You can use fields
# from the event as parts of the filename and/or path.
class LogStash::Outputs::File < LogStash::Outputs::Base
FIELD_REF = /%\{[^}]+\}/
config_name "file"
attr_reader :failure_path
# The path to the file to write. Event fields can be used here,
# like `/var/log/logstash/%{host}/%{application}`
# One may also utilize the path option for date-based log
# rotation via the joda time format. This will use the event
# timestamp.
# E.g.: `path => "./test-%{+YYYY-MM-dd}.txt"` to create
# `./test-2013-05-29.txt`
#
# If you use an absolute path you cannot start with a dynamic string.
# E.g: `/%{myfield}/`, `/test-%{myfield}/` are not valid paths
config :path, :validate => :string, :required => true
# The format to use when writing events to the file. This value
# supports any string and can include `%{name}` and other dynamic
# strings.
#
# If this setting is omitted, the full json representation of the
# event will be written as a single line.
config :message_format, :validate => :string
# Flush interval (in seconds) for flushing writes to log files.
# 0 will flush on every message.
config :flush_interval, :validate => :number, :default => 2
# Gzip the output stream before writing to disk.
config :gzip, :validate => :boolean, :default => false
# If the generated path is invalid, the events will be saved
# into this file and inside the defined path.
config :filename_failure, :validate => :string, :default => '_filepath_failures'
# If the a file is deleted, but an event is comming with the need to be stored
# in such a file, the plugin will created a gain this file. Default => true
config :create_if_deleted, :validate => :boolean, :default => true
public
def register
require "fileutils" # For mkdir_p
workers_not_supported
@files = {}
@path = File.expand_path(path)
validate_path
if path_with_field_ref?
@file_root = extract_file_root
else
@file_root = File.dirname(path)
end
@failure_path = File.join(@file_root, @filename_failure)
now = Time.now
@last_flush_cycle = now
@last_stale_cleanup_cycle = now
@flush_interval = @flush_interval.to_i
@stale_cleanup_interval = 10
end # def register
private
def validate_path
if (root_directory =~ FIELD_REF) != nil
@logger.error("File: The starting part of the path should not be dynamic.", :path => @path)
raise LogStash::ConfigurationError.new("The starting part of the path should not be dynamic.")
end
end
private
def root_directory
parts = @path.split(File::SEPARATOR).select { |item| !item.empty? }
if Gem.win_platform?
# First part is the drive letter
parts[1]
else
parts.first
end
end
public
def receive(event)
return unless output?(event)
file_output_path = generate_filepath(event)
if path_with_field_ref? && !inside_file_root?(file_output_path)
@logger.warn("File: the event tried to write outside the files root, writing the event to the failure file", :event => event, :filename => @failure_path)
file_output_path = @failure_path
elsif !@create_if_deleted && deleted?(file_output_path)
file_output_path = @failure_path
end
output = format_message(event)
write_event(file_output_path, output)
end # def receive
public
def teardown
@logger.debug("Teardown: closing files")
@files.each do |path, fd|
begin
fd.close
@logger.debug("Closed file #{path}", :fd => fd)
rescue Exception => e
@logger.error("Exception while flushing and closing files.", :exception => e)
end
end
finished
end
private
def inside_file_root?(log_path)
target_file = File.expand_path(log_path)
return target_file.start_with?("#{@file_root.to_s}/")
end
private
def write_event(log_path, event)
@logger.debug("File, writing event to file.", :filename => log_path)
fd = open(log_path)
# TODO(sissel): Check if we should rotate the file.
fd.write(event)
fd.write("\n")
flush(fd)
close_stale_files
end
private
def generate_filepath(event)
event.sprintf(@path)
end
private
def path_with_field_ref?
path =~ FIELD_REF
end
private
def format_message(event)
if @message_format
event.sprintf(@message_format)
else
event.to_json
end
end
private
def extract_file_root
parts = File.expand_path(path).split(File::SEPARATOR)
parts.take_while { |part| part !~ FIELD_REF }.join(File::SEPARATOR)
end
private
def flush(fd)
if flush_interval > 0
flush_pending_files
else
fd.flush
end
end
# every flush_interval seconds or so (triggered by events, but if there are no events there's no point flushing files anyway)
private
def flush_pending_files
return unless Time.now - @last_flush_cycle >= flush_interval
@logger.debug("Starting flush cycle")
@files.each do |path, fd|
@logger.debug("Flushing file", :path => path, :fd => fd)
fd.flush
end
@last_flush_cycle = Time.now
end
# every 10 seconds or so (triggered by events, but if there are no events there's no point closing files anyway)
private
def close_stale_files
now = Time.now
return unless now - @last_stale_cleanup_cycle >= @stale_cleanup_interval
@logger.info("Starting stale files cleanup cycle", :files => @files)
inactive_files = @files.select { |path, fd| not fd.active }
@logger.debug("%d stale files found" % inactive_files.count, :inactive_files => inactive_files)
inactive_files.each do |path, fd|
@logger.info("Closing file %s" % path)
fd.close
@files.delete(path)
end
# mark all files as inactive, a call to write will mark them as active again
@files.each { |path, fd| fd.active = false }
@last_stale_cleanup_cycle = now
end
private
def cached?(path)
@files.include?(path) && !@files[path].nil?
end
private
def deleted?(path)
!File.exist?(path)
end
private
def open(path)
if !deleted?(path) && cached?(path)
return @files[path]
elsif deleted?(path)
if @create_if_deleted
@logger.debug("Required path was deleted, creating the file again", :path => path)
@files.delete(path)
else
return @files[path] if cached?(path)
end
end
@logger.info("Opening file", :path => path)
dir = File.dirname(path)
if !Dir.exists?(dir)
@logger.info("Creating directory", :directory => dir)
FileUtils.mkdir_p(dir)
end
# work around a bug opening fifos (bug JRUBY-6280)
stat = File.stat(path) rescue nil
if stat && stat.ftype == "fifo" && LogStash::Environment.jruby?
fd = java.io.FileWriter.new(java.io.File.new(path))
else
fd = File.new(path, "a+")
end
if gzip
fd = Zlib::GzipWriter.new(fd)
end
@files[path] = IOWriter.new(fd)
end
end # class LogStash::Outputs::File
# wrapper class
class IOWriter
def initialize(io)
@io = io
end
def write(*args)
@io.write(*args)
@active = true
end
def flush
@io.flush
if @io.class == Zlib::GzipWriter
@io.to_io.flush
end
end
def method_missing(method_name, *args, &block)
if @io.respond_to?(method_name)
@io.send(method_name, *args, &block)
else
super
end
end
attr_accessor :active
end