Я использую этот фильтр для анализа некоторых данных CSV, которые я генерирую из файла php. Я беру вывод из инструмента мониторинга GPU под названием MSI Afterburner, который выводит файл .hml. Есть тонна пробелов и нерелевантный заголовок, который мой php-файл удаляет и выводит через запятую значение.
filter
{
csv
{
columns => ["somename","@timestamp","cpu.avg.temp","gpu.temp","fan.speed","gpu.usage","bus.usage","fan.tachometer","clock.core","framerate.hz","framerate.ms","cpu.temp.1","cpu.temp.2","cpu.temp.3","cpu.temp.4"]
separator => ","skip_empty_columns => "true"}
mutate
{
convert => ["somename","integer"]
convert => ["cpu.avg.temp","float"]
convert => ["gpu.temp","float"]
convert => ["fan.speed","float"]
convert => ["gpu.usage","float"]
convert => ["bus.usage","float"]
convert => ["fan.tachometer","float"]
convert => ["clock.core", "float"]
convert => ["framerate.hz","float"]
convert => ["framerate.ms","float"]
convert => ["cpu.temp.1","float"]
convert => ["cpu.temp.2","float"]
convert => ["cpu.temp.3","float"]
convert => ["cpu.temp.4","float"]
}
date
{
match => ["@timestamp", "dd-MM-yyyyHH:mm:ss"]
}
}
Это вывод logstash кидает на меня. Мне интересно, связано ли это с тем, что мой формат даты неверен или в конце моего сообщения появляется специальный символ ‘\ r’. Мне интересно, умеет ли logstash читать дд-мм-ггггЧЧ: мм: сс, где год и час слипаются, если нет, то у меня могут возникнуть проблемы.
{
"path" => "C:\\Users\\Public\\Documents\\gpumetrics.csv",
"somename" => 80,
"@timestamp" => 2017-02-20T02:33:10.764Z,
"@version" => "1",
"host" => "DESKTOP-Q8UEATO",
"message" => "80,19-02-201721:33:10,32.000,41.000,0.000,0.000,0.000,0.000,215.000,0.000,0.000,31.000,32.000,30.000,31.000\r",
"type" => "csv",
"tags" => [
[0] "_csvparsefailure",
[1] "_dateparsefailure"]
}
Вот несколько примеров строк из моего файла журнала. Как вы можете заметить, перед отметкой времени есть поле. Мне интересно, разрешено ли это.
80,19-02-201713:20:32,44.000,43.000,0.000,0.000,0.000,0.000,215.000,,,37.000,42.000,41.000,38.000
80,19-02-201713:20:33,47.000,43.000,0.000,0.000,0.000,0.000,215.000,,,46.000,47.000,45.000,44.000
80,19-02-201713:20:34,53.000,43.000,0.000,0.000,0.000,0.000,215.000,,,35.000,50.000,36.000,37.000
80,19-02-201713:20:35,37.000,43.000,0.000,0.000,0.000,0.000,215.000,,,37.000,37.000,37.000,34.000
80,19-02-201713:20:36,34.000,44.000,0.000,0.000,0.000,0.000,1582.000,0.000,0.000,39.000,34.000,33.000,36.000
80,19-02-201713:20:37,46.000,44.000,0.000,0.000,0.000,0.000,1582.000,0.000,0.000,45.000,37.000,43.000,37.000
Ваша проблема может быть решена очень просто путем изменения имени вашей временной метки, поскольку @timestamp
создается внутри перед анализом вашей строки.
filter
{
csv
{
remove the @
|
v
columns => ["somename","timestamp","cpu.avg.temp","gpu.temp","fan.speed","gpu.usage","bus.usage","fan.tachometer","clock.core","framerate.hz","framerate.ms","cpu.temp.1","cpu.temp.2","cpu.temp.3","cpu.temp.4"]
separator => ","skip_empty_columns => "true"}
...
date
{
match => ["timestamp", "dd-MM-yyyyHH:mm:ss"]
^
|
remove the @
}
}
Других решений пока нет …