node.js - 用fluentd和winston解析json
问题描述
我的应用程序生成 apche 日志以及类似这样的 JSON 数据
{ TableName: 'myTable', CapacityUnits: 0.5 }
我正在使用 winston(3.2.1) 作为我的记录器。在我的 Kibana 中,我将 JSON 的每一行视为不同的条目,而不是单个 json。知道如何解决这个问题吗?
我的温斯顿代码看起来像这样
const winston = require('winston');
const { format } = winston;
const prettyJson = format.printf((info) => {
if (info.message.constructor === Object) {
info.message = JSON.stringify(info.message, null, 2);
console.log('inside pretyjson', info.message);
}
return `${info.level}: ${info.message}`;
});
const logLevel = process.env.LOG_LEVEL || 'debug';
const tsFormat = () => (new Date()).toLocaleTimeString();
const Logger = winston.createLogger({
level: logLevel,
transports: [
new winston.transports.Console({
timestamp: tsFormat,
format: format.combine(
format.colorize(),
format.prettyPrint(),
format.splat(),
format.simple(),
prettyJson,
),
}),
],
});
module.exports = Logger;
我流利的配置看起来像这样
# Recieve events over http from port 9880
<source>
@type http
port 9880
bind 0.0.0.0
@log_level debug
</source>
# Recieve events from 24224/tcp
<source>
@type forward
port 24224
bind 0.0.0.0
</source>
# We need to massage the data before if goes into the ES
<filter **>
# We parse the input with key "log" (https://docs.fluentd.org/filter/parser)
@type parser
key_name log
# Keep the original key value pair in the result
reserve_data true
<parse>
# Use apache2 parser plugin to parse the data
@type multi_format
<pattern>
format json
</pattern>
<pattern>
format apache2
</pattern>
<pattern>
format none
</pattern>
</parse>
</filter>
# Fluentd will decide what to do here if the event is matched
# In our case, we want all the data to be matched hence **
<match **>
# We want all the data to be copied to elasticsearch using inbuilt
# copy output plugin https://docs.fluentd.org/output/copy
@type copy
<store>
# We want to store our data to elastic search using out_elasticsearch plugin
# https://docs.fluentd.org/output/elasticsearch. See Dockerfile for installation
@type elasticsearch
time_key timestamp_ms
host hostip
port 9200
with_transporter_log true
@log_level debug
log_es_400_reason true
# Use conventional index name format (logstash-%Y.%m.%d)
logstash_format true
# We will use this when kibana reads logs from ES
logstash_prefix fluentd
logstash_dateformat %Y-%m-%d
flush_interval 1s
reload_connections false
reconnect_on_error true
reload_on_failure true
</store>
</match>
解决方案
推荐阅读
- java - Java - 在 Super 之前设置类属性
- android - Android - 在方向更改时恢复动态添加的视图状态
- eclipse - 项目资源管理器中没有包含目录
- java - *已解决*我可以将 Spring Boot 作为具有 Digest 身份验证请求的客户端运行吗?
- mongodb - 如何使用 mongodb compass 恢复数据库?
- r - 如何为上调基因和下调基因赋予两种不同的颜色?
- python - Google Colab 错误:无法加载 Qt 平台插件“xcb”
- java - DownloadManager 在再次连接到互联网时进行多次相同的下载
- c# - 解决了!如何动态添加 onClick 侦听器和带有变量的方法调用到预制按钮?
- python - 创建子类时覆盖类定义中使用的全局变量