首页 > 解决方案 > Logstash 不使用 docker-compose.yaml 文件读取文件输入

问题描述

logstash.conf 文件:

 input {

 file {
    type => "java"
    path => "/elk/spring-boot-elk.log"
    start_position => "beginning"
  }
}


filter {
  #If log line contains tab character followed by 'at' then we will tag that entry as 
stacktrace
  if [message] =~ "\tat" {
    grok {
      match => ["message", "^(\tat)"]
      add_tag => ["stacktrace"]
    }
  }

}

output { 
  stdout {
    codec => rubydebug
  }

  # Sending properly parsed log events to elasticsearch
  elasticsearch {
    hosts => ["elasticsearch:9200"]
  }
}

docker-compose.yaml文件:

version: "3"
services:
  elasticsearch:
    image: elasticsearch:7.5.2
    ports: 
    - "9200:9200"
    - "9300:9300"
    environment:
    - discovery.type=single-node
  kibana:
    image: kibana:7.5.2
    ports:
    - "5601:5601"
    links:
    - elasticsearch
    depends_on:
    - elasticsearch
  logstash:
    image: logstash:7.5.2
    links:
    - elasticsearch
    volumes:
    - ./:/config-dir
    command: logstash -f /config-dir/logstash.conf
    depends_on:
    - elasticsearch

所有容器都在运行,但没有数据存在于kibana.

我认为问题在于logstash.

[2020-04-26T16:37:44,502][WARN ][logstash.outputs.elasticsearch] You are using a deprecated config setting "document_type" set in elasticsearch. Deprecated settings will continue to work, but are scheduled for removal from logstash in the future. Document types are being deprecated in Elasticsearch 6.0, and removed entirely in 7.0. You should avoid this feature If you have any questions about this, please visit the #logstash channel on freenode irc. {:name=>"document_type", :plugin=><LogStash::Outputs::ElasticSearch bulk_path=>"/_monitoring/bulk?system_id=logstash&system_api_version=7&interval=1s", hosts=>[http://elasticsearch:9200], sniffing=>false, manage_template=>false, id=>"7d7dfa0f023f65240aeb31ebb353da5a42dc782979a2bd7e26e28b7cbd509bb3", document_type=>"%{[@metadata][document_type]}", enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_1a08e50c-ae97-4f38-a5b7-7aa70df94f4a", enable_metric=>true, charset=>"UTF-8">, workers=>1, template_name=>"logstash", template_overwrite=>false, doc_as_upsert=>false, script_type=>"inline", script_lang=>"painless", script_var_name=>"event", scripted_upsert=>false, retry_initial_interval=>2, retry_max_interval=>64, retry_on_conflict=>1, ilm_enabled=>"auto", ilm_rollover_alias=>"logstash", ilm_pattern=>"{now/d}-000001", ilm_policy=>"logstash-policy", action=>"index", ssl_certificate_verification=>true, sniffing_delay=>5, timeout=>60, pool_max=>1000, pool_max_per_route=>100, resurrect_delay=>5, validate_after_inactivity=>10000, http_compression=>false>}
[2020-04-26T16:37:44,544][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://elasticsearch:9200/]}}
[2020-04-26T16:37:44,550][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://elasticsearch:9200/"}
[2020-04-26T16:37:44,555][INFO ][logstash.outputs.elasticsearch] ES Output version determined {:es_version=>7}
[2020-04-26T16:37:44,555][WARN ][logstash.outputs.elasticsearch] Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type {:es_version=>7}
[2020-04-26T16:37:44,586][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["http://elasticsearch:9200"]}
[2020-04-26T16:37:44,597][INFO ][logstash.javapipeline    ] Starting pipeline {:pipeline_id=>".monitoring-logstash", "pipeline.workers"=>1, "pipeline.batch.size"=>2, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>2, "pipeline.sources"=>["monitoring pipeline"], :thread=>"#<Thread:0x61a1055 run>"}
[2020-04-26T16:37:44,636][INFO ][logstash.javapipeline    ] Pipeline started {"pipeline.id"=>".monitoring-logstash"}
[2020-04-26T16:37:44,654][INFO ][logstash.agent           ] Pipelines running {:count=>2, :running_pipelines=>[:main, :".monitoring-logstash"], :non_running_pipelines=>[]}
[2020-04-26T16:37:44,899][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

标签: elasticsearchdocker-composelogstashkibana

解决方案


推荐阅读