sanfrareau
sanfrareau

Reputation: 113

Logstash Index error : [logstash-*] IndexNotFoundException[no such index]

I am new for ELK. I am using : - elasticsearch-2.1.0 - logstash-2.1.1 - kibana-4.3.0-windows I tried to configure ELK to monitoring my application logs and I followed different tutorials and different logstash configuration, but I am getting this error when I switch on kibana, and it send the request to the elasticsearch. :

[logstash-*] IndexNotFoundException[no such index]

This is my logstash config:

input {
   file {
       path => "/var/logs/*.log"
       type => "syslog"
        }
      } 
filter {
       grok {match => [ "message", "%{COMBINEDAPACHELOG}" ] }
} 
output {
      elasticsearch { hosts => localhost }
      stdout { codec => rubydebug }
}

I tried to deleted all folder and re-install it and follow this tutorial step by step: https://www.elastic.co/guide/en/logstash/current/advanced-pipeline.html

But I didn't received any kind of index, and I got again the index Error from kibana to elasticsearch

Any helps ?

Regards debug Logs : `

C:\Users\xxx\Desktop\LOGS\logstash-2.1.1\bin>logstash -f first-pipeline.conf --debug
io/console not supported; tty will not be manipulated
←[36mReading config file {:config_file=>"C:/Users/xxx/Desktop/LOGS/logstash-2.1.1/bin/first-pipeline.conf", :level=>:debug, :file=>"/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby
/1.9/gems/logstash-core-2.1.1-java/lib/logstash/agent.rb", :line=>"325", :method=>"local_config"}←[0m
←[36mCompiled pipeline code:
        @inputs = []
        @filters = []
        @outputs = []
        @periodic_flushers = []
        @shutdown_flushers = []
        @input_file_1 = plugin("input", "file", LogStash::Util.hash_merge_many({ "path" => ("/var/logs/logstash-tutorial-dataset") }, { "start_position" => ("beginning") }))
        @inputs << @input_file_1
        @filter_grok_2 = plugin("filter", "grok", LogStash::Util.hash_merge_many({ "match" => {("message") => ("%{COMBINEDAPACHELOG}")} }))
        @filters << @filter_grok_2
            @filter_grok_2_flush = lambda do |options, &block|
              @logger.debug? && @logger.debug("Flushing", :plugin => @filter_grok_2)
              events = @filter_grok_2.flush(options)
              return if events.nil? || events.empty?
              @logger.debug? && @logger.debug("Flushing", :plugin => @filter_grok_2, :events => events)
                          events = @filter_geoip_3.multi_filter(events)
              events.each{|e| block.call(e)}
            end
            if @filter_grok_2.respond_to?(:flush)
              @periodic_flushers << @filter_grok_2_flush if @filter_grok_2.periodic_flush
              @shutdown_flushers << @filter_grok_2_flush
            end
          @filter_geoip_3 = plugin("filter", "geoip", LogStash::Util.hash_merge_many({ "source" => ("clientip") }))
          @filters << @filter_geoip_3
            @filter_geoip_3_flush = lambda do |options, &block|
              @logger.debug? && @logger.debug("Flushing", :plugin => @filter_geoip_3)
              events = @filter_geoip_3.flush(options)
              return if events.nil? || events.empty?
              @logger.debug? && @logger.debug("Flushing", :plugin => @filter_geoip_3, :events => events)
              events.each{|e| block.call(e)}
            end
            if @filter_geoip_3.respond_to?(:flush)
              @periodic_flushers << @filter_geoip_3_flush if @filter_geoip_3.periodic_flush
              @shutdown_flushers << @filter_geoip_3_flush
            end
          @output_elasticsearch_4 = plugin("output", "elasticsearch", LogStash::Util.hash_merge_many({ "hosts" => [("localhost")] }))
          @outputs << @output_elasticsearch_4
  def filter_func(event)
    events = [event]
    @logger.debug? && @logger.debug("filter received", :event => event.to_hash)
              events = @filter_grok_2.multi_filter(events)
              events = @filter_geoip_3.multi_filter(events)
    events
  end
  def output_func(event)
    @logger.debug? && @logger.debug("output received", :event => event.to_hash)
    @output_elasticsearch_4.handle(event)
  end {:level=>:debug, :file=>"/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.1-java/lib/logstash/pipeline.rb", :line=>"38", :method=>"initialize"}←[0m
←[36mPlugin not defined in namespace, checking for plugin file {:type=>"input", :name=>"file", :path=>"logstash/inputs/file", :level=>:debug, :file=>"/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby/1.9/gems/logstash-core-2.1.1-java/lib/logstash/plugin.rb", :line=>"76", :method=>"lookup"}←[0m
[...]
Logstash startup completed
←[32mFlushing buffer at interval {:instance=>"#<LogStash::Outputs::ElasticSearch::Buffer:0x75375e77@stopping=#<Concurrent::AtomicBoolean:0x61b12c0>, @last_flush=2015-12-29 15:45:27 +0000, @flush_thread=#<Thread:0x7008acbf run>, @max_size=500, @operations_lock=#<Java::JavaUtilConcurrentLocks::ReentrantLock:0x4985690f>, @submit_proc=#<Proc:0x3c9b0727@C:/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-2.2.0-java/lib/logstash/outputs/elasticsearch/common.rb:55>, @flush_interval=1, @logger=#<Cabin::Channel:0x65f2b086 @subscriber_lock=#<Mutex:0x202361b4>, @data={}, @metrics=#<Cabin::Metrics:0x72e380e7 @channel=#<Cabin::Channel:0x65f2b086 ...>, @metrics={}, @metrics_lock=#<Mutex:0x3623f89e>>, @subscribers={12592=>#<Cabin::Outputs::IO:0x316290ee @lock=#<Mutex:0x3e191296>, @io=#<IO:fd 1>>}, @level=:debug>, @buffer=[], @operations_mutex=#<Mutex:0x601355b3>>", :interval=>1, :level=>:info, :file=>"/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-2.2.0-java/lib/logstash/outputs/elasticsear
ch/buffer.rb", :line=>"90", :method=>"interval_flush"}←[0m
←[36m_globbed_files: /var/logs/logstash-tutorial-dataset: glob is: ["/var/logs/logstash-tutorial-dataset"] {:level=>:debug, :file=>"/Users/xxx/Desktop/LOGS/logstash-2.1.1/vendor/bundle/jruby/1.9/gems/filewatch-0.6.7/lib/filewatch/watch.rb", :line=>"190", :method=>"_globbed_files"}←[0m`

elasticsearch.log :

[2015-12-29 15:15:01,702][WARN ][bootstrap                ] unable to install syscall filter: syscall filtering not supported for OS: 'Windows 8.1'
[2015-12-29 15:15:01,879][INFO ][node                     ] [Blue Marvel] version[2.1.1], pid[10152], build[40e2c53/2015-12-15T13:05:55Z]
[2015-12-29 15:15:01,880][INFO ][node                     ] [Blue Marvel] initializing ...
[2015-12-29 15:15:01,923][INFO ][plugins                  ] [Blue Marvel] loaded [], sites []
[2015-12-29 15:15:01,941][INFO ][env                      ] [Blue Marvel] using [1] data paths, mounts [[OS (C:)]], net usable_space [242.8gb], net total_space [458.4gb], spins? [unknown], types [NTFS]
[2015-12-29 15:15:03,135][INFO ][node                     ] [Blue Marvel] initialized
[2015-12-29 15:15:03,135][INFO ][node                     ] [Blue Marvel] starting ...
[2015-12-29 15:15:03,249][INFO ][transport                ] [Blue Marvel] publish_address {127.0.0.1:9300}, bound_addresses {127.0.0.1:9300}, {[::1]:9300}
[2015-12-29 15:15:03,255][INFO ][discovery                ] [Blue Marvel] elasticsearch/3DpYKTroSke4ruP21QefmA
[2015-12-29 15:15:07,287][INFO ][cluster.service          ] [Blue Marvel] new_master {Blue Marvel}{3DpYKTroSke4ruP21QefmA}{127.0.0.1}{127.0.0.1:9300}, reason: zen-disco-join(elected_as_master, [0] joins received)
[2015-12-29 15:15:07,377][INFO ][http                     ] [Blue Marvel] publish_address {127.0.0.1:9200}, bound_addresses {127.0.0.1:9200}, {[::1]:9200}
[2015-12-29 15:15:07,382][INFO ][node                     ] [Blue Marvel] started
[2015-12-29 15:15:07,399][INFO ][gateway                  ] [Blue Marvel] recovered [1] indices into cluster_state
[2015-12-29 16:33:00,715][INFO ][rest.suppressed          ] /logstash-$DATE/_search Params: {index=logstash-$DATE, q=response=200}
[logstash-$DATE] IndexNotFoundException[no such index]
    at org.elasticsearch.cluster.metadata.IndexNameExpressionResolver$WildcardExpressionResolver.resolve(IndexNameExpressionResolver.java:566)

Upvotes: 3

Views: 4217

Answers (3)

Sasin Wijayanga
Sasin Wijayanga

Reputation: 9

This issue will fix with below logstash config file change.

 input {
    file {
        path => "/path/to/logfile.log"
        start_position => beginning 
    }
}

filter {

}


output {
      elasticsearch { 
        hosts => ["localhost:9200"] 
        index => "logstash-%{+YYYY.MM.dd}"    
                    }
      stdout { codec => rubydebug }
}

Upvotes: 0

sanfrareau
sanfrareau

Reputation: 113

I fixed the problem adding this: input { file { path => "/path/to/logstash-tutorial.log" start_position => beginning sincedb_path => "/dev/null" } }

now logstash is sending the index to elasticsearch

Upvotes: 0

manishrw
manishrw

Reputation: 429

From my observation, it seems that you've not provided port no in logstash output config file. Generally the port used is 9200 (default) for elasticsearch (as instructed by most of the tutorials outh there). Try changing logstash config - output part to follows and let me know if it works:

output {
      elasticsearch { hosts => ["localhost:9200"] }
      stdout { codec => rubydebug }
}

Upvotes: 0

Related Questions