Open carrotshub opened 4 years ago
input {
kafka {
bootstrap_servers => ["xxxx:9093,xxx:9093,xxx:9093"]
auto_offset_reset => "latest"
consumer_threads => 5
decorate_events => true
topics => ["xxx-capture-package"]
type => "middle-end"
security_protocol => "SASL_PLAINTEXT"
sasl_mechanism => "PLAIN"
jaas_path => "/usr/share/logstash/kafka_client_jaas.conf"
ssl_truststore_location => "/usr/share/logstash/kafka.server.truststore.jks"
ssl_truststore_password => "xxx"
}
}
docker logstash with logstash-integration-kafka-10.4.0-java
[ERROR] 2020-09-11 05:56:29.485 [[main]<kafka] javapipeline - A plugin had an unrecoverable error. Will restart this plugin. Pipeline_id:main Plugin: <LogStash::Inputs::Kafka auto_offset_reset=>"latest", topics=>["witness-capture-package"], ssl_truststore_location=>"/usr/share/logstash/kafka.server.truststore.jks", ssl_truststore_password=><password>, sasl_mechanism=>"PLAIN", consumer_threads=>5, security_protocol=>"SASL_PLAINTEXT", jaas_path=>"/usr/share/logstash/kafka_client_jaas.conf", id=>"fe7e3ef00c1cf9d26b5680b7d7a6b5c9bc88e4fbb9f67dd3aa82dca301917bab", type=>"middle-end", bootstrap_servers=>"xxxxx:9093", decorate_events=>true, enable_metric=>true, codec=><LogStash::Codecs::Plain id=>"plain_5fbe3ebc-e4d4-4338-9bd9-85d602a63f5c", enable_metric=>true, charset=>"UTF-8">, auto_commit_interval_ms=>5000, check_crcs=>true, client_dns_lookup=>"default", client_id=>"logstash", connections_max_idle_ms=>540000, enable_auto_commit=>true, fetch_max_bytes=>52428800, fetch_max_wait_ms=>500, group_id=>"logstash", heartbeat_interval_ms=>3000, isolation_level=>"read_uncommitted", key_deserializer_class=>"org.apache.kafka.common.serialization.StringDeserializer", max_poll_interval_ms=>300000, max_partition_fetch_bytes=>1048576, max_poll_records=>500, metadata_max_age_ms=>300000, receive_buffer_bytes=>32768, reconnect_backoff_ms=>50, request_timeout_ms=>40000, retry_backoff_ms=>100, send_buffer_bytes=>131072, session_timeout_ms=>10000, value_deserializer_class=>"org.apache.kafka.common.serialization.StringDeserializer", poll_timeout_ms=>100, ssl_endpoint_identification_algorithm=>"https"> Error: Failed to construct kafka consumer Exception: Java::OrgApacheKafkaCommon::KafkaException Stack: org.apache.kafka.clients.consumer.KafkaConsumer.<init>(org/apache/kafka/clients/consumer/KafkaConsumer.java:820) org.apache.kafka.clients.consumer.KafkaConsumer.<init>(org/apache/kafka/clients/consumer/KafkaConsumer.java:666) org.apache.kafka.clients.consumer.KafkaConsumer.<init>(org/apache/kafka/clients/consumer/KafkaConsumer.java:646) jdk.internal.reflect.GeneratedConstructorAccessor42.newInstance(jdk/internal/reflect/GeneratedConstructorAccessor42) jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(jdk/internal/reflect/DelegatingConstructorAccessorImpl.java:45) java.lang.reflect.Constructor.newInstance(java/lang/reflect/Constructor.java:490) org.jruby.javasupport.JavaConstructor.newInstanceDirect(org/jruby/javasupport/JavaConstructor.java:285) org.jruby.RubyClass.newInstance(org/jruby/RubyClass.java:918) org.jruby.RubyClass$INVOKER$i$newInstance.call(org/jruby/RubyClass$INVOKER$i$newInstance.gen) usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java.lib.logstash.inputs.kafka.invokeOther152:new(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java/lib/logstash/inputs//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-kafka-10.4.0-java/lib/logstash/inputs/kafka.rb:346) usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java.lib.logstash.inputs.kafka.create_consumer(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-kafka-10.4.0-java/lib/logstash/inputs/kafka.rb:346) usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java.lib.logstash.inputs.kafka.invokeOther1:create_consumer(usr/share/logstash/vendor/bundle/jruby/$2_dot_5_dot_0/gems/logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java/lib/logstash/inputs//usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-kafka-10.4.0-java/lib/logstash/inputs/kafka.rb:243) usr.share.logstash.vendor.bundle.jruby.$2_dot_5_dot_0.gems.logstash_minus_integration_minus_kafka_minus_10_dot_4_dot_0_minus_java.lib.logstash.inputs.kafka.run(/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-integration-kafka-10.4.0-java/lib/logstash/inputs/kafka.rb:243) org.jruby.RubyEnumerable$22.call(org/jruby/RubyEnumerable.java:902) org.jruby.RubyEnumerator$2.call(org/jruby/RubyEnumerator.java:404) org.jruby.RubyFixnum.times(org/jruby/RubyFixnum.java:291) org.jruby.RubyInteger$INVOKER$i$0$0$times.call(org/jruby/RubyInteger$INVOKER$i$0$0$times.gen) org.jruby.RubyClass.finvokeWithRefinements(org/jruby/RubyClass.java:514) org.jruby.RubyClass.finvoke(org/jruby/RubyClass.java:502) org.jruby.RubyBasicObject.callMethod(org/jruby/RubyBasicObject.java:393) org.jruby.RubyEnumerator.__each__(org/jruby/RubyEnumerator.java:400) org.jruby.RubyEnumerator.each(org/jruby/RubyEnumerator.java:396) org.jruby.RubyEnumerator$INVOKER$i$each.call(org/jruby/RubyEnumerator$INVOKER$i$each.gen) org.jruby.RubyClass.finvokeWithRefinements(org/jruby/RubyClass.java:497) org.jruby.RubyClass.finvoke(org/jruby/RubyClass.java:487) org.jruby.RubyEnumerable.callEach19(org/jruby/RubyEnumerable.java:119) org.jruby.RubyEnumerable.collectCommon(org/jruby/RubyEnumerable.java:894) org.jruby.RubyEnumerable.map(org/jruby/RubyEnumerable.java:886) org.jruby.RubyEnumerable$INVOKER$s$0$0$map.call(org/jruby/RubyEnumerable$INVOKER$s$0$0$map.gen)
Please post all product and debugging questions on our forum. Your questions will reach our wider community members there, and if we confirm that there is a bug, then we can open a new issue here.
For security vulnerabilities please only send reports to security@elastic.co. See https://www.elastic.co/community/security for more information.
Logstash Plugins are located in a different organization: https://github.com/logstash-plugins. For bugs on specific Logstash plugins, for example, if Redis Output has a defect, please open it in the respective Redis Output repository.
For all general issues, please provide the following details for fast resolution:
Config File (if you have sensitive info, please remove it): input{ kafka{ bootstrap_servers => ["IP:port"] client_id => "aggs-log-001" group_id => "aggs-log-001" auto_offset_reset => "latest" consumer_threads => 3 decorate_events => false codec => json topics => ["cdn-live-streams"] enable_auto_commit => "true" sasl_jaas_config => "org.apache.kafka.common.security.plain.PlainLoginModule required username='username' password='password' serviceName='kafka';" sasl_mechanism => "PLAIN" security_protocol => "SASL_SSL" ssl_truststore_location => '/usr/share/logstash/pipeline/client.truststore.jks' ssl_truststore_password => "remove"
codec => json { charset => "UTF-8" } } }
filter { mutate { remove_field => ["msg", "topics", "level" ] } }
output { elasticsearch { hosts => ["http://ip:port"] index => "test_index" } }
read'", "/usr/share/logstash/logstash-core/lib/logstash/config/source/local.rb:94:in
read'", "/usr/share/logstash/logstash-core/lib/logstash/config/source/local.rb:190:inlocal_pipeline_configs'", "/usr/share/logstash/logstash-core/lib/logstash/config/source/multi_local.rb:26:in
block in pipeline_configs'", "org/jruby/RubyArray.java:2577:inmap'", "/usr/share/logstash/logstash-core/lib/logstash/config/source/multi_local.rb:21:in
pipeline_configs'", "/usr/share/logstash/logstash-core/lib/logstash/config/source_loader.rb:61:inblock in fetch'", "org/jruby/RubyArray.java:2572:in
collect'", "/usr/share/logstash/logstash-core/lib/logstash/config/source_loader.rb:60:infetch'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:148:in
converge_state_and_update'", "/usr/share/logstash/logstash-core/lib/logstash/agent.rb:96:inexecute'", "/usr/share/logstash/logstash-core/lib/logstash/runner.rb:362:in
block in execute'", "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/stud-0.0.23/lib/stud/task.rb:24:in `block in initialize'"]}