Open sanpichen opened 6 years ago
You can specify remote ES in System Settings/Main(tab) Use a custom Elasticsearch server
.
Then you also need to adjust accordingly the Logstash template (under /etc/logstash/conf.d
)
thanks @regit / @biolds for heads up!
I am afraid it does't enough .
[2018-06-01T21:20:42,196][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"number_of_replicas"=>0, "index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}, "raw"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}, "dest_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}, "src_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}}}}}} [2018-06-01T21:20:42,230][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//10.194.xxx.xxx:19200"]} [2018-06-01T21:20:42,405][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://10.194.xxx.xxx:19200/]}} [2018-06-01T21:20:42,417][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://10.194.xxx.xxx:19200/, :path=>"/"} [2018-06-01T21:20:42,486][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://10.194.xxx.xxx:19200/"} [2018-06-01T21:20:42,515][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>"/etc/logstash/elasticsearch5-template.json"} [2018-06-01T21:20:42,521][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"number_of_replicas"=>0, "index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}, "raw"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}, "dest_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}, "src_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}}}}}} [2018-06-01T21:20:42,570][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//10.194.xxx.xxx:19200"]} [2018-06-01T21:20:42,697][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://127.0.0.1:9200/]}} [2018-06-01T21:20:42,728][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://127.0.0.1:9200/, :path=>"/"} [2018-06-01T21:20:43,477][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://127.0.0.1:9200/"} [2018-06-01T21:20:43,563][INFO ][logstash.outputs.elasticsearch] Using mapping template from {:path=>"/etc/logstash/elasticsearch5-template.json"} [2018-06-01T21:20:43,569][INFO ][logstash.outputs.elasticsearch] Attempting to install template {:manage_template=>{"template"=>"logstash-", "version"=>50001, "settings"=>{"number_of_replicas"=>0, "index.refresh_interval"=>"5s"}, "mappings"=>{"default"=>{"_all"=>{"enabled"=>true, "norms"=>false}, "dynamic_templates"=>[{"message_field"=>{"path_match"=>"message", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false}}}, {"string_fields"=>{"match"=>"", "match_mapping_type"=>"string", "mapping"=>{"type"=>"text", "norms"=>false, "fields"=>{"keyword"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}, "raw"=>{"type"=>"keyword", "index"=>"not_analyzed", "ignore_above"=>256}}}}}], "properties"=>{"@timestamp"=>{"type"=>"date", "include_in_all"=>false}, "@version"=>{"type"=>"keyword", "include_in_all"=>false}, "geoip"=>{"dynamic"=>true, "properties"=>{"ip"=>{"type"=>"ip"}, "location"=>{"type"=>"geo_point"}, "latitude"=>{"type"=>"half_float"}, "longitude"=>{"type"=>"half_float"}}}, "dest_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}, "src_ip"=>{"type"=>"ip", "fields"=>{"raw"=>{"index"=>"not_analyzed", "type"=>"keyword"}, "keyword"=>{"index"=>"not_analyzed", "type"=>"keyword"}}}}}}}} [2018-06-01T21:20:43,626][INFO ][logstash.outputs.elasticsearch] New Elasticsearch output {:class=>"LogStash::Outputs::ElasticSearch", :hosts=>["//127.0.0.1"]} [2018-06-01T21:20:43,641][INFO ][logstash.outputs.elasticsearch] Elasticsearch pool URLs updated {:changes=>{:removed=>[], :added=>[http://127.0.0.1:9200/]}} [2018-06-01T21:20:43,648][INFO ][logstash.outputs.elasticsearch] Running health check to see if an Elasticsearch connection is working {:healthcheck_url=>http://127.0.0.1:9200/, :path=>"/"} [2018-06-01T21:20:44,485][WARN ][logstash.outputs.elasticsearch] Restored connection to ES instance {:url=>"http://127.0.0.1:9200/"}
AND , IS there something i should do about evebox ,kibana ? it seems doesn't work correctly
Can you please post your logstash config? Also your /ets/scirius/ (local settings.py)?
-- Regards, Peter Manev
On 2 Jun 2018, at 04:40, sanpichen notifications@github.com wrote:
AND , IS there something i should do about evebox ,kibana ? it seems it doesn't work correctly
— You are receiving this because you commented. Reply to this email directly, view it on GitHub, or mute the thread.
root@SELKS131:/etc/systemd/system/multi-user.target.wants# cat /etc/logstash/conf.d/logstash.conf input { file { path => ["/var/log/suricata/*.json"]
sincedb_path => ["/var/cache/logstash/sincedbs/since.db"]
codec => json
type => "SELKS"
}
}
filter { if [type] == "SELKS" {
date {
match => [ "timestamp", "ISO8601" ]
}
ruby {
code => "
if event.get('[event_type]') == 'fileinfo'
event.set('[fileinfo][type]', event.get('[fileinfo][magic]').to_s.split(',')[0])
end
"
}
ruby {
code => "
if event.get('[event_type]') == 'alert'
sp = event.get('[alert][signature]').to_s.split(' group ')
if (sp.length == 2) and /\A\d+\z/.match(sp[1])
event.set('[alert][signature]', sp[0])
end
end
"
}
metrics {
meter => [ "eve_insert" ]
add_tag => "metric"
flush_interval => 60
}
}
if [http] { useragent { source => "[http][http_user_agent]" target => "[http][user_agent]" } } if [src_ip] { geoip { source => "src_ip" target => "geoip"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float" ]
}
if ![geoip.ip] {
if [dest_ip] {
geoip {
source => "dest_ip"
target => "geoip"
#database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float" ]
}
}
}
} }
output { if [event_type] and [event_type] != 'stats' { elasticsearch { hosts => "10.194.13.18:19200" index => "logstash-%{event_type}-%{+YYYY.MM.dd}" template => "/etc/logstash/elasticsearch5-template.json" } } else { elasticsearch { hosts => "10.194.13.18:19200" index => "logstash-%{+YYYY.MM.dd}" template => "/etc/logstash/elasticsearch5-template.json" } } }
root@SELKS131:/var/log/logstash# cat /etc/scirius/local_settings.py "
import os BASE_DIR = "/var/lib/scirius/" GIT_SOURCES_BASE_DIRECTORY = os.path.join(BASE_DIR, 'git-sources/')
SECRET_KEY = 'p7o6%vq))7h3li08c%k3id(wwo*u(^dbdmx2tv#t(tb2pr9@n-' USE_ELASTICSEARCH = True
ELASTICSEARCH_ADDRESS = "10.194.13.18:19200" ELASTICSEARCH_VERSION = 5 KIBANA_VERSION=4 KIBANA_INDEX = ".kibana" KIBANA_URL = "http://localhost:5601" KIBANA_DASHBOARDS_PATH = "/opt/selks/kibana5-dashboards/" USE_KIBANA = True KIBANA_PROXY = True
USE_EVEBOX = True EVEBOX_ADDRESS = "localhost:5636"
USE_SURICATA_STATS = True USE_LOGSTASH_STATS = True STATIC_ROOT="/var/lib/scirius/static/"
DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db', 'db.sqlite3'), } } DBBACKUP_STORAGE_OPTIONS = {'location': '/var/backups/'}
ELASTICSEARCH_LOGSTASH_ALERT_INDEX="logstash-alert-"
SURICATA_NAME_IS_HOSTNAME = True
ALLOWED_HOSTS=["*"]
"
You may want to adjust the KIBANA_URL in the settings file. You already have pointed (from the web interface) to the remote ES right? Can you check if you receive/digest records in the remote ES to begin with ?
On 2 Jun 2018, at 07:55, sanpichen notifications@github.com wrote:
root@SELKS131:/etc/systemd/system/multi-user.target.wants# cat /etc/logstash/conf.d/logstash.conf input { file { path => ["/var/log/suricata/*.json"]
sincedb_path => ["/var/lib/logstash/"]
sincedb_path => ["/var/cache/logstash/sincedbs/since.db"] codec => json type => "SELKS" }
}
filter { if [type] == "SELKS" {
date { match => [ "timestamp", "ISO8601" ] }
ruby { code => " if event.get('[event_type]') == 'fileinfo' event.set('[fileinfo][type]', event.get('[fileinfo][magic]').to_s.split(',')[0]) end " } ruby { code => " if event.get('[event_type]') == 'alert' sp = event.get('[alert][signature]').to_s.split(' group ') if (sp.length == 2) and /\A\d+\z/.match(sp[1]) event.set('[alert][signature]', sp[0]) end end " }
metrics { meter => [ "eve_insert" ] add_tag => "metric" flush_interval => 60 } }
if [http] { useragent { source => "[http][http_user_agent]" target => "[http][user_agent]" } } if [src_ip] { geoip { source => "src_ip" target => "geoip"
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] } mutate { convert => [ "[geoip][coordinates]", "float" ] } if ![geoip.ip] { if [dest_ip] { geoip { source => "dest_ip" target => "geoip"
database => "/opt/logstash/vendor/geoip/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ] add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ] } mutate { convert => [ "[geoip][coordinates]", "float" ] } } } } }
output { if [event_type] and [event_type] != 'stats' { elasticsearch { hosts => "10.194.13.18:19200" index => "logstash-%{event_type}-%{+YYYY.MM.dd}" template => "/etc/logstash/elasticsearch5-template.json" } } else { elasticsearch { hosts => "10.194.13.18:19200" index => "logstash-%{+YYYY.MM.dd}" template => "/etc/logstash/elasticsearch5-template.json" } } }
root@SELKS131:/var/log/logstash# cat /etc/scirius/local_settings.py """ Django settings for scirius project.
For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """
Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os BASE_DIR = "/var/lib/scirius/" GIT_SOURCES_BASE_DIRECTORY = os.path.join(BASE_DIR, 'git-sources/')
Quick-start development settings - unsuitable for production
See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
SECURITY WARNING: keep the secret key used in production secret!
FIXME: generate this
SECRET_KEY = 'p7o6%vq))7h3li08c%k3id(wwo*u(^dbdmx2tv#t(tb2pr9@n-' USE_ELASTICSEARCH = True
ELASTICSEARCH_ADDRESS = "localhost:9200"
ELASTICSEARCH_ADDRESS = "10.194.13.18:19200" ELASTICSEARCH_VERSION = 5 KIBANA_VERSION=4 KIBANA_INDEX = ".kibana" KIBANA_URL = "http://localhost:5601" KIBANA_DASHBOARDS_PATH = "/opt/selks/kibana5-dashboards/" USE_KIBANA = True KIBANA_PROXY = True
SURICATA_UNIX_SOCKET = "/var/run/suricata/suricata-command.socket"
USE_EVEBOX = True EVEBOX_ADDRESS = "localhost:5636"
USE_SURICATA_STATS = True USE_LOGSTASH_STATS = True STATIC_ROOT="/var/lib/scirius/static/"
DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db', 'db.sqlite3'), } } DBBACKUP_STORAGE_OPTIONS = {'location': '/var/backups/'}
ELASTICSEARCH_LOGSTASH_ALERT_INDEX="logstash-alert-"
SURICATA_NAME_IS_HOSTNAME = True
ALLOWED_HOSTS=["*"]
— You are receiving this because you commented. Reply to this email directly, view it on GitHub, or mute the thread.
more details about setting "KIBANA_URL " please. begin with ?? like that ? the "docs.count" is small and would not increase at all
health | status | index | uuid | pri | rep | docs.count | docs.deleted | store.size | pri.store.size |
---|---|---|---|---|---|---|---|---|---|
green | open | .kibana | vkyqzmtMQXGBezis6_f_0w | 5 | 0 | 248 | 0 | 470.4kb | 470.4kb |
green | open | logstash-fileinfo-2018.06.02 | sREEtQo0Q36De7jigPRbNA | 5 | 0 | 171 | 0 | 1.1mb | 1.1mb |
green | open | logstash-flow-2018.06.02 | 7bpF9hM9TsGM06SiKL9dwA | 5 | 0 | 43 | 0 | 199kb | 199kb |
green | open | logstash-http-2018.06.02 | BYYi96YGTTaAecLiGIQzJw | 5 | 0 | 84 | 0 | 832.6kb | 832.6kb |
For Kibana i meant this setting -
https://github.com/StamusNetworks/scirius/blob/master/scirius/settings.py#L201
If you dont have any data being digested into ES - there maybe two possibilities that are worth exploring
eve.json
What is the guide if I have to config my SELKS to the ES in other server?