Open ChrisCalzaretta opened 5 years ago
Here is some more info..
Here is a sample of the json of the flow log
{
"records":[
{
"time":"2019-03-20T15:00:56.6983518Z",
"systemId":"2fbb240e-356f-4c3f-abaf-d72a1bb0c00x",
"category":"NetworkSecurityGroupFlowEvent",
"resourceId":"/SUBSCRIPTIONS/{SUBSCRIPTIONID}/RESOURCEGROUPS/MSAWEST-PROD-APP-RG01/PROVIDERS/MICROSOFT.NETWORK/NETWORKSECURITYGROUPS/{NSGNAME}",
"operationName":"NetworkSecurityGroupFlowEvents",
"properties":{
"Version":2,
"flows":[
{
"rule":"DefaultRule_DenyAllInBound",
"flows":[
{
"mac":"000D3A3128xx",
"flowTuples":[
"1553094020,185.xx.xx.xx,10.xx.xx.xx,48724,12576,T,I,D,B,,,,",
"1553094026,185.xx.xx.xx,xx.xx.xx.xx,41388,2939,T,I,D,B,,,,"
]
}
]
},
{
"rule":"DefaultRule_AllowVnetOutBound",
"flows":[
{
"mac":"000D3A3128xx",
"flowTuples":[
"1553093995,10.xx.xx.xx,10.xx.xx.xx,52973,1433,T,O,A,B,,,,"
]
}
]
}
]
}
}
]
}
Did you ever find a solution to this? Struggling with the same thing.
Hi Chris, was that the entire error or did you cut it off half way? I had a similar problem where the strings from the NSG FLow block blob weren't betting assembled correctly into JSON and has to switch to the plain codec in the input plugin, clean up the string in the filter and THEN convert it to JSON before allowing the rest of the the filter pipeline to do it's thing...
I am looking at the NSG config here https://github.com/Azure/azure-diagnostics-tools/tree/master/Logstash/logstash-input-azureblob I copied it and updated it with my keys but this is what i am using input { azureblob { storage_account_name => "" storage_access_key => "" container => "insights-logs-networksecuritygroupflowevent" codec => "json"
Refer https://docs.microsoft.com/en-us/azure/network-watcher/network-watcher-read-nsg-flow-logs
}
filter { split { field => "[records]" } split { field => "[records][properties][flows]"} split { field => "[records][properties][flows][flows]"} split { field => "[records][properties][flows][flows][flowTuples]"}
mutate{ split => { "[records][resourceId]" => "/"} add_field => {"Subscription" => "%{[records][resourceId][2]}" "ResourceGroup" => "%{[records][resourceId][4]}" "NetworkSecurityGroup" => "%{[records][resourceId][8]}"} convert => {"Subscription" => "string"} convert => {"ResourceGroup" => "string"} convert => {"NetworkSecurityGroup" => "string"} split => { "[records][properties][flows][flows][flowTuples]" => ","} add_field => { "unixtimestamp" => "%{[records][properties][flows][flows][flowTuples][0]}" "srcIp" => "%{[records][properties][flows][flows][flowTuples][1]}" "destIp" => "%{[records][properties][flows][flows][flowTuples][2]}" "srcPort" => "%{[records][properties][flows][flows][flowTuples][3]}" "destPort" => "%{[records][properties][flows][flows][flowTuples][4]}" "protocol" => "%{[records][properties][flows][flows][flowTuples][5]}" "trafficflow" => "%{[records][properties][flows][flows][flowTuples][6]}" "traffic" => "%{[records][properties][flows][flows][flowTuples][7]}" } convert => {"unixtimestamp" => "integer"} convert => {"srcPort" => "integer"} convert => {"destPort" => "integer"}
}
date{ match => ["unixtimestamp" , "UNIX"] } }
output { stdout { codec => rubydebug } }
When log stash tryes to parse the json i get an error JJSON parse error, original data now in message field {:error=>#<LogStash::Json::ParserError: Unexpected character ('V' (code 86)$ at [Source: (String)"{"records":[{"time":"{"Version":1,"flows":[{"rule":"UserRule_Allow_SQL_Traffic_Inbound","flows":[{"mac":"000D3A59EE5C","flowTuples":["1553036181,10.190.40.36,10.190.4$ [2019-03-19T23:01:14,346][ERROR][logstash.codecs.json ] JSON parse error, original data now in message field {:error=>#<LogStash::Json::ParserError: Unexpected character ('t' (code 116$ at [Source: (String)"{"records":[{"time":"{"time":"2019-03-19T22:58:08.7849961Z","systemId":"f601256d-e07c-4c32-926f-bf4421944f63","category":"NetworkSecurityGroupFlowEvent","resourceId":$
Can you give me a hand with getting through this error ?
Thank you
Chris