index="_internal" source="*metrics.log*" group=tcpin_connections NOT eventType=* | eval Source=if(isnull(hostname), Source,hostname) | eval connectionType=case(fwdType=="uf","Universal Forwarder", fwdType=="lwf", "Lightweight Forwarder",fwdType=="full", "Heavy Forwarder", connectionType=="cooked" or connectionType=="cookedSSL","Splunk Forwarder", connectionType=="raw" or connectionType=="rawSSL","Legacy Forwarder")| eval build=if(isnull(build),"n/a",build) | eval version=if(isnull(version),"< 4.2",version) | eval os=if(isnull(os),"unkown",os)| eval arch=if(isnull(arch),"unkown",arch) | eval lastReceived = if(kb>0, _time,null) | eval seconds=30 | eval event_count= tcp_eps * seconds | bin _time span=10m | stats first(connectionType) as connectionType max(lastReceived) as lastReceived by Source | eval LastReceived=strftime(lastReceived, "%Y/%m/%d %H:%M:%S") | fields Source, connectionType, LastReceived | sort + LastReceived
0 comments
index=_internal sourcetype=splunkd group=tcpin_connections NOT eventType=* | eval Host=if(isnull(hostname), sourceHost,hostname) | eval version=if(isnull(version),"< 4.2",version) | eval architecture=if(isnull(arch),"unknown",arch) | stats count by Host version architecture | sort version
0 comments
| metadata type=hosts | table * | append [ | metadata type=sourcetypes | table * ] | eval time = now() - lastTime | eval Source = coalesce(host,sourcetype)| table Source type time lastTime totalCount | sort - lastTime | rename time as "Time Since Last Event (sec)" | convert ctime(lastTime) timeformat="%m/%d/%Y %H:%M:%S %z" | rename lastTime as "Time of Last Event"
0 comments
index=_internal earliest=-48h latest=-24h | bin _time span=15m | stats count by _time | eval window="Yesterday" | append [search index=_internal earliest=-24h | bin _time span=15m | stats count by _time | eval window="Today" | eval _time=(_time-(60*60*24))] | timechart span=15m sum(count) by window
0 comments
host=* | dedup index | table index
0 comments
index=_internal source=*license_usage.log type="RolloverSummary" earliest=-30d@d | timechart span=1d sum(b) AS "License Usage" | eval "License Usage"=round("License Usage"/1024/1024/1024, 2)
0 comments
| rest splunk_server=local /services/licenser/pools | rename title AS Pool | search [ rest splunk_server=local /services/licenser/groups | search is_active=1 | eval stack_id=stack_ids | fields stack_id ] | join type=outer stack_id [rest splunk_server=local /services/licenser/stacks | eval stack_id=title | eval stack_quota=quota | fields stack_id stack_quota] | stats sum(used_bytes) as used_bytes, max(stack_quota) as total_quota | eval "Used (GB)"=round(used_bytes/1024/1024/1024,3) | eval "Quota (GB)"=round(total_quota/1024/1024/1024,3)
0 comments
index=_internal sourcetype=splunkd group=queue (name=parsingQueue OR name=indexqueue OR name=tcpin_queue OR name=aggqueue) | eval blocked=if(blocked=="true",1,0), queued_host=host." - ".name | stats sparkline sum(blocked) as blocked,count by queued_host | eval block_ratio=round(blocked/count*100,2) | sort - block_ratio | eval Finding=case(blocked_ratio>50.0,"Critical",blocked_ratio>40.0,"Warning",blocked_ratio>20.0,"Low",1=1,"Healthy")
0 comments
| tstats count where index=* by span=1d _time index | fields - _time | sort - count
0 comments
| tstats count values(host) as host, values(source) as source, values(sourcetype) as sourcetype by index
0 comments