Splunk Search

Comparing last 2 weeks average against yesterday's events

dpatiladobe
Explorer

I am trying to get last 2 weeks data and avg over week day's and compare that against event count of yesterday to detect any issue.

index=xxxxx  host="xxxxx" earliest=-2w@w latest=@w date_wday!=saturday date_wday!=sunday| bin span=1d _time | eval marker=if (_time<relative_time(now(),"-w@w"), "LastWeek_Weekdays","ThisWeek_Weekdays") | eval _time=if(marker=="LastWeek_Weekdays", _time + 7*24*60*60, _time) | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId   |  chart count  by TriggerId limit=0 marker |  eval  LastWeek_Weekdays_Avg=round(LastWeek_Weekdays/5,0) | append [ search host="xxxx" earliest=-1d@d latest=@d | bin span=1d _time  | eval marker=if (_time<relative_time(now(),"-1d@d"), "Yesterday","Today") | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId |  chart count  by TriggerId, marker  ] | eval diff_In_Percentage=((Yesterday-LastWeek_Weekdays_Avg)/LastWeek_Weekdays_Avg)*100

And O/p should look like

    TriggerId   LastWeek_Weekdays   LastWeek_Weekdays_Avg   ThisWeek_Weekdays Yesterday diff_inPercentage
xyz 32301   6460    26118 xx xx%
abc 2146    429 1876 xx xx%
abc123  4   1   5 xx xx%
Tags (1)
0 Karma

somesoni2
Revered Legend

Give this a try

index=xxxxx  host="xxxxx" earliest=-2w@w latest=@w date_wday!=saturday date_wday!=sunday| bin span=1d _time | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId   | stats count by TriggerId _time |  stats avg(count) as count LastWeek_Weekdays_Avg by TriggerId |  eval  LastWeek_Weekdays_Avg=round(LastWeek_Weekdays/5,0) | append [ search host="xxxx" earliest=-1d@d latest=@d | bin span=1d _time  | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId |  stats count as Yesterday by TriggerId ] | stats values(LastWeek_Weekdays_Avg) as LastWeek_Weekdays_Avg values(Yesterday) as Yesterday by TriggerId | eval diff_In_Percentage=((Yesterday-LastWeek_Weekdays_Avg)/LastWeek_Weekdays_Avg)*100
0 Karma

dpatiladobe
Explorer

I was able to get work around with below query but if TriggerId not present in last 2 week and present in yesterday data then it mess up all the calculation

index=xxx  host="xxxx" source="xxx*.log*"  earliest=-2w@w latest=@w date_wday!=saturday date_wday!=sunday| bin span=1d _time | eval marker=if (_time<relative_time(now(),"-w@w"), "Last2Week_Weekdays","LastWeek_Weekdays") | eval _time=if(marker=="Last2Week_Weekdays", _time + 7*24*60*60, _time) | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId   |  chart count  by TriggerId limit=0 marker |  eval  Last2Week_Weekdays_Avg=round(Last2Week_Weekdays/5,0) | eval  LastWeek_Weekdays_Avg=round(LastWeek_Weekdays/5,0)| appendcols [ search host="xxxx" source="xxx*.log*" earliest=-1d@d latest=@d   date_wday!=saturday date_wday!=sunday| bin span=1d _time  | eval marker=if (_time<relative_time(now(),"-0d@d"), "Yesterday","ThisWeek_Weekdays") |eval _time=if(marker=="Yesterday", _time + 1*24*60*60, _time) | rex "Current Partition:(?<PartitionNumber>\d+),Offset:(?<Offset>\d+),triggerID:(?<TriggerId>\S+),Outputsystem:\d+,IsprodTrigger:\S+,triggerTimeStamp:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartTime:\d+-\d+-\d+\W+\d+:\d+:\d+.\d+,StartDelay:(?<DelayLag>\d+),batchCount:(?<batchCount>\d+),timeGT:(?<createtime>\d+-\d+-\d+\W+\d+:\d+:\d+.\d+)" |dedup PartitionNumber , Offset ,TriggerId |  chart count by TriggerId ,marker ] | eval diff_In_Percentage_Week_2=((Yesterday-Last2Week_Weekdays_Avg)/Last2Week_Weekdays_Avg)*100 | eval diff_In_Percentage_Week_Last=((Yesterday-LastWeek_Weekdays_Avg)/LastWeek_Weekdays_Avg)*100
0 Karma

somesoni2
Revered Legend

You can assign a default values, say 0 to LastWeek_Weekdays_Avg field if it's null (TriggerId present yesterday but not Last 2 weeks) and vice versa. OR you can just put NA where any one of Yesterday or LastWeek_Weekdays_Avg field is null, so that no calculation will be done.

0 Karma

dpatiladobe
Explorer

Thanks . instead of append i used joined and it works perfect.

0 Karma

anjambha
Communicator

dpatiladobe
Explorer

I have updated the question.

0 Karma
Get Updates on the Splunk Community!

Adoption of RUM and APM at Splunk

    Unleash the power of Splunk Observability   Watch Now In this can't miss Tech Talk! The Splunk Growth ...

Routing logs with Splunk OTel Collector for Kubernetes

The Splunk Distribution of the OpenTelemetry (OTel) Collector is a product that provides a way to ingest ...

Welcome to the Splunk Community!

(view in My Videos) We're so glad you're here! The Splunk Community is place to connect, learn, give back, and ...