Splunk Search

Convert from table to Line Chart

fsda
New Member

Hello! I apologize in advance for such a bad request and a stupid question, as well as ignorance of English.I've been meeting SPLUNK for the first week. I have a request, it collects data from the log and predicts the accumulation of the queue at the checkout. The formulas probably don't work, but the problem is different. How do I turn the end result into a graph? A simple conversion in the interface gives individual points, the timechart for values does not work (apparently too much JOIN?). I need the chart to be updated and at each moment put down additional points and connect them with lines. Tell me something?

index="bo2558" ("[PlBelegFactory] Bonnr" AND "belegType = 1") OR "[PlPrepinSession] validatePrepinSession - bon closed -> notification not necessary"
| sort -_time
| transaction source startswith="Bonnr" endswith="bon closed"
| stats count as X avg(duration) as T1
| eval A=X/15
| join
[ search index="bo2558" ("[PlBelegFactory] Bonnr" AND "belegType = 1") OR "[PlPrepinSession] validatePrepinSession - bon closed -> notification not necessary"
| sort -_time
| transaction startswith="bon closed" endswith="Bonnr"
| stats avg(duration) as T2]
| eval T=T1+T2
| eval Tob=T/60
| eval p=A*Tob
| eval pk=round(p)
| eval n0=if(pk>p,pk,pk+1)
| join
[ search "PFM popPosFunction - gk.std_xpos.function.item_registration.PosX_ItemRegistration_xrg"
| stats count as Y]
| eval C=Y/X
| join
[ search index="bo2558" source="/x5/splunk/2558/pos01-bo-2558/std_pos.log" "new bon" OR "PAYMENT_END_DONE"
| sort -_time
| transaction source startswith="new bon" endswith="PAYMENT_END_DONE=true"
| stats count as Q1]
| eval Kas1=if(Q1>0,"Work","Not Work ")
| eval UK=0
| eval UK=if(Q1>0,UK+1, UK+0)
| join
[ search index="bo2558" source="/x5/splunk/2558/pos02-bo-2558/std_pos.log" "new bon" OR "PAYMENT_END_DONE"
| sort -_time
| transaction source startswith="new bon" endswith="PAYMENT_END_DONE=true"
| stats count as Q2]
| eval Kas2=if(Q2>0,"Work","Not Work")
| eval UK=if(Q2>0,UK+1, UK+0)
| join
[ search index="bo2558" source="/x5/splunk/2558/pos03-bo-2558/std_pos.log" "new bon" OR "PAYMENT_END_DONE"
| sort -_time
| transaction source startswith="new bon" endswith="PAYMENT_END_DONE=true"
| stats count as Q3]
| eval Kas3=if(Q3>0,"Work","Not Work ")
| eval UK=if(Q3>0,UK+1, UK+0)
| join
[ search index="bo2558" source="/x5/splunk/2558/pos04-bo-2558/std_pos.log" "new bon" OR "PAYMENT_END_DONE"
| sort -_time
| transaction source startswith="new bon" endswith="PAYMENT_END_DONE=true"
| stats count as Q4]
| eval Kas4=if(Q4>0,"Work","Not Work ")
| eval UK=if(Q4>0,UK+1, UK+0)
| join
[ search index="bo2558" source="/x5/splunk/2558/pos05-bo-2558/std_pos.log" "new bon" OR "PAYMENT_END_DONE"
| sort -_time
| transaction source startswith="new bon" endswith="PAYMENT_END_DONE=true"
| stats count as Q5]
| eval Kas5=if(Q5>0,"Work","Not Work ")
| eval UK=if(Q5>0,UK+1, UK+0)
| eval QQ=X/UK
| eval p0=if(UK=1,pow(1+p+(pow(p,2)/(1-p)),-1),0)
| eval p0=if(UK=2,pow(1+p+(pow(p,2)/2)+(pow(p,3)/(2*(2-p))),-1),p0)
| eval p0=if(UK=3,pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/(6*(3-p))),-1),p0)
| eval p0=if(UK=4,pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/24)+(pow(p,5)/(24*(4-p))),-1),p0)
| eval p0=if(UK=5,pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/24)+(pow(p,5)/120)+(pow(p,6)/(120*(5-p))),-1),p0)
| eval p01=pow(1+p+(pow(p,2)/(1-p)),-1)
| eval p02=pow(1+p+(pow(p,2)/2)+(pow(p,3)/(2*(2-p))),-1)
| eval p03=pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/(6*(3-p))),-1)
| eval p04=pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/24)+(pow(p,5)/(24*(4-p))),-1)
| eval p05=pow(1+p+(pow(p,2)/2)+(pow(p,3)/6)+(pow(p,4)/24)+(pow(p,5)/120)+(pow(p,6)/(120*(5-p))),-1)
| eval prest=p0*100
| eval Poch=if(UK=1, (pow(p,UK+1)/(1*(UK-p)))*p0,0)
| eval Poch=if(UK=2, (pow(p,UK+1)/(2*(UK-p)))*p0,Poch)
| eval Poch=if(UK=3, (pow(p,UK+1)/(6*(UK-p)))*p0,Poch)
| eval Poch=if(UK=4, (pow(p,UK+1)/(24*(UK-p)))*p0,Poch)
| eval Poch=if(UK=5, (pow(p,UK+1)/(120*(UK-p)))*p0,Poch)
| eval Pochf=Poch*100
| eval Loch=if(UK=1,(pow(p,UK+1)*p0)/(UK*1)* pow((1-(p/UK)) ,-2), 0)
| eval Loch=if(UK=2,(pow(p,UK+1)*p0)/(UK*2)* pow((1-(p/UK)) ,-2), Loch)
| eval Loch=if(UK=3, (pow(p,UK+1)*p0)/(UK*6)* pow((1-(p/UK)) ,-2), Loch)
| eval Loch=if(UK=4, (pow(p,UK+1)*p0)/(UK*24)* pow((1-(p/UK)) ,-2), Loch)
| eval Loch=if(UK=5, (pow(p,UK+1)*p0)/(UK*120)* pow((1-(p/UK)) ,-2), Loch)
| eval Loch1=(pow(p,1+1)*p01)/(1*1)* pow((1-(p/1)) ,-2)
| eval Loch2=(pow(p,2+1)*p02)/(2*2)* pow((1-(p/2)) ,-2)
| eval Loch3=(pow(p,3+1)*p03)/(3*6)* pow((1-(p/3)) ,-2)
| eval Loch4=(pow(p,4+1)*p04)/(4*24)* pow((1-(p/4)) ,-2)
| eval Loch5=(pow(p,5+1)*p05)/(5*120)* pow((1-(p/5)) ,-2)
| eval Lochf=round(Loch)
| eval Toch= 1/A*Loch
| eval Toch1= 1/A*Loch1
| eval Toch2= 1/A*Loch2
| eval Toch3= 1/A*Loch3
| eval Toch4= 1/A*Loch4
| eval Toch5= 1/A*Loch5
| eval Cotn1=(1/A)+(3*Toch1)
| eval Cotn2=(2/A)+(3*Toch2)
| eval Cotn3=(3/A)+(3*Toch3)
| eval Cotn4=(4/A)+(3*Toch4)
| eval Cotn5=(5/A)+(3*Toch5)
| eval Cotn =min(Cotn1,Cotn2,Cotn3,Cotn4,Cotn5)
| eval Cotnf=if(Cotn=Cotn1,1,Cotnf)
| eval Cotnf=if(Cotn=Cotn2,2,Cotnf)
| eval Cotnf=if(Cotn=Cotn3,3,Cotnf)
| eval Cotnf=if(Cotn=Cotn4,4,Cotnf)
| eval Cotnf=if(Cotn=Cotn5,5,Cotnf)
| eval GALYA=if((Lochf/UK)>4 OR Pochf>50,"Вызывайте Галю!","Галя не нужна")
| rename C as "Средняя наполненность чека" prest as "Время простоя кассира, %" X as "Всего чеков" QQ as "Чеков на 1 кассу" n0 as "Необходимый минимум касс" Cotnf as "Касс оптимально(альфа)"
| rename UK as "Касс работает" Tob as "Среднее время на 1 чек" Y as "Позиций в чеках" Pochf as "Вероятность скопления очереди,%" Lochf as "Покупателей в очереди" Toch as "Время ожидания в очереди,минут"
| fields - count, Q1, Q2, Q3, Q4 ,Q5,T,p0, A,pk,p,Loch,Poch, Cotn, Cotn1, Cotn2, Cotn3, Cotn4, Cotn5, Loch1, Loch2, Loch3, Loch4, Loch5,Toch1,Toch2,Toch3,Toch4,Toch5,p01,p02,p03,p04,p05,T1,T2

Thank you!

Tags (2)
0 Karma

richgalloway
SplunkTrust
SplunkTrust

It's not the number of joins, it's the joins themselves. Each join appears to end with a stats command, which causes two problems: 1) they don't return a time field so there's nothing for timechart to work with; 2) each stats field is different so there's no common field on which to perform a join.

---
If this reply helps you, Karma would be appreciated.
0 Karma
Get Updates on the Splunk Community!

Splunk App for Anomaly Detection End of Life Announcment

Q: What is happening to the Splunk App for Anomaly Detection?A: Splunk is officially announcing the ...

Aligning Observability Costs with Business Value: Practical Strategies

 Join us for an engaging Tech Talk on Aligning Observability Costs with Business Value: Practical ...

Mastering Data Pipelines: Unlocking Value with Splunk

 In today's AI-driven world, organizations must balance the challenges of managing the explosion of data with ...