I ended up solving this with a custom search command. Praveen's answer above would work perfectly if the data were perfect, that is if you have an OPEN and a CLOSED event stored for each ticket. Unfortunately we don't have that.
Below is the script burndown.py for anyone who is interested. It runs slowly in Search but I think making it a saved search that runs daily will do the trick 🙂
'''
Burndown Chart custom search command for Splunk
Charles Hack, Accenture
<charles.hack@accenture.com>
April 2016
This script takes in a list of deduplicated ALM defect events (TODO: generalize)
and returns a table of days and the count of defects that were open on that day.
This can be timecharted to yield a picture of the ebb and flow of open tickets over time.
Input format:
```
[{'date_created':'02-06-1991 06:00:00', 'status':'Closed', 'date_closed':'02-07-1991 12:15:00', ...}, ...]
```
Output format:
```
[{'_time':'02-01-1991', 'defects_open':22}, {'_time':'02-02-1991', 'defects_open':19}, ...]
```
'''
import json
import logging
from datetime import datetime
from datetime import timedelta
from splunk.Intersplunk import readResults, outputResults
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
DATE_FORMAT = "%Y-%m-%d"
#set up logging suitable for splunkd comsumption
logging.root
logging.root.setLevel(logging.INFO)
formatter = logging.Formatter('%(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logging.root.addHandler(handler)
# helper functions
def get_date_created(e):
try:
return datetime.strptime(e['fields.creation-time'][:10], DATE_FORMAT).date()
except:
return datetime.now().date()
def get_date_modified(e):
try:
return datetime.strptime(e['last-modified'][:19], DATETIME_FORMAT).date()
except:
return datetime.now().date()
def get_date_closed(e):
try:
return datetime.strptime(e['fields.closing-date'][:10], DATE_FORMAT).date()
except:
return datetime.now().date()
def get_dates(events, date_fieldname):
if date_fieldname == 'created':
return map(lambda x: get_date_created(x), events)
elif date_fieldname == 'modified':
return map(lambda x: get_date_modified(x), events)
if __name__ == '__main__':
# get events piped in
events = readResults()
logging.info("Read in {} results.".format(len(events)))
logging.info("Sample craetion time: [{}]".format(datetime.strptime(events[100]['fields.creation-time'][:10], DATE_FORMAT).isoformat()))
# extract dates
created_dates = get_dates(events, 'created')
logging.info('got {} created dates'.format(len(created_dates)))
logging.info('5th val is {}'.format(created_dates[5].isoformat()))
logging.info('1400th val is {}'.format(created_dates[1400].isoformat()))
modified_dates = get_dates(events, 'modified')
# get min and max dates
min_date = min(created_dates)
logging.info('Min date is {}'.format(min_date.isoformat()))
max_date = max(modified_dates)
logging.info('Max date is {}'.format(max_date.isoformat()))
n_days = (max_date - min_date).days # timedelta object
logging.info('{} days represented total.'.format(n_days))
# create date range to populate
date_range = [min_date + timedelta(days=n) for n in range(n_days)]
output = []
for d in date_range:
# how many defects were in OPEN state on this day?
defects_open = len(filter(lambda e: get_date_created(e) <= d <= get_date_closed(e), events))
# append to the output table
output.append({'_time': d.strftime(DATE_FORMAT), 'defects_open': defects_open})
logging.info('Generated output table with {} results.'.format(len(output)))
# output results
outputResults(output)
... View more