You better to use service jobs as following:
Function to Perform a Splunk search
def execute_query(searchquery_normal,
kwargs_normalsearch={"exec_mode": "normal"},
kwargs_options={"output_mode": "csv", "count": 1000000}):
# Execute Search
job = service.jobs.create(searchquery_normal, **kwargs_normalsearch)
# A normal search returns the job's SID right away, so we need to poll for completion
while True:
while not job.is_ready():
pass
stats = {"isDone": job["isDone"], "doneProgress": float(job["doneProgress"])*100,
"scanCount": int(job["scanCount"]), "eventCount": int(job["eventCount"]),
"resultCount": int(job["resultCount"])}
status = ("\r%(doneProgress)03.1f%% %(scanCount)d scanned "
"%(eventCount)d matched %(resultCount)d results") % stats
sys.stdout.write(status + '\n')
sys.stdout.flush()
if stats["isDone"] == "1":
sys.stdout.write("\nDone!")
break
time.sleep(0.5)
# Get the results and display them
csv_results = job.results(**kwargs_options).read()
job.cancel()
for row in csv_results:
if row[0] not in (None, ""):
df = pd.read_csv(StringIO.StringIO(csv_results), encoding='utf8', sep=',', low_memory=False)
df.to_csv(filename_new, sep=',', encoding='utf-8')
break
break
you can find whole project from following:
https://github.com/selcukozer/splunk_python
... View more