Thanks again for your help woodcock - but this still failed
Here are all the details if you could reproduce the issue.
The Data
root@testbox:/opt/data# cat myfile_201510210345.txt
IP|10.0.0.1/32|proxy|65|||2015/10/19|proxy server
root@testbox:/opt/data#
props.conf and datetime.xml
root@testbox:/opt/splunk/etc/apps/myapp/local# cat props.conf
[mysource]
DATETIME_CONFIG = /etc/apps/myapp/local/datetime.xml
FIELD_DELIMITER = |
FIELD_NAMES = F1,F2,F3,F4,F5,F6,F7,F8
TZ = UTC
NO_BINARY_CHECK = true
SHOULD_LINEMERGE = false
root@testbox:/opt/splunk/etc/apps/myapp/local#
root@testbox:/opt/splunk/etc/apps/myapp/local# cat datetime.xml
<!-- Version 4.0 -->
<!-- datetime.xml -->
<!-- This file contains the general formulas for parsing date/time formats. -->
<datetime>
<define name="_mydatetime" extract="year, month, day, hour, minute">
<text><![CDATA[source::.*?_(\d{4})(\d{2})(\d{2})(\d{2})(\d{2}).txt]]></text>
</define>
<timePatterns>
<use name="_mydatetime"/>
</timePatterns>
<datePatterns>
<use name="_mydatetime"/>
</datePatterns>
</datetime>
root@testbox:/opt/splunk/etc/apps/myapp/local#
btool output showing full config
root@ashlubuntu:/opt/splunk/etc/apps/myapp/local# /opt/splunk/bin/splunk btool props list mysource
[mysource]
ANNOTATE_PUNCT = True
AUTO_KV_JSON = true
BREAK_ONLY_BEFORE =
BREAK_ONLY_BEFORE_DATE = True
CHARSET = UTF-8
DATETIME_CONFIG = /etc/apps/myapp/local/datetime.xml
FIELD_DELIMITER = |
FIELD_NAMES = F1,F2,F3,F4,F5,F6,F7,F8
HEADER_MODE =
LEARN_SOURCETYPE = true
LINE_BREAKER_LOOKBEHIND = 100
MAX_DAYS_AGO = 2000
MAX_DAYS_HENCE = 2
MAX_DIFF_SECS_AGO = 3600
MAX_DIFF_SECS_HENCE = 604800
MAX_EVENTS = 256
MAX_TIMESTAMP_LOOKAHEAD = 128
MUST_BREAK_AFTER =
MUST_NOT_BREAK_AFTER =
MUST_NOT_BREAK_BEFORE =
NO_BINARY_CHECK = true
SEGMENTATION = indexing
SEGMENTATION-all = full
SEGMENTATION-inner = inner
SEGMENTATION-outer = outer
SEGMENTATION-raw = none
SEGMENTATION-standard = standard
SHOULD_LINEMERGE = false
TRANSFORMS =
TRUNCATE = 10000
TZ = UTC
detect_trailing_nulls = false
maxDist = 100
priority =
sourcetype =
root@ashlubuntu:/opt/splunk/etc/apps/myapp/local#
oneshot import
root@ashlubuntu:/opt/data# /opt/splunk/bin/splunk add oneshot myfile_201510210345.txt -sourcetype "mysource" -index "testindex" -host "myhost"
Your session is invalid. Please login.
Splunk username: admin
Password:
Oneshot '/opt/data/myfile_201510210345.txt' added
root@ashlubuntu:/opt/data#
splunkd.log DEBUG output - showing it recognises the config, but fails to parse the timestamp
10-28-2015 22:56:39.543 DEBUG REST_Calls - app=search POST data/inputs/oneshot/ id=/opt/data/myfile_201510210345.txt: host -> [myhost], index -> [testindex], sourcetype -> [mysource]
10-28-2015 22:56:39.543 DEBUG AdminManager - Validating argument values...
10-28-2015 22:56:39.543 DEBUG AdminManager - Validating rule='validate(len(name) < 1024, 'Parameter "name" must be less than 1024 characters.')' for arg='name'.
10-28-2015 22:56:39.589 DEBUG FilesystemFilter - Testing path=/opt/data/myfile_201510210345.txt(real=/opt/data/myfile_201510210345.txt) with global blacklisted paths
10-28-2015 22:56:39.590 INFO AdminManager - feedName=oneshotinput, atomUrl=services
10-28-2015 22:56:39.590 INFO UserManager - Unwound user context: admin -> NULL
10-28-2015 22:56:39.590 DEBUG InThreadActor - this=0x7f8454016b50 waitForActorToComplete start actor=0x7f844a3fcdf0
10-28-2015 22:56:39.592 DEBUG InThreadActor - this=0x7f8454016b50 waitForActorToComplete end actor=0x7f844a3fcdf0
10-28-2015 22:56:39.593 DEBUG ArchiveContext - /opt/data/myfile_201510210345.txt is NOT an archive file.
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Performing pattern matching for: source::/opt/data/myfile_201510210345.txt
10-28-2015 22:56:39.593 DEBUG OneShotWriter - Got new entry in the archive: /opt/data/myfile_201510210345.txt
10-28-2015 22:56:39.593 DEBUG OneShotWriter - Will call classifier with given_type="mysource".
10-28-2015 22:56:39.593 DEBUG FileClassifierManager - Finding type for file: /opt/data/myfile_201510210345.txt
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Performing pattern matching for: source::/opt/data/myfile_201510210345.txt
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Performing pattern matching for: source::/opt/data/myfile_201510210345.txt|mysource
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Pattern 'mysource' matches with priority 100
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Pattern 'mysource' matches with priority 100
10-28-2015 22:56:39.593 DEBUG PropertiesMapConfig - Performing pattern matching for: source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|
10-28-2015 22:56:39.594 DEBUG PropertiesMapConfig - Pattern 'mysource' matches with priority 100
10-28-2015 22:56:39.594 DEBUG OneShotWriter - Setting sourcetype="sourcetype::mysource"
10-28-2015 22:56:39.594 DEBUG OneShotWriter - Setting channelKey="2"
10-28-2015 22:56:39.594 DEBUG PropertiesMapConfig - Performing pattern matching for: source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|2
10-28-2015 22:56:39.594 DEBUG PropertiesMapConfig - Pattern 'mysource' matches with priority 100
10-28-2015 22:56:39.594 DEBUG StructuredDataHeaderExtractor - Read configuration: configured=1 mode=6 HEADER_FIELD_LINE_NUMBER=0 HEADER_FIELD_DELIMITER='|' HEADER_FIELD_QUOTE='"' FIELD_DELIMITER='|' FIELD_QUOTE='"'.
10-28-2015 22:56:39.594 DEBUG OneShotWriter - Structured data configurations loaded
10-28-2015 22:56:39.594 INFO UTF8Processor - Converting using CHARSET="UTF-8" for conf "source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|2"
10-28-2015 22:56:39.594 INFO LineBreakingProcessor - Using truncation length 10000 for conf "source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|2"
10-28-2015 22:56:39.594 INFO LineBreakingProcessor - Using lookbehind 100 for conf "source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|2"
10-28-2015 22:56:39.594 DEBUG StructuredDataHeaderExtractor - Read configuration: configured=1 mode=6 HEADER_FIELD_LINE_NUMBER=0 HEADER_FIELD_DELIMITER='|' HEADER_FIELD_QUOTE='"' FIELD_DELIMITER='|' FIELD_QUOTE='"'.
10-28-2015 22:56:39.594 INFO AggregatorMiningProcessor - Setting up line merging apparatus for: source::/opt/data/myfile_201510210345.txt|host::myhost|mysource|2
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - put _mydatetime regex=source::.*?_(\d{4})(\d{2})(\d{2})(\d{2})(\d{2}).txt
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - * year
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - * month
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - * day
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - * hour
10-28-2015 22:56:39.595 DEBUG LoadDateParserRegexes - * minute
10-28-2015 22:56:39.595 INFO DateParser - Set timezone to: UTC
10-28-2015 22:56:39.595 DEBUG AggregatorMiningProcessor - Failed to parse timestamp. Defaulting to time specified by data input. - data_source="/opt/data/myfile_201510210345.txt", data_host="myhost", data_sourcetype="mysource"
Here is the data in splunk - you can see the same time for _time and _indextime. Splunk didnt even use the file time, it reverted to the indextime.
Im wondering if this needs to be a support case, I cannot seem to get the timestamp from a filename.
hxxps://www.dropbox.com/s/f40xx78v5zctpon/mysource.PNG?dl=0
... View more