Hi @rdagan ,
I have splunkd1@TS.fitco.com user on the splunk node and splunkd1@RT.rtp.com user on hadoop cluster.
I have created keytabfile for splunkd1@TS.fitco.com and provided in the indexes.conf....and I get this error while executing hadoop commands on splunk host.
ls: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Failed to find any Kerberos tgt)]; Host Details : local host is: "splunkdev@TS.company2.COM/xx.xx.xx.xxx"; destination host is: "SLPP02.HADOOP.company.COM:8020;
[hadoopidx]
coldPath = $SPLUNK_DB/hadoopidx/colddb
enableDataIntegrityControl = 0
enableTsidxReduction = 0
homePath = $SPLUNK_DB/hadoopidx/db
maxTotalDataSizeMB = 20480
thawedPath = $SPLUNK_DB/hadoopidx/thaweddb
[provider:eihadoop]
vix.command.arg.3 = $SPLUNK_HOME/bin/jars/SplunkMR-hy2.jar
vix.dfs.namenode.kerberos.principal = hdfs/_HOST@HADOOP.company.COM
vix.env.HADOOP_HOME = /opt/local/hadoop-2.6.0-cdh5.9.1
vix.env.HUNK_THIRDPARTY_JARS = $SPLUNK_HOME/bin/jars/thirdparty/common/avro-1.7.7.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/avro-mapred-1.7.7.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/commons-compress-1.10.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/commons-io-2.4.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/libfb303-0.9.2.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/parquet-hive-bundle-1.6.0.jar,$SPLUNK_HOME/bin/jars/thirdparty/common/snappy-java-1.1.1.7.jar,$SPLUNK_HOME/bin/jars/thirdparty/hive_1_2/hive-exec-1.2.1.jar,$SPLUNK_HOME/bin/jars/thirdparty/hive_1_2/hive-metastore-1.2.1.jar,$SPLUNK_HOME/bin/jars/thirdparty/hive_1_2/hive-serde-1.2.1.jar
vix.env.JAVA_HOME = /usr/java/jdk1.8.0_102
vix.family = hadoop
vix.fs.default.name = hdfs://SLPP02.HADOOP.company.COM:8020
vix.hadoop.security.authentication = kerberos
vix.hadoop.security.authorization = 1
vix.javaprops.java.security.krb5.kdc = SLP013.HADOOP.company.COM
vix.javaprops.java.security.krb5.realm = HADOOP.company.COM
vix.mapreduce.framework.name = yarn
vix.output.buckets.max.network.bandwidth = 0
vix.splunk.home.hdfs = /user/splunkdev/hadoopanalytics/
vix.yarn.nodemanager.principal = yarn/_HOST@HADOOP.company.COM
vix.yarn.resourcemanager.address = https://SLPP08.HADOOP.company.COM:8090/cluster
vix.yarn.resourcemanager.principal = yarn/_HOST@HADOOP.company.COM
vix.yarn.resourcemanager.scheduler.address = https://SLPP015.HADOOP.company.COM:8090/cluster/scheduler
vix.mapreduce.jobtracker.kerberos.principal = mapred/_HOST@HADOOP.company.COM
vix.kerberos.keytab = /home/splunkd1/splunkd1.keytab
vix.kerberos.principal = splunkdev@TS.company2.COM
[splunk_index_archive]
vix.output.buckets.from.indexes = hadoopidx
vix.output.buckets.older.than = 172800
vix.output.buckets.path = /user/splunkdev/splunk_index_archive
vix.provider = eihadoop
... View more