<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Introspection in Splunk Search</title>
    <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742584#M240876</link>
    <description>&lt;P&gt;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;If you run this search, how many peers return count?&lt;/P&gt;&lt;P&gt;&lt;EM&gt;&lt;STRONG&gt;index=_internal earliest=-5m@m | stats count by splunk_server&lt;/STRONG&gt;&lt;/EM&gt;&lt;/P&gt;&lt;P&gt;This should give responses from all your indexers, and if you have your SH / Component boxes configured to forward their internal logs, those also.&lt;/P&gt;</description>
    <pubDate>Tue, 25 Mar 2025 06:11:02 GMT</pubDate>
    <dc:creator>kiran_panchavat</dc:creator>
    <dc:date>2025-03-25T06:11:02Z</dc:date>
    <item>
      <title>Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742579#M240873</link>
      <description>&lt;P&gt;Hello I am running search&lt;BR /&gt;index=_introspection&lt;BR /&gt;dedup host&amp;nbsp;&lt;BR /&gt;table host&lt;BR /&gt;&lt;BR /&gt;in result i am not able to see one indexer and one search head while other indexers and sh are visible .&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 05:04:47 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742579#M240873</guid>
      <dc:creator>SN1</dc:creator>
      <dc:date>2025-03-25T05:04:47Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742582#M240874</link>
      <description>&lt;P&gt;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;DIV&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;Check if the missing indexer and search head are online and Splunk is running on them. You can SSH into those servers and run &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;splunk status&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt; to verify.&lt;BR /&gt;&lt;BR /&gt;&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;DIV&gt;Are you able to see all the instances in the Monitoring console?&amp;nbsp;&lt;/DIV&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;DIV class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;This could happen if:&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV class=""&gt;&amp;nbsp;&lt;/DIV&gt;&lt;UL&gt;&lt;LI&gt;&lt;DIV class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;The hosts are down or disconnected.&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;/LI&gt;&lt;LI&gt;&lt;DIV class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;The Splunk instance on those hosts is not running.&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;/LI&gt;&lt;LI&gt;&lt;DIV class=""&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;There’s a network issue preventing data from being forwarded.&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;/DIV&gt;</description>
      <pubDate>Tue, 25 Mar 2025 06:06:58 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742582#M240874</guid>
      <dc:creator>kiran_panchavat</dc:creator>
      <dc:date>2025-03-25T06:06:58Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742583#M240875</link>
      <description>&lt;P&gt;i am getting this error on health check&lt;BR /&gt;&lt;BR /&gt;&lt;/P&gt;&lt;UL&gt;&lt;LI&gt;&lt;STRONG&gt;Root Cause(s): &lt;/STRONG&gt;&lt;UL&gt;&lt;LI&gt;&lt;SPAN&gt;Events from tracker.log have not been seen for the last 238401 seconds, which is more than the red threshold (210 seconds). This typically occurs when indexing or forwarding are falling behind or are blocked.&lt;/SPAN&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;/LI&gt;&lt;/UL&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 06:10:54 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742583#M240875</guid>
      <dc:creator>SN1</dc:creator>
      <dc:date>2025-03-25T06:10:54Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742584#M240876</link>
      <description>&lt;P&gt;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;If you run this search, how many peers return count?&lt;/P&gt;&lt;P&gt;&lt;EM&gt;&lt;STRONG&gt;index=_internal earliest=-5m@m | stats count by splunk_server&lt;/STRONG&gt;&lt;/EM&gt;&lt;/P&gt;&lt;P&gt;This should give responses from all your indexers, and if you have your SH / Component boxes configured to forward their internal logs, those also.&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 06:11:02 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742584#M240876</guid>
      <dc:creator>kiran_panchavat</dc:creator>
      <dc:date>2025-03-25T06:11:02Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742586#M240877</link>
      <description>&lt;P&gt;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;The&amp;nbsp;&lt;/SPAN&gt;_introspection&lt;SPAN&gt;&amp;nbsp;index in Splunk is part of the "Platform Instrumentation" features, which collect information about your systems running Splunk to help diagnose performance issues.&amp;nbsp;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;A href="https://docs.splunk.com/Documentation/Splunk/9.3.2/Troubleshooting/Whatdatagetslogged?_gl=1*zbcqog*_gcl_au*MTYyNzI0NzgzLjE3NDIxODE1NDY.*FPAU*MTYyNzI0NzgzLjE3NDIxODE1NDY.*_ga*MTk4MjY5MTI0MC4xNzQyMTgxNTQ3*_ga_5EPM2P39FV*MTc0Mjg4MjY2NS4xNi4xLjE3NDI4ODM0MTEuMC4wLjg3OTg5NDU5OA..*_fplc*ZnE5TURaNSUyRlBsc3p2S3I3MENYMmtJejk5NFhEbTZEaldEZHJQOWh2NDJyWTc1OFFUdU1ocW4wJTJCbkJxQ1RWbWFJTkZ3M2g2Q2U0blhLT1Q5MFZxU3ZuSnZ3cXZ6QUNKdnJvRkVDc292U2ViaVA3bjlZZXhOMXRQbU5uWnExdyUzRCUzRA.." target="_blank" rel="noopener"&gt;What does platform instrumentation log? - Splunk Documentation&lt;/A&gt;&lt;/SPAN&gt;&lt;/P&gt;&lt;P&gt;&lt;SPAN&gt;&lt;A href="https://docs.splunk.com/Documentation/Splunk/9.3.2/RESTREF/RESTintrospect?_gl=1*zbcqog*_gcl_au*MTYyNzI0NzgzLjE3NDIxODE1NDY.*FPAU*MTYyNzI0NzgzLjE3NDIxODE1NDY.*_ga*MTk4MjY5MTI0MC4xNzQyMTgxNTQ3*_ga_5EPM2P39FV*MTc0Mjg4MjY2NS4xNi4xLjE3NDI4ODM0MTEuMC4wLjg3OTg5NDU5OA..*_fplc*ZnE5TURaNSUyRlBsc3p2S3I3MENYMmtJejk5NFhEbTZEaldEZHJQOWh2NDJyWTc1OFFUdU1ocW4wJTJCbkJxQ1RWbWFJTkZ3M2g2Q2U0blhLT1Q5MFZxU3ZuSnZ3cXZ6QUNKdnJvRkVDc292U2ViaVA3bjlZZXhOMXRQbU5uWnExdyUzRCUzRA.." target="_blank" rel="noopener"&gt;Introspection endpoint descriptions - Splunk Documentation&lt;/A&gt;&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 06:17:51 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742586#M240877</guid>
      <dc:creator>kiran_panchavat</dc:creator>
      <dc:date>2025-03-25T06:17:51Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742589#M240878</link>
      <description>&lt;P&gt;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;There should be a message in splunkd.log explaining the problem.&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;index=_internal source=*splunkd.log&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;P&gt;Check that there is enough storage on the volume containing the introspection index.&lt;/P&gt;&lt;P&gt;Also, confirm no one turned off introspection.&amp;nbsp; See&amp;nbsp;&lt;/P&gt;&lt;/DIV&gt;&lt;DIV&gt;&lt;A href="https://docs.splunk.com/Documentation/Splunk/latest/Troubleshooting/ConfigurePIF#Disable_logging" target="_blank" rel="noopener"&gt;https://docs.splunk.com/Documentation/Splunk/latest/Troubleshooting/ConfigurePIF#Disable_logging&lt;/A&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;DIV&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;If the missing hosts haven’t reported data recently, they might not appear depending on the default time range (e.g., last 24 hours). Expand the time range in the UI or add &lt;/SPAN&gt;&lt;/SPAN&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt;earliest=-30d&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;SPAN class=""&gt;&lt;SPAN class=""&gt; (or further back) to your search&lt;/SPAN&gt;&lt;/SPAN&gt;&lt;/DIV&gt;&lt;DIV&gt;&amp;nbsp;&lt;/DIV&gt;&lt;P&gt;&lt;span class="lia-inline-image-display-wrapper lia-image-align-inline" image-alt="kiran_panchavat_1-1742884486774.png" style="width: 400px;"&gt;&lt;img src="https://community.splunk.com/t5/image/serverpage/image-id/38322i87BB6A49DB78682E/image-size/medium?v=v2&amp;amp;px=400" role="button" title="kiran_panchavat_1-1742884486774.png" alt="kiran_panchavat_1-1742884486774.png" /&gt;&lt;/span&gt;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 06:40:27 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742589#M240878</guid>
      <dc:creator>kiran_panchavat</dc:creator>
      <dc:date>2025-03-25T06:40:27Z</dc:date>
    </item>
    <item>
      <title>Re: Introspection</title>
      <link>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742596#M240879</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.splunk.com/t5/user/viewprofilepage/user-id/274807"&gt;@SN1&lt;/a&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;If you look further back, when was the last event?&amp;nbsp;&lt;BR /&gt;Have a look using this search looking back at least to the time of the last event from the missing servers.&lt;/P&gt;&lt;LI-CODE lang="markup"&gt;| tstats latest(_time) as _time where index=_introspection by host&lt;/LI-CODE&gt;&lt;P&gt;Then run the search 5-10 minutes later. Are the times of the last events different for the missing host? If so this would suggest that they are having issues sending logs and that they are delayed, rather than not sending at all.&lt;/P&gt;&lt;P&gt;In addition it would be worth checking the Splunk log of the missing host directly, check out $SPLUNK_HOME/var/log/splunk/splunkd.log - are there any references to blocking or output errors?&lt;/P&gt;&lt;P&gt;Please let me know how you get on and consider adding karma to this or any other answer if it has helped.&lt;BR /&gt;Regards&lt;BR /&gt;&lt;BR /&gt;Will&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Tue, 25 Mar 2025 07:21:14 GMT</pubDate>
      <guid>https://community.splunk.com/t5/Splunk-Search/Introspection/m-p/742596#M240879</guid>
      <dc:creator>livehybrid</dc:creator>
      <dc:date>2025-03-25T07:21:14Z</dc:date>
    </item>
  </channel>
</rss>

