All Posts

Find Answers
Ask questions. Get answers. Find technical product solutions from passionate members of the Splunk community.

All Posts

i am testing this data by uploading in splunk cloud but i am not getting events in proper format when i am selectin sourcetype as json in settings given screen shot [ { "check_error": ... See more...
i am testing this data by uploading in splunk cloud but i am not getting events in proper format when i am selectin sourcetype as json in settings given screen shot [ { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "General_parameters", "check_status": "OK", "database_major_version": "19", "database_minor_version": "0", "database_name": "C2N48617", "database_version": "19.0.0.0.0", "extract_date": "30/07/2024 08:09:06", "host_name": "flosclnrhv03.pharma.aventis.com", "instance_name": "C2N48617", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "LIVE2459_VAL", "check_key_4": "AQ$_Q_TASKREPORTWORKTASK_TAB_E", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "AQ$_Q_TASKREPORTWORKTASK_TAB_E", "queue_owner": "LIVE2459_VAL", "queue_sharable_mem": "4072", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "LIVE2459_VAL", "check_key_4": "AQ$_Q_PIWORKTASK_TAB_E", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "AQ$_Q_PIWORKTASK_TAB_E", "queue_owner": "LIVE2459_VAL", "queue_sharable_mem": "4072", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "LIVE2459_VAL", "check_key_4": "AQ$_Q_LABELWORKTASK_TAB_E", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "AQ$_Q_LABELWORKTASK_TAB_E", "queue_owner": "LIVE2459_VAL", "queue_sharable_mem": "4072", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "LIVE2459_VAL", "check_key_4": "AQ$_Q_PIPROCESS_TAB_E", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "AQ$_Q_PIPROCESS_TAB_E", "queue_owner": "LIVE2459_VAL", "queue_sharable_mem": "4072", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "SYS", "check_key_4": "ALERT_QUE", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "ALERT_QUE", "queue_owner": "SYS", "queue_sharable_mem": "0", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "SYS", "check_key_4": "AQ$_ALERT_QT_E", "check_key_5": "", "check_key_6": "", "check_name": "queue_mem_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "queue_name": "AQ$_ALERT_QT_E", "queue_owner": "SYS", "queue_sharable_mem": "0", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "fra_check", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "flash_in_gb": "40", "flash_reclaimable_gb": "0", "flash_used_in_gb": ".47", "percent_of_space_used": "1.17", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "processes", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "process_current_value": "299", "process_limit": "1000", "process_percent": "29.9", "script_version": "1.0" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "sessions", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "script_version": "1.0", "sessions_current_value": "299", "sessions_limit": "1536", "sessions_percent": "19.47" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "SYSTEM", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "cdb_tbs_check", "check_status": "OK", "current_use_mb": "1355", "extract_date": "30/07/2024 08:09:06", "percent_used": "2", "script_version": "1.0", "tablespace_name": "SYSTEM", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "SYSAUX", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "cdb_tbs_check", "check_status": "OK", "current_use_mb": "23635", "extract_date": "30/07/2024 08:09:06", "percent_used": "36", "script_version": "1.0", "tablespace_name": "SYSAUX", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "UNDOTBS1", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "cdb_tbs_check", "check_status": "OK", "current_use_mb": "22", "extract_date": "30/07/2024 08:09:06", "percent_used": "0", "script_version": "1.0", "tablespace_name": "UNDOTBS1", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "USERS", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "cdb_tbs_check", "check_status": "OK", "current_use_mb": "4", "extract_date": "30/07/2024 08:09:06", "percent_used": "0", "script_version": "1.0", "tablespace_name": "USERS", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "USERS", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "1176", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN8944", "percent_used": "4", "script_version": "1.0", "tablespace_name": "USERS", "total_physical_all_mb": "32767" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "SYSTEM", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "784", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "1", "script_version": "1.0", "tablespace_name": "SYSTEM", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "SYSAUX", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "1549", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN8944", "percent_used": "2", "script_version": "1.0", "tablespace_name": "SYSAUX", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "USERS", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "1149", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "2", "script_version": "1.0", "tablespace_name": "USERS", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "UNDOTBS1", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "60", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN8944", "percent_used": "0", "script_version": "1.0", "tablespace_name": "UNDOTBS1", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "SYSAUX", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "7803", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "12", "script_version": "1.0", "tablespace_name": "SYSAUX", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "SYSTEM", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "705", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN8944", "percent_used": "1", "script_version": "1.0", "tablespace_name": "SYSTEM", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN8944", "check_key_3": "INDX", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "378", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN8944", "percent_used": "1", "script_version": "1.0", "tablespace_name": "INDX", "total_physical_all_mb": "32767" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN2467", "check_key_3": "SYSTEM", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "623", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN2467", "percent_used": "1", "script_version": "1.0", "tablespace_name": "SYSTEM", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "AUDIT_TBS", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "3", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "0", "script_version": "1.0", "tablespace_name": "AUDIT_TBS", "total_physical_all_mb": "8192" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "USRINDEX", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "128", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "0", "script_version": "1.0", "tablespace_name": "USRINDEX", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "UNDOTBS1", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "77", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "0", "script_version": "1.0", "tablespace_name": "UNDOTBS1", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1S48633", "check_key_3": "TOOLS", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "5", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1S48633", "percent_used": "0", "script_version": "1.0", "tablespace_name": "TOOLS", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN2467", "check_key_3": "UNDOTBS1", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "24", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN2467", "percent_used": "0", "script_version": "1.0", "tablespace_name": "UNDOTBS1", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "O1NN2467", "check_key_3": "SYSAUX", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "pdb_tbs_check", "check_status": "OK", "current_use_mb": "628", "extract_date": "30/07/2024 08:09:06", "pdb_name": "O1NN2467", "percent_used": "1", "script_version": "1.0", "tablespace_name": "SYSAUX", "total_physical_all_mb": "65536" }, { "check_error": "", "check_key_1": "C2N48617", "check_key_2": "", "check_key_3": "", "check_key_4": "", "check_key_5": "", "check_key_6": "", "check_name": "monitoring_package", "check_status": "OK", "extract_date": "30/07/2024 08:09:06", "script_version": "1.0" } ]
Hi @Nawab , to use computername instead host youcannot use tstats and the search is slower, so try this: with perimeter.csv lookup index=* | stats count BY sourcetype ComputerName | append [ ... See more...
Hi @Nawab , to use computername instead host youcannot use tstats and the search is slower, so try this: with perimeter.csv lookup index=* | stats count BY sourcetype ComputerName | append [ | inputlookup perimeter.csv | eval count=0 | fields ComputerName sourcetype count ] | stats sum(count) AS total BY sourcetype ComputerName | where total=0 without lookup: index=* | stats count latest(_time) AS _time BY sourcetype ComputerName | eval period=if(_time<now()-3600,"previous,"latest") | stats dc(period) AS period_count values(period) AS period BY sourcetype ComputerName | where period_count=1 AND period="previous" Ciao. Giuseppe
My data has a tables{}.values{} containing a list of lists. Within each list there is data. Sample data below. When I try to extract this search to csv via job id, it's not containing tables{}.values... See more...
My data has a tables{}.values{} containing a list of lists. Within each list there is data. Sample data below. When I try to extract this search to csv via job id, it's not containing tables{}.values{} data within a single cell and instead treating it as it's own field as it's comma delimited. How can I keep all the data within a singular field when exporting to CSV?   Sample data: test@email.com The following was found, on website: google.com, by test user, with id:testuser, extracted from test.txt, on date testdate,another test field. 
The issue in my case is the field i am look at is computername instead of host. below is the deployement.   All windows servers ----> forwarder server ----> splunk in splunk host will be forwarde... See more...
The issue in my case is the field i am look at is computername instead of host. below is the deployement.   All windows servers ----> forwarder server ----> splunk in splunk host will be forwarder server i.e 1 instead of the backend servers sending data. these queries work on host source sourcetype and index fields.
@KendallW ,,, Thank for your tips,, But when I search index=card in search app, The result is nothing.. 
Yes, if Windows offer the option to renderXml, using it is better than plain text.  Either way,, you need to parse with search command. As to this event, you do need to use semantics to present such... See more...
Yes, if Windows offer the option to renderXml, using it is better than plain text.  Either way,, you need to parse with search command. As to this event, you do need to use semantics to present such data.  When you say the message is in French, do you mean you have difficulty understanding the language?  If so, seek assistance on that.  This is a security failure during an account login.  The account of significance is Albert.  Maybe set up an extraction after the verb, like   | rex "Compte pour lequel l’ouverture de session .+ : ID de sécurité : (?<securityID>\S+)\s+Nom du compte : (?<accountName>\S+)\s+Domaine du compte : (?<accountDomain>\S+)"   However, if you see two separate events in Splunk when original event is one, there may be a line breaker problem.  Fix that first. (XML can make line breaking more robust.)
Hi @silverKi  To classify logs into multiple indexes based on one sourcetype: props.conf: [test] TRANSFORMS-routing = bankRouting,cardRouting,errorRouting Note: -the plural form TRANSFORMS-routi... See more...
Hi @silverKi  To classify logs into multiple indexes based on one sourcetype: props.conf: [test] TRANSFORMS-routing = bankRouting,cardRouting,errorRouting Note: -the plural form TRANSFORMS-routing instead of TRANSFORM-routing. transforms.conf: [bankRouting] REGEX = (?i)bank DEST_KEY = _MetaData:Index FORMAT = bank [cardRouting] REGEX = (?i)card DEST_KEY = _MetaData:Index FORMAT = card [errorRouting] REGEX = (?i)error DEST_KEY = _MetaData:Index FORMAT = error Note: -Use (?i) for case-insensitive matching -Change DEST_KEY to _MetaData:Index -FORMAT should be the exact index name outputs.conf: [tcpout] defaultGroup = defaultGroup [tcpout:defaultGroup] server = 192.168.111.153:9997 [tcpout-server://192.168.111.151:9997] index = card [tcpout-server://192.168.111.152:9997] index = error      
Hi @kc_prane , what's the difference with your previous question? Anyway, the solution hinted by @KendallW is similar with my previous one. Ciao. Giuseppe
Thank you , Good to hear that we are not the only one What Linux version are you running?  
Hi @kc_prane , you shared only a part of your search, so I cannot check it. anyway, does it solves your requirement? Ciao. Giuseppe
Hi @harishsplunk7 , using my original search, you are checking if the users defined on your Splunk did a login in the last 30 days are present, if not (count=0) they are listed, in other words: the ... See more...
Hi @harishsplunk7 , using my original search, you are checking if the users defined on your Splunk did a login in the last 30 days are present, if not (count=0) they are listed, in other words: the users not logged in Splunk in the last 30 days. Why my search doesn't run for you? the only check that you can performa is if (or when) users did last login, there isn't a not login trace. Adding timeSinceLastSeen tge added list of users isn't considered in the count so you cannot check them. Ciao. Giuseppe
This is my test.log  [07-30-2024 02:19:22] +0900 INFO LMTracker [14307 MainThread] username=fIg-Jvkf, Visa, cardtype=credit, cardnumber=7085-5579-5664-8197, cvc=794, expireday=05/26, user-phone=852-... See more...
This is my test.log  [07-30-2024 02:19:22] +0900 INFO LMTracker [14307 MainThread] username=fIg-Jvkf, Visa, cardtype=credit, cardnumber=7085-5579-5664-8197, cvc=794, expireday=05/26, user-phone=852-9765-3539, comapny=IBK, com-tel=02-885-8485, address=7547 0c2F1YA76CHEkgw Street, city=Seoul, Country=Korea, status=500 Internal Server Error, Server error. Please try again later card.
Currently, my sourcetype contains a mix of bank logs and card logs. I would like to categorize this into `index=bank` and `index=card` respectively. Currently, the search is done with index=main, ... See more...
Currently, my sourcetype contains a mix of bank logs and card logs. I would like to categorize this into `index=bank` and `index=card` respectively. Currently, the search is done with index=main, and all data is displayed. If index=bank, I want only bank-related logs to be output. We set the forwarder as follows and created bank, card, and error indexes on the server that will receive the data. This is the code I have written so far... I need help,,,,,   splunk@heavy-forwarder:/opt/splunk/etc/apps/search/local:> cat inputs.conf [monitor:///opt/splunk/var/log/splunk/test.log] disabled = false host = heavy-forwarder sourcetype = test crcSalt = <SOURCE>   splunk@heavy-forwarder:/opt/splunk/etc/system/local:> cat props.conf [test] TRANSFORM-routing=bankRouting,cardRouting,errorRouting splunk@heavy-forwarder:/opt/splunk/etc/system/local:> cat transform.conf [bankRouting] REGEX=bank DEST_KEY =_INDEX FORMAT = bankGroup [cardRouting] REGEX=card DEST_KEY =_INDEX FORMAT = cardGroup [errorGroup] REGEX=error DEST_KEY =_INDEX FORMAT = errorGroup splunk@heavy-forwarder:/opt/splunk/etc/system/local:> cat outputs.conf [tcpout:bankGroup] server = 192.168.111.153:9997 [tcpout:cardGroup] server = 192.168.111.151:9997 [tcpout:errorGroup] server = 192.168.111.152:9997  
Or with JSON: { "type": "splunk.table", "dataSources": { "primary": "ds_5ds4f5" }, "title": "Device Inventory", "eventHandlers": [ { "type": "drilldow... See more...
Or with JSON: { "type": "splunk.table", "dataSources": { "primary": "ds_5ds4f5" }, "title": "Device Inventory", "eventHandlers": [ { "type": "drilldown.customUrl", "options": { "url": "{{row.target_url}}", "newTab": true } } ] }
Hi @SplunkerNoob, first create a field in your search which contains the URLs, e.g.  ... | eval target_url=case( device_type=="type1", "https://device1.com", device_type=="type2", "https://d... See more...
Hi @SplunkerNoob, first create a field in your search which contains the URLs, e.g.  ... | eval target_url=case( device_type=="type1", "https://device1.com", device_type=="type2", "https://device2.com", device_type=="type3", "https://device3.com", 1=1, "https://default.com" )   Then in your dashboard: <drilldown> <link target="_blank">{{row.target_url}}</link> </drilldown>
Hi @Gauri you can use "|eventstats" instead of "|stats" to keep the data in the pipeline for the later "|stats" command:     | eval totalResponseTime=round(requestTimeinSec*1000) | convert num("r... See more...
Hi @Gauri you can use "|eventstats" instead of "|stats" to keep the data in the pipeline for the later "|stats" command:     | eval totalResponseTime=round(requestTimeinSec*1000) | convert num("requestTimeinSec") | rangemap field="totalResponseTime" "totalResponseTime"=0-3000 | rename range as RangetotalResponseTime | eval totalResponseTimeabv3sec=round(requestTimeinSec*1000) | rangemap field="totalResponseTimeabv3sec" "totalResponseTimeabv3sec"=3001-60000 | rename range as RangetotalResponseTimeabv3sec | eval Product=case( (like(proxyUri,"URI1") AND like(methodName,"POST")) OR (like(proxyUri,"URI2") AND like(methodName,"GET")) OR (like(proxyUri,"URI3") AND like(methodName,"GET")), "ABC") | bin span=5m _time | stats count(totalResponseTime) as TotalTrans by Product URI methodName _time | eventstats sum(eval(RangetotalResponseTime="totalResponseTime")) as TS<3S by Product URI methodName | eventstats sum(eval(RangetotalResponseTimeabv3sec="totalResponseTimeabv3sec")) as TS>3S by Product URI methodName | eval SLI=case(Product="ABC", round('TS<3S'/TotalTrans*100,4)) | rename methodName AS Method | where (Product="ABC") and (SLI<99) | stats sum(TS>3S) as AvgImpact count(URI) as DataOutage by Product URI Method | fields Product URI Method TotalTrans SLI AvgImpact DataOutage | sort Product URI Method    
Hi ALL, After revisiting the installation document, I found it should start the Enterprise Console first. Now the EC is successfully started. But it can not be accessed via browser GUI via "http://<... See more...
Hi ALL, After revisiting the installation document, I found it should start the Enterprise Console first. Now the EC is successfully started. But it can not be accessed via browser GUI via "http://<server-name>:9191" because of permission issue. bin/platform-admin.sh start-platform-admin Starting Enterprise Console Database .... ***** Enterprise Console Database started ***** Starting Enterprise Console application Waiting for the Enterprise Console application to start......... ***** Enterprise Console application started on port 9191 *****
@kaede_oogami  はい、Splunkでソースタイプを設定する際のCHARSET(文字エンコーディング)オプションについて説明いたします。 Shift-JIS関連の文字エンコーディングには確かに複数の選択肢がありますが、主な違いは以下の通りです: 1. SHIFT-JIS: - 標準的なShift-JISエンコーディングです。 - JIS X 0208で定義された文字セットを... See more...
@kaede_oogami  はい、Splunkでソースタイプを設定する際のCHARSET(文字エンコーディング)オプションについて説明いたします。 Shift-JIS関連の文字エンコーディングには確かに複数の選択肢がありますが、主な違いは以下の通りです: 1. SHIFT-JIS: - 標準的なShift-JISエンコーディングです。 - JIS X 0208で定義された文字セットをカバーしています。 2. SJIS: - SHIFT-JISの別名として使われることが多いです。 - 多くの場合、SHIFT-JISと同じ意味で使用されます。 3. MS932: - Microsoftによる拡張Shift-JISエンコーディングです。 - SHIFT-JISを基にしていますが、追加の文字(NEC特殊文字、IBM拡張文字など)をサポートしています。 - Windowsで一般的に使用される日本語エンコーディングです。 4. CP932: - MS932の別名です。「Code Page 932」の略称です。 5. Windows-31J: - MS932のIANA登録名です。 - 技術的にはMS932と同じですが、より正式な名称として使用されることがあります。 実際の使用においては: - 標準的なShift-JIS文書の場合、SHIFT-JISまたはSJISを選択すれば問題ありません。 - Windows環境で作成された文書や、拡張文字を含む可能性がある場合は、MS932やWindows-31Jを選択するのが安全です。 Splunkがこれらの選択肢を提供しているのは、異なるシステムや環境から来るデータに対応するためです。適切なエンコーディングを選択することで、日本語テキストを正確に解析し、インデックスすることができます。 特定のデータソースに対してどのエンコーディングを選択すべきか迷う場合は、データの出所やそれを生成したシステムの特性を考慮して判断するのが良いでしょう。
Hi @MediumToast  If you only specify netfw,index,site1_netfw, it will not apply to all events from sources that are configured to be sent to the netfw index. It will only apply to events with the ex... See more...
Hi @MediumToast  If you only specify netfw,index,site1_netfw, it will not apply to all events from sources that are configured to be sent to the netfw index. It will only apply to events with the exact key netfw. Also, SC4S does not support wildcards in the splunk_metadata.csv file, so each sourcetype must be explicitly defined. If you have multiple Cisco devices (or any other types) that you want to redirect to site1_netfw, you will need to list each one individually.  You could get around this by updating the compliance_meta_by_source.conf and compliance_meta_by_source.csv files, e.g. like this (please test) compliance_meta_by_source.conf:   filter f_netfw_sources { program("cisco_asa" type(string)) or program("cisco_ios" type(string)) or program("cisco_nexus" type(string)) or program("juniper_netscreen" type(string)) # Add other relevant network firewall source types here };   compliance_meta_by_source.csv   f_netfw_sources,.splunk.index,site1_netfw      
Hi @Team,   Could you please help me on looping over inputs in splunk soar. my requirement: I am having input like this , input=['a','b','c','d'] I need to run query on each value from input li... See more...
Hi @Team,   Could you please help me on looping over inputs in splunk soar. my requirement: I am having input like this , input=['a','b','c','d'] I need to run query on each value from input like first it must take 'a' value and run query then from run query result i need to take sys id and pass it to create ticket. Note: we are using 6.1.1(On-prem) Please help me on this    Regards, Harish