# Version 9.2.2.20240415 # DO NOT EDIT THIS FILE! # Changes to default files will be lost on update and are difficult to # manage and support. # # Please make any changes to system defaults by overriding them in # apps or $SPLUNK_HOME/etc/system/local # (See "Configuration file precedence" in the web documentation). # # To override a specific setting, copy the name of the stanza and # setting to the file where you wish to override it. # # This file contains possible attribute/value pairs for configuring # Splunk's processing properties. # [default] CHARSET = UTF-8 LINE_BREAKER_LOOKBEHIND = 100 TRUNCATE = 10000 LB_CHUNK_BREAKER_TRUNCATE = 2000000 DATETIME_CONFIG = /etc/datetime.xml ADD_EXTRA_TIME_FIELDS = True ANNOTATE_PUNCT = True HEADER_MODE = MATCH_LIMIT = 100000 DEPTH_LIMIT = 1000 MAX_DAYS_HENCE=2 MAX_DAYS_AGO=2000 MAX_DIFF_SECS_AGO=3600 MAX_DIFF_SECS_HENCE=604800 MAX_TIMESTAMP_LOOKAHEAD = 128 DETERMINE_TIMESTAMP_DATE_WITH_SYSTEM_TIME = false SHOULD_LINEMERGE = True BREAK_ONLY_BEFORE = BREAK_ONLY_BEFORE_DATE = True MAX_EVENTS = 256 MUST_BREAK_AFTER = MUST_NOT_BREAK_AFTER = MUST_NOT_BREAK_BEFORE = TRANSFORMS = SEGMENTATION = indexing SEGMENTATION-all = full SEGMENTATION-inner = inner SEGMENTATION-outer = outer SEGMENTATION-raw = none SEGMENTATION-standard = standard LEARN_SOURCETYPE = true LEARN_MODEL = true termFrequencyWeightedDist = false maxDist = 100 AUTO_KV_JSON = true detect_trailing_nulls = false sourcetype = priority = unarchive_cmd_start_mode = shell ########## APPLICATION SERVERS ########## [log4j] BREAK_ONLY_BEFORE = \d\d?:\d\d:\d\d pulldown_type = true maxDist = 75 category = Application description = Output produced by any Java 2 Enterprise Edition (J2EE) application server using log4j [log4php] pulldown_type = true BREAK_ONLY_BEFORE = ^\w{3} \w{3} category = Application description = Output produced by a machine that runs the log4php logging utility [weblogic_stdout] pulldown_type = true maxDist = 60 MAX_TIMESTAMP_LOOKAHEAD = 34 MAX_EVENTS = 2048 REPORT-st = weblogic-code category = Application description = Output produced by the Oracle WebLogic Java EE application server [websphere_activity] pulldown_type = true BREAK_ONLY_BEFORE = ^----- MAX_TIMESTAMP_LOOKAHEAD = 500 REPORT-st = colon-line category = Application description = Activity logs produced by the Oracle WebLogic Java EE application server [websphere_core] pulldown_type = true maxDist = 70 BREAK_ONLY_BEFORE = ^NULL\s category = Application description = Output produced by the IBM WebSphere application server [websphere_trlog] pulldown_type = true REPORT-st = was-trlog-code category = Application description = Trace output produced by the IBM WebSphere application server [log4net_xml] maxDist = 75 NO_BINARY_CHECK = 1 SHOULD_LINEMERGE = true BREAK_ONLY_BEFORE = .*)$ EXTRACT-fields = (?i)^(?:[^ ]* ){2}(?:[+\-]\d+ )?(?P[^ ]*)\s+(?P[^ ]+) - [splunkd_remote_searches] MAX_TIMESTAMP_LOOKAHEAD = 40 TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z SHOULD_LINEMERGE = false REPORT-fields = remote_searches_extractions_starting,remote_searches_extractions_terminated, remote_searches_extractions_starting_fallback KV_MODE = none TRUNCATE = 20000 [searches] SHOULD_LINEMERGE = False [splunkd_access] maxDist = 28 MAX_TIMESTAMP_LOOKAHEAD = 128 REPORT-access = access-extractions, extract_spent SHOULD_LINEMERGE = False TIME_PREFIX = \[ TRUNCATE = 20000 [wlm_monitor] MAX_TIMESTAMP_LOOKAHEAD = 40 TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z SHOULD_LINEMERGE = False TRUNCATE = 20000 [splunkd_ui_access] maxDist = 28 MAX_TIMESTAMP_LOOKAHEAD = 128 REPORT-access = access-extractions, extract_spent SHOULD_LINEMERGE = False TIME_PREFIX = \[ TRUNCATE = 20000 [splunk_web_access] maxDist = 28 MAX_TIMESTAMP_LOOKAHEAD = 128 REPORT-access = access-extractions SHOULD_LINEMERGE = False TIME_PREFIX = \[ EXTRACT-extract_spent = \s(?\d+(\.\d+)?)ms$ TRUNCATE = 75000 [splunk_web_service] MAX_TIMESTAMP_LOOKAHEAD = 40 REPORT-fields = splunk-service-extractions TRUNCATE = 20000 [splunkd_conf] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none TRUNCATE = 20000 [splunk_help] BREAK_ONLY_BEFORE = gooblygook MAX_EVENTS = 200000 TRANSFORMS-help = splunk_help [mongod] TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%3N%Z SHOULD_LINEMERGE = False MAX_TIMESTAMP_LOOKAHEAD = 40 [splunk_version] DATETIME_CONFIG = CURRENT MUST_NOT_BREAK_AFTER = .* [source::.../var/log/splunk/searchhistory.log(.\d+)?] TRANSFORMS = splunk_index_history sourcetype = splunk_search_history [source::.../var/log/splunk/(web|report)_access(-\d+)?.log(.\d+)?] sourcetype = splunk_web_access [source::.../var/log/splunk/(web|report)_service(-\d+)?.log(.\d+)?] sourcetype = splunk_web_service [source::.../var/log/splunk/metrics.log(.\d+)?] sourcetype = splunkd [source::.../var/log/splunk/license_usage(|_summary).log(.\d+)?] sourcetype = splunkd [source::.../splunkd.log(.\d+)?] sourcetype = splunkd [source::.../mergebuckets.log(.\d+)?] sourcetype = splunkd [source::.../var/log/splunk/configuration_change.log(.\d+)?] sourcetype = splunk_configuration_change TRUNCATE = 0 [source::.../var/log/splunk/splunkd-utility.log(.\d+)?] sourcetype = splunkd [source::.../var/log/splunk/scheduler.log(.\d+)?] sourcetype = scheduler [source::.../var/log/splunk/audit.log(.\d+)?] TRANSFORMS = send_to_nullqueue sourcetype = splunk_audit [source::.../var/log/splunk/btool.log(.\d+)?] sourcetype = splunk_btool [source::.../var/log/splunk/intentions.log(.\d+)?] sourcetype = splunk_intentions [source::.../var/log/splunk/python.log(.\d+)?] sourcetype = splunk_python [source::.../var/log/splunk/pdfgen.log(.\d+)?] sourcetype = splunk_pdfgen [source::.../var/log/splunk/searches.log] sourcetype = searches [source::.../var/log/splunk/splunkd_stdout.log(.\d+)?] sourcetype = splunkd_stdout [source::.../var/log/splunk/splunkd_stderr.log(.\d+)?] sourcetype = splunkd_stderr [source::.../var/log/splunk/*crash-*.log] sourcetype = splunkd_crash_log [source::.../var/log/splunk/migration.log.*] sourcetype = splunk_migration [source::.../var/log/splunk/remote_searches.log(.\d+)?] sourcetype = splunkd_remote_searches [source::.../splunkd_access.log(.\d+)?] sourcetype = splunkd_access [source::.../wlm_monitor.log(.\d+)?] sourcetype = wlm_monitor [source::.../splunkd_ui_access.log(.\d+)?] sourcetype = splunkd_ui_access [source::.../var/log/splunk/conf.log(.\d+)?] sourcetype = splunkd_conf [source::.../var/log/splunk/mongod.log(.\d+)?] sourcetype = mongod [source::.../var/log/splunk/health.log(.\d+)?] sourcetype = splunkd [source::.../var/log/watchdog/watchdog.log(.\d+)?] sourcetype = splunkd [source::.../var/log/splunk/search_messages.log(.\d+)?] sourcetype = splunk_search_messages [source::.../etc/splunk.version] sourcetype = splunk_version ########## SPECIAL ########## [__singleline] SHOULD_LINEMERGE = False [too_small] maxDist = 9999 BREAK_ONLY_BEFORE_DATE = True PREFIX_SOURCETYPE = True ; same as too_small but for larger text that has special characters [breakable_text] BREAK_ONLY_BEFORE = (^(?:---|===|\*\*\*|___|=+=))|^\s*$ LEARN_MODEL = false [lastlog] invalid_cause = binary LEARN_MODEL = false [wtmp] invalid_cause = binary LEARN_MODEL = false [known_binary] is_valid = False invalid_cause = binary LEARN_MODEL = false [ignored_type] is_valid = False invalid_cause = ignored_type LEARN_MODEL = false [stash] TRUNCATE = 0 # only look for ***SPLUNK*** on the first line HEADER_MODE = firstline # we can summary index past data, but rarely future data MAX_DAYS_HENCE = 2 MAX_DAYS_AGO = 10000 # 5 years difference between two events MAX_DIFF_SECS_AGO = 155520000 MAX_DIFF_SECS_HENCE = 155520000 MAX_TIMESTAMP_LOOKAHEAD = 64 LEARN_MODEL = false # search time extractions KV_MODE = none REPORT-1 = stash_extract [stash_new] TRUNCATE = 0 # only look for ***SPLUNK*** on the first line HEADER_MODE = firstline # we can summary index past data, but rarely future data MAX_DAYS_HENCE = 2 MAX_DAYS_AGO = 10000 # 5 years difference between two events MAX_DIFF_SECS_AGO = 155520000 MAX_DIFF_SECS_HENCE = 155520000 MAX_TIMESTAMP_LOOKAHEAD = 64 LEARN_MODEL = false # break .stash_new custom format into events SHOULD_LINEMERGE = false BREAK_ONLY_BEFORE_DATE = false LINE_BREAKER = (\r?\n==##~~##~~ 1E8N3D4E6V5E7N2T9 ~~##~~##==\r?\n) # change sourcetype to stash before indexing/forwarding this data (these events # are feed to the stashparsing pipeline) TRANSFORMS-sourcetype = set_sourcetype_to_stash [stash_hec] SHOULD_LINEMERGE = False pulldown_type = false INDEXED_EXTRACTIONS = hec # we can summary index past data, but rarely future data MAX_DAYS_HENCE = 2 MAX_DAYS_AGO = 10000 # 5 years difference between two events MAX_DIFF_SECS_AGO = 155520000 MAX_DIFF_SECS_HENCE = 155520000 [mcollect_stash] SHOULD_LINEMERGE = False pulldown_type = true INDEXED_EXTRACTIONS = csv ADD_EXTRA_TIME_FIELDS = subseconds KV_MODE = none TIMESTAMP_FIELDS = metric_timestamp TIME_FORMAT = %s.%Q ########## NON-LOG FILES ########## # settings copied from zip [source_archive] invalid_cause = needs_preprocess is_valid = False LEARN_MODEL = false [web] BREAK_ONLY_BEFORE=goblygook MAX_EVENTS=200000 DATETIME_CONFIG = NONE CHECK_METHOD = modtime LEARN_MODEL = false [backup_file] BREAK_ONLY_BEFORE=goblygook MAX_EVENTS=10000 LEARN_MODEL = false [manpage] BREAK_ONLY_BEFORE = gooblygook MAX_EVENTS = 200000 DATETIME_CONFIG = NONE CHECK_METHOD = modtime LEARN_MODEL = false [misc_text] BREAK_ONLY_BEFORE=goblygook MAX_EVENTS=200000 DATETIME_CONFIG = NONE CHECK_METHOD = modtime pulldown_type = false LEARN_MODEL = false [csv] SHOULD_LINEMERGE = False pulldown_type = true INDEXED_EXTRACTIONS = csv KV_MODE = none category = Structured description = Comma-separated value format. Set header and other settings in "Delimited Settings" [psv] SHOULD_LINEMERGE = False pulldown_type = true INDEXED_EXTRACTIONS = psv FIELD_DELIMITER=| HEADER_FIELD_DELIMITER=| KV_MODE = none category = Structured description = Pipe-separated value format. Set header and other settings in "Delimited Settings" [tsv] SHOULD_LINEMERGE = False pulldown_type = true INDEXED_EXTRACTIONS = tsv FIELD_DELIMITER=tab HEADER_FIELD_DELIMITER=tab KV_MODE = none category = Structured description = Tab-separated value format. Set header and other settings in "Delimited Settings" [_json] pulldown_type = true INDEXED_EXTRACTIONS = json KV_MODE = none category = Structured description = JavaScript Object Notation format. For more information, visit http://json.org/ [json_no_timestamp] BREAK_ONLY_BEFORE = ^{ DATETIME_CONFIG = CURRENT MAX_TIMESTAMP_LOOKAHEAD = 800 pulldown_type = 1 category = Structured description = A variant of the JSON source type, with support for nonexistent timestamps [fs_notification] SHOULD_LINEMERGE=false [exchange] INDEXED_EXTRACTIONS = w3c KV_MODE = none [generic_single_line] TIME_FORMAT = %Y-%m-%dT%H:%M:%S.%3N %Z SHOULD_LINEMERGE = false pulldown_type = 1 category = Miscellaneous description = A common log format with a predefined timestamp. Customize timestamp in "Timestamp" options ########## RULE BASED CONDITIONS ########## [rule::snort] sourcetype = snort # IF MORE THAN 5% OF LINES MATCH REGEX, MUST BE THIS TYPE MORE_THAN_1 = (=\+)+ MORE_THAN_10 = (?:[0-9A-F]{2} ){16} [rule::exim_main] sourcetype = exim_main # MORE THAN 2% HAVE <=, =>, 'queue' MORE_THANA_2 = <= MORE_THANB_2 = => MORE_THANC_2 = queue [rule::postfix_syslog] sourcetype = postfix_syslog # IF 75% OF LINES MATCH REGEX, MUST BE THIS TYPE MORE_THAN_75 = ^\w{3} +\d+ \d\d:\d\d:\d\d .* postfix(/\w+)?\[\d+\]: [rule::sendmail_syslog] sourcetype = sendmail_syslog # IF 75% OF LINES MATCH REGEX, MUST BE THIS TYPE MORE_THAN_75 = ^\w{3} +\d+ \d\d:\d\d:\d\d .* (sendmail|imapd|ipop3d)\[\d+\]: [rule::access_common] sourcetype = access_common MORE_THAN_75 = ^\S+ \S+ \S+ \[[^\]]+\] "[^"]+" \S+ \S+$ [rule::access_combined] sourcetype = access_combined MORE_THAN_75 = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+ \S+ "[^"]*"$ [rule::access_combined_wcookie] sourcetype = access_combined_wcookie # more restrictive version = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+ \S+ "[^"]*" "[^"]*"$ MORE_THAN_75 = ^\S+ \S+ \S+ \S* ?\[[^\]]+\] "[^"]*" \S+ \S+(?: \S+)? "[^"]*" "[^"]*" ### DELAYED RULE BASED CONDITIONS. RUN AS LAST DITCH EFFORT BEFORE MAKING A NEW SOURCETYPE ### # break text on ascii art and blanklines if more than 10% of lines # have ascii art or blanklines, and less than 10% have timestamps [delayedrule::breakable_text] MORE_THAN_10 = (^(?:---|===|\*\*\*|___|=+=))|^\s*$ LESS_THAN_10 = [: ][012]?[0-9]:[0-5][0-9] sourcetype = breakable_text [delayedrule::syslog] sourcetype = syslog # IF MORE THAN 80% OF LINES MATCH REGEX, MUST BE THIS TYPE MORE_THAN_80 = ^\w{3} +\d+ \d\d:\d\d:\d\d (?!AM|PM)[\w\-.]+ [\w\-/.]+(\[\d+\])?: ########## FILE MATCH CONDITIONS ########## [source::.../var/log/anaconda.syslog(.\d+)?] sourcetype = anaconda_syslog [source::.../var/log/anaconda.log(.\d+)?] sourcetype = anaconda [source::.../var/log/httpd/error_log(.\d+)?] sourcetype = apache_error [source::.../var/log/cups/access_log(.\d+)?] sourcetype = cups_access [source::.../var/log/cups/error_log(.\d+)?] sourcetype = cups_error [source::.../var/log/dmesg(.\d+)?] sourcetype = dmesg [source::.../var/log/ftp.log(.\d+)?] sourcetype = ftp [source::.../(u_|)ex(tend|\d{4,8})*?.log] sourcetype = iis [source::.../var/log/lastlog(.\d+)?] sourcetype = lastlog [source::.../var/log/audit/audit.log(.\d+)?] sourcetype = linux_audit [source::.../var/log/boot.log(.\d+)?] sourcetype = linux_bootlog [source::.../var/log/secure(.\d+)?] sourcetype = linux_secure [source::.../man/man\d+/*.\d+] sourcetype = manpage [source::.../var/log/asl.log(.\d+)?] sourcetype = osx_asl [source::.../var/log/crashreporter.log(.\d+)?] sourcetype = osx_crashreporter [source::....crash.log(.\d+)?] sourcetype = osx_crash_log [source::.../var/log/install.log(.\d+)?] sourcetype = osx_install [source::.../var/log/secure.log(.\d+)?] sourcetype = osx_secure [source::.../var/log/daily.out(.\d+)?] sourcetype = osx_daily [source::.../var/log/weekly.out(.\d+)?] sourcetype = osx_weekly [source::.../var/log/monthly.out(.\d+)?] sourcetype = osx_monthly [source::.../private/var/log/windowserver.log(.\d+)?] sourcetype = osx_window_server [source::....Z(.\d+)?] unarchive_cmd = gzip -cd - sourcetype = preprocess-Z NO_BINARY_CHECK = true [source::....(tbz|tbz2)(.\d+)?] unarchive_cmd = _auto sourcetype = preprocess-bzip NO_BINARY_CHECK = true [source::....bz2?(.\d+)?] unarchive_cmd = bzip2 -cd - sourcetype = preprocess-bzip NO_BINARY_CHECK = true [source::....(?\S+)::(?<_VAL_1>\S+) # MySQL example. # See the Splunker's Guide for Splunk.com # for the myunbinit script and sample MySQL setup # This example is commented out. # # [mysql] # match_filename1 = *.bin # invalid_cause = needs_preprocess # is_valid = False # # Dealing with all windows type data, even when we're a unix # platform, incase these types of data is forwarded by a windows # light weight forwarder [ActiveDirectory] SHOULD_LINEMERGE = false LINE_BREAKER = ([\r\n]+---splunk-admon-end-of-event---\r\n[\r\n]*) EXTRACT-GUID = (?i)(?!=\w)(?:objectguid|guid)\s*=\s*(?[\w\-]+) EXTRACT-SID = objectSid\s*=\s*(?\S+) REPORT-MESSAGE = ad-kv # some schema AD events may be very long MAX_EVENTS = 10000 TRUNCATE = 100000 [WinRegistry] DATETIME_CONFIG=NONE LINE_BREAKER = ([\r\n]+---splunk-regmon-end-of-event---\r\n[\r\n]*) [WinWinHostMon] DATETIME_CONFIG=NONE SHOULD_LINEMERGE = false [WinPrintMon] DATETIME_CONFIG=NONE SHOULD_LINEMERGE = false [wmi] SHOULD_LINEMERGE = false LINE_BREAKER = ([\r\n]+---splunk-wmi-end-of-event---\r\n[\r\n]*) CHARSET = UTF-8 [source::WMI...] REPORT-MESSAGE = wel-message, wel-eq-kv, wel-col-kv TRANSFORMS-FIELDS = wmi-host, wmi-override-host SHOULD_LINEMERGE = false [source::WinEventLog...] REPORT-MESSAGE = wel-message, wel-eq-kv, wel-col-kv KV_MODE=none # Note the below settings are effectively legacy, in place here to handle # data coming from much much older forwarders (3.x & 4.x) SHOULD_LINEMERGE = false MAX_TIMESTAMP_LOOKAHEAD=30 LINE_BREAKER = ([\r\n](?=\d{2}/\d{2}/\d{2,4} \d{2}:\d{2}:\d{2} [aApPmM]{2})) TRANSFORMS-FIELDS = strip-winevt-linebreaker [PerformanceMonitor] SHOULD_LINEMERGE = false LINE_BREAKER = ([\r\n]+---splunk-perfmon-end-of-event---\r\n[\r\n]*) REPORT-MESSAGE = perfmon-kv [source::....(?i)(evt|evtx)(.\d+)?] sourcetype = preprocess-winevt NO_BINARY_CHECK = true SHOULD_LINEMERGE = false MAX_TIMESTAMP_LOOKAHEAD=30 LINE_BREAKER = ([\r\n](?=\d{2}/\d{2}/\d{4} \d{2}:\d{2}:\d{2} [aApPmM]{2})) REPORT-MESSAGE = wel-message, wel-eq-kv, wel-col-kv [preprocess-winevt] invalid_cause = winevt is_valid = False LEARN_MODEL = false [source::PerfmonMk...] EXTRACT-collection,category,object = collection=\"?(?P[^\"\n]+)\"?\ncategory=\"?(?P[^\"\n]+)\"?\nobject=\"?(?P[^\"\n]+)\"?\n KV_MODE = multi_PerfmonMk NO_BINARY_CHECK = 1 pulldown_type = 1 [WinNetMonMk] KV_MODE = multi_WinNetMonMk NO_BINARY_CHECK = 1 pulldown_type = 0 [source::.../disk_objects.log(.\d+)?] sourcetype = splunk_disk_objects [source::.../resource_usage.log(.\d+)?] sourcetype = splunk_resource_usage [source::.../kvstore.log(.\d+)?] sourcetype = kvstore [source::.../token_input_metrics.log(.\d+)?] sourcetype = token_endpoint_metrics [source::.../http_event_collector_metrics.log(.\d+)?] sourcetype = http_event_collector_metrics [splunk_disk_objects] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [splunk_resource_usage] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [kvstore] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none TRUNCATE = 1000000 JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [token_input_metrics] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [collectd_http] METRICS_PROTOCOL = collectd_http NO_BINARY_CHECK = true SHOULD_LINEMERGE = false ADD_EXTRA_TIME_FIELDS = false ANNOTATE_PUNCT = false pulldown_type = true TIMESTAMP_FIELDS = time KV_MODE=none category = Metrics description = Collectd daemon format. Uses the write_http plugin to send metrics data to a Splunk platform data input via the HTTP Event Collector. [http_event_collector_metrics] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [statsd] METRICS_PROTOCOL = statsd NO_BINARY_CHECK = true SHOULD_LINEMERGE = false DATETIME_CONFIG = CURRENT # remove indextime fields that aren't super useful. ADD_EXTRA_TIME_FIELDS = false ANNOTATE_PUNCT = false pulldown_type = true category = Metrics description = Statsd daemon output format. Accepts the plain StatsD line metric protocol or the StatsD line metric protocol with dimensions extension. [metrics_csv] SHOULD_LINEMERGE = False pulldown_type = true INDEXED_EXTRACTIONS = csv ADD_EXTRA_TIME_FIELDS = subseconds KV_MODE = none TIMESTAMP_FIELDS = metric_timestamp TIME_FORMAT = %s.%Q category = Metrics description = Comma-separated value format for metrics. Must have metric_timestamp, metric_name, and _value fields. [search_telemetry] SHOULD_LINEMERGE = false INDEXED_EXTRACTIONS = json TRUNCATE = 1000000 KV_MODE = none description = JSON-formatted file containing search related telemetry. [splunk_cloud_telemetry] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %m-%d-%Y %H:%M:%S.%l %z INDEXED_EXTRACTIONS = json KV_MODE = none JSON_TRIM_BRACES_IN_ARRAY_NAMES = true [splunkd_latency_tracker] SHOULD_LINEMERGE = false TIMESTAMP_FIELDS = datetime TIME_FORMAT = %s.%l INDEXED_EXTRACTIONS = json KV_MODE = none