@ -0,0 +1,60 @@
|
||||
# SA-metricator-for-nmon
|
||||
|
||||
Copyright 2017 Octamis limited - Copyright 2017 Guilhem Marchand
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Sample indexes.conf:
|
||||
|
||||
# indexes.conf
|
||||
|
||||
########################
|
||||
# default Indexes scheme
|
||||
########################
|
||||
|
||||
# The default indexing scheme uses a combination of 4 indexes:
|
||||
|
||||
# - metrics ingested with the metric stores
|
||||
# - nmon metric data ingested as regular events
|
||||
# - nmon configuration data ingested as regular events
|
||||
# - application internal data ingested as regular events
|
||||
|
||||
# CUSTOMIZATION:
|
||||
|
||||
# if you need more segmentation, for example if you are indexing data from several data centers, we suggest you use
|
||||
# the same naming convention across naming convention such that you easily customize eventtypes and macros
|
||||
|
||||
# nmon data ingested as metrics
|
||||
[os-unix-nmon-metrics]
|
||||
disabled = false
|
||||
coldPath = $SPLUNK_DB/os-unix-nmon-metrics/colddb
|
||||
datatype = metric
|
||||
homePath = $SPLUNK_DB/os-unix-nmon-metrics/db
|
||||
splitByIndexKeys = metric_name,host
|
||||
thawedPath = $SPLUNK_DB/os-unix-nmon-metrics/thaweddb
|
||||
repFactor = auto
|
||||
|
||||
# nmon data ingested as regular events
|
||||
[os-unix-nmon-events]
|
||||
disabled = false
|
||||
coldPath = $SPLUNK_DB/os-unix-nmon-events/colddb
|
||||
homePath = $SPLUNK_DB/os-unix-nmon-events/db
|
||||
thawedPath = $SPLUNK_DB/os-unix-nmon-events/thaweddb
|
||||
repFactor = auto
|
||||
|
||||
# nmon config ingested as regular events
|
||||
[os-unix-nmon-config]
|
||||
disabled = false
|
||||
coldPath = $SPLUNK_DB/os-unix-nmon-config/colddb
|
||||
homePath = $SPLUNK_DB/os-unix-nmon-config/db
|
||||
thawedPath = $SPLUNK_DB/os-unix-nmon-config/thaweddb
|
||||
repFactor = auto
|
||||
|
||||
# nmon internal data
|
||||
[os-unix-nmon-internal]
|
||||
disabled = false
|
||||
coldPath = $SPLUNK_DB/os-unix-nmon-internal/colddb
|
||||
homePath = $SPLUNK_DB/os-unix-nmon-internal/db
|
||||
thawedPath = $SPLUNK_DB/os-unix-nmon-internal/thaweddb
|
||||
repFactor = auto
|
||||
|
||||
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Splunk app configuration file
|
||||
#
|
||||
|
||||
[install]
|
||||
is_configured = 0
|
||||
|
||||
[package]
|
||||
id = SA-metricator-for-nmon
|
||||
check_for_updates = true
|
||||
|
||||
[ui]
|
||||
is_visible = 0
|
||||
label = SA-metricator-for-nmon
|
||||
|
||||
[launcher]
|
||||
author = Guilhem Marchand for Octamis
|
||||
description = SA-metricator-for-nmon
|
||||
version = 1.0.4
|
||||
@ -0,0 +1,47 @@
|
||||
# eventtypes.conf
|
||||
|
||||
##### Customization ####
|
||||
# In case of index name customization or spliting data into multiple indexes, you should
|
||||
# copy the eventtypes.conf to local/eventtypes.conf to adapt eventtypes definition to your needs
|
||||
########################
|
||||
|
||||
####################
|
||||
# All data #
|
||||
####################
|
||||
|
||||
[nmon]
|
||||
search = (index=os-unix-nmon-events* OR index=os-unix-nmon-internal* OR index=os-unix-nmon-config*)
|
||||
|
||||
####################
|
||||
# Performance data #
|
||||
####################
|
||||
|
||||
[nmon:events]
|
||||
search = index=os-unix-nmon-events* sourcetype=nmon_data
|
||||
|
||||
#####################
|
||||
# Others type of data
|
||||
#####################
|
||||
|
||||
[nmon:config]
|
||||
search = index=os-unix-nmon-config* sourcetype=nmon_config
|
||||
|
||||
[nmon:collect]
|
||||
search = index=os-unix-nmon-internal* sourcetype=nmon_collect
|
||||
|
||||
[nmon:processing]
|
||||
search = index=os-unix-nmon-internal* sourcetype=nmon_processing
|
||||
|
||||
[nmon:clean]
|
||||
search = index=os-unix-nmon-internal* sourcetype=nmon_clean
|
||||
|
||||
###################
|
||||
# CIM normalization
|
||||
###################
|
||||
|
||||
# CIM - Uptime stdout (nmon external)
|
||||
[uptime]
|
||||
search = index=os-unix-nmon-events* sourcetype=nmon_data type=UPTIME
|
||||
|
||||
[inventory]
|
||||
search = index=os-unix-nmon-config* sourcetype=nmon_config
|
||||
@ -0,0 +1,171 @@
|
||||
# props.conf
|
||||
|
||||
###############################
|
||||
# nmon metrics for metric store
|
||||
###############################
|
||||
|
||||
# Introduced with Splunk 7, metrics are now natively supported
|
||||
# Nmon uses its own copy of the default metrics_csv sourcetype
|
||||
|
||||
[nmon_metrics_csv]
|
||||
SHOULD_LINEMERGE = False
|
||||
pulldown_type = true
|
||||
INDEXED_EXTRACTIONS = csv
|
||||
ADD_EXTRA_TIME_FIELDS = False
|
||||
KV_MODE = none
|
||||
TIMESTAMP_FIELDS = metric_timestamp
|
||||
TIME_FORMAT = %s.%Q
|
||||
category = Metrics
|
||||
description = Comma-separated value format for metrics. Nmon implementation.
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_metrics_csv_hostoverride
|
||||
|
||||
# Metrics can be sent by http using the Splunk Http Event Collector (HEC)
|
||||
[nmon_metrics_http]
|
||||
TIME_PREFIX = metric_timestamp=\"(\d+)\"
|
||||
TIME_FORMAT = %s
|
||||
TRANSFORMS-nmon_metrics_http = nmon_metrics_http_host, nmon_metrics_http_metric_name, nmon_metrics_http_metric_value, nmon_metrics_http_dims, nmon_metrics_http_OStype, nmon_metrics_http_serialnum
|
||||
NO_BINARY_CHECK = true
|
||||
SHOULD_LINEMERGE = false
|
||||
pulldown_type = 1
|
||||
category = Metrics
|
||||
|
||||
########################
|
||||
# nmon metrics as events
|
||||
########################
|
||||
|
||||
# This sourcetype stanza will be used to index nmon csv converted data
|
||||
# Every generated csv file will contain a CSV header used by Splunk to identify fields
|
||||
|
||||
[nmon_data]
|
||||
FIELD_DELIMITER=,
|
||||
FIELD_QUOTE="
|
||||
HEADER_FIELD_LINE_NUMBER=1
|
||||
|
||||
# your settings
|
||||
INDEXED_EXTRACTIONS=csv
|
||||
NO_BINARY_CHECK=1
|
||||
SHOULD_LINEMERGE=false
|
||||
TIMESTAMP_FIELDS=ZZZZ
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
|
||||
# set by detected source type
|
||||
KV_MODE=none
|
||||
pulldown_type=true
|
||||
|
||||
# Leaving PUNCT enabled can impact indexing performance, and uses space
|
||||
# For structured data, it has poor interest and shall be deactivated
|
||||
ANNOTATE_PUNCT=false
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_data_hostoverride
|
||||
|
||||
# nmon_data sent over http using the Splunk Http Event Collector (HEC)
|
||||
# This sourcetype will be automatically renamed to nmon_data
|
||||
|
||||
[nmon_data_http]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_FORMAT=%s
|
||||
TIME_PREFIX=timestamp="
|
||||
MAX_TIMESTAMP_LOOKAHEAD=26
|
||||
KV_MODE=auto
|
||||
|
||||
# Apply indexing time parsing configuration
|
||||
TRANSFORMS-nmon_data_http = nmon_data_http_host, nmon_data_http_OStype, nmon_data_http_type, nmon_data_http_sourcetype
|
||||
|
||||
# For search time extractions, activate kvmode to auto for that source
|
||||
[source::nmon_data:http]
|
||||
KV_MODE=auto
|
||||
|
||||
########################
|
||||
# nmon processing events
|
||||
########################
|
||||
|
||||
[nmon_processing]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE=none
|
||||
|
||||
####################
|
||||
# nmon config events
|
||||
####################
|
||||
|
||||
[nmon_config]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^CONFIG,
|
||||
TIME_FORMAT=%d-%b-%Y:%H:%M.%S
|
||||
LINE_BREAKER=([\r\n]+)CONFIG,\d{2}-\w{3}-\d{4}:\d{2}:\d{2}\.\d{2},
|
||||
TRUNCATE=0
|
||||
MAX_EVENTS=100000
|
||||
MAX_TIMESTAMP_LOOKAHEAD=30
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_config_hostoverride
|
||||
|
||||
# nmon_config sent over http
|
||||
[nmon_config:http]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
LINE_BREAKER=([\r\n]+)timestamp=\"
|
||||
MAX_EVENTS=100000
|
||||
TIME_FORMAT=%s
|
||||
TIME_PREFIX=timestamp="
|
||||
TRUNCATE=0
|
||||
|
||||
# Rewrite the source Metadata to manage search time extraction
|
||||
TRANSFORMS-nmon_config_http = nmon_config_http_rewrite_host, nmon_config_http_rewrite_sourcetype
|
||||
|
||||
# For search heads
|
||||
[source::nmon_config:http]
|
||||
KV_MODE=none
|
||||
|
||||
#####################
|
||||
# nmon collect events
|
||||
#####################
|
||||
|
||||
[nmon_collect]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
|
||||
###################
|
||||
# nmon clean events
|
||||
###################
|
||||
|
||||
[nmon_clean]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
@ -0,0 +1,114 @@
|
||||
# transforms.conf
|
||||
|
||||
##############
|
||||
# nmon metrics
|
||||
##############
|
||||
|
||||
# host Meta overridden with 5th column
|
||||
[nmon_metrics_csv_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = ^\d*,\"{0,1}[^\"\,]*\"{0,1},\"{0,1}[^\"\,]*\"{0,1}[^\"\,]*\"{0,1},\"{0,1}[^\"\,]*\"{0,1},\"{0,1}([^\"\,]*)\"{0,1}
|
||||
FORMAT = host::$1
|
||||
|
||||
# Metrics sent over http - host
|
||||
[nmon_metrics_http_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = hostname=\"([^\"]*)\"
|
||||
FORMAT = host::$1
|
||||
|
||||
# Metrics sent over http - metric_name
|
||||
[nmon_metrics_http_metric_name]
|
||||
REGEX = metric_name=\"([^\"]*)\"
|
||||
FORMAT = metric_name::$1
|
||||
WRITE_META = true
|
||||
|
||||
[nmon_metrics_http_metric_value]
|
||||
REGEX = _value=\"([\d|\.]*)\"
|
||||
FORMAT = _value::$1
|
||||
WRITE_META = true
|
||||
|
||||
# Metrics sent over http - dimensions
|
||||
[nmon_metrics_http_dims]
|
||||
REGEX = (dimension\_\w*)=\"([^\"]*)\"
|
||||
FORMAT = $1::$2
|
||||
WRITE_META = true
|
||||
REPEAT_MATCH = true
|
||||
|
||||
# Metrics sent over http - OStype
|
||||
[nmon_metrics_http_OStype]
|
||||
REGEX = OStype=\"([^\"]*)\"
|
||||
FORMAT = OStype::$1
|
||||
WRITE_META = true
|
||||
|
||||
# Metrics sent over http - serialnum
|
||||
[nmon_metrics_http_serialnum]
|
||||
REGEX = serialnum=\"([^\"]*)\"
|
||||
FORMAT = serialnum::$1
|
||||
WRITE_META = true
|
||||
|
||||
###########
|
||||
# nmon data
|
||||
###########
|
||||
|
||||
# Host override based on event data form nmon_data sourcetype
|
||||
|
||||
[nmon_data_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = ^\"{0,1}[a-zA-Z0-9\_]+\"{0,1},\"{0,1}[a-zA-Z0-9\-\_\.]+\"{0,1},\"{0,1}([a-zA-Z0-9\-\_\.]+)\"{0,1},.+
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon data as events sent over http - host indexed field
|
||||
[nmon_data_http_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = hostname=\"([^\"]*)\"
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon data as events sent over http - OStype indexed field
|
||||
[nmon_data_http_OStype]
|
||||
REGEX = \sOStype=\"([^\"]*)\"
|
||||
WRITE_META = true
|
||||
FORMAT = OStype::$1
|
||||
DEFAULT_VALUE = NULL
|
||||
|
||||
# nmon data as events sent over http - type indexed field
|
||||
[nmon_data_http_type]
|
||||
REGEX = \stype=\"([^\"]*)\"
|
||||
WRITE_META = true
|
||||
FORMAT = type::$1
|
||||
DEFAULT_VALUE = NULL
|
||||
|
||||
# nmon data as events sent over http - rewrite sourcetype
|
||||
[nmon_data_http_sourcetype]
|
||||
DEST_KEY = MetaData:Sourcetype
|
||||
REGEX = .*
|
||||
FORMAT = sourcetype::nmon_data
|
||||
|
||||
#############
|
||||
# nmon config
|
||||
#############
|
||||
|
||||
# Host override based on event data form nmon_config sourcetype
|
||||
|
||||
[nmon_config_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = CONFIG\,[a-zA-Z0-9\-\:\.]+\,([a-zA-Z0-9\-\_\.]+)\,[a-zA-Z0-9\-\_\.]+
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon_config sent over http
|
||||
|
||||
[nmon_config_http_rewrite_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = host=\"{0,}([a-zA-Z0-9\-\_\.]+)\"{0,}
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon_config source
|
||||
[nmon_config_http_rewrite_source]
|
||||
DEST_KEY = MetaData:Source
|
||||
REGEX = .*
|
||||
FORMAT = source::configdata:http
|
||||
|
||||
# nmon_config sourcetype
|
||||
[nmon_config_http_rewrite_sourcetype]
|
||||
DEST_KEY = MetaData:Sourcetype
|
||||
REGEX = .*
|
||||
FORMAT = sourcetype::nmon_config
|
||||
@ -0,0 +1,7 @@
|
||||
|
||||
# Application-level permissions
|
||||
|
||||
[]
|
||||
owner = admin
|
||||
access = read : [ * ], write : [ admin ]
|
||||
export = system
|
||||
@ -0,0 +1,115 @@
|
||||
{
|
||||
"version": "1.0",
|
||||
"date": "2022-11-14T10:00:27.298956691Z",
|
||||
"hashAlgorithm": "SHA-256",
|
||||
"app": {
|
||||
"id": 3949,
|
||||
"version": "1.0.4",
|
||||
"files": [
|
||||
{
|
||||
"path": "license.txt",
|
||||
"hash": "16b42e565a723507298adfca58a3e970f87f1fdde7dc638a02fa320daf918979"
|
||||
},
|
||||
{
|
||||
"path": "metadata/default.meta",
|
||||
"hash": "6b6c91fc18940aeb1580da6c06f92810beef8af632f73714070bae9e4a777af2"
|
||||
},
|
||||
{
|
||||
"path": "default/app.conf",
|
||||
"hash": "4e9ee565f1d95ec2e0edba40945ed3dbd21a8973eca5db675c5777fb11245b00"
|
||||
},
|
||||
{
|
||||
"path": "default/props.conf",
|
||||
"hash": "865bce58bea6b7ae664d6b1f4d95457dfb41ab34e4588af9efa2625ac36325eb"
|
||||
},
|
||||
{
|
||||
"path": "default/transforms.conf",
|
||||
"hash": "4c575ce58234455c879370ab8f69ed4ea6b9bd7901b6bceb53ac1049c9ad1b82"
|
||||
},
|
||||
{
|
||||
"path": "default/eventtypes.conf",
|
||||
"hash": "66cfa9ca9fe2fecce3d1a15692a7fa2dad36df96773050c14316d0977b4ee72b"
|
||||
},
|
||||
{
|
||||
"path": "static/appLogo_2x.png",
|
||||
"hash": "845f9bcdcd947b60e7c6d110f03debad96fee327b13a1bda2457788e069c350e"
|
||||
},
|
||||
{
|
||||
"path": "static/appIconAlt.png",
|
||||
"hash": "e0611349e349b6cee55d123f85ed286a4ac2c0f1bbdbbedcbf230207bc2404ee"
|
||||
},
|
||||
{
|
||||
"path": "static/appIconAlt_2x.png",
|
||||
"hash": "5434fede7130f1bacc4d8e3ec48f2b3bd67367f55658b6d07d5b232b8f60f522"
|
||||
},
|
||||
{
|
||||
"path": "static/appLogo.png",
|
||||
"hash": "0736204483f4205c90d49c1f212e70d4e15c4d79aee19d43a0ab8247455118d1"
|
||||
},
|
||||
{
|
||||
"path": "static/appIcon.png",
|
||||
"hash": "e0611349e349b6cee55d123f85ed286a4ac2c0f1bbdbbedcbf230207bc2404ee"
|
||||
},
|
||||
{
|
||||
"path": "static/appIcon_2x.png",
|
||||
"hash": "5434fede7130f1bacc4d8e3ec48f2b3bd67367f55658b6d07d5b232b8f60f522"
|
||||
},
|
||||
{
|
||||
"path": "README.md",
|
||||
"hash": "4cb28100eb177f1de9a173a7861e6e9a9b847ef55d819cc42ce41b2dc2b9f414"
|
||||
}
|
||||
]
|
||||
},
|
||||
"products": [
|
||||
{
|
||||
"platform": "splunk",
|
||||
"product": "enterprise",
|
||||
"versions": [
|
||||
"7.0",
|
||||
"7.1",
|
||||
"7.2",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"8.2",
|
||||
"9.0"
|
||||
],
|
||||
"architectures": [
|
||||
"x86_64"
|
||||
],
|
||||
"operatingSystems": [
|
||||
"windows",
|
||||
"linux",
|
||||
"macos",
|
||||
"freebsd",
|
||||
"solaris",
|
||||
"aix"
|
||||
]
|
||||
},
|
||||
{
|
||||
"platform": "splunk",
|
||||
"product": "cloud",
|
||||
"versions": [
|
||||
"7.0",
|
||||
"7.1",
|
||||
"7.2",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"8.2",
|
||||
"9.0"
|
||||
],
|
||||
"architectures": [
|
||||
"x86_64"
|
||||
],
|
||||
"operatingSystems": [
|
||||
"windows",
|
||||
"linux",
|
||||
"macos",
|
||||
"freebsd",
|
||||
"solaris",
|
||||
"aix"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
|
After Width: | Height: | Size: 3.5 KiB |
|
After Width: | Height: | Size: 4.3 KiB |
|
After Width: | Height: | Size: 4.3 KiB |
|
After Width: | Height: | Size: 1.3 KiB |
|
After Width: | Height: | Size: 2.6 KiB |
@ -0,0 +1,5 @@
|
||||
# TA-metricator-for-nmon
|
||||
|
||||
Copyright 2017 Octamis - Copyright 2017 Guilhem Marchand
|
||||
|
||||
All rights reserved.
|
||||
@ -0,0 +1,238 @@
|
||||
# nmon.conf.spec
|
||||
|
||||
# This file contains possibles attributes and values you can use to configure nmon processes generation.
|
||||
|
||||
# There is an nmon.conf in $SPLUNK_HOME/etc/[nmon|TA-nmon|PA-nmon]/default/. To set custom configurations,
|
||||
# place an nmon.conf in $SPLUNK_HOME/etc/[nmon|TA-nmon|PA-nmon]/default/.
|
||||
|
||||
# *** FILE ENCODING: UTF-8 ! ***
|
||||
# When creating a local/nmon.conf, pay attention to file encoding specially when working under Windows.
|
||||
# The file must be UTF-8 encoded or you may run in trouble.
|
||||
|
||||
# *** DON'T MODIFY THIS FILE ***
|
||||
|
||||
########################################################################################################################
|
||||
### NMON COLLECT OPTIONS ###
|
||||
########################################################################################################################
|
||||
|
||||
# The metricator_helper.sh input script is set by default to run every 60 seconds
|
||||
# If Nmon is not running, the script will start Nmon using the configuration above
|
||||
|
||||
###
|
||||
### FIFO options:
|
||||
###
|
||||
|
||||
# Using FIFO files (named pipe) are now used to minimize the CPU footprint of the technical addons
|
||||
# As such, it is not required anymore to use short cycle of Nmon run to reduce the CPU usage
|
||||
|
||||
# You can still want to manage the volume of data to be generated by managing the interval and snapshot values
|
||||
# as a best practice recommendation, the time to live of nmon processes writing to FIFO should be 24 hours
|
||||
|
||||
# value for interval: time in seconds between 2 performance measures
|
||||
fifo_interval=<value>
|
||||
|
||||
# value for snapshot: number of measure to perform
|
||||
fifo_snapshot=<value>
|
||||
|
||||
########################################################################################################################
|
||||
### VARIOUS COMMON OPTIONS ###
|
||||
########################################################################################################################
|
||||
|
||||
# Time in seconds of margin before running a new iteration of Nmon process to prevent data gaps between 2 iterations of Nmon
|
||||
# the metricator_helper.sh script will spawn a new Nmon process when the age in seconds of the current process gets higher than this value
|
||||
|
||||
# The endtime is evaluated the following way:
|
||||
# endtime=$(( ${interval} * ${snapshot} - ${endtime_margin} ))
|
||||
|
||||
# When the endtime gets higher than the endtime_margin, a new Nmon process will be spawned
|
||||
# default value to 240 seconds which will start a new process 4 minutes before the current process ends
|
||||
|
||||
# Setting this value to "0" will totally disable this feature
|
||||
|
||||
# Default value:
|
||||
# endtime_margin="240"
|
||||
|
||||
endtime_margin=<value>
|
||||
|
||||
### NFS OPTIONS ###
|
||||
|
||||
# Change to "1" to activate NFS V2 / V3 (option -N) for AIX hosts
|
||||
# Default value:
|
||||
# AIX_NFS23="0"
|
||||
|
||||
AIX_NFS23=<string>
|
||||
|
||||
# Change to "1" to activate NFS V4 (option -NN) for AIX hosts
|
||||
# Default value:
|
||||
# AIX_NFS4="0"
|
||||
|
||||
AIX_NFS4=<string>
|
||||
|
||||
# Change to "1" to activate NFS V2 / V3 / V4 (option -N) for Linux hosts
|
||||
# Note: Some versions of Nmon introduced a bug that makes Nmon to core when activating NFS, ensure your version is not outdated
|
||||
# Default value:
|
||||
# Linux_NFS="0"
|
||||
|
||||
Linux_NFS=<string>
|
||||
|
||||
########################################################################################################################
|
||||
### LINUX OPTIONS ###
|
||||
########################################################################################################################
|
||||
|
||||
# Change the priority applied while looking at nmon binary
|
||||
# by default, the metricator_helper.sh script will use any nmon binary found in PATH
|
||||
# Set to "1" to give the priority to embedded nmon binaries
|
||||
# Note: Since release 1.6.07, priority is given by default to embedded binaries
|
||||
|
||||
# Default value:
|
||||
# Linux_embedded_nmon_priority="1"
|
||||
|
||||
Linux_embedded_nmon_priority=<string>
|
||||
|
||||
# Change the limit for processes and disks capture of nmon for Linux
|
||||
# In default configuration, nmon will capture most of the process table by capturing main consuming processes
|
||||
# This function is percentage limit of CPU time, with a default limit of 0.01
|
||||
# Changing this value can influence the volume of data to be generated, and the associated CPU overhead for that data to be parsed
|
||||
|
||||
# Possible values are:
|
||||
# Linux_unlimited_capture="0" --> Default nmon behavior, capture main processes (no -I option)
|
||||
# Linux_unlimited_capture="-1" --> Set the capture mode to unlimited (-I -1)
|
||||
# Linux_unlimited_capture="x.xx" --> Set the percentage limit to a custom value, ex: "0.01" will set "-I 0.01"
|
||||
Linux_unlimited_capture=<value>
|
||||
|
||||
# Set the maximum number of devices collected by Nmon, default is set to 1500 devices
|
||||
# Increase this value if you have systems with more devices
|
||||
# Up to 3000 devices will be taken in charge by the Application (hard limit in nmonparser.py / nmonparser.pl)
|
||||
|
||||
# Default value:
|
||||
# Linux_devices="1500"
|
||||
|
||||
Linux_devices=<value>
|
||||
|
||||
# Enable disks extended statistics (DG*)
|
||||
# Default is true, which activates and generates DG statistics
|
||||
Linux_disk_dg_enable=<string>
|
||||
|
||||
# Name of the User Defined Disk Groups file, "auto" generates this for you
|
||||
Linux_disk_dg_group=<value>
|
||||
|
||||
########################################################################################################################
|
||||
### SOLARIS OPTIONS ###
|
||||
########################################################################################################################
|
||||
|
||||
# Change to "1" to activate VxVM volumes IO statistics
|
||||
# Default value:
|
||||
|
||||
# Solaris_VxVM="0"
|
||||
|
||||
Solaris_VxVM=<string>
|
||||
|
||||
# UARG collection (new in Version 1.11), Change to "0" to deactivate, "1" to activate (default is activate)
|
||||
# Default value:
|
||||
|
||||
# Solaris_UARG="1"
|
||||
|
||||
Solaris_UARG=<string>
|
||||
|
||||
########################################################################################################################
|
||||
### AIX OPTIONS ###
|
||||
########################################################################################################################
|
||||
|
||||
# CAUTION: Since release 1.3.0, we use fifo files, which requires the option "-yoverwrite=1"
|
||||
|
||||
# Change this line if you add or remove common options for AIX, do not change NFS options here (see NFS options)
|
||||
# the -p option is mandatory as it is used at launch time to save instance pid
|
||||
|
||||
# Default value:
|
||||
# AIX_options="-f -T -A -d -K -L -M -P -^ -p -yoverwrite=1"
|
||||
|
||||
AIX_options=<string>
|
||||
|
||||
#############################
|
||||
# Application related options
|
||||
#############################
|
||||
|
||||
|
||||
######################
|
||||
# hostname definition:
|
||||
######################
|
||||
|
||||
# This option can be used to force the technical add-on to use the Splunk configured value of the server hostname
|
||||
# If for some reason, you need to use the Splunk host value instead of the system real hostname value, set this value to "1"
|
||||
|
||||
# We will search for the value of host=<value> in $SPLUNK_HOME/etc/system/local/inputs.conf
|
||||
# If no value can be found, or if the file does not exist, we will fallback to the normal behavior
|
||||
|
||||
# Default is use system hostname
|
||||
|
||||
# FQDN management in nmonparser: The --fqdn option is not compatible with the host name override, if the override_sys_hostname
|
||||
# is activated, the --fqdn argument will have no effect
|
||||
|
||||
override_sys_hostname=<string>
|
||||
|
||||
#####################
|
||||
# frameID definition:
|
||||
#####################
|
||||
|
||||
# The frameID definition is an enrichment mechanism used within the application to associate a given host with a given frame identifier
|
||||
# By default, the mapping is operated against the value of "serialnum" which is defined at the raw level by nmon binaries
|
||||
|
||||
# On AIX systems, the serialnum value is equal to the serial number of the frame hosting the partition
|
||||
# On Linux and Solaris systems, the serialnum is equal to the value of the hostname
|
||||
|
||||
# Using this option allows you to override the serialnum value by a static value defined in the nmon.conf configuration file
|
||||
# nmon.conf precedence allows defining the serialnum value on per deployment basis (local/nmon.conf) or on a per server basis (/etc/nmon.conf)
|
||||
|
||||
# default is:
|
||||
# override_sys_serialnum="0"
|
||||
# which lets nmon set the serialnum value
|
||||
|
||||
# Set this value to:
|
||||
# override_sys_serialnum="1"
|
||||
# to activate the serialnum override based on the value defined in:
|
||||
|
||||
# override_sys_serialnum_value="<sting>"
|
||||
# Acceptable values for <string> are letters (lower and upper case), numbers and "-" / "_"
|
||||
|
||||
override_sys_serialnum=<string>
|
||||
override_sys_serialnum_value=<string>
|
||||
|
||||
########################
|
||||
# nmon external metrics:
|
||||
########################
|
||||
|
||||
# nmon external generation management
|
||||
|
||||
# This option will manage the activation or deactivation of the nmon external data generation at the lower level, before it comes to parsers
|
||||
# default is activated (value=1), set to "0" to deactivate
|
||||
|
||||
nmon_external_generation=<string>
|
||||
|
||||
###############
|
||||
# fifo options:
|
||||
###############
|
||||
|
||||
# Fifo options
|
||||
|
||||
# The realtime mode which corresponds to the old mechanism is now deprecated
|
||||
# fifo mode is mandatory
|
||||
|
||||
# Default is "1" which means write to fifo
|
||||
|
||||
mode_fifo=<string>
|
||||
|
||||
#######################
|
||||
# nmon parsers options:
|
||||
#######################
|
||||
|
||||
# consult the documentation to get the full list of available options
|
||||
|
||||
# --mode fifo|colddata --> explicitly manage data in fifo/colddata
|
||||
# --use_fqdn --> use the host fully qualified domain name (default)
|
||||
# --silent --> minimize the processing output to save data volume (deactivated by default)
|
||||
# --show_zero_values --> allows generating metrics with 0 values (default removes any metric with a zero value before it reaches the ingestion)
|
||||
|
||||
# In fifo mode, options are sent by the metricator_consumer.sh
|
||||
# In file mode, options are sent by Splunk via the nmon_processing stanza in props.conf
|
||||
|
||||
nmonparser_options=<string>
|
||||
@ -0,0 +1 @@
|
||||
This is where you put any scripts you want to add to this app.
|
||||
@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Program name: create_agent.py
|
||||
# Compatibility: Python 2x
|
||||
# Purpose - Create a customized version of the TA-metricator-for-nmon
|
||||
# Licence:
|
||||
|
||||
# Copyright 2018 Guilhem Marchand
|
||||
|
||||
import sys
|
||||
import os
|
||||
import tarfile
|
||||
import glob
|
||||
import fnmatch
|
||||
import argparse
|
||||
import shutil
|
||||
|
||||
version = '2.0.0'
|
||||
|
||||
####################################################################
|
||||
############# Arguments Parser
|
||||
####################################################################
|
||||
|
||||
# Define Arguments
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('-f', action='store', dest='INFILE',
|
||||
help='Name of the tgz archive file')
|
||||
|
||||
parser.add_argument('--agentname', action='store', dest='TARGET',
|
||||
help='Define the TA Agent name and root directory')
|
||||
|
||||
parser.add_argument('--version', action='version', version='%(prog)s ' + version)
|
||||
|
||||
parser.add_argument('--debug', dest='debug', action='store_true')
|
||||
|
||||
parser.set_defaults(debug=False)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Set debug
|
||||
if args.debug:
|
||||
debug = True
|
||||
|
||||
####################################################################
|
||||
############# Functions
|
||||
####################################################################
|
||||
|
||||
# String replacement function
|
||||
# Can be called by:
|
||||
# findreplace(path, string_to_search, replace_by, file_extension)
|
||||
|
||||
def findreplace(directory, find, replace, filepattern):
|
||||
for path, dirs, files in os.walk(os.path.abspath(directory)):
|
||||
for filename in fnmatch.filter(files, filepattern):
|
||||
filepath = os.path.join(path, filename)
|
||||
|
||||
# Prevents binaries modification
|
||||
if "bin/linux" in filepath:
|
||||
if debug:
|
||||
print("file " + str(filename) + " is binary or binary related")
|
||||
elif "bin/sarmon" in filepath:
|
||||
if debug:
|
||||
print("file " + str(filename) + " is binary or binary related")
|
||||
else:
|
||||
with open(filepath) as f:
|
||||
s = f.read()
|
||||
s = s.replace(find, replace)
|
||||
with open(filepath, "w") as f:
|
||||
f.write(s)
|
||||
|
||||
|
||||
####################################################################
|
||||
############# Main Program
|
||||
####################################################################
|
||||
|
||||
# Check Arguments
|
||||
if len(sys.argv) < 2:
|
||||
print "\n%s" % os.path.basename(sys.argv[0])
|
||||
print "\nThis utility had been designed to allow creating customized agents for the TA-metricator-for-nmon" \
|
||||
" please follow these instructions:\n"
|
||||
print "- Download the current release of the technical add-on"
|
||||
print "- Ensure to have this Python script and the TGZ archive in the same directory"
|
||||
print "- Run the tool: ./create_agent.py and check for available options"
|
||||
print "- After the execution, a new agent package will have been created in the resources directory"
|
||||
print "- Extract its content to your Splunk deployment server, configure the server class, associated clients and" \
|
||||
" deploy the agent"
|
||||
print "- Don't forget to set the application to restart splunkd after deployment\n"
|
||||
print "\nRun this tool such as:\n"
|
||||
print "./create_agent.py -f TA-metricator-for-nmon_xxx.tgz --agentname TA-metricator-for-nmon-custom \n"
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
# Will expect in first Argument the name of the tgz Archive of the Application to be downloaded in Splunk Base
|
||||
if not args.INFILE:
|
||||
print "\nERROR: Please provide the tgz Archive file with -f statement\n"
|
||||
sys.exit(1)
|
||||
else:
|
||||
infile = args.INFILE
|
||||
|
||||
# If the root directory of the TA-nmon is not defined, exit and show message
|
||||
if not args.TARGET:
|
||||
print "ERROR: You must specify the name of the agent package you want to create, and it must be different from" \
|
||||
" the default package: TA-metricator-for-nmon"
|
||||
sys.exit(0)
|
||||
else:
|
||||
ta_root_dir = args.TARGET
|
||||
|
||||
# Avoid naming the TA ascore application
|
||||
if not "TA-" in ta_root_dir:
|
||||
print "ERROR: The TA package name should always start by TA_ as good Splunk practice."
|
||||
sys.exit(1)
|
||||
|
||||
# Verify tgz Archive file exists
|
||||
if not os.path.exists(infile):
|
||||
print ('ERROR: invalid file, could not find: ' + infile)
|
||||
sys.exit(1)
|
||||
|
||||
# Ensure the same package name does not already exist in current directory
|
||||
if os.path.exists(ta_root_dir):
|
||||
print ('ERROR: A directory named ' + ta_root_dir + ' already exist in current directory, please remove it and'
|
||||
' restart')
|
||||
sys.exit(1)
|
||||
elif os.path.exists(ta_root_dir + ".tgz"):
|
||||
print ('ERROR: A tgz archive named ' + ta_root_dir + ".tgz" + ' already exist in current directory, please'
|
||||
' remove it and restart')
|
||||
sys.exit(1)
|
||||
|
||||
# Extract Archive
|
||||
tar = tarfile.open(infile)
|
||||
msg = 'Extracting tgz Archive: ' + infile
|
||||
print (msg)
|
||||
tar.extractall()
|
||||
tar.close()
|
||||
|
||||
# Operate
|
||||
|
||||
# Get current directory
|
||||
curdir = os.getcwd()
|
||||
|
||||
# Extract the TA-nmon default package in current directory
|
||||
|
||||
print ('INFO: Extracting Agent tgz resources Archives')
|
||||
|
||||
tgz_files = 'TA-metricator-for-nmon*.tgz'
|
||||
for tgz in glob.glob(str(tgz_files)):
|
||||
tar = tarfile.open(tgz)
|
||||
tar.extractall()
|
||||
tar.close()
|
||||
|
||||
# Rename the TA directory to match agent name
|
||||
|
||||
msg = 'INFO: Renaming TA-metricator-for-nmon default agent to ' + ta_root_dir
|
||||
print (msg)
|
||||
|
||||
shutil.copytree('TA-metricator-for-nmon', ta_root_dir)
|
||||
|
||||
################# STRING REPLACEMENTS #################
|
||||
|
||||
# Replace the old agent name in files
|
||||
|
||||
# Achieve string replacements
|
||||
|
||||
print ('Achieving files transformation...')
|
||||
|
||||
search = 'TA-metricator-for-nmon'
|
||||
replace = ta_root_dir
|
||||
findreplace(ta_root_dir, search, replace, "*.sh")
|
||||
findreplace(ta_root_dir, search, replace, "*.py")
|
||||
findreplace(ta_root_dir, search, replace, "*.pl")
|
||||
findreplace(ta_root_dir, search, replace, "*.conf")
|
||||
|
||||
print ('Done.')
|
||||
|
||||
# Don't use "with" statement in tar creation for Python 2.6 backward compatibility
|
||||
tar_file = ta_root_dir + '.tgz'
|
||||
out = tarfile.open(tar_file, mode='w:gz')
|
||||
|
||||
try:
|
||||
out.add(ta_root_dir)
|
||||
finally:
|
||||
msg = 'INFO: ************* Tar creation done of: ' + tar_file + ' *************'
|
||||
print (msg)
|
||||
out.close()
|
||||
|
||||
# remove Agent directory
|
||||
if os.path.isdir(ta_root_dir):
|
||||
shutil.rmtree(ta_root_dir)
|
||||
|
||||
print ('\n*** Agent Creation terminated: To install the agent: ***\n')
|
||||
print (' - Upload the tgz Archive ' + tar_file + ' to your Splunk deployment server')
|
||||
print (' - Extract the content of the TA package in $SPLUNK_HOME/etc/deployment-apps/')
|
||||
print (' - Configure the Application (set splunkd to restart), server class and associated clients to push the new'
|
||||
' package to your clients\n')
|
||||
|
||||
# END
|
||||
print ('Operation terminated.\n')
|
||||
sys.exit(0)
|
||||
@ -0,0 +1,3 @@
|
||||
Text-CSV-1.95: http://search.cpan.org/~ishigaki/Text-CSV-1.95/lib/Text/CSV.pm
|
||||
|
||||
Compiled on AIX 7.1, certified under AIX 7.1 and 7.2
|
||||
@ -0,0 +1,745 @@
|
||||
package Text::Diff;
|
||||
|
||||
use 5.006;
|
||||
use strict;
|
||||
use warnings;
|
||||
use Carp qw/ croak confess /;
|
||||
use Exporter ();
|
||||
use Algorithm::Diff ();
|
||||
|
||||
our $VERSION = '1.45';
|
||||
our @ISA = qw/ Exporter /;
|
||||
our @EXPORT = qw/ diff /;
|
||||
|
||||
## Hunks are made of ops. An op is the starting index for each
|
||||
## sequence and the opcode:
|
||||
use constant A => 0; # Array index before match/discard
|
||||
use constant B => 1;
|
||||
use constant OPCODE => 2; # "-", " ", "+"
|
||||
use constant FLAG => 3; # What to display if not OPCODE "!"
|
||||
|
||||
my %internal_styles = (
|
||||
Unified => undef,
|
||||
Context => undef,
|
||||
OldStyle => undef,
|
||||
Table => undef, ## "internal", but in another module
|
||||
);
|
||||
|
||||
sub diff {
|
||||
my @seqs = ( shift, shift );
|
||||
my $options = shift || {};
|
||||
|
||||
for my $i ( 0 .. 1 ) {
|
||||
my $seq = $seqs[$i];
|
||||
my $type = ref $seq;
|
||||
|
||||
while ( $type eq "CODE" ) {
|
||||
$seqs[$i] = $seq = $seq->( $options );
|
||||
$type = ref $seq;
|
||||
}
|
||||
|
||||
my $AorB = !$i ? "A" : "B";
|
||||
|
||||
if ( $type eq "ARRAY" ) {
|
||||
## This is most efficient :)
|
||||
$options->{"OFFSET_$AorB"} = 0
|
||||
unless defined $options->{"OFFSET_$AorB"};
|
||||
}
|
||||
elsif ( $type eq "SCALAR" ) {
|
||||
$seqs[$i] = [split( /^/m, $$seq )];
|
||||
$options->{"OFFSET_$AorB"} = 1
|
||||
unless defined $options->{"OFFSET_$AorB"};
|
||||
}
|
||||
elsif ( ! $type ) {
|
||||
$options->{"OFFSET_$AorB"} = 1
|
||||
unless defined $options->{"OFFSET_$AorB"};
|
||||
$options->{"FILENAME_$AorB"} = $seq
|
||||
unless defined $options->{"FILENAME_$AorB"};
|
||||
$options->{"MTIME_$AorB"} = (stat($seq))[9]
|
||||
unless defined $options->{"MTIME_$AorB"};
|
||||
|
||||
local $/ = "\n";
|
||||
open F, "<$seq" or croak "$!: $seq";
|
||||
$seqs[$i] = [<F>];
|
||||
close F;
|
||||
|
||||
}
|
||||
elsif ( $type eq "GLOB" || UNIVERSAL::isa( $seq, "IO::Handle" ) ) {
|
||||
$options->{"OFFSET_$AorB"} = 1
|
||||
unless defined $options->{"OFFSET_$AorB"};
|
||||
local $/ = "\n";
|
||||
$seqs[$i] = [<$seq>];
|
||||
}
|
||||
else {
|
||||
confess "Can't handle input of type ", ref;
|
||||
}
|
||||
}
|
||||
|
||||
## Config vars
|
||||
my $output;
|
||||
my $output_handler = $options->{OUTPUT};
|
||||
my $type = ref $output_handler ;
|
||||
if ( ! defined $output_handler ) {
|
||||
$output = "";
|
||||
$output_handler = sub { $output .= shift };
|
||||
}
|
||||
elsif ( $type eq "CODE" ) {
|
||||
## No problems, mate.
|
||||
}
|
||||
elsif ( $type eq "SCALAR" ) {
|
||||
my $out_ref = $output_handler;
|
||||
$output_handler = sub { $$out_ref .= shift };
|
||||
}
|
||||
elsif ( $type eq "ARRAY" ) {
|
||||
my $out_ref = $output_handler;
|
||||
$output_handler = sub { push @$out_ref, shift };
|
||||
}
|
||||
elsif ( $type eq "GLOB" || UNIVERSAL::isa $output_handler, "IO::Handle" ) {
|
||||
my $output_handle = $output_handler;
|
||||
$output_handler = sub { print $output_handle shift };
|
||||
}
|
||||
else {
|
||||
croak "Unrecognized output type: $type";
|
||||
}
|
||||
|
||||
my $style = $options->{STYLE};
|
||||
$style = "Unified" unless defined $options->{STYLE};
|
||||
$style = "Text::Diff::$style" if exists $internal_styles{$style};
|
||||
|
||||
if ( ! $style->can( "hunk" ) ) {
|
||||
eval "require $style; 1" or die $@;
|
||||
}
|
||||
|
||||
$style = $style->new if ! ref $style && $style->can( "new" );
|
||||
|
||||
my $ctx_lines = $options->{CONTEXT};
|
||||
$ctx_lines = 3 unless defined $ctx_lines;
|
||||
$ctx_lines = 0 if $style->isa( "Text::Diff::OldStyle" );
|
||||
|
||||
my @keygen_args = $options->{KEYGEN_ARGS}
|
||||
? @{$options->{KEYGEN_ARGS}}
|
||||
: ();
|
||||
|
||||
## State vars
|
||||
my $diffs = 0; ## Number of discards this hunk
|
||||
my $ctx = 0; ## Number of " " (ctx_lines) ops pushed after last diff.
|
||||
my @ops; ## ops (" ", +, -) in this hunk
|
||||
my $hunks = 0; ## Number of hunks
|
||||
|
||||
my $emit_ops = sub {
|
||||
$output_handler->( $style->file_header( @seqs, $options ) )
|
||||
unless $hunks++;
|
||||
$output_handler->( $style->hunk_header( @seqs, @_, $options ) );
|
||||
$output_handler->( $style->hunk ( @seqs, @_, $options ) );
|
||||
$output_handler->( $style->hunk_footer( @seqs, @_, $options ) );
|
||||
};
|
||||
|
||||
## We keep 2*ctx_lines so that if a diff occurs
|
||||
## at 2*ctx_lines we continue to grow the hunk instead
|
||||
## of emitting diffs and context as we go. We
|
||||
## need to know the total length of both of the two
|
||||
## subsequences so the line count can be printed in the
|
||||
## header.
|
||||
my $dis_a = sub {push @ops, [@_[0,1],"-"]; ++$diffs ; $ctx = 0 };
|
||||
my $dis_b = sub {push @ops, [@_[0,1],"+"]; ++$diffs ; $ctx = 0 };
|
||||
|
||||
Algorithm::Diff::traverse_sequences(
|
||||
@seqs,
|
||||
{
|
||||
MATCH => sub {
|
||||
push @ops, [@_[0,1]," "];
|
||||
|
||||
if ( $diffs && ++$ctx > $ctx_lines * 2 ) {
|
||||
$emit_ops->( [ splice @ops, 0, $#ops - $ctx_lines ] );
|
||||
$ctx = $diffs = 0;
|
||||
}
|
||||
|
||||
## throw away context lines that aren't needed any more
|
||||
shift @ops if ! $diffs && @ops > $ctx_lines;
|
||||
},
|
||||
DISCARD_A => $dis_a,
|
||||
DISCARD_B => $dis_b,
|
||||
},
|
||||
$options->{KEYGEN}, # pass in user arguments for key gen function
|
||||
@keygen_args,
|
||||
);
|
||||
|
||||
if ( $diffs ) {
|
||||
$#ops -= $ctx - $ctx_lines if $ctx > $ctx_lines;
|
||||
$emit_ops->( \@ops );
|
||||
}
|
||||
|
||||
$output_handler->( $style->file_footer( @seqs, $options ) ) if $hunks;
|
||||
|
||||
return defined $output ? $output : $hunks;
|
||||
}
|
||||
|
||||
sub _header {
|
||||
my ( $h ) = @_;
|
||||
my ( $p1, $fn1, $t1, $p2, $fn2, $t2 ) = @{$h}{
|
||||
"FILENAME_PREFIX_A",
|
||||
"FILENAME_A",
|
||||
"MTIME_A",
|
||||
"FILENAME_PREFIX_B",
|
||||
"FILENAME_B",
|
||||
"MTIME_B"
|
||||
};
|
||||
|
||||
## remember to change Text::Diff::Table if this logic is tweaked.
|
||||
return "" unless defined $fn1 && defined $fn2;
|
||||
|
||||
return join( "",
|
||||
$p1, " ", $fn1, defined $t1 ? "\t" . localtime $t1 : (), "\n",
|
||||
$p2, " ", $fn2, defined $t2 ? "\t" . localtime $t2 : (), "\n",
|
||||
);
|
||||
}
|
||||
|
||||
## _range encapsulates the building of, well, ranges. Turns out there are
|
||||
## a few nuances.
|
||||
sub _range {
|
||||
my ( $ops, $a_or_b, $format ) = @_;
|
||||
|
||||
my $start = $ops->[ 0]->[$a_or_b];
|
||||
my $after = $ops->[-1]->[$a_or_b];
|
||||
|
||||
## The sequence indexes in the lines are from *before* the OPCODE is
|
||||
## executed, so we bump the last index up unless the OP indicates
|
||||
## it didn't change.
|
||||
++$after
|
||||
unless $ops->[-1]->[OPCODE] eq ( $a_or_b == A ? "+" : "-" );
|
||||
|
||||
## convert from 0..n index to 1..(n+1) line number. The unless modifier
|
||||
## handles diffs with no context, where only one file is affected. In this
|
||||
## case $start == $after indicates an empty range, and the $start must
|
||||
## not be incremented.
|
||||
my $empty_range = $start == $after;
|
||||
++$start unless $empty_range;
|
||||
|
||||
return
|
||||
$start == $after
|
||||
? $format eq "unified" && $empty_range
|
||||
? "$start,0"
|
||||
: $start
|
||||
: $format eq "unified"
|
||||
? "$start,".($after-$start+1)
|
||||
: "$start,$after";
|
||||
}
|
||||
|
||||
sub _op_to_line {
|
||||
my ( $seqs, $op, $a_or_b, $op_prefixes ) = @_;
|
||||
|
||||
my $opcode = $op->[OPCODE];
|
||||
return () unless defined $op_prefixes->{$opcode};
|
||||
|
||||
my $op_sym = defined $op->[FLAG] ? $op->[FLAG] : $opcode;
|
||||
$op_sym = $op_prefixes->{$op_sym};
|
||||
return () unless defined $op_sym;
|
||||
|
||||
$a_or_b = $op->[OPCODE] ne "+" ? 0 : 1 unless defined $a_or_b;
|
||||
my @line = ( $op_sym, $seqs->[$a_or_b][$op->[$a_or_b]] );
|
||||
unless ( $line[1] =~ /(?:\n|\r\n)$/ ) {
|
||||
$line[1] .= "\n\\ No newline at end of file\n";
|
||||
}
|
||||
return @line;
|
||||
}
|
||||
|
||||
SCOPE: {
|
||||
package Text::Diff::Base;
|
||||
|
||||
sub new {
|
||||
my $proto = shift;
|
||||
return bless { @_ }, ref $proto || $proto;
|
||||
}
|
||||
|
||||
sub file_header { return "" }
|
||||
|
||||
sub hunk_header { return "" }
|
||||
|
||||
sub hunk { return "" }
|
||||
|
||||
sub hunk_footer { return "" }
|
||||
|
||||
sub file_footer { return "" }
|
||||
}
|
||||
|
||||
@Text::Diff::Unified::ISA = qw( Text::Diff::Base );
|
||||
|
||||
sub Text::Diff::Unified::file_header {
|
||||
shift; ## No instance data
|
||||
my $options = pop ;
|
||||
|
||||
_header(
|
||||
{ FILENAME_PREFIX_A => "---", FILENAME_PREFIX_B => "+++", %$options }
|
||||
);
|
||||
}
|
||||
|
||||
sub Text::Diff::Unified::hunk_header {
|
||||
shift; ## No instance data
|
||||
pop; ## Ignore options
|
||||
my $ops = pop;
|
||||
|
||||
return join( "",
|
||||
"@@ -",
|
||||
_range( $ops, A, "unified" ),
|
||||
" +",
|
||||
_range( $ops, B, "unified" ),
|
||||
" @@\n",
|
||||
);
|
||||
}
|
||||
|
||||
sub Text::Diff::Unified::hunk {
|
||||
shift; ## No instance data
|
||||
pop; ## Ignore options
|
||||
my $ops = pop;
|
||||
|
||||
my $prefixes = { "+" => "+", " " => " ", "-" => "-" };
|
||||
|
||||
return join "", map _op_to_line( \@_, $_, undef, $prefixes ), @$ops
|
||||
}
|
||||
|
||||
@Text::Diff::Context::ISA = qw( Text::Diff::Base );
|
||||
|
||||
sub Text::Diff::Context::file_header {
|
||||
_header { FILENAME_PREFIX_A=>"***", FILENAME_PREFIX_B=>"---", %{$_[-1]} };
|
||||
}
|
||||
|
||||
sub Text::Diff::Context::hunk_header {
|
||||
return "***************\n";
|
||||
}
|
||||
|
||||
sub Text::Diff::Context::hunk {
|
||||
shift; ## No instance data
|
||||
pop; ## Ignore options
|
||||
my $ops = pop;
|
||||
## Leave the sequences in @_[0,1]
|
||||
|
||||
my $a_range = _range( $ops, A, "" );
|
||||
my $b_range = _range( $ops, B, "" );
|
||||
|
||||
## Sigh. Gotta make sure that differences that aren't adds/deletions
|
||||
## get prefixed with "!", and that the old opcodes are removed.
|
||||
my $after;
|
||||
for ( my $start = 0; $start <= $#$ops ; $start = $after ) {
|
||||
## Scan until next difference
|
||||
$after = $start + 1;
|
||||
my $opcode = $ops->[$start]->[OPCODE];
|
||||
next if $opcode eq " ";
|
||||
|
||||
my $bang_it;
|
||||
while ( $after <= $#$ops && $ops->[$after]->[OPCODE] ne " " ) {
|
||||
$bang_it ||= $ops->[$after]->[OPCODE] ne $opcode;
|
||||
++$after;
|
||||
}
|
||||
|
||||
if ( $bang_it ) {
|
||||
for my $i ( $start..($after-1) ) {
|
||||
$ops->[$i]->[FLAG] = "!";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
my $b_prefixes = { "+" => "+ ", " " => " ", "-" => undef, "!" => "! " };
|
||||
my $a_prefixes = { "+" => undef, " " => " ", "-" => "- ", "!" => "! " };
|
||||
|
||||
return join( "",
|
||||
"*** ", $a_range, " ****\n",
|
||||
map( _op_to_line( \@_, $_, A, $a_prefixes ), @$ops ),
|
||||
"--- ", $b_range, " ----\n",
|
||||
map( _op_to_line( \@_, $_, B, $b_prefixes ), @$ops ),
|
||||
);
|
||||
}
|
||||
|
||||
@Text::Diff::OldStyle::ISA = qw( Text::Diff::Base );
|
||||
|
||||
sub _op {
|
||||
my $ops = shift;
|
||||
my $op = $ops->[0]->[OPCODE];
|
||||
$op = "c" if grep $_->[OPCODE] ne $op, @$ops;
|
||||
$op = "a" if $op eq "+";
|
||||
$op = "d" if $op eq "-";
|
||||
return $op;
|
||||
}
|
||||
|
||||
sub Text::Diff::OldStyle::hunk_header {
|
||||
shift; ## No instance data
|
||||
pop; ## ignore options
|
||||
my $ops = pop;
|
||||
|
||||
my $op = _op $ops;
|
||||
|
||||
return join "", _range( $ops, A, "" ), $op, _range( $ops, B, "" ), "\n";
|
||||
}
|
||||
|
||||
sub Text::Diff::OldStyle::hunk {
|
||||
shift; ## No instance data
|
||||
pop; ## ignore options
|
||||
my $ops = pop;
|
||||
## Leave the sequences in @_[0,1]
|
||||
|
||||
my $a_prefixes = { "+" => undef, " " => undef, "-" => "< " };
|
||||
my $b_prefixes = { "+" => "> ", " " => undef, "-" => undef };
|
||||
|
||||
my $op = _op $ops;
|
||||
|
||||
return join( "",
|
||||
map( _op_to_line( \@_, $_, A, $a_prefixes ), @$ops ),
|
||||
$op eq "c" ? "---\n" : (),
|
||||
map( _op_to_line( \@_, $_, B, $b_prefixes ), @$ops ),
|
||||
);
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
__END__
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Text::Diff - Perform diffs on files and record sets
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Text::Diff;
|
||||
|
||||
## Mix and match filenames, strings, file handles, producer subs,
|
||||
## or arrays of records; returns diff in a string.
|
||||
## WARNING: can return B<large> diffs for large files.
|
||||
my $diff = diff "file1.txt", "file2.txt", { STYLE => "Context" };
|
||||
my $diff = diff \$string1, \$string2, \%options;
|
||||
my $diff = diff \*FH1, \*FH2;
|
||||
my $diff = diff \&reader1, \&reader2;
|
||||
my $diff = diff \@records1, \@records2;
|
||||
|
||||
## May also mix input types:
|
||||
my $diff = diff \@records1, "file_B.txt";
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
C<diff()> provides a basic set of services akin to the GNU C<diff> utility. It
|
||||
is not anywhere near as feature complete as GNU C<diff>, but it is better
|
||||
integrated with Perl and available on all platforms. It is often faster than
|
||||
shelling out to a system's C<diff> executable for small files, and generally
|
||||
slower on larger files.
|
||||
|
||||
Relies on L<Algorithm::Diff> for, well, the algorithm. This may not produce
|
||||
the same exact diff as a system's local C<diff> executable, but it will be a
|
||||
valid diff and comprehensible by C<patch>. We haven't seen any differences
|
||||
between L<Algorithm::Diff>'s logic and GNU C<diff>'s, but we have not examined
|
||||
them to make sure they are indeed identical.
|
||||
|
||||
B<Note>: If you don't want to import the C<diff> function, do one of the
|
||||
following:
|
||||
|
||||
use Text::Diff ();
|
||||
|
||||
require Text::Diff;
|
||||
|
||||
That's a pretty rare occurrence,
|
||||
so C<diff()> is exported by default.
|
||||
|
||||
If you pass a filename, but the file can't be read,
|
||||
then C<diff()> will C<croak>.
|
||||
|
||||
=head1 OPTIONS
|
||||
|
||||
C<diff()> takes two parameters from which to draw input and a set of
|
||||
options to control its output. The options are:
|
||||
|
||||
=over
|
||||
|
||||
=item FILENAME_A, MTIME_A, FILENAME_B, MTIME_B
|
||||
|
||||
The name of the file and the modification time "files".
|
||||
|
||||
These are filled in automatically for each file when C<diff()> is passed a
|
||||
filename, unless a defined value is passed in.
|
||||
|
||||
If a filename is not passed in and FILENAME_A and FILENAME_B are not provided
|
||||
or are C<undef>, the header will not be printed.
|
||||
|
||||
Unused on C<OldStyle> diffs.
|
||||
|
||||
=item OFFSET_A, OFFSET_B
|
||||
|
||||
The index of the first line / element. These default to 1 for all
|
||||
parameter types except ARRAY references, for which the default is 0. This
|
||||
is because ARRAY references are presumed to be data structures, while the
|
||||
others are line-oriented text.
|
||||
|
||||
=item STYLE
|
||||
|
||||
"Unified", "Context", "OldStyle", or an object or class reference for a class
|
||||
providing C<file_header()>, C<hunk_header()>, C<hunk()>, C<hunk_footer()> and
|
||||
C<file_footer()> methods. The two footer() methods are provided for
|
||||
overloading only; none of the formats provide them.
|
||||
|
||||
Defaults to "Unified" (unlike standard C<diff>, but Unified is what's most
|
||||
often used in submitting patches and is the most human readable of the three.
|
||||
|
||||
If the package indicated by the STYLE has no C<hunk()> method, C<diff()> will
|
||||
load it automatically (lazy loading). Since all such packages should inherit
|
||||
from C<Text::Diff::Base>, this should be marvy.
|
||||
|
||||
Styles may be specified as class names (C<STYLE =E<gt> 'Foo'>),
|
||||
in which case they will be C<new()>ed with no parameters,
|
||||
or as objects (C<STYLE =E<gt> Foo-E<gt>new>).
|
||||
|
||||
=item CONTEXT
|
||||
|
||||
How many lines before and after each diff to display. Ignored on old-style
|
||||
diffs. Defaults to 3.
|
||||
|
||||
=item OUTPUT
|
||||
|
||||
Examples and their equivalent subroutines:
|
||||
|
||||
OUTPUT => \*FOOHANDLE, # like: sub { print FOOHANDLE shift() }
|
||||
OUTPUT => \$output, # like: sub { $output .= shift }
|
||||
OUTPUT => \@output, # like: sub { push @output, shift }
|
||||
OUTPUT => sub { $output .= shift },
|
||||
|
||||
If no C<OUTPUT> is supplied, returns the diffs in a string. If
|
||||
C<OUTPUT> is a C<CODE> ref, it will be called once with the (optional)
|
||||
file header, and once for each hunk body with the text to emit. If
|
||||
C<OUTPUT> is an L<IO::Handle>, output will be emitted to that handle.
|
||||
|
||||
=item FILENAME_PREFIX_A, FILENAME_PREFIX_B
|
||||
|
||||
The string to print before the filename in the header. Unused on C<OldStyle>
|
||||
diffs. Defaults are C<"---">, C<"+++"> for Unified and C<"***">, C<"+++"> for
|
||||
Context.
|
||||
|
||||
=item KEYGEN, KEYGEN_ARGS
|
||||
|
||||
These are passed to L<Algorithm::Diff/traverse_sequences>.
|
||||
|
||||
=back
|
||||
|
||||
B<Note>: if neither C<FILENAME_> option is defined, the header will not be
|
||||
printed. If at least one is present, the other and both C<MTIME_> options must
|
||||
be present or "Use of undefined variable" warnings will be generated (except
|
||||
on C<OldStyle> diffs, which ignores these options).
|
||||
|
||||
=head1 Formatting Classes
|
||||
|
||||
These functions implement the output formats. They are grouped in to classes
|
||||
so C<diff()> can use class names to call the correct set of output routines and
|
||||
so that you may inherit from them easily. There are no constructors or
|
||||
instance methods for these classes, though subclasses may provide them if need
|
||||
be.
|
||||
|
||||
Each class has C<file_header()>, C<hunk_header()>, C<hunk()>, and C<footer()>
|
||||
methods identical to those documented in the C<Text::Diff::Unified> section.
|
||||
C<header()> is called before the C<hunk()> is first called, C<footer()>
|
||||
afterwards. The default footer function is an empty method provided for
|
||||
overloading:
|
||||
|
||||
sub footer { return "End of patch\n" }
|
||||
|
||||
Some output formats are provided by external modules (which are loaded
|
||||
automatically), such as L<Text::Diff::Table>. These are
|
||||
are documented here to keep the documentation simple.
|
||||
|
||||
=head2 Text::Diff::Base
|
||||
|
||||
Returns "" for all methods (other than C<new()>).
|
||||
|
||||
=head2 Text::Diff::Unified
|
||||
|
||||
--- A Mon Nov 12 23:49:30 2001
|
||||
+++ B Mon Nov 12 23:49:30 2001
|
||||
@@ -2,13 +2,13 @@
|
||||
2
|
||||
3
|
||||
4
|
||||
-5d
|
||||
+5a
|
||||
6
|
||||
7
|
||||
8
|
||||
9
|
||||
+9a
|
||||
10
|
||||
11
|
||||
-11d
|
||||
12
|
||||
13
|
||||
|
||||
=over
|
||||
|
||||
=item Text::Diff::Unified::file_header
|
||||
|
||||
$s = Text::Diff::Unified->file_header( $options );
|
||||
|
||||
Returns a string containing a unified header. The sole parameter is the
|
||||
C<options> hash passed in to C<diff()>, containing at least:
|
||||
|
||||
FILENAME_A => $fn1,
|
||||
MTIME_A => $mtime1,
|
||||
FILENAME_B => $fn2,
|
||||
MTIME_B => $mtime2
|
||||
|
||||
May also contain
|
||||
|
||||
FILENAME_PREFIX_A => "---",
|
||||
FILENAME_PREFIX_B => "+++",
|
||||
|
||||
to override the default prefixes (default values shown).
|
||||
|
||||
=item Text::Diff::Unified::hunk_header
|
||||
|
||||
Text::Diff::Unified->hunk_header( \@ops, $options );
|
||||
|
||||
Returns a string containing the heading of one hunk of unified diff.
|
||||
|
||||
=item Text::Diff::Unified::hunk
|
||||
|
||||
Text::Diff::Unified->hunk( \@seq_a, \@seq_b, \@ops, $options );
|
||||
|
||||
Returns a string containing the output of one hunk of unified diff.
|
||||
|
||||
=back
|
||||
|
||||
=head2 Text::Diff::Table
|
||||
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
| |../Test-Differences-0.2/MANIFEST | |../Test-Differences/MANIFEST |
|
||||
| |Thu Dec 13 15:38:49 2001 | |Sat Dec 15 02:09:44 2001 |
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
| | * 1|Changes *
|
||||
| 1|Differences.pm | 2|Differences.pm |
|
||||
| 2|MANIFEST | 3|MANIFEST |
|
||||
| | * 4|MANIFEST.SKIP *
|
||||
| 3|Makefile.PL | 5|Makefile.PL |
|
||||
| | * 6|t/00escape.t *
|
||||
| 4|t/00flatten.t | 7|t/00flatten.t |
|
||||
| 5|t/01text_vs_data.t | 8|t/01text_vs_data.t |
|
||||
| 6|t/10test.t | 9|t/10test.t |
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
|
||||
This format also goes to some pains to highlight "invisible" characters on
|
||||
differing elements by selectively escaping whitespace:
|
||||
|
||||
+--+--------------------------+--------------------------+
|
||||
| |demo_ws_A.txt |demo_ws_B.txt |
|
||||
| |Fri Dec 21 08:36:32 2001 |Fri Dec 21 08:36:50 2001 |
|
||||
+--+--------------------------+--------------------------+
|
||||
| 1|identical |identical |
|
||||
* 2| spaced in | also spaced in *
|
||||
* 3|embedded space |embedded tab *
|
||||
| 4|identical |identical |
|
||||
* 5| spaced in |\ttabbed in *
|
||||
* 6|trailing spaces\s\s\n |trailing tabs\t\t\n *
|
||||
| 7|identical |identical |
|
||||
* 8|lf line\n |crlf line\r\n *
|
||||
* 9|embedded ws |embedded\tws *
|
||||
+--+--------------------------+--------------------------+
|
||||
|
||||
See L<Text::Diff::Table> for more details, including how the whitespace
|
||||
escaping works.
|
||||
|
||||
=head2 Text::Diff::Context
|
||||
|
||||
*** A Mon Nov 12 23:49:30 2001
|
||||
--- B Mon Nov 12 23:49:30 2001
|
||||
***************
|
||||
*** 2,14 ****
|
||||
2
|
||||
3
|
||||
4
|
||||
! 5d
|
||||
6
|
||||
7
|
||||
8
|
||||
9
|
||||
10
|
||||
11
|
||||
- 11d
|
||||
12
|
||||
13
|
||||
--- 2,14 ----
|
||||
2
|
||||
3
|
||||
4
|
||||
! 5a
|
||||
6
|
||||
7
|
||||
8
|
||||
9
|
||||
+ 9a
|
||||
10
|
||||
11
|
||||
12
|
||||
13
|
||||
|
||||
Note: C<hunk_header()> returns only "***************\n".
|
||||
|
||||
=head2 Text::Diff::OldStyle
|
||||
|
||||
5c5
|
||||
< 5d
|
||||
---
|
||||
> 5a
|
||||
9a10
|
||||
> 9a
|
||||
12d12
|
||||
< 11d
|
||||
|
||||
Note: no C<file_header()>.
|
||||
|
||||
=head1 LIMITATIONS
|
||||
|
||||
Must suck both input files entirely in to memory and store them with a normal
|
||||
amount of Perlish overhead (one array location) per record. This is implied by
|
||||
the implementation of L<Algorithm::Diff>, which takes two arrays. If
|
||||
L<Algorithm::Diff> ever offers an incremental mode, this can be changed
|
||||
(contact the maintainers of L<Algorithm::Diff> and C<Text::Diff> if you need
|
||||
this; it shouldn't be too terribly hard to tie arrays in this fashion).
|
||||
|
||||
Does not provide most of the more refined GNU C<diff> options: recursive
|
||||
directory tree scanning, ignoring blank lines / whitespace, etc., etc. These
|
||||
can all be added as time permits and need arises, many are rather easy; patches
|
||||
quite welcome.
|
||||
|
||||
Uses closures internally, this may lead to leaks on Perl versions 5.6.1 and
|
||||
prior if used many times over a process' life time.
|
||||
|
||||
=head1 SEE ALSO
|
||||
|
||||
L<Algorithm::Diff> - the underlying implementation of the diff algorithm
|
||||
used by C<Text::Diff>.
|
||||
|
||||
L<YAML::Diff> - find difference between two YAML documents.
|
||||
|
||||
L<HTML::Differences> - find difference between two HTML documents.
|
||||
This uses a more sane approach than L<HTML::Diff>.
|
||||
|
||||
L<XML::Diff> - find difference between two XML documents.
|
||||
|
||||
L<Array::Diff> - find the differences between two Perl arrays.
|
||||
|
||||
L<Hash::Diff> - find the differences between two Perl hashes.
|
||||
|
||||
L<Data::Diff> - find difference between two arbitrary data structures.
|
||||
|
||||
=head1 REPOSITORY
|
||||
|
||||
L<https://github.com/neilbowers/Text-Diff>
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Adam Kennedy E<lt>adamk@cpan.orgE<gt>
|
||||
|
||||
Barrie Slaymaker E<lt>barries@slaysys.comE<gt>
|
||||
|
||||
=head1 COPYRIGHT
|
||||
|
||||
Some parts copyright 2009 Adam Kennedy.
|
||||
|
||||
Copyright 2001 Barrie Slaymaker. All Rights Reserved.
|
||||
|
||||
You may use this under the terms of either the Artistic License or GNU Public
|
||||
License v 2.0 or greater.
|
||||
|
||||
=cut
|
||||
|
||||
1;
|
||||
@ -0,0 +1,142 @@
|
||||
package Text::Diff::Config;
|
||||
|
||||
use 5.006;
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
our $VERSION = '1.44';
|
||||
our $Output_Unicode;
|
||||
|
||||
BEGIN
|
||||
{
|
||||
$Output_Unicode = $ENV{'DIFF_OUTPUT_UNICODE'};
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
__END__
|
||||
|
||||
=pod
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Text::Diff::Config - global configuration for Text::Diff (as a
|
||||
separate module).
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Text::Diff::Config;
|
||||
|
||||
$Text::Diff::Config::Output_Unicode = 1;
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
This module configures Text::Diff and its related modules. Currently it contains
|
||||
only one global variable $Text::Diff::Config::Output_Unicode which is a boolean
|
||||
flag, that if set outputs unicode characters as themselves without escaping them
|
||||
as C< \x{HHHH} > first.
|
||||
|
||||
It is initialized to the value of C< $ENV{DIFF_OUTPUT_UNICODE} >, but can be
|
||||
set to a different value at run-time, including using local.
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Shlomi Fish, L<http://www.shlomifish.org/> .
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
Copyright 2010, Shlomi Fish.
|
||||
|
||||
This file is licensed under the MIT/X11 License:
|
||||
L<http://www.opensource.org/licenses/mit-license.php>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
=cut
|
||||
|
||||
package Text::Diff::Config;
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use vars qw($Output_Unicode);
|
||||
|
||||
BEGIN
|
||||
{
|
||||
$Output_Unicode = $ENV{'DIFF_OUTPUT_UNICODE'};
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
__END__
|
||||
|
||||
=pod
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Text::Diff::Config - global configuration for Text::Diff (as a
|
||||
separate module).
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Text::Diff::Config;
|
||||
|
||||
$Text::Diff::Config::Output_Unicode = 1;
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
This module configures Text::Diff and its related modules. Currently it contains
|
||||
only one global variable $Text::Diff::Config::Output_Unicode which is a boolean
|
||||
flag, that if set outputs unicode characters as themselves without escaping them
|
||||
as C< \x{HHHH} > first.
|
||||
|
||||
It is initialized to the value of C< $ENV{DIFF_OUTPUT_UNICODE} >, but can be
|
||||
set to a different value at run-time, including using local.
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Shlomi Fish, L<http://www.shlomifish.org/> .
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
Copyright 2010, Shlomi Fish.
|
||||
|
||||
This file is licensed under the MIT/X11 License:
|
||||
L<http://www.opensource.org/licenses/mit-license.php>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
=cut
|
||||
|
||||
@ -0,0 +1,429 @@
|
||||
package Text::Diff::Table;
|
||||
|
||||
use 5.006;
|
||||
use strict;
|
||||
use warnings;
|
||||
use Carp;
|
||||
use Text::Diff::Config;
|
||||
|
||||
our $VERSION = '1.44';
|
||||
our @ISA = qw( Text::Diff::Base Exporter );
|
||||
our @EXPORT_OK = qw( expand_tabs );
|
||||
|
||||
my %escapes = map {
|
||||
my $c =
|
||||
$_ eq '"' || $_ eq '$' ? qq{'$_'}
|
||||
: $_ eq "\\" ? qq{"\\\\"}
|
||||
: qq{"$_"};
|
||||
( ord eval $c => $_ )
|
||||
} (
|
||||
map( chr, 32..126),
|
||||
map( sprintf( "\\x%02x", $_ ), ( 0..31, 127..255 ) ),
|
||||
# map( "\\c$_", "A".."Z"),
|
||||
"\\t", "\\n", "\\r", "\\f", "\\b", "\\a", "\\e"
|
||||
## NOTE: "\\\\" is not here because some things are explicitly
|
||||
## escaped before escape() is called and we don't want to
|
||||
## double-escape "\". Also, in most texts, leaving "\" more
|
||||
## readable makes sense.
|
||||
);
|
||||
|
||||
sub expand_tabs($) {
|
||||
my $s = shift;
|
||||
my $count = 0;
|
||||
$s =~ s{(\t)(\t*)|([^\t]+)}{
|
||||
if ( $1 ) {
|
||||
my $spaces = " " x ( 8 - $count % 8 + 8 * length $2 );
|
||||
$count = 0;
|
||||
$spaces;
|
||||
}
|
||||
else {
|
||||
$count += length $3;
|
||||
$3;
|
||||
}
|
||||
}ge;
|
||||
|
||||
return $s;
|
||||
}
|
||||
|
||||
sub trim_trailing_line_ends($) {
|
||||
my $s = shift;
|
||||
$s =~ s/[\r\n]+(?!\n)$//;
|
||||
return $s;
|
||||
}
|
||||
|
||||
sub escape($);
|
||||
|
||||
SCOPE: {
|
||||
## use utf8 if available. don't if not.
|
||||
my $escaper = <<'EOCODE';
|
||||
sub escape($) {
|
||||
use utf8;
|
||||
join "", map {
|
||||
my $c = $_;
|
||||
$_ = ord;
|
||||
exists $escapes{$_}
|
||||
? $escapes{$_}
|
||||
: $Text::Diff::Config::Output_Unicode
|
||||
? $c
|
||||
: sprintf( "\\x{%04x}", $_ );
|
||||
} split //, shift;
|
||||
}
|
||||
|
||||
1;
|
||||
EOCODE
|
||||
unless ( eval $escaper ) {
|
||||
$escaper =~ s/ *use *utf8 *;\n// or die "Can't drop use utf8;";
|
||||
eval $escaper or die $@;
|
||||
}
|
||||
}
|
||||
|
||||
sub new {
|
||||
my $proto = shift;
|
||||
return bless { @_ }, $proto
|
||||
}
|
||||
|
||||
my $missing_elt = [ "", "" ];
|
||||
|
||||
sub hunk {
|
||||
my $self = shift;
|
||||
my @seqs = ( shift, shift );
|
||||
my $ops = shift; ## Leave sequences in @_[0,1]
|
||||
my $options = shift;
|
||||
|
||||
my ( @A, @B );
|
||||
for ( @$ops ) {
|
||||
my $opcode = $_->[Text::Diff::OPCODE()];
|
||||
if ( $opcode eq " " ) {
|
||||
push @A, $missing_elt while @A < @B;
|
||||
push @B, $missing_elt while @B < @A;
|
||||
}
|
||||
push @A, [ $_->[0] + ( $options->{OFFSET_A} || 0), $seqs[0][$_->[0]] ]
|
||||
if $opcode eq " " || $opcode eq "-";
|
||||
push @B, [ $_->[1] + ( $options->{OFFSET_B} || 0), $seqs[1][$_->[1]] ]
|
||||
if $opcode eq " " || $opcode eq "+";
|
||||
}
|
||||
|
||||
push @A, $missing_elt while @A < @B;
|
||||
push @B, $missing_elt while @B < @A;
|
||||
my @elts;
|
||||
for ( 0..$#A ) {
|
||||
my ( $A, $B ) = (shift @A, shift @B );
|
||||
|
||||
## Do minimal cleaning on identical elts so these look "normal":
|
||||
## tabs are expanded, trailing newelts removed, etc. For differing
|
||||
## elts, make invisible characters visible if the invisible characters
|
||||
## differ.
|
||||
my $elt_type = $B == $missing_elt ? "A" :
|
||||
$A == $missing_elt ? "B" :
|
||||
$A->[1] eq $B->[1] ? "="
|
||||
: "*";
|
||||
if ( $elt_type ne "*" ) {
|
||||
if ( $elt_type eq "=" || $A->[1] =~ /\S/ || $B->[1] =~ /\S/ ) {
|
||||
$A->[1] = escape trim_trailing_line_ends expand_tabs $A->[1];
|
||||
$B->[1] = escape trim_trailing_line_ends expand_tabs $B->[1];
|
||||
}
|
||||
else {
|
||||
$A->[1] = escape $A->[1];
|
||||
$B->[1] = escape $B->[1];
|
||||
}
|
||||
}
|
||||
else {
|
||||
## not using \z here for backcompat reasons.
|
||||
$A->[1] =~ /^(\s*?)([^ \t].*?)?(\s*)(?![\n\r])$/s;
|
||||
my ( $l_ws_A, $body_A, $t_ws_A ) = ( $1, $2, $3 );
|
||||
$body_A = "" unless defined $body_A;
|
||||
$B->[1] =~ /^(\s*?)([^ \t].*?)?(\s*)(?![\n\r])$/s;
|
||||
my ( $l_ws_B, $body_B, $t_ws_B ) = ( $1, $2, $3 );
|
||||
$body_B = "" unless defined $body_B;
|
||||
|
||||
my $added_escapes;
|
||||
|
||||
if ( $l_ws_A ne $l_ws_B ) {
|
||||
## Make leading tabs visible. Other non-' ' chars
|
||||
## will be dealt with in escape(), but this prevents
|
||||
## tab expansion from hiding tabs by making them
|
||||
## look like ' '.
|
||||
$added_escapes = 1 if $l_ws_A =~ s/\t/\\t/g;
|
||||
$added_escapes = 1 if $l_ws_B =~ s/\t/\\t/g;
|
||||
}
|
||||
|
||||
if ( $t_ws_A ne $t_ws_B ) {
|
||||
## Only trailing whitespace gets the \s treatment
|
||||
## to make it obvious what's going on.
|
||||
$added_escapes = 1 if $t_ws_A =~ s/ /\\s/g;
|
||||
$added_escapes = 1 if $t_ws_B =~ s/ /\\s/g;
|
||||
$added_escapes = 1 if $t_ws_A =~ s/\t/\\t/g;
|
||||
$added_escapes = 1 if $t_ws_B =~ s/\t/\\t/g;
|
||||
}
|
||||
else {
|
||||
$t_ws_A = $t_ws_B = "";
|
||||
}
|
||||
|
||||
my $do_tab_escape = $added_escapes || do {
|
||||
my $expanded_A = expand_tabs join( $body_A, $l_ws_A, $t_ws_A );
|
||||
my $expanded_B = expand_tabs join( $body_B, $l_ws_B, $t_ws_B );
|
||||
$expanded_A eq $expanded_B;
|
||||
};
|
||||
|
||||
my $do_back_escape = $do_tab_escape || do {
|
||||
my ( $unescaped_A, $escaped_A,
|
||||
$unescaped_B, $escaped_B
|
||||
) =
|
||||
map
|
||||
join( "", /(\\.)/g ),
|
||||
map {
|
||||
( $_, escape $_ )
|
||||
}
|
||||
expand_tabs join( $body_A, $l_ws_A, $t_ws_A ),
|
||||
expand_tabs join( $body_B, $l_ws_B, $t_ws_B );
|
||||
$unescaped_A ne $unescaped_B && $escaped_A eq $escaped_B;
|
||||
};
|
||||
|
||||
if ( $do_back_escape ) {
|
||||
$body_A =~ s/\\/\\\\/g;
|
||||
$body_B =~ s/\\/\\\\/g;
|
||||
}
|
||||
|
||||
my $line_A = join $body_A, $l_ws_A, $t_ws_A;
|
||||
my $line_B = join $body_B, $l_ws_B, $t_ws_B;
|
||||
|
||||
unless ( $do_tab_escape ) {
|
||||
$line_A = expand_tabs $line_A;
|
||||
$line_B = expand_tabs $line_B;
|
||||
}
|
||||
|
||||
$A->[1] = escape $line_A;
|
||||
$B->[1] = escape $line_B;
|
||||
}
|
||||
|
||||
push @elts, [ @$A, @$B, $elt_type ];
|
||||
}
|
||||
|
||||
push @{$self->{ELTS}}, @elts, ["bar"];
|
||||
return "";
|
||||
}
|
||||
|
||||
sub _glean_formats {
|
||||
my $self = shift;
|
||||
}
|
||||
|
||||
sub file_footer {
|
||||
my $self = shift;
|
||||
my @seqs = (shift,shift);
|
||||
my $options = pop;
|
||||
|
||||
my @heading_lines;
|
||||
|
||||
if ( defined $options->{FILENAME_A} || defined $options->{FILENAME_B} ) {
|
||||
push @heading_lines, [
|
||||
map(
|
||||
{
|
||||
( "", escape( defined $_ ? $_ : "<undef>" ) );
|
||||
}
|
||||
( @{$options}{qw( FILENAME_A FILENAME_B)} )
|
||||
),
|
||||
"=",
|
||||
];
|
||||
}
|
||||
|
||||
if ( defined $options->{MTIME_A} || defined $options->{MTIME_B} ) {
|
||||
push @heading_lines, [
|
||||
map( {
|
||||
( "",
|
||||
escape(
|
||||
( defined $_ && length $_ )
|
||||
? localtime $_
|
||||
: ""
|
||||
)
|
||||
);
|
||||
}
|
||||
@{$options}{qw( MTIME_A MTIME_B )}
|
||||
),
|
||||
"=",
|
||||
];
|
||||
}
|
||||
|
||||
if ( defined $options->{INDEX_LABEL} ) {
|
||||
push @heading_lines, [ "", "", "", "", "=" ] unless @heading_lines;
|
||||
$heading_lines[-1]->[0] = $heading_lines[-1]->[2] =
|
||||
$options->{INDEX_LABEL};
|
||||
}
|
||||
|
||||
## Not ushifting on to @{$self->{ELTS}} in case it's really big. Want
|
||||
## to avoid the overhead.
|
||||
|
||||
my $four_column_mode = 0;
|
||||
for my $cols ( @heading_lines, @{$self->{ELTS}} ) {
|
||||
next if $cols->[-1] eq "bar";
|
||||
if ( $cols->[0] ne $cols->[2] ) {
|
||||
$four_column_mode = 1;
|
||||
last;
|
||||
}
|
||||
}
|
||||
|
||||
unless ( $four_column_mode ) {
|
||||
for my $cols ( @heading_lines, @{$self->{ELTS}} ) {
|
||||
next if $cols->[-1] eq "bar";
|
||||
splice @$cols, 2, 1;
|
||||
}
|
||||
}
|
||||
|
||||
my @w = (0,0,0,0);
|
||||
for my $cols ( @heading_lines, @{$self->{ELTS}} ) {
|
||||
next if $cols->[-1] eq "bar";
|
||||
for my $i (0..($#$cols-1)) {
|
||||
$w[$i] = length $cols->[$i]
|
||||
if defined $cols->[$i] && length $cols->[$i] > $w[$i];
|
||||
}
|
||||
}
|
||||
|
||||
my %fmts = $four_column_mode
|
||||
? (
|
||||
"=" => "| %$w[0]s|%-$w[1]s | %$w[2]s|%-$w[3]s |\n",
|
||||
"A" => "* %$w[0]s|%-$w[1]s * %$w[2]s|%-$w[3]s |\n",
|
||||
"B" => "| %$w[0]s|%-$w[1]s * %$w[2]s|%-$w[3]s *\n",
|
||||
"*" => "* %$w[0]s|%-$w[1]s * %$w[2]s|%-$w[3]s *\n",
|
||||
)
|
||||
: (
|
||||
"=" => "| %$w[0]s|%-$w[1]s |%-$w[2]s |\n",
|
||||
"A" => "* %$w[0]s|%-$w[1]s |%-$w[2]s |\n",
|
||||
"B" => "| %$w[0]s|%-$w[1]s |%-$w[2]s *\n",
|
||||
"*" => "* %$w[0]s|%-$w[1]s |%-$w[2]s *\n",
|
||||
);
|
||||
|
||||
my @args = ('', '', '');
|
||||
push(@args, '') if $four_column_mode;
|
||||
$fmts{bar} = sprintf $fmts{"="}, @args;
|
||||
$fmts{bar} =~ s/\S/+/g;
|
||||
$fmts{bar} =~ s/ /-/g;
|
||||
|
||||
# Sometimes the sprintf has too many arguments,
|
||||
# which results in a warning on Perl 5.021+
|
||||
# I really wanted to write:
|
||||
# no warnings 'redundant';
|
||||
# but that causes a compilation error on older versions of perl
|
||||
# where the warnings pragma doesn't know about 'redundant'
|
||||
no warnings;
|
||||
|
||||
return join( "",
|
||||
map {
|
||||
sprintf( $fmts{$_->[-1]}, @$_ );
|
||||
} (
|
||||
["bar"],
|
||||
@heading_lines,
|
||||
@heading_lines ? ["bar"] : (),
|
||||
@{$self->{ELTS}},
|
||||
),
|
||||
);
|
||||
|
||||
@{$self->{ELTS}} = [];
|
||||
}
|
||||
|
||||
1;
|
||||
|
||||
__END__
|
||||
|
||||
=pod
|
||||
|
||||
=head1 NAME
|
||||
|
||||
Text::Diff::Table - Text::Diff plugin to generate "table" format output
|
||||
|
||||
=head1 SYNOPSIS
|
||||
|
||||
use Text::Diff;
|
||||
|
||||
diff \@a, $b, { STYLE => "Table" };
|
||||
|
||||
=head1 DESCRIPTION
|
||||
|
||||
This is a plugin output formatter for Text::Diff that generates "table" style
|
||||
diffs:
|
||||
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
| |../Test-Differences-0.2/MANIFEST | |../Test-Differences/MANIFEST |
|
||||
| |Thu Dec 13 15:38:49 2001 | |Sat Dec 15 02:09:44 2001 |
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
| | * 1|Changes *
|
||||
| 1|Differences.pm | 2|Differences.pm |
|
||||
| 2|MANIFEST | 3|MANIFEST |
|
||||
| | * 4|MANIFEST.SKIP *
|
||||
| 3|Makefile.PL | 5|Makefile.PL |
|
||||
| | * 6|t/00escape.t *
|
||||
| 4|t/00flatten.t | 7|t/00flatten.t |
|
||||
| 5|t/01text_vs_data.t | 8|t/01text_vs_data.t |
|
||||
| 6|t/10test.t | 9|t/10test.t |
|
||||
+--+----------------------------------+--+------------------------------+
|
||||
|
||||
This format also goes to some pains to highlight "invisible" characters on
|
||||
differing elements by selectively escaping whitespace. Each element is split
|
||||
in to three segments (leading whitespace, body, trailing whitespace). If
|
||||
whitespace differs in a segement, that segment is whitespace escaped.
|
||||
|
||||
Here is an example of the selective whitespace.
|
||||
|
||||
+--+--------------------------+--------------------------+
|
||||
| |demo_ws_A.txt |demo_ws_B.txt |
|
||||
| |Fri Dec 21 08:36:32 2001 |Fri Dec 21 08:36:50 2001 |
|
||||
+--+--------------------------+--------------------------+
|
||||
| 1|identical |identical |
|
||||
* 2| spaced in | also spaced in *
|
||||
* 3|embedded space |embedded tab *
|
||||
| 4|identical |identical |
|
||||
* 5| spaced in |\ttabbed in *
|
||||
* 6|trailing spaces\s\s\n |trailing tabs\t\t\n *
|
||||
| 7|identical |identical |
|
||||
* 8|lf line\n |crlf line\r\n *
|
||||
* 9|embedded ws |embedded\tws *
|
||||
+--+--------------------------+--------------------------+
|
||||
|
||||
Here's why the lines do or do not have whitespace escaped:
|
||||
|
||||
=over
|
||||
|
||||
=item lines 1, 4, 7 don't differ, no need.
|
||||
|
||||
=item lines 2, 3 differ in non-whitespace, no need.
|
||||
|
||||
=item lines 5, 6, 8, 9 all have subtle ws changes.
|
||||
|
||||
=back
|
||||
|
||||
Whether or not line 3 should have that tab character escaped is a judgement
|
||||
call; so far I'm choosing not to.
|
||||
|
||||
=head1 UNICODE
|
||||
|
||||
To output the raw unicode chracters consult the documentation of
|
||||
L<Text::Diff::Config>. You can set the C<DIFF_OUTPUT_UNICODE> environment
|
||||
variable to 1 to output it from the command line. For more information,
|
||||
consult this bug: L<https://rt.cpan.org/Ticket/Display.html?id=54214> .
|
||||
|
||||
=head1 LIMITATIONS
|
||||
|
||||
Table formatting requires buffering the entire diff in memory in order to
|
||||
calculate column widths. This format should only be used for smaller
|
||||
diffs.
|
||||
|
||||
Assumes tab stops every 8 characters, as $DIETY intended.
|
||||
|
||||
Assumes all character codes >= 127 need to be escaped as hex codes, ie that the
|
||||
user's terminal is ASCII, and not even "high bit ASCII", capable. This can be
|
||||
made an option when the need arises.
|
||||
|
||||
Assumes that control codes (character codes 0..31) that don't have slash-letter
|
||||
escapes ("\n", "\r", etc) in Perl are best presented as hex escapes ("\x01")
|
||||
instead of octal ("\001") or control-code ("\cA") escapes.
|
||||
|
||||
=head1 AUTHOR
|
||||
|
||||
Barrie Slaymaker E<lt>barries@slaysys.comE<gt>
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
Copyright 2001 Barrie Slaymaker, All Rights Reserved.
|
||||
|
||||
You may use this software under the terms of the GNU public license, any
|
||||
version, or the Artistic license.
|
||||
|
||||
=cut
|
||||
@ -0,0 +1,3 @@
|
||||
# The Linux tgz archive contains compiled nmon binaries for various Linux distribution on various type of processor: x86 / Power / arm / s390x
|
||||
|
||||
The source code of Nmon Linux is available in the Nmon Linux site: http://nmon.sourceforge.net
|
||||
@ -0,0 +1,221 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Program name: metricator_cleaner.pl
|
||||
# Compatibility: Perl x
|
||||
# Purpose - Clean nmon and csv files when retention expired
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
$version = "2.0.0";
|
||||
|
||||
use Time::Local;
|
||||
use Time::HiRes;
|
||||
use Getopt::Long;
|
||||
use POSIX 'strftime';
|
||||
use File::stat; # use the object-oriented interface to stat
|
||||
|
||||
# LOGGING INFORMATION:
|
||||
# - The program uses the standard logging Python module to display important messages in Splunk logs
|
||||
# - Every message of the script will be indexed and accessible within Splunk splunkd logs
|
||||
|
||||
#################################################
|
||||
## Arguments Parser
|
||||
#################################################
|
||||
|
||||
# Default values
|
||||
my $CSV_REPOSITORY = "csv_repository";
|
||||
my $APP = "";
|
||||
my $CONFIG_REPOSITORY = "config_repository";
|
||||
my $MAXSECONDS = "";
|
||||
my $verbose;
|
||||
|
||||
$result = GetOptions(
|
||||
"csv_repository=s" => \$CSV_REPOSITORY, # string
|
||||
"config_repository=s" => \$CONFIG_REPOSITORY, # string
|
||||
"cleancsv" => \$CLEANCSV, # string
|
||||
"approot=s" => \$APP, # string
|
||||
"maxseconds_csv=s" => \$MAXSECONDS_CSV, # string
|
||||
"version" => \$VERSION, # flag
|
||||
"help" => \$help # flag
|
||||
);
|
||||
|
||||
# Show version
|
||||
if ($VERSION) {
|
||||
print("nmon_clean.pl version $version \n");
|
||||
|
||||
exit 0;
|
||||
}
|
||||
|
||||
# Show help
|
||||
if ($help) {
|
||||
|
||||
print( "
|
||||
|
||||
Help for metricator_cleaner.pl:
|
||||
|
||||
In default configuration (eg. no options specified) the script will purge any nmon file (*.nmon) in default nmon_repository
|
||||
|
||||
Available options are:
|
||||
|
||||
--cleancsv :Activate the purge of csv files from csv repository and config repository (see also options above)
|
||||
--maxseconds_csv <value> :Set the maximum file retention in seconds for csv data, every files older than this value will be permanently removed
|
||||
--approot <value> :Set a custom value for the Application root directory (default are: nmon / TA-metricator-for-nmon / PA-nmon)
|
||||
--csv_repository <value> :Set a custom location for directory containing csv data (default: csv_repository)
|
||||
--config_repository <value> :Set a custom location for directory containing config data (default: config_repository)
|
||||
--version :Show current program version \n
|
||||
"
|
||||
);
|
||||
|
||||
exit 0;
|
||||
}
|
||||
|
||||
#################################################
|
||||
## Parameters
|
||||
#################################################
|
||||
|
||||
# Default values for CSV retention (4 hours less 1 minute)
|
||||
my $MAXSECONDS_CSV_DEFAULT = 86400;
|
||||
|
||||
#################################################
|
||||
## Functions
|
||||
#################################################
|
||||
|
||||
#################################################
|
||||
## Program
|
||||
#################################################
|
||||
|
||||
# Processing starting time
|
||||
my $t_start = [Time::HiRes::gettimeofday];
|
||||
|
||||
# Local time
|
||||
my $time = strftime "%d-%m-%Y %H:%M:%S", localtime;
|
||||
|
||||
# Default Environment Variable SPLUNK_HOME, this shall be automatically defined if as the script shall be launched by Splunk
|
||||
my $SPLUNK_HOME = $ENV{SPLUNK_HOME};
|
||||
|
||||
# Verify SPLUNK_HOME definition
|
||||
if ( not $SPLUNK_HOME ) {
|
||||
print(
|
||||
"\n$time ERROR: The environment variable SPLUNK_HOME could not be verified, if you want to run this script manually you need to export it before processing \n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
# Discover TA-metricator-for-nmon path
|
||||
if ( length($APP) == 0 ) {
|
||||
|
||||
if ( -d "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon" ) {
|
||||
$APP = "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon";
|
||||
}
|
||||
elsif ( -d "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon" ) {
|
||||
$APP = "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
if ( !-d "$APP" ) {
|
||||
print(
|
||||
"\n$time ERROR: The Application root directory could be verified using your custom setting: $APP \n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
# Verify existence of APP
|
||||
if ( !-d "$APP" ) {
|
||||
print(
|
||||
"\n$time ERROR: The Application root directory could not be found, is the TA-metricator-for-nmon installed ?\n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
# var directories
|
||||
my $APP_MAINVAR = "$SPLUNK_HOME/var/log/metricator";
|
||||
my $APP_VAR = "$APP_MAINVAR/var";
|
||||
|
||||
if ( !-d "$APP_MAINVAR" ) {
|
||||
print(
|
||||
"\n$time INFO: main var directory not found ($APP_MAINVAR), no need to run.\n"
|
||||
);
|
||||
exit 0;
|
||||
}
|
||||
|
||||
|
||||
####################################################################
|
||||
############# Main Program
|
||||
####################################################################
|
||||
|
||||
# check retention
|
||||
if ( not "$MAXSECONDS_CSV" ) {
|
||||
$MAXSECONDS_CSV = $MAXSECONDS_CSV_DEFAULT;
|
||||
}
|
||||
|
||||
# Print starting message
|
||||
print("$time Starting nmon cleaning:\n");
|
||||
print("Splunk Root Directory $SPLUNK_HOME nmon_cleaner version: $version Perl version: $] \n");
|
||||
|
||||
# Set current epoch time
|
||||
$epoc = time();
|
||||
|
||||
# If the csv switch is on, purge csv data
|
||||
|
||||
if ($CLEANCSV) {
|
||||
|
||||
# Counter
|
||||
$count = 0;
|
||||
|
||||
# CSV Items to clean
|
||||
@cleaning =
|
||||
( "$APP_VAR/$CSV_REPOSITORY/*.csv", "$APP_VAR/$CONFIG_REPOSITORY/*.csv" );
|
||||
|
||||
# Enter loop
|
||||
foreach $key (@cleaning) {
|
||||
|
||||
@files = glob($key);
|
||||
|
||||
foreach $file (@files) {
|
||||
if ( -f $file ) {
|
||||
|
||||
# Get file timestamp
|
||||
my $file_timestamp = stat($file)->mtime;
|
||||
|
||||
# Get difference
|
||||
my $timediff = $epoc - $file_timestamp;
|
||||
|
||||
# If retention has expired
|
||||
if ( $timediff > $MAXSECONDS_CSV ) {
|
||||
|
||||
# information
|
||||
print ("Max set retention of $MAXSECONDS_CSV seconds seconds expired for file: $file \n");
|
||||
|
||||
# purge file
|
||||
unlink $file;
|
||||
|
||||
# Increment counter
|
||||
$count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( $count eq 0 ) {
|
||||
print ("$key, no action required. \n");
|
||||
}
|
||||
else {
|
||||
print("INFO: $count files were permanently removed from $key \n");
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
#############################################
|
||||
############# Main Program End ############
|
||||
#############################################
|
||||
|
||||
# Show elapsed time
|
||||
my $t_end = [Time::HiRes::gettimeofday];
|
||||
print "Elapsed time was: ",
|
||||
Time::HiRes::tv_interval( $t_start, $t_end ) . " seconds \n";
|
||||
|
||||
exit(0);
|
||||
@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Program name: metricator_cleaner.py
|
||||
# Compatibility: Python 3.x
|
||||
# Purpose - Clean csv files when retention expires, tuned for the Coke Company
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
# Load libs
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os
|
||||
import glob
|
||||
import time
|
||||
import logging
|
||||
import platform
|
||||
import re
|
||||
import argparse
|
||||
|
||||
# Converter version
|
||||
version = '3.0.0'
|
||||
|
||||
# LOGGING INFORMATION:
|
||||
# - The program uses the standard logging Python module to display important messages in Splunk logs
|
||||
# - Every message of the script will be indexed and accessible within Splunk splunkd logs
|
||||
|
||||
#################################################
|
||||
# Functions
|
||||
#################################################
|
||||
|
||||
# Disallow negative value in parser
|
||||
|
||||
def check_negative(value):
|
||||
|
||||
ivalue = int(value)
|
||||
if ivalue < 0:
|
||||
raise argparse.ArgumentTypeError("%s is an invalid positive int value" % value)
|
||||
return ivalue
|
||||
|
||||
#################################################
|
||||
# Arguments Parser
|
||||
#################################################
|
||||
|
||||
# Define Arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('--cleancsv', action='store_true', default=False, dest='cleancsv',
|
||||
help='Activate the purge of csv files from csv repository and config repository '
|
||||
'(see also options above)')
|
||||
|
||||
parser.add_argument('--maxseconds_csv', action='store', dest='MAXSECONDS_CSV', type=check_negative,
|
||||
help='Set the maximum file retention in seconds for csv data, every files older'
|
||||
' than this value will be permanently removed')
|
||||
|
||||
parser.add_argument('--approot', action='store', dest='APP',
|
||||
help='Set a custom value for the Application root directory '
|
||||
'(default are: nmon / TA-metricator-for-nmon / PA-nmon)')
|
||||
|
||||
parser.add_argument('--csv_repository', action='store', dest='CSV_REPOSITORY',
|
||||
help='Set a custom location for directory containing csv data (default: csv_repository)')
|
||||
|
||||
parser.add_argument('--config_repository', action='store', dest='CONFIG_REPOSITORY',
|
||||
help='Set a custom location for directory containing config data (default: config_repository)')
|
||||
|
||||
parser.add_argument('--version', action='version', version='%(prog)s ' + version)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
#################################################
|
||||
# Variables
|
||||
#################################################
|
||||
|
||||
# Set logging format
|
||||
logging.root
|
||||
logging.root.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter('%(levelname)s %(message)s')
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logging.root.addHandler(handler)
|
||||
|
||||
# Current date
|
||||
now = time.strftime("%d-%m-%Y %H:%M:%S")
|
||||
|
||||
# Set maxseconds
|
||||
maxseconds_csv = args.MAXSECONDS_CSV
|
||||
|
||||
# Set cleancsv
|
||||
cleancsv = args.cleancsv
|
||||
|
||||
# If the root directory App is no defined, use empty value (will be set later)
|
||||
if not args.APP:
|
||||
APP = ''
|
||||
else:
|
||||
APP = args.APP
|
||||
|
||||
# If the csv_repository is not defined, apply default 'csv_repository' value
|
||||
if not args.CSV_REPOSITORY:
|
||||
csv_repository = "csv_repository"
|
||||
else:
|
||||
csv_repository = args.CSV_REPOSITORY
|
||||
|
||||
# If the config_repository is not defined, apply default 'config_repository' value
|
||||
if not args.CONFIG_REPOSITORY:
|
||||
config_repository = "config_repository"
|
||||
else:
|
||||
config_repository = args.CONFIG_REPOSITORY
|
||||
|
||||
# Guest Operation System type
|
||||
ostype = platform.system().lower()
|
||||
|
||||
# If running Windows OS (used for directory identification)
|
||||
is_windows = re.match(r'^win\w+', (platform.system().lower()))
|
||||
|
||||
# Python version
|
||||
python_version = platform.python_version()
|
||||
|
||||
# Verify SPLUNK_HOME environment variable is available, the script is expected to be launched by Splunk which
|
||||
# will set this for debugging or manual run, please set this variable manually
|
||||
try:
|
||||
os.environ["SPLUNK_HOME"]
|
||||
except KeyError:
|
||||
logging.error('The environment variable SPLUNK_HOME could not be verified, if you want to run this script '
|
||||
'manually you need to export it before processing')
|
||||
sys.exit(1)
|
||||
|
||||
# SPLUNK_HOME environment variable
|
||||
SPLUNK_HOME = os.environ['SPLUNK_HOME']
|
||||
|
||||
# Set APP root directory
|
||||
if not APP:
|
||||
|
||||
# Discover TA-metricator-for-nmon path
|
||||
|
||||
if is_windows:
|
||||
TA_NMON_APP = SPLUNK_HOME + '\\etc\\apps\\TA-metricator-for-nmon'
|
||||
else:
|
||||
TA_NMON_APP = SPLUNK_HOME + '/etc/apps/TA-metricator-for-nmon'
|
||||
|
||||
if is_windows:
|
||||
TA_NMON_APP_CLUSTERED = SPLUNK_HOME + '\\etc\\slave-apps\\TA-metricator-for-nmon'
|
||||
else:
|
||||
TA_NMON_APP_CLUSTERED = SPLUNK_HOME + '/etc/slave-apps/TA-metricator-for-nmon'
|
||||
|
||||
# Verify APP exist
|
||||
if os.path.exists(TA_NMON_APP):
|
||||
APP = TA_NMON_APP
|
||||
|
||||
elif os.path.exists(TA_NMON_APP_CLUSTERED):
|
||||
APP = TA_NMON_APP_CLUSTERED
|
||||
|
||||
else:
|
||||
msg = 'The Application root directory could not be found, is the TA-metricator-for-nmon ? We tried: '\
|
||||
+ str(TA_NMON_APP) + ' ' + str(TA_NMON_APP_CLUSTERED)
|
||||
logging.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
|
||||
if is_windows:
|
||||
NMON_APP = SPLUNK_HOME + '\\etc\\apps\\' + APP
|
||||
else:
|
||||
NMON_APP = SPLUNK_HOME + '/etc/apps/' + APP
|
||||
|
||||
# Verify APP exist
|
||||
if os.path.exists(NMON_APP):
|
||||
APP = NMON_APP
|
||||
else:
|
||||
msg = 'The Application root directory could not be found, is the TA-metricator-for-nmon installed ? We tried: '\
|
||||
+ str(NMON_APP)
|
||||
logging.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
# APP_MAINVAR and APP_VAR directories
|
||||
if is_windows:
|
||||
APP_MAINVAR = SPLUNK_HOME + '\\var\\log\\nmon'
|
||||
APP_VAR = APP_MAINVAR + '\\var'
|
||||
else:
|
||||
APP_MAINVAR = SPLUNK_HOME + '/var/log/metricator'
|
||||
APP_VAR = APP_MAINVAR + '/var'
|
||||
|
||||
|
||||
if not os.path.exists(APP_MAINVAR):
|
||||
msg = 'The main var directory ' + APP_VAR + ' has not been found, there is no need to run now.'
|
||||
sys.exit(1)
|
||||
|
||||
# Repositories definition
|
||||
if is_windows:
|
||||
CSV_DIR = APP_VAR + '\\' + csv_repository
|
||||
CONFIG_DIR = APP_VAR + '\\' + config_repository
|
||||
else:
|
||||
CSV_DIR = APP_VAR + '/' + csv_repository
|
||||
CONFIG_DIR = APP_VAR + '/' + config_repository
|
||||
|
||||
# List of directories to be proceeded
|
||||
WORKING_DIR = {CSV_DIR, CONFIG_DIR}
|
||||
|
||||
# Starting time of process
|
||||
start_time = time.time()
|
||||
|
||||
####################################################################
|
||||
# Main Program
|
||||
####################################################################
|
||||
|
||||
# Default value for CSV retention
|
||||
if maxseconds_csv is None:
|
||||
maxseconds_csv = 86400
|
||||
|
||||
# Show current time
|
||||
msg = now + " Starting nmon cleaning"
|
||||
print (msg)
|
||||
|
||||
# Display some basic information about us
|
||||
msg = "Splunk Root Directory ($SPLUNK_HOME): " + str(SPLUNK_HOME) + " nmon_cleaner version: " + str(version) \
|
||||
+ " Python version: " + str(python_version)
|
||||
print (msg)
|
||||
|
||||
# Proceed to CSV cleaning
|
||||
if cleancsv:
|
||||
|
||||
for DIR in WORKING_DIR:
|
||||
|
||||
if os.path.exists(DIR):
|
||||
# cd to directory
|
||||
os.chdir(DIR)
|
||||
|
||||
# Verify we have data to manage
|
||||
counter = len(glob.glob1(DIR, "*.csv"))
|
||||
|
||||
# print (counter)
|
||||
|
||||
if counter == 0:
|
||||
msg = str(DIR) + ', no action required.'
|
||||
print (msg)
|
||||
|
||||
else:
|
||||
|
||||
# counter of files with retention expired
|
||||
counter_expired = 0
|
||||
|
||||
curtime = time.time()
|
||||
limit = maxseconds_csv
|
||||
|
||||
for xfile in glob.glob('*.csv'):
|
||||
|
||||
filemtime = os.path.getmtime(xfile)
|
||||
|
||||
if curtime - filemtime > limit:
|
||||
|
||||
counter_expired += 1
|
||||
|
||||
size_mb = os.path.getsize(xfile)/1000.0/1000.0
|
||||
size_mb = format(size_mb, '.2f')
|
||||
|
||||
mtime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(filemtime)) # Human readable datetime
|
||||
|
||||
msg = 'Max set retention of ' + str(maxseconds_csv) + ' seconds expired for file: ' +\
|
||||
xfile + ' size(MB): '\
|
||||
+ str(size_mb) + ' mtime: ' + str(mtime)
|
||||
print (msg)
|
||||
|
||||
os.remove(xfile) # Permanently remove the file!
|
||||
|
||||
if counter_expired != 0:
|
||||
msg = str(counter_expired) + ' files were permanently removed due to retention expired' \
|
||||
' for directory ' + DIR
|
||||
else:
|
||||
msg = str(DIR) + ', no action required.'
|
||||
print (msg)
|
||||
|
||||
###################
|
||||
# End
|
||||
###################
|
||||
|
||||
# Time required to process
|
||||
end_time = time.time()
|
||||
result = "Elapsed time was: %g seconds" % (end_time - start_time)
|
||||
print (result)
|
||||
|
||||
# exit
|
||||
sys.exit(0)
|
||||
@ -0,0 +1,368 @@
|
||||
#!/bin/sh
|
||||
|
||||
# set -x
|
||||
|
||||
# Program name: metricator_cleaner.sh
|
||||
# Purpose - Frontal script to metricator_cleaner.py and metricator_cleaner.pl, will launch Python or Perl script depending on interpreter availability
|
||||
# See metricator_cleaner.py | metricator_cleaner.pl
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
# Version 2.0.1
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
#################################################
|
||||
## Your Customizations Go Here ##
|
||||
#################################################
|
||||
|
||||
# format date output to strftime dd/mm/YYYY HH:MM:SS
|
||||
log_date () {
|
||||
date "+%d-%m-%Y %H:%M:%S"
|
||||
}
|
||||
|
||||
# hostname
|
||||
HOST=`hostname`
|
||||
|
||||
# Which type of OS are we running
|
||||
UNAME=`uname`
|
||||
|
||||
if [ -z "${SPLUNK_HOME}" ]; then
|
||||
echo "`log_date`, ${HOST} ERROR, SPLUNK_HOME variable is not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# APP path discovery
|
||||
if [ -d "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon" ]; then
|
||||
APP=$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon
|
||||
|
||||
elif [ -d "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon" ];then
|
||||
APP=$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon
|
||||
|
||||
else
|
||||
echo "`log_date`, ${HOST} ERROR, the APP directory could not be defined, is the TA-metricator-for-nmon installed ?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# source default nmon.conf
|
||||
if [ -f $APP/default/nmon.conf ]; then
|
||||
# During initial deployment, the nmon.conf needs to be managed properly by the metricator_consumer.sh
|
||||
# wait for this to be done
|
||||
grep '\[nmon\]' $APP/default/nmon.conf >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} INFO, initial deployment condition detected, safe exiting."
|
||||
exit 0
|
||||
else
|
||||
. $APP/default/nmon.conf
|
||||
fi
|
||||
fi
|
||||
|
||||
# source local nmon.conf, if any
|
||||
|
||||
# Search for a local nmon.conf file located in $SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local
|
||||
if [ -f $APP/local/nmon.conf ]; then
|
||||
. $APP/local/nmon.conf
|
||||
fi
|
||||
|
||||
# On a per server basis, you can also set in /etc/nmon.conf
|
||||
if [ -f /etc/nmon.conf ]; then
|
||||
. /etc/nmon.conf
|
||||
fi
|
||||
|
||||
# Manage FQDN option
|
||||
echo $nmonparser_options | grep '\-\-use_fqdn' >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
# Only relevant for Linux OS
|
||||
case $UNAME in
|
||||
Linux)
|
||||
HOST=`hostname -f` ;;
|
||||
AIX)
|
||||
HOST=`hostname` ;;
|
||||
SunOS)
|
||||
HOST=`hostname` ;;
|
||||
esac
|
||||
else
|
||||
HOST=`hostname`
|
||||
fi
|
||||
|
||||
# Manage host override option based on Splunk hostname defined
|
||||
case $override_sys_hostname in
|
||||
"1")
|
||||
# Retrieve the Splunk host value
|
||||
HOST=`cat $SPLUNK_HOME/etc/system/local/inputs.conf | grep '^host =' | awk -F\= '{print $2}' | sed 's/ //g'`
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Interpreter choice
|
||||
#
|
||||
|
||||
PYTHON=0
|
||||
PYTHON2=0
|
||||
PYTHON3=0
|
||||
PERL=0
|
||||
# Set the default interpreter
|
||||
INTERPRETER="python"
|
||||
|
||||
# Get the version for both worlds
|
||||
PYTHON2=`which python 2>&1`
|
||||
PYTHON3=`which python3 2>&1`
|
||||
PERL=`which perl 2>&1`
|
||||
|
||||
# Handle Python
|
||||
PYTHON_available="false"
|
||||
case $PYTHON3 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python3" ;;
|
||||
*)
|
||||
case $PYTHON2 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle Perl
|
||||
case $PERL in
|
||||
*perl*)
|
||||
PERL_available="true"
|
||||
;;
|
||||
*)
|
||||
PERL_available="false"
|
||||
;;
|
||||
esac
|
||||
|
||||
case `uname` in
|
||||
|
||||
# AIX priority is Perl
|
||||
"AIX")
|
||||
case $PERL_available in
|
||||
"true")
|
||||
INTERPRETER="perl" ;;
|
||||
"false")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
esac
|
||||
;;
|
||||
|
||||
# Other OS, priority is Python
|
||||
*)
|
||||
case $PYTHON_available in
|
||||
"true")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
"false")
|
||||
INTERPRETER="perl" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# POSIX process run time in seconds (for Solaris only)
|
||||
P_RUNTIME () {
|
||||
t=`LC_ALL=POSIX ps -o etime= -p $1 | awk '{print $1}'`
|
||||
d=0 h=0
|
||||
case $t in *-*) d=$((0 + ${t%%-*})); t=${t#*-};; esac
|
||||
case $t in *:*:*) h=$((0 + ${t%%:*})); t=${t#*:};; esac
|
||||
s=$((10#$d*86400 + 10#$h*3600 + 10#${t%%:*}*60 + 10#${t#*:}))
|
||||
echo $s
|
||||
}
|
||||
|
||||
####################################################################
|
||||
############# Main Program ############
|
||||
####################################################################
|
||||
|
||||
# Store arguments sent to script
|
||||
userargs=$@
|
||||
|
||||
###### Maintenance tasks ######
|
||||
|
||||
#
|
||||
# Maintenance task1
|
||||
#
|
||||
|
||||
# Maintenance task 1: verify if we have nmon processes running over the allowed period
|
||||
# This issue seems to happen sometimes specially on AIX servers
|
||||
|
||||
# If an nmon process has not been terminated after its grace period, the process will be killed
|
||||
|
||||
# get the allowed runtime in seconds for an nmon process according to the configuration
|
||||
# and add a 10 minute grace period
|
||||
|
||||
case `uname` in
|
||||
|
||||
"AIX"|"Linux"|"SunOS")
|
||||
|
||||
echo "`log_date`, ${HOST} INFO, starting maintenance task 1: verify nmon processes running over expected time period"
|
||||
|
||||
endtime=0
|
||||
|
||||
case ${mode_fifo} in
|
||||
"1")
|
||||
endtime=`expr ${fifo_interval} \* ${fifo_snapshot}` ;;
|
||||
*)
|
||||
endtime=`expr ${interval} \* ${snapshot}` ;;
|
||||
esac
|
||||
|
||||
endtime=`expr ${endtime} + 600`
|
||||
|
||||
# get the list of running processes
|
||||
case $UNAME in
|
||||
"AIX"|"Linux")
|
||||
oldPidList=`ps -eo user,pid,command,etime,args | grep "nmon" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep -v grep | awk '{ print $2 }'`
|
||||
ps -eo user,pid,command,etime,args | grep "nmon" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep -v grep >/dev/null ;;
|
||||
"SunOS")
|
||||
oldPidList=`ps auxwww | grep "sadc" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep -v grep | awk '{ print $2 }'`
|
||||
ps auxwww | grep "sadc" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep -v grep >/dev/null ;;
|
||||
esac
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
|
||||
for pid in $oldPidList; do
|
||||
|
||||
pid_runtime=0
|
||||
# only run the process is running
|
||||
if [ -d /proc/${pid} ]; then
|
||||
# get the process runtime in seconds
|
||||
|
||||
case $UNAME in
|
||||
"AIX"|"Linux")
|
||||
pid_runtime=`ps -p ${pid} -oetime= | tr '-' ':' | awk -F: '{ total=0; m=1; } { for (i=0; i < NF; i++) {total += $(NF-i)*m; m *= i >= 2 ? 24 : 60 }} {print total}'`
|
||||
;;
|
||||
"SunOS")
|
||||
pid_runtime=`P_RUNTIME ${pid}`
|
||||
;;
|
||||
esac
|
||||
|
||||
# additional protection
|
||||
case ${pid_runtime} in
|
||||
"")
|
||||
;;
|
||||
*)
|
||||
if [ ${pid_runtime} -gt ${endtime} ]; then
|
||||
echo "`log_date`, ${HOST} WARN, old nmon process found due to: `ps auxwww | grep $pid | grep -v grep` killing (SIGTERM) process $pid"
|
||||
kill $pid
|
||||
|
||||
# Allow some time for the process to end
|
||||
sleep 5
|
||||
|
||||
# re-check the status
|
||||
ps -p ${pid} -oetime= >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} WARN, old nmon process found due to: `ps auxwww | grep $pid | grep -v grep` failed to stop, killing (-9) process $pid"
|
||||
kill -9 $pid
|
||||
fi
|
||||
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
fi
|
||||
|
||||
#
|
||||
# Maintenance task2
|
||||
# set -x
|
||||
# - manage any fifo reader orphan processes (no associated nmon process)
|
||||
# - manage any fifo reader duplicated (abnormal situation)
|
||||
|
||||
echo "`log_date`, ${HOST} INFO, starting maintenance task 2: verify orphan or duplicated fifo_reader processes"
|
||||
|
||||
for instance in fifo1 fifo2; do
|
||||
|
||||
# Initiate
|
||||
oldPidNb=0
|
||||
|
||||
case $INTERPRETER in
|
||||
"perl")
|
||||
readerNbProc=2 ;;
|
||||
"python"|"python3")
|
||||
readerNbProc=3 ;;
|
||||
esac
|
||||
|
||||
# get the list of running processes
|
||||
ps auxwww | grep "nmon" | grep "splunk" | grep metricator_reader | grep ${instance} >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
|
||||
oldPidList=`ps auxwwww | grep "nmon" | grep "splunk" | grep metricator_reader | grep ${instance} | grep -v grep | awk '{ print $2 }'`
|
||||
oldPidNb=`ps auxwww | grep "nmon" | grep "splunk" | grep metricator_reader | grep ${instance} | grep -v grep | wc -l | awk '{print $1}'`
|
||||
|
||||
# search for associated nmon process
|
||||
case $UNAME in
|
||||
"AIX"|"Linux")
|
||||
ps auxwww | grep "nmon" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep ${instance} >/dev/null
|
||||
;;
|
||||
"SunOS")
|
||||
ps auxwww | grep "sadc" | grep "splunk" | grep "var/log/metricator" | grep -v metricator_reader | grep ${instance} >/dev/null
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ $? -ne 0 ] && [ $oldPidNb -eq $readerNbProc ]; then
|
||||
|
||||
# no process found, kill the reader processes
|
||||
for pid in $oldPidList; do
|
||||
echo "`log_date`, ${HOST} WARN, orphan reader process found (no associated nmon process) due to: `ps auxwww | grep $pid | grep -v grep` killing (SIGTERM) process $pid"
|
||||
kill $pid
|
||||
|
||||
# Allow some time for the process to end
|
||||
sleep 5
|
||||
|
||||
# re-check the status
|
||||
ps -p ${pid} -oetime= >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} WARN, orphan reader process (no associated nmon process) due to: `ps auxwww | grep $pid | grep -v grep` failed to stop, killing (-9) process $pid"
|
||||
kill -9 $pid
|
||||
fi
|
||||
done
|
||||
|
||||
# If nmon is running but the number of reader processes is higher than 2 (shell parent + Python/Perl child), something went wrong
|
||||
elif [ $oldPidNb -gt $readerNbProc ]; then
|
||||
|
||||
echo "`log_date`, ${HOST} WARN, multiple reader for the same fifo were detected, this is an abnormal situation and reader will be killed."
|
||||
|
||||
# no process found, kill the reader processes
|
||||
for pid in $oldPidList; do
|
||||
echo "`log_date`, ${HOST} WARN, duplicated reader process found due to: `ps auxwww | grep $pid | grep -v grep` killing (SIGTERM) process $pid"
|
||||
kill $pid
|
||||
|
||||
# Allow some time for the process to end
|
||||
sleep 5
|
||||
|
||||
# re-check the status
|
||||
ps -p ${pid} -oetime= >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} WARN, duplicated reader process found due to: `ps auxwww | grep $pid | grep -v grep` failed to stop, killing (-9) process $pid"
|
||||
kill -9 $pid
|
||||
fi
|
||||
done
|
||||
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
done
|
||||
|
||||
;;
|
||||
|
||||
# End of per OS case
|
||||
esac
|
||||
|
||||
###### End maintenance tasks ######
|
||||
|
||||
###### Start cleaner ######
|
||||
|
||||
case ${INTERPRETER} in
|
||||
|
||||
"python"|"python3")
|
||||
$INTERPRETER $APP/bin/metricator_cleaner.py ${userargs} ;;
|
||||
|
||||
"perl")
|
||||
$APP/bin/metricator_cleaner.pl ${userargs} ;;
|
||||
|
||||
esac
|
||||
|
||||
exit 0
|
||||
@ -0,0 +1,367 @@
|
||||
#!/bin/sh
|
||||
|
||||
# set -x
|
||||
|
||||
# Program name: metricator_consumer.sh
|
||||
# Purpose - consume data produced by the fifo readers
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
# Version 2.0.0
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
#################################################
|
||||
## Your Customizations Go Here ##
|
||||
#################################################
|
||||
|
||||
# hostname
|
||||
HOST=`hostname`
|
||||
|
||||
# Which type of OS are we running
|
||||
UNAME=`uname`
|
||||
|
||||
# format date output to strftime dd/mm/YYYY HH:MM:SS
|
||||
log_date () {
|
||||
date "+%d-%m-%Y %H:%M:%S"
|
||||
}
|
||||
|
||||
if [ -z "${SPLUNK_HOME}" ]; then
|
||||
echo "`log_date`, ${HOST} ERROR, SPLUNK_HOME variable is not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# check and wait to acquire mutex
|
||||
mutex="$SPLUNK_HOME/var/log/metricator/mutex"
|
||||
|
||||
remove_mutex () {
|
||||
rm -f $mutex
|
||||
}
|
||||
|
||||
# Allow 10s mini to acquire mutex and break
|
||||
count=0
|
||||
while [ -f $mutex ]; do
|
||||
sleep 2
|
||||
count=`expr $count + 1`
|
||||
if [ $count -gt 5 ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# acquire mutex
|
||||
if [ -d $SPLUNK_HOME/var/log/metricator ]; then
|
||||
touch $mutex
|
||||
fi
|
||||
|
||||
# tmp dir and file
|
||||
temp_dir="${SPLUNK_HOME}/var/log/metricator/tmp/"
|
||||
|
||||
if [ ! -d ${temp_dir} ]; then
|
||||
mkdir -p ${temp_dir}
|
||||
fi
|
||||
|
||||
temp_file="${temp_dir}/metricator_consumer.sh.$$"
|
||||
|
||||
# Splunk Home variable: This should automatically defined when this script is being launched by Splunk
|
||||
# If you intend to run this script out of Splunk, please set your custom value here
|
||||
SPL_HOME=${SPLUNK_HOME}
|
||||
|
||||
# Check SPL_HOME variable is defined, this should be the case when launched by Splunk scheduler
|
||||
if [ -z "${SPL_HOME}" ]; then
|
||||
echo "`log_date`, ${HOST} ERROR, SPL_HOME (SPLUNK_HOME) variable is not defined"
|
||||
remove_mutex
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# APP path discovery
|
||||
if [ -d "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon" ]; then
|
||||
APP=$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon
|
||||
|
||||
elif [ -d "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon" ];then
|
||||
APP=$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon
|
||||
|
||||
else
|
||||
echo "`log_date`, ${HOST} ERROR, the APP directory could not be defined, is the TA-metricator-for-nmon installed ?"
|
||||
remove_mutex
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#
|
||||
# Interpreter choice
|
||||
#
|
||||
|
||||
PYTHON=0
|
||||
PYTHON2=0
|
||||
PYTHON3=0
|
||||
PERL=0
|
||||
# Set the default interpreter
|
||||
INTERPRETER="python"
|
||||
|
||||
# Get the version for both worlds
|
||||
PYTHON2=`which python 2>&1`
|
||||
PYTHON3=`which python3 2>&1`
|
||||
PERL=`which perl 2>&1`
|
||||
|
||||
# Handle Python
|
||||
PYTHON_available="false"
|
||||
case $PYTHON3 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python3" ;;
|
||||
*)
|
||||
case $PYTHON2 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle Perl
|
||||
case $PERL in
|
||||
*perl*)
|
||||
PERL_available="true"
|
||||
;;
|
||||
*)
|
||||
PERL_available="false"
|
||||
;;
|
||||
esac
|
||||
|
||||
case `uname` in
|
||||
|
||||
# AIX priority is Perl
|
||||
"AIX")
|
||||
case $PERL_available in
|
||||
"true")
|
||||
INTERPRETER="perl" ;;
|
||||
"false")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
esac
|
||||
;;
|
||||
|
||||
# Other OS, priority is Python
|
||||
*)
|
||||
case $PYTHON_available in
|
||||
"true")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
"false")
|
||||
INTERPRETER="perl" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# default values relevant for our context
|
||||
nmonparser_options="--mode fifo"
|
||||
|
||||
# source default nmon.conf
|
||||
if [ -f $APP/default/nmon.conf ]; then
|
||||
# During initial deployment, the nmon.conf needs to be managed properly by the metricator_consumer.sh
|
||||
# wait for this to be done
|
||||
grep '\[nmon\]' $APP/default/nmon.conf >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} INFO, initial deployment condition detected, safe exiting."
|
||||
exit 0
|
||||
else
|
||||
. $APP/default/nmon.conf
|
||||
fi
|
||||
fi
|
||||
|
||||
# source local nmon.conf, if any
|
||||
|
||||
# Search for a local nmon.conf file located in $SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local
|
||||
if [ -f $APP/local/nmon.conf ]; then
|
||||
. $APP/local/nmon.conf
|
||||
fi
|
||||
|
||||
# On a per server basis, you can also set in /etc/nmon.conf
|
||||
if [ -f /etc/nmon.conf ]; then
|
||||
. /etc/nmon.conf
|
||||
fi
|
||||
|
||||
# Manage FQDN option
|
||||
echo $nmonparser_options | grep '\-\-use_fqdn' >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
# Only relevant for Linux OS
|
||||
case $UNAME in
|
||||
Linux)
|
||||
HOST=`hostname -f` ;;
|
||||
AIX)
|
||||
HOST=`hostname` ;;
|
||||
SunOS)
|
||||
HOST=`hostname` ;;
|
||||
esac
|
||||
else
|
||||
HOST=`hostname`
|
||||
fi
|
||||
|
||||
# Manage host override option based on Splunk hostname defined
|
||||
case $override_sys_hostname in
|
||||
"1")
|
||||
# Retrieve the Splunk host value
|
||||
HOST=`cat $SPLUNK_HOME/etc/system/local/inputs.conf | grep '^host =' | awk -F\= '{print $2}' | sed 's/ //g'`
|
||||
;;
|
||||
esac
|
||||
|
||||
############################################
|
||||
# functions
|
||||
############################################
|
||||
|
||||
# consume function
|
||||
consume_data () {
|
||||
|
||||
# fifo name (valid choices are: fifo1 | fifo2)
|
||||
FIFO=$1
|
||||
|
||||
# consume fifo
|
||||
|
||||
# realtime
|
||||
nmon_config=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_config.dat
|
||||
nmon_header=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_header.dat
|
||||
nmon_timestamp=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_timestamp.dat
|
||||
nmon_data=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_data.dat
|
||||
nmon_data_tmp=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_data_tmp.dat
|
||||
nmon_external=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_external.dat
|
||||
nmon_external_header=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_external_header.dat
|
||||
|
||||
|
||||
# rotated
|
||||
nmon_config_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_config.dat.rotated
|
||||
nmon_header_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_header.dat.rotated
|
||||
nmon_timestamp_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_timestamp.dat.rotated
|
||||
nmon_data_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_data.dat.rotated
|
||||
nmon_external_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_external.dat.rotated
|
||||
nmon_external_header_rotated=$SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/nmon_external_header.dat.rotated
|
||||
|
||||
# manage rotated data if existing, prevent any data loss
|
||||
|
||||
# all files must be existing to be managed
|
||||
if [ -s $nmon_config_rotated ] && [ -s $nmon_header_rotated ] && [ -s $nmon_data_rotated ]; then
|
||||
|
||||
# Manager headers
|
||||
unset nmon_header_files
|
||||
if [ -f $nmon_external_header_rotated ]; then
|
||||
nmon_header_files="$nmon_header_rotated $nmon_external_header_rotated"
|
||||
else
|
||||
nmon_header_files="$nmon_header_rotated"
|
||||
fi
|
||||
|
||||
# Ensure the first line of nmon_data starts by the relevant timestamp, if not add it
|
||||
head -1 $nmon_data_rotated | grep 'ZZZZ,T' >/dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
# check timestamp dat exists before processing
|
||||
# there is no else possible, if the the timestamp data file does not exist, there is nothing we can do
|
||||
# and the parser will raise an error
|
||||
if [ -f $nmon_timestamp_rotated ]; then
|
||||
tail -1 $nmon_timestamp_rotated >$temp_file
|
||||
cat $nmon_config_rotated $nmon_header_files $temp_file $nmon_data_rotated $nmon_external_rotated | $SPLUNK_HOME/bin/splunk cmd $APP/bin/nmonparser.sh $nmonparser_options
|
||||
fi
|
||||
else
|
||||
cat $nmon_config_rotated $nmon_header_files $nmon_data_rotated $nmon_external_rotated | $SPLUNK_HOME/bin/splunk cmd $APP/bin/nmonparser.sh $nmonparser_options
|
||||
fi
|
||||
|
||||
# remove rotated
|
||||
rm -f $SPLUNK_HOME/var/log/metricator/var/nmon_repository/$FIFO/*.dat.rotated
|
||||
|
||||
# header var
|
||||
unset nmon_header_files
|
||||
|
||||
fi
|
||||
|
||||
# Manage realtime files
|
||||
|
||||
# all files must be existing to be managed
|
||||
if [ -s $nmon_config ] && [ -s $nmon_header ] && [ -s $nmon_data ]; then
|
||||
|
||||
# get data mtime
|
||||
case $INTERPRETER in
|
||||
"perl")
|
||||
perl -e "\$mtime=(stat(\"$nmon_data\"))[9]; \$cur_time=time(); print \$cur_time - \$mtime;" >$temp_file
|
||||
nmon_data_mtime=`cat $temp_file`
|
||||
;;
|
||||
"python"|"python3")
|
||||
$INTERPRETER -c "import os; import time; now = time.strftime(\"%s\"); print(int(int(now)-(os.path.getmtime('$nmon_data'))))" >$temp_file
|
||||
nmon_data_mtime=`cat $temp_file`
|
||||
;;
|
||||
|
||||
esac
|
||||
|
||||
# file should have last mtime of mini 5 sec
|
||||
|
||||
while [ $nmon_data_mtime -lt 5 ];
|
||||
do
|
||||
|
||||
sleep 1
|
||||
|
||||
# get data mtime
|
||||
case $INTERPRETER in
|
||||
"perl")
|
||||
perl -e "\$mtime=(stat(\"$nmon_data\"))[9]; \$cur_time=time(); print \$cur_time - \$mtime;" >$temp_file
|
||||
nmon_data_mtime=`cat $temp_file`
|
||||
;;
|
||||
"python"|"python3")
|
||||
$INTERPRETER -c "import os; import time; now = time.strftime(\"%s\"); print(int(int(now)-(os.path.getmtime('$nmon_data'))))" >$temp_file
|
||||
nmon_data_mtime=`cat $temp_file`
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
done
|
||||
|
||||
# copy content
|
||||
cat $nmon_data > $nmon_data_tmp
|
||||
|
||||
# nmon external data
|
||||
if [ -f $nmon_external ]; then
|
||||
cat $nmon_external >> $nmon_data_tmp
|
||||
fi
|
||||
|
||||
# empty the nmon_data file & external
|
||||
> $nmon_data
|
||||
> $nmon_external
|
||||
|
||||
# Manager headers
|
||||
unset nmon_header_files
|
||||
if [ -f $nmon_external_header ]; then
|
||||
nmon_header_files="$nmon_header $nmon_external_header"
|
||||
else
|
||||
nmon_header_files="$nmon_header"
|
||||
fi
|
||||
|
||||
# Ensure the first line of nmon_data starts by the relevant timestamp, if not add it
|
||||
head -1 $nmon_data_tmp | grep 'ZZZZ,T' >/dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
tail -1 $nmon_timestamp >$temp_file
|
||||
cat $nmon_config $nmon_header_files $temp_file $nmon_data_tmp | $SPLUNK_HOME/bin/splunk cmd $APP/bin/nmonparser.sh $nmonparser_options
|
||||
else
|
||||
cat $nmon_config $nmon_header_files $nmon_data_tmp | $SPLUNK_HOME/bin/splunk cmd $APP/bin/nmonparser.sh $nmonparser_options
|
||||
fi
|
||||
|
||||
# remove the copy
|
||||
rm -f $nmon_data_tmp
|
||||
|
||||
# header var
|
||||
unset nmon_header_files
|
||||
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
####################################################################
|
||||
############# Main Program ############
|
||||
####################################################################
|
||||
|
||||
# consume fifo1
|
||||
consume_data fifo1
|
||||
|
||||
# allow 1 sec idle
|
||||
sleep 1
|
||||
|
||||
# consume fifo2
|
||||
consume_data fifo2
|
||||
|
||||
# remove the temp file
|
||||
if [ -f $temp_file ]; then
|
||||
rm -f $temp_file
|
||||
fi
|
||||
|
||||
remove_mutex
|
||||
exit 0
|
||||
@ -0,0 +1,248 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
# Program name: metricator_reader.pl
|
||||
# Compatibility: Perl x
|
||||
# Purpose - read nmon data from fifo file
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
my $version = "2.0.0";
|
||||
|
||||
use Getopt::Long;
|
||||
use File::stat;
|
||||
use File::Copy;
|
||||
use POSIX 'strftime';
|
||||
|
||||
#################################################
|
||||
## Arguments Parser
|
||||
#################################################
|
||||
|
||||
# Default values
|
||||
my $APP = "";
|
||||
my $fifo_name = "";
|
||||
my $VERSION = "";
|
||||
my $help = "";
|
||||
|
||||
my $result = GetOptions(
|
||||
"fifo=s" => \$fifo_name, # string
|
||||
"version" => \$VERSION, # flag
|
||||
"help" => \$help # flag
|
||||
);
|
||||
|
||||
# Show version
|
||||
if ($VERSION) {
|
||||
print("metricator_reader.pl version $version \n");
|
||||
|
||||
exit 0;
|
||||
}
|
||||
|
||||
# Show help
|
||||
if ($help) {
|
||||
|
||||
print( "
|
||||
|
||||
Help for metricator_reader.pl:
|
||||
|
||||
The script should be run in the backgroud to continously read nmon data from fifo files.
|
||||
|
||||
Available options are:
|
||||
|
||||
--fifo <name of fifo> :Name of the pre-configured fifo file
|
||||
--version :Show current program version \n
|
||||
"
|
||||
);
|
||||
|
||||
exit 0;
|
||||
}
|
||||
|
||||
# Local time
|
||||
my $time = strftime "%d-%m-%Y %H:%M:%S", localtime;
|
||||
|
||||
# Default Environment Variable SPLUNK_HOME, this shall be automatically defined if as the script shall be launched by Splunk
|
||||
my $SPLUNK_HOME = $ENV{SPLUNK_HOME};
|
||||
|
||||
# Verify SPLUNK_HOME definition
|
||||
if ( not $SPLUNK_HOME ) {
|
||||
print(
|
||||
"\n$time ERROR: The environment variable SPLUNK_HOME could not be verified, if you want to run this script manually you need to export it before processing \n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
# Discover TA-metricator-for-nmon path
|
||||
if ( length($APP) == 0 ) {
|
||||
|
||||
if ( -d "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon" ) {
|
||||
$APP = "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon";
|
||||
}
|
||||
elsif ( -d "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon" ) {
|
||||
$APP = "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
else {
|
||||
|
||||
if ( !-d "$APP" ) {
|
||||
print(
|
||||
"\n$time ERROR: The Application root directory could be verified using your custom setting: $APP \n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
# Verify existence of APP
|
||||
if ( !-d "$APP" ) {
|
||||
print(
|
||||
"\n$time ERROR: The Application root directory could not be found, is the TA-metricator-for-nmon installed ?\n"
|
||||
);
|
||||
die;
|
||||
}
|
||||
|
||||
# var directories
|
||||
my $APP_MAINVAR = "$SPLUNK_HOME/var/log/metricator";
|
||||
my $APP_VAR = "$APP_MAINVAR/var";
|
||||
|
||||
if ( !-d "$APP_MAINVAR" ) {
|
||||
print(
|
||||
"\n$time INFO: main var directory not found ($APP_MAINVAR), no need to run.\n"
|
||||
);
|
||||
exit 0;
|
||||
}
|
||||
|
||||
# check fifo_name
|
||||
if ( not "$fifo_name" ) {
|
||||
print("\n$time ERROR: the --fifo_name <name of fifo> is mandatory\n");
|
||||
die;
|
||||
}
|
||||
|
||||
# define the full path to the fifo file
|
||||
my $fifo_path = "$APP_VAR/nmon_repository/$fifo_name/nmon.fifo";
|
||||
|
||||
# At startup, rotate any existing non empty .dat file if nmon_data.dat is not empty
|
||||
|
||||
# define the various files to be written
|
||||
|
||||
# realtime files
|
||||
my $nmon_config_dat = "$APP_VAR/nmon_repository/$fifo_name/nmon_config.dat";
|
||||
my $nmon_header_dat = "$APP_VAR/nmon_repository/$fifo_name/nmon_header.dat";
|
||||
my $nmon_data_dat = "$APP_VAR/nmon_repository/$fifo_name/nmon_data.dat";
|
||||
my $nmon_external_dat = "$APP_VAR/nmon_repository/$fifo_name/nmon_external.dat";
|
||||
my $nmon_external_header_dat =
|
||||
"$APP_VAR/nmon_repository/$fifo_name/nmon_external_header.dat";
|
||||
my $nmon_timestamp_dat =
|
||||
"$APP_VAR/nmon_repository/$fifo_name/nmon_timestamp.dat";
|
||||
my $nmon_error_dat = "$APP_VAR/nmon_repository/$fifo_name/nmon_error.dat";
|
||||
|
||||
my @nmon_dat = (
|
||||
"$nmon_config_dat", "$nmon_header_dat",
|
||||
"$nmon_data_dat", "$nmon_timestamp_dat",
|
||||
"$nmon_external_dat", "$nmon_external_header_dat",
|
||||
"$nmon_error_dat"
|
||||
);
|
||||
|
||||
my $file;
|
||||
my $rotated_file;
|
||||
|
||||
# Remove any existing rotated file
|
||||
foreach $file (@nmon_dat) {
|
||||
$rotated_file = "$file.rotated";
|
||||
if ( -e $rotated_file ) {
|
||||
unlink $rotated_file;
|
||||
}
|
||||
}
|
||||
|
||||
# Manage existing files and do the rotation if required
|
||||
if ( !-z $nmon_data_dat ) {
|
||||
foreach $file (@nmon_dat) {
|
||||
$rotated_file = "$file.rotated";
|
||||
move( $file, $rotated_file );
|
||||
}
|
||||
}
|
||||
else {
|
||||
foreach $file (@nmon_dat) {
|
||||
if ( -e $file ) {
|
||||
unlink $file;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
####################################################################
|
||||
############# Main Program
|
||||
####################################################################
|
||||
|
||||
if ( !-p $fifo_path ) {
|
||||
print(
|
||||
"\n$time INFO: The fifo file $fifo_path does not exist yet, we are not ready to start.\n"
|
||||
);
|
||||
exit 0;
|
||||
|
||||
}
|
||||
else {
|
||||
|
||||
my $fifoh;
|
||||
|
||||
# Open the named pipe "a la shell" to ensure that we we will quite when the nmon process has ended as well
|
||||
open( $fifoh, "$APP/bin/metricator_reader.sh $fifo_path|" );
|
||||
|
||||
while (<$fifoh>) {
|
||||
chomp($_);
|
||||
|
||||
my $nmon_config_match = '^[AAA|BBB].+';
|
||||
my $nmon_header_match =
|
||||
'^(?!AAA|BBB|TOP)[a-zA-Z0-9\-\_]*,(?!T\d{3,})[^,]*,(?!T\d{3,})[^,]*.*';
|
||||
my $nmon_header_TOP_match = '^TOP,(?!\d*,)';
|
||||
my $nmon_timestamp_match = '^ZZZZ,T\d*';
|
||||
my $nmon_error_match = '^ERROR,T\d*';
|
||||
|
||||
if ( $_ =~ /$nmon_config_match/ ) {
|
||||
open( my $fh, '>>', $nmon_config_dat )
|
||||
or die "Could not open file '$nmon_config_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
elsif ( $_ =~ /$nmon_header_match/ ) {
|
||||
open( my $fh, '>>', $nmon_header_dat )
|
||||
or die "Could not open file '$nmon_header_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
elsif ( $_ =~ /$nmon_header_TOP_match/ ) {
|
||||
open( my $fh, '>>', $nmon_header_dat )
|
||||
or die "Could not open file '$nmon_header_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
elsif ( $_ =~ /$nmon_error_match/ ) {
|
||||
open( my $fh, '>>', $nmon_error_dat )
|
||||
or die "Could not open file '$nmon_error_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
elsif ( $_ =~ /$nmon_timestamp_match/ ) {
|
||||
open( my $fh, '>>', $nmon_timestamp_dat )
|
||||
or die "Could not open file '$nmon_timestamp_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
open( my $fh, '>>', $nmon_data_dat )
|
||||
or die "Could not open file '$nmon_data_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
else {
|
||||
open( my $fh, '>>', $nmon_data_dat )
|
||||
or die "Could not open file '$nmon_data_dat' $!";
|
||||
print $fh "$_\n";
|
||||
close $fh;
|
||||
}
|
||||
|
||||
}
|
||||
close $fifoh;
|
||||
exit(0);
|
||||
|
||||
}
|
||||
@ -0,0 +1,180 @@
|
||||
# Program name: metricator_reader.py
|
||||
# Compatibility: Python 3.x
|
||||
# Purpose - read nmon data from fifo file
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
import os
|
||||
import sys
|
||||
import optparse
|
||||
import logging
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
# script version
|
||||
version = '3.0.0'
|
||||
|
||||
#################################################
|
||||
# Variables
|
||||
#################################################
|
||||
|
||||
# Set logging format
|
||||
logging.root
|
||||
logging.root.setLevel(logging.DEBUG)
|
||||
formatter = logging.Formatter('%(levelname)s %(message)s')
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(formatter)
|
||||
logging.root.addHandler(handler)
|
||||
|
||||
# Verify SPLUNK_HOME environment variable is available, the script is expected to be launched by Splunk
|
||||
# which will set this.
|
||||
# for debugging or manual run, please set this variable manually
|
||||
try:
|
||||
os.environ["SPLUNK_HOME"]
|
||||
except KeyError:
|
||||
logging.error(
|
||||
'The environment variable SPLUNK_HOME could not be verified, if you want to run this script manually you need'
|
||||
' to export it before processing')
|
||||
sys.exit(1)
|
||||
|
||||
# SPLUNK_HOME environment variable
|
||||
SPLUNK_HOME = os.environ['SPLUNK_HOME']
|
||||
|
||||
# APP_VAR directory
|
||||
APP_VAR = SPLUNK_HOME + '/var/log/metricator/var'
|
||||
if not os.path.exists(APP_VAR):
|
||||
logging.info(
|
||||
'The application var directory does not exist yet, we are not ready to start')
|
||||
sys.exit(0)
|
||||
|
||||
# APP Directories for TA-metricator-for-nmon
|
||||
TA_NMON_APP = SPLUNK_HOME + '/etc/apps/TA-metricator-for-nmon'
|
||||
TA_NMON_APP_CLUSTERED = SPLUNK_HOME + '/etc/slave-apps/TA-metricator-for-nmon'
|
||||
|
||||
# Empty APP
|
||||
APP = ''
|
||||
|
||||
# Verify APP exist
|
||||
if os.path.exists(TA_NMON_APP):
|
||||
APP = TA_NMON_APP
|
||||
elif os.path.exists(TA_NMON_APP_CLUSTERED):
|
||||
APP = TA_NMON_APP_CLUSTERED
|
||||
else:
|
||||
msg = 'The Application root directory could not be found, is the TA-metricator-for-nmon ? We tried: ' + \
|
||||
str(TA_NMON_APP) + ' ' + str(TA_NMON_APP_CLUSTERED)
|
||||
logging.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
# metricator_reader.sh
|
||||
fifo_reader = APP + "/bin/metricator_reader.sh"
|
||||
|
||||
#################################################
|
||||
# Arguments
|
||||
#################################################
|
||||
|
||||
parser = optparse.OptionParser(usage='usage: %prog [options]', version='%prog '+version)
|
||||
|
||||
parser.add_option('-F', '--fifo', action='store', type='string', dest='fifo_name',
|
||||
help='set the fifo file to be read')
|
||||
parser.add_option('--dumpargs', action='store_true', dest='dumpargs',
|
||||
help='only dump the passed arguments and exit (for debugging purposes only)')
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if options.dumpargs:
|
||||
print("options: ", options)
|
||||
print("args: ", args)
|
||||
sys.exit(0)
|
||||
|
||||
if not options.fifo_name:
|
||||
logging.error(
|
||||
'The fifo file option has not been set (-F fifo_name or --fifo fifo_name)')
|
||||
sys.exit(1)
|
||||
else:
|
||||
fifo_name = options.fifo_name
|
||||
|
||||
# define the full path to the fifo file
|
||||
fifo_path = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon.fifo'
|
||||
|
||||
# At startup, rotate any existing non empty .dat file if nmon_data.dat is not empty
|
||||
|
||||
# define the various files to be written
|
||||
|
||||
# realtime files
|
||||
nmon_config_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_config.dat'
|
||||
nmon_header_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_header.dat'
|
||||
nmon_data_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_data.dat'
|
||||
nmon_timestamp_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_timestamp.dat'
|
||||
nmon_external_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_external.dat'
|
||||
nmon_external_header_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_external_header.dat'
|
||||
nmon_error_dat = APP_VAR + '/nmon_repository/' + fifo_name + '/nmon_error.dat'
|
||||
nmon_dat = {nmon_config_dat, nmon_header_dat, nmon_timestamp_dat, nmon_data_dat, nmon_external_dat,
|
||||
nmon_external_header_dat, nmon_error_dat}
|
||||
|
||||
# Manage existing files and do the rotation if required
|
||||
if os.path.exists(nmon_data_dat) and os.path.getsize(nmon_data_dat) > 0:
|
||||
for file in nmon_dat:
|
||||
rotated_file = str(file) + ".rotated"
|
||||
if os.path.isfile(rotated_file):
|
||||
os.remove(rotated_file)
|
||||
if os.path.isfile(file):
|
||||
os.rename(file, rotated_file)
|
||||
|
||||
elif os.path.exists(nmon_data_dat):
|
||||
for file in nmon_dat:
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
####################################################################
|
||||
# Main Program
|
||||
####################################################################
|
||||
|
||||
# Verify the fifo file exists, and start processing
|
||||
if not os.path.exists(fifo_path):
|
||||
logging.info(
|
||||
'The fifo file ' + fifo_path + ' does not exist yet, we are not ready to start')
|
||||
sys.exit(0)
|
||||
else:
|
||||
# we use the metricator_reader.sh to read the fifo file, benchmarks have shown more stability than
|
||||
# opening the fifo file in pure Python
|
||||
cmd = fifo_reader + " " + fifo_path
|
||||
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, universal_newlines=True)
|
||||
|
||||
while 1:
|
||||
line = str(p.stdout.readline())
|
||||
if line == '' and p.poll() != None:
|
||||
break
|
||||
|
||||
# Manage nmon config
|
||||
nmon_config_match = re.match(r'^[AAA|BBB].+', str(line))
|
||||
nmon_header_match = re.match(r'^(?!AAA|BBB|TOP)[a-zA-Z0-9\-\_]*,(?!T\d{3,})[^,]*,(?!T\d{3,})[^,]*.*', str(line))
|
||||
nmon_header_TOP_match = re.match(r'^TOP,(?!\d*,)', str(line))
|
||||
nmon_timestamp_match = re.match(r'^ZZZZ,T\d*', str(line))
|
||||
nmon_error_match = re.match(r'^ERROR,T\d*', str(line))
|
||||
|
||||
if nmon_config_match:
|
||||
with open(nmon_config_dat, "a") as nmon_config:
|
||||
print(str(line))
|
||||
nmon_config.write(line)
|
||||
|
||||
elif nmon_header_match:
|
||||
with open(nmon_header_dat, "a") as nmon_header:
|
||||
nmon_header.write(line)
|
||||
|
||||
elif nmon_header_TOP_match:
|
||||
with open(nmon_header_dat, "a") as nmon_header:
|
||||
nmon_header.write(line)
|
||||
|
||||
elif nmon_error_match:
|
||||
with open(nmon_error_dat, "a") as nmon_error:
|
||||
nmon_error.write(line)
|
||||
|
||||
# timestamp management: write the nmon timestamp in nmon_data and as well nmon_timestamp for later use
|
||||
elif nmon_timestamp_match:
|
||||
with open(nmon_timestamp_dat, "a") as nmon_timestamp:
|
||||
nmon_timestamp.write(line)
|
||||
with open(nmon_data_dat, "a") as nmon_data:
|
||||
nmon_data.write(line)
|
||||
|
||||
else:
|
||||
with open(nmon_data_dat, "a") as nmon_data:
|
||||
nmon_data.write(line)
|
||||
@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
# set -x
|
||||
|
||||
# Program name: metricator_reader.sh
|
||||
# Compatibility: Shell
|
||||
# Purpose - read nmon data from fifo file and output to stdout
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
# Version 2.0.0
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
#################################################
|
||||
## Your Customizations Go Here ##
|
||||
#################################################
|
||||
|
||||
# fifo to be read (valid choices are: fifo1 | fifo2
|
||||
FIFO=$1
|
||||
|
||||
####################################################################
|
||||
############# Main Program ############
|
||||
####################################################################
|
||||
|
||||
while IFS= read line
|
||||
do
|
||||
echo $line
|
||||
done <$FIFO
|
||||
|
||||
exit 0
|
||||
@ -0,0 +1,58 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Program name: nmon_external_snap.sh
|
||||
# Purpose - Add external command results to extend nmon data
|
||||
# Author - Guilhem Marchand
|
||||
# Disclaimer: this provided "as is".
|
||||
# Date - March 2017
|
||||
# Guilhem Marchand 2017/03/18, initial version
|
||||
|
||||
# 2017/04/29, Guilhem Marchand: - AIX compatibility issues, detach the commands in background
|
||||
# 2017/06/04, Guilhem Marchand: - Manage nmon external data in a dedicated file
|
||||
# 2018/01/09, Guilhem Marchand: - exclude dockers virtual fs, improve exclusion of false fs
|
||||
|
||||
# Version 1.0.3
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
# for more information, see:
|
||||
# https://www.ibm.com/developerworks/community/blogs/aixpert/entry/nmon_and_External_Data_Collectors?lang=en
|
||||
|
||||
# This script will output the values for our custom external monitors
|
||||
# The first field defines the name of the monitor (type field in the application)
|
||||
# This monitor name must then be added to your local/nmonparser_config.json file
|
||||
|
||||
# 2 sections are available for nmon external monitor managements:
|
||||
# - nmon_external: manage any number of fields without transposition
|
||||
# - nmon_external_transposed: manage any number of fields with a notion of device / value
|
||||
|
||||
# note: the NMON_FIFO_PATH is a pattern that will be replaced by the metricator_helper.sh script in a copy of this script
|
||||
# that lives for the time to live of the nmon process started
|
||||
|
||||
# CAUTION: ensure your custom command does not output any comma within the field name and value
|
||||
|
||||
# Number of running processes
|
||||
echo "PROCCOUNT,$1,`ps -ef | wc -l`" >>NMON_FIFO_PATH/nmon_external.dat &
|
||||
|
||||
# Uptime information (uptime command output)
|
||||
echo "UPTIME,$1,\"`uptime | sed 's/^\s//g' | sed 's/,/;/g'`\"" >>NMON_FIFO_PATH/nmon_external.dat &
|
||||
|
||||
# df table information
|
||||
DF_TABLE=`df -k -P | sed '1d' | egrep -v '\/proc$|/dev$|\/run$|^tmpfs.*\/dev.*$|^tmpfs.*\/run.*$|^tmpfs.*\/sys.*$|^tmpfs.*\/var.*$|^none.*\/run|^none.*\/sys.*|\/var\/lib\/docker\/aufs\/mnt\/.*|\/var\/lib\/docker\/containers\/.*' | awk '{print $6}'`
|
||||
for fs in $DF_TABLE; do
|
||||
echo "DF_STORAGE,$1,`df -k -P $fs | sed '1d' | sed 's/%//g' | sed 's/,/;/g' | awk '{print $1 "," $2 "," $3 "," $4 "," $5 "," $6}'`" >>NMON_FIFO_PATH/nmon_external.dat
|
||||
done
|
||||
|
||||
# DF_INODES, for AIX and Linux
|
||||
case `uname` in
|
||||
"AIX")
|
||||
for fs in $DF_TABLE; do
|
||||
echo "DF_INODES,$1,`df -i $fs | sed '1d' | sed 's/%//g' | sed 's/,/;/g' | awk '{print $1 "," $5 "," $6 "," $7}'`" >>NMON_FIFO_PATH/nmon_external.dat
|
||||
done
|
||||
;;
|
||||
"Linux")
|
||||
for fs in $DF_TABLE; do
|
||||
echo "DF_INODES,$1,`df -i -P $fs | sed '1d' | sed 's/%//g' | sed 's/,/;/g' | awk '{print $1 "," $2 "," $3 "," $4 "," $5 "," $6}'`" >>NMON_FIFO_PATH/nmon_external.dat
|
||||
done
|
||||
;;
|
||||
esac
|
||||
@ -0,0 +1,52 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Program name: nmon_external_start.sh
|
||||
# Purpose - Add external command results to extend nmon data (header definition)
|
||||
# Author - Guilhem Marchand
|
||||
# Disclaimer: this provided "as is".
|
||||
# Date - March 2017
|
||||
# Guilhem Marchand 2017/03/18, initial version
|
||||
# Guilhem Marchand 2017/03/29, protect against unexpecting failure in NMON_EXTERNAL_DIR getting value
|
||||
# Guilhem Marchand 2017/06/09, use dedicated files for external header and data
|
||||
# Guilhem Marchand 2017/08/17, Adding DF table
|
||||
|
||||
# Version 1.0.3
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
# for more information, see:
|
||||
# https://www.ibm.com/developerworks/community/blogs/aixpert/entry/nmon_and_External_Data_Collectors?lang=en
|
||||
|
||||
# This script will define the headers for our custom external monitors
|
||||
# The first field defines the name of the monitor (type field in the application)
|
||||
# This monitor name must then be added to your local/nmonparser_config.json file
|
||||
|
||||
# 2 sections are available for nmon external monitor managements:
|
||||
# - nmon_external: manage any number of fields without transposition
|
||||
# - nmon_external_transposed: manage any number of fields with a notion of device / value
|
||||
|
||||
# note: the NMON_FIFO_PATH is a pattern that will be replaced by the metricator_helper.sh script in a copy of this script
|
||||
# that lives for the time to live of the nmon process started
|
||||
|
||||
# Empty the header file if existing
|
||||
if [ -f NMON_FIFO_PATH/nmon_external_header.dat ]; then
|
||||
>NMON_FIFO_PATH/nmon_external_header.dat
|
||||
fi
|
||||
|
||||
# CAUTION: ensure your custom command does not output any comma within the field name and value
|
||||
|
||||
# number of running processes
|
||||
echo "PROCCOUNT,Process Count,nb_running_processes" >>NMON_FIFO_PATH/nmon_external_header.dat
|
||||
|
||||
# uptime information
|
||||
echo "UPTIME,Server Uptime and load,uptime_stdout" >>NMON_FIFO_PATH/nmon_external_header.dat
|
||||
|
||||
# DF table (file systems usage)
|
||||
echo "DF_STORAGE,File system disk space usage,filesystem,blocks,Used,Available,Use_pct,mount" >>NMON_FIFO_PATH/nmon_external_header.dat
|
||||
# inodes utilization table is system specific and available for AIX and Linux
|
||||
case `uname` in
|
||||
"AIX")
|
||||
echo "DF_INODES,File system inodes usage,filesystem,IUsed,IUse_pct,mount" >>NMON_FIFO_PATH/nmon_external_header.dat ;;
|
||||
"Linux")
|
||||
echo "DF_INODES,File system inodes usage,filesystem,Inodes,IUsed,IFree,IUse_pct,mount" >>NMON_FIFO_PATH/nmon_external_header.dat ;;
|
||||
esac
|
||||
@ -0,0 +1,197 @@
|
||||
#!/bin/sh
|
||||
|
||||
# set -x
|
||||
|
||||
# Program name: nmonparser.sh
|
||||
# Purpose - Frontal script to nmonparser, will launch Python or Perl script depending on interpreter availability
|
||||
# See nmonparser | nmonparser.pl
|
||||
# Author - Guilhem Marchand
|
||||
|
||||
# Version 2.0.1
|
||||
|
||||
# For AIX / Linux / Solaris
|
||||
|
||||
#################################################
|
||||
## Your Customizations Go Here ##
|
||||
#################################################
|
||||
|
||||
# format date output to strftime dd/mm/YYYY HH:MM:SS
|
||||
log_date () {
|
||||
date "+%d-%m-%Y %H:%M:%S"
|
||||
}
|
||||
|
||||
# Set host
|
||||
HOST=`hostname`
|
||||
|
||||
if [ -z "${SPLUNK_HOME}" ]; then
|
||||
echo "`log_date`, ERROR, SPLUNK_HOME variable is not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set tmp directory
|
||||
APP_VAR=${SPLUNK_HOME}/var/log/metricator
|
||||
|
||||
# Verify it exists
|
||||
if [ ! -d ${APP_VAR} ]; then
|
||||
mkdir -p ${APP_VAR}
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# silently remove tmp file (testing exists before rm seems to cause trouble on some old OS)
|
||||
rm -f ${APP_VAR}/nmonparser.temp.*
|
||||
|
||||
# Set nmon_temp
|
||||
nmon_temp=${APP_VAR}/nmonparser.temp.$$
|
||||
|
||||
# APP path discovery
|
||||
if [ -d "$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon" ]; then
|
||||
APP=$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon
|
||||
|
||||
elif [ -d "$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon" ];then
|
||||
APP=$SPLUNK_HOME/etc/slave-apps/TA-metricator-for-nmon
|
||||
|
||||
else
|
||||
echo "`log_date`, ${HOST} ERROR, the APP directory could not be defined, is the TA-metricator-for-nmon installed ?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# source default nmon.conf
|
||||
if [ -f $APP/default/nmon.conf ]; then
|
||||
# During initial deployment, the nmon.conf needs to be managed properly by the metricator_consumer.sh
|
||||
# wait for this to be done
|
||||
grep '\[nmon\]' $APP/default/nmon.conf >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "`log_date`, ${HOST} INFO, initial deployment condition detected, safe exiting."
|
||||
exit 0
|
||||
else
|
||||
. $APP/default/nmon.conf
|
||||
fi
|
||||
fi
|
||||
|
||||
# source local nmon.conf, if any
|
||||
|
||||
# Search for a local nmon.conf file located in $SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local
|
||||
if [ -f $APP/local/nmon.conf ]; then
|
||||
. $APP/local/nmon.conf
|
||||
fi
|
||||
|
||||
# On a per server basis, you can also set in /etc/nmon.conf
|
||||
if [ -f /etc/nmon.conf ]; then
|
||||
. /etc/nmon.conf
|
||||
fi
|
||||
|
||||
# Manage FQDN option
|
||||
echo $nmonparser_options | grep '\-\-use_fqdn' >/dev/null
|
||||
if [ $? -eq 0 ]; then
|
||||
# Only relevant for Linux OS
|
||||
case $UNAME in
|
||||
Linux)
|
||||
HOST=`hostname -f` ;;
|
||||
AIX)
|
||||
HOST=`hostname` ;;
|
||||
SunOS)
|
||||
HOST=`hostname` ;;
|
||||
esac
|
||||
else
|
||||
HOST=`hostname`
|
||||
fi
|
||||
|
||||
# Manage host override option based on Splunk hostname defined
|
||||
case $override_sys_hostname in
|
||||
"1")
|
||||
# Retrieve the Splunk host value
|
||||
HOST=`cat $SPLUNK_HOME/etc/system/local/inputs.conf | grep '^host =' | awk -F\= '{print $2}' | sed 's/ //g'`
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Interpreter choice
|
||||
#
|
||||
|
||||
PYTHON=0
|
||||
PYTHON2=0
|
||||
PYTHON3=0
|
||||
PERL=0
|
||||
# Set the default interpreter
|
||||
INTERPRETER="python"
|
||||
|
||||
# Get the version for both worlds
|
||||
PYTHON2=`which python 2>&1`
|
||||
PYTHON3=`which python3 2>&1`
|
||||
PERL=`which perl 2>&1`
|
||||
|
||||
# Handle Python
|
||||
PYTHON_available="false"
|
||||
case $PYTHON3 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python3" ;;
|
||||
*)
|
||||
case $PYTHON2 in
|
||||
*python*)
|
||||
PYTHON_available="true"
|
||||
INTERPRETER="python" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle Perl
|
||||
case $PERL in
|
||||
*perl*)
|
||||
PERL_available="true"
|
||||
;;
|
||||
*)
|
||||
PERL_available="false"
|
||||
;;
|
||||
esac
|
||||
|
||||
case `uname` in
|
||||
|
||||
# AIX priority is Perl
|
||||
"AIX")
|
||||
case $PERL_available in
|
||||
"true")
|
||||
INTERPRETER="perl" ;;
|
||||
"false")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
esac
|
||||
;;
|
||||
|
||||
# Other OS, priority is Python
|
||||
*)
|
||||
case $PYTHON_available in
|
||||
"true")
|
||||
INTERPRETER="$INTERPRETER" ;;
|
||||
"false")
|
||||
INTERPRETER="perl" ;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
####################################################################
|
||||
############# Main Program ############
|
||||
####################################################################
|
||||
|
||||
# Store arguments sent to script
|
||||
userargs=$@
|
||||
|
||||
# Store stdin
|
||||
while read line ; do
|
||||
echo "$line" >> ${nmon_temp}
|
||||
done
|
||||
|
||||
# Start the parser
|
||||
case ${INTERPRETER} in
|
||||
|
||||
"python"|"python3")
|
||||
cat ${nmon_temp} | ${SPLUNK_HOME}/bin/splunk cmd $INTERPRETER ${APP}/bin/nmonparser.py ${userargs} ;;
|
||||
|
||||
"perl")
|
||||
cat ${nmon_temp} | ${SPLUNK_HOME}/bin/splunk cmd ${APP}/bin/nmonparser.pl ${userargs} ;;
|
||||
|
||||
esac
|
||||
|
||||
# Remove temp
|
||||
rm -f ${nmon_temp}
|
||||
|
||||
exit 0
|
||||
@ -0,0 +1,140 @@
|
||||
# pre-actions scripts
|
||||
|
||||
The frameID definition is an enrichment mechanism used within the application to associate a given host with a given frame identifier.
|
||||
By default, the mapping is operated against the value of "serialnum" which is defined at the raw level by nmon binaries.
|
||||
|
||||
- On AIX systems, the serialnum value is equal to the serial number of the frame hosting the partition
|
||||
- On Linux and Solaris systems, the serialnum is equal to the value of the hostname
|
||||
|
||||
A pre-action script can be designed to define a serial number according to your needs.
|
||||
|
||||
In "nmon.conf", the following settings are designed to manage the serial number:
|
||||
|
||||
- override_sys_serialnum="1": will activate the serial number override
|
||||
- override_sys_serialnum_value="<value for serial number": defines it value
|
||||
|
||||
Add any shell script in this directory to get a pre-action to be executed automatically by the metricator_helper.sh script at startup time.
|
||||
You can use this simple feature to perform a pre-action each time the metricator_helper.sh script is executed.
|
||||
|
||||
stdout will be indexed in "sourcetype=nmon_collect".
|
||||
stderr will be indexes in splunkd logs.
|
||||
|
||||
Any script you would add in this directory will be upgrade resilient and would not be lost or modified when your upgrade the TA.
|
||||
|
||||
pre-action scripts execution will be visible in sourcetype=nmon_collect:
|
||||
|
||||
message = <date>, ${HOST} INFO, executing pre-action script: <name of pre-action script>
|
||||
|
||||
Requirements:
|
||||
|
||||
- scripts names can whatever you want
|
||||
- must have ".sh" extension
|
||||
- must have execution permission by the Unix username owning processes
|
||||
|
||||
## use case examples:
|
||||
|
||||
### 1. run a local command on servers to define the serial number value
|
||||
|
||||
----------------------------
|
||||
#!/bin/sh
|
||||
|
||||
serialnumber=`<replace the command that retrieves a value to be used as the serialnumber`
|
||||
|
||||
# nmon_conf="/etc/nmon.conf" to write in /etc/nmon.conf (requires processes to run under root)
|
||||
# nmon_conf="$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local/nmon.conf" to write in app name space
|
||||
nmon_conf="$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local/nmon.conf"
|
||||
|
||||
# if nmon.conf could not be found, create, activate serial number override and fill its value
|
||||
|
||||
if [ ! -f $nmon_conf ]; then
|
||||
echo "# nmon.conf" >> $nmon_conf
|
||||
echo "override_sys_serialnum=\"1\"" >> $nmon_conf
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> $nmon_conf
|
||||
|
||||
else # verify the option activation, verify the serial number value
|
||||
|
||||
egrep "^override_sys_serialnum=" $nmon_conf >/dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "override_sys_serialnum=\"1\"" >> $nmon_conf
|
||||
fi
|
||||
|
||||
# verify serial number value
|
||||
egrep "^override_sys_serialnum_value=" $nmon_conf >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then # if option is set, check value
|
||||
egrep "^override_sys_serialnum_value=\"$serialnumber\"" $nmon_conf >/dev/null
|
||||
if [ $? -ne 0 ]; then # if mismatch, replace value
|
||||
cat $nmon_conf | grep -v "override_sys_serialnum_value" > ${nmon_conf}.new
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> ${nmon_conf}.new
|
||||
mv ${nmon_conf}.new $nmon_conf
|
||||
fi
|
||||
else # option is not set, simply add it
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> $nmon_conf
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
exit 0
|
||||
----------------------------
|
||||
|
||||
### 2. Using servers naming convention and domain names to setup a frameID by extraction the region name
|
||||
|
||||
----------------------------
|
||||
#!/bin/sh
|
||||
|
||||
# convention naming: server001.mycompany.co.uk | retrieve the region being the 4th segment of FQDN
|
||||
HOST=`hostname`
|
||||
case `uname` in
|
||||
Linux)
|
||||
FQDN=`hostname -f` ;;
|
||||
*)
|
||||
FQDN=$HOST ;;
|
||||
esac
|
||||
|
||||
REGION=`echo $FQDN | awk -F\. '{print $4}'`
|
||||
|
||||
# revert to host value in case of failure
|
||||
# otherwise, affect to: DC-<region>
|
||||
case $REGION in
|
||||
"")
|
||||
serialnumber="$HOST" ;;
|
||||
*)
|
||||
serialnumber="datacenter-$REGION" ;;
|
||||
esac
|
||||
|
||||
# nmon_conf="/etc/nmon.conf" to write in /etc/nmon.conf (requires processes to run under root)
|
||||
# nmon_conf="$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local/nmon.conf" to write in app name space
|
||||
nmon_conf="$SPLUNK_HOME/etc/apps/TA-metricator-for-nmon/local/nmon.conf"
|
||||
|
||||
# if nmon.conf could not be found, create, activate serial number override and fill its value
|
||||
|
||||
if [ ! -f $nmon_conf ]; then
|
||||
echo "# nmon.conf" >> $nmon_conf
|
||||
echo "override_sys_serialnum=\"1\"" >> $nmon_conf
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> $nmon_conf
|
||||
|
||||
else # verify the option activation
|
||||
|
||||
egrep "^override_sys_serialnum=" $nmon_conf >/dev/null
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "override_sys_serialnum=\"1\"" >> $nmon_conf
|
||||
fi
|
||||
|
||||
# verify serial number value
|
||||
egrep "^override_sys_serialnum_value=" $nmon_conf >/dev/null
|
||||
|
||||
if [ $? -eq 0 ]; then # if option is set, check value
|
||||
egrep "^override_sys_serialnum_value=\"$serialnumber\"" $nmon_conf >/dev/null
|
||||
if [ $? -ne 0 ]; then # if mismatch, replace value
|
||||
cat $nmon_conf | grep -v "override_sys_serialnum_value" > ${nmon_conf}.new
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> ${nmon_conf}.new
|
||||
mv ${nmon_conf}.new $nmon_conf
|
||||
fi
|
||||
else # option is not set, simply add it
|
||||
echo "override_sys_serialnum_value=\"$serialnumber\"" >> $nmon_conf
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
exit 0
|
||||
----------------------------
|
||||
@ -0,0 +1,3 @@
|
||||
# sarmon_bin_* directories contain compiled sarmon binaries for x86 and sparc processor
|
||||
|
||||
The source code of sarmon is available at the sarmon web site: http://www.geckotechnology.com/sarmon
|
||||
@ -0,0 +1,19 @@
|
||||
#
|
||||
# Splunk app configuration file
|
||||
#
|
||||
|
||||
[install]
|
||||
is_configured = 0
|
||||
|
||||
[package]
|
||||
id = TA-metricator-for-nmon
|
||||
check_for_updates = true
|
||||
|
||||
[ui]
|
||||
is_visible = 0
|
||||
label = TA-metricator-for-nmon
|
||||
|
||||
[launcher]
|
||||
author = Guilhem Marchand for Octamis
|
||||
description = Nmon performance and configuration data technical addon for the metric store
|
||||
version = 1.1.1
|
||||
@ -0,0 +1,87 @@
|
||||
# inputs.conf
|
||||
|
||||
######
|
||||
# main
|
||||
######
|
||||
|
||||
# The metricator_helper.sh performs nmon binary starting if required
|
||||
# by default, the script is executed every 60 seconds and will not perform any action if
|
||||
# the nmon binary is started
|
||||
|
||||
[script://./bin/metricator_helper.sh]
|
||||
disabled = false
|
||||
index = os-unix-nmon-internal
|
||||
interval = 60
|
||||
source = nmon_collect
|
||||
sourcetype = nmon_collect
|
||||
|
||||
# The metricator_consumer.sh script consumes nmon data produced in fifo mode
|
||||
|
||||
[script://./bin/metricator_consumer.sh]
|
||||
disabled = false
|
||||
index = os-unix-nmon-internal
|
||||
interval = 60
|
||||
source = fifo_consumer
|
||||
sourcetype = nmon_processing
|
||||
|
||||
# Example of configuration for cold data processing
|
||||
|
||||
# [monitor://$SPLUNK_HOME/var/log/metricator/var/nmon_repository/*.nmon]
|
||||
# disabled = false
|
||||
# followTail = 0
|
||||
# index = os-unix-nmon-internal
|
||||
# sourcetype = nmon_processing
|
||||
# crcSalt = <SOURCE>
|
||||
|
||||
##############
|
||||
# nmon metrics
|
||||
##############
|
||||
|
||||
# nmon metrics stored as native metrics
|
||||
|
||||
[batch://$SPLUNK_HOME/var/log/metricator/var/csv_repository/*metrics.csv]
|
||||
disabled = false
|
||||
move_policy = sinkhole
|
||||
index = os-unix-nmon-metrics
|
||||
sourcetype = nmon_metrics_csv
|
||||
# source override: to prevent Metadata from having millions of entry, the source is overridden by default
|
||||
# You can disable this for trouble shooting purposes if required
|
||||
source = nmon_perfdata_metrics
|
||||
|
||||
# nmon data stored as regular events
|
||||
|
||||
[batch://$SPLUNK_HOME/var/log/metricator/var/csv_repository/*events.csv]
|
||||
disabled = false
|
||||
move_policy = sinkhole
|
||||
index = os-unix-nmon-events
|
||||
sourcetype = nmon_data
|
||||
# source override: to prevent Metadata from having millions of entry, the source is overridden by default
|
||||
# You can disable this for trouble shooting purposes if required
|
||||
source = nmon_perfdata_events
|
||||
|
||||
#############
|
||||
# nmon config
|
||||
#############
|
||||
|
||||
# Files are monitored using regular file monitor to avoid risk of data duplication
|
||||
# Cleaning will be achieved by nmon_cleaner jobs
|
||||
[monitor://$SPLUNK_HOME/var/log/metricator/var/config_repository/*events.csv]
|
||||
disabled = false
|
||||
index = os-unix-nmon-config
|
||||
sourcetype = nmon_config
|
||||
# source override: to prevent Metadata from having millions of entry, the source is overridden by default
|
||||
# You can disable this for trouble shooting purposes if required
|
||||
source = configdata
|
||||
|
||||
##############
|
||||
# nmon cleaner
|
||||
##############
|
||||
|
||||
# The metricator_cleaner.sh script performs cleaning tasks internal to the TA, scheduled once a day
|
||||
|
||||
[script://./bin/metricator_cleaner.sh --cleancsv]
|
||||
disabled = false
|
||||
index = os-unix-nmon-internal
|
||||
interval = 14400
|
||||
source = nmon_cleaner
|
||||
sourcetype = nmon_clean
|
||||
@ -0,0 +1,212 @@
|
||||
# nmon.conf
|
||||
|
||||
|
||||
|
||||
################################
|
||||
# Nmon processes related options
|
||||
################################
|
||||
|
||||
#
|
||||
# These options will be used to manage Nmon processes starting options
|
||||
#
|
||||
|
||||
# This configuration file will set the interval and snapshot values when starting up the nmon binary
|
||||
# It is being sourced by the metricator_helper.sh script during script startup
|
||||
|
||||
# *** BE UPGRADE RESILIENT: *** Copy this file to your local/nmon.conf to prevent future upgrades from overwriting your settings
|
||||
# *** DON'T MODIFY THIS FILE ***
|
||||
|
||||
# *** FILE ENCODING: UTF-8 ! ***
|
||||
# When creating a local/nmon.conf, pay attention to file encoding specially when working under Windows.
|
||||
# The file must be UTF-8 encoded or you may run in trouble.
|
||||
|
||||
### NMON COLLECT OPTIONS ###
|
||||
|
||||
# The metricator_helper.sh input script is set by default to run every 60 seconds
|
||||
# If Nmon is not running, the script will start Nmon using the configuration above
|
||||
|
||||
###
|
||||
### FIFO options:
|
||||
###
|
||||
|
||||
# Using FIFO files (named pipe) are now used to minimize the CPU footprint of the technical addons
|
||||
# As such, it is not required anymore to use short cycle of Nmon run to reduce the CPU usage
|
||||
|
||||
# You can still want to manage the volume of data to be generated by managing the interval and snapshot values
|
||||
# as a best practice recommendation, the time to live of nmon processes writing to FIFO should be 24 hours
|
||||
|
||||
# value for interval: time in seconds between 2 performance measures
|
||||
fifo_interval="60"
|
||||
|
||||
# value for snapshot: number of measure to perform
|
||||
fifo_snapshot="1440"
|
||||
|
||||
### VARIOUS COMMON OPTIONS ###
|
||||
|
||||
# Time in seconds of margin before running a new iteration of Nmon process to prevent data gaps between 2 iterations of Nmon
|
||||
# the metricator_helper.sh script will spawn a new Nmon process when the age in seconds of the current process gets higher than this value
|
||||
|
||||
# The endtime is evaluated the following way:
|
||||
# endtime=$(( ${interval} * ${snapshot} - ${endtime_margin} ))
|
||||
|
||||
# When the endtime gets higher than the endtime_margin, a new Nmon process will be spawned
|
||||
# default value to 240 seconds which will start a new process 4 minutes before the current process ends
|
||||
|
||||
# Setting this value to "0" will totally disable this feature
|
||||
|
||||
endtime_margin="240"
|
||||
|
||||
### NFS OPTIONS ###
|
||||
|
||||
# Change to "1" to activate NFS V2 / V3 (option -N) for AIX hosts
|
||||
AIX_NFS23="0"
|
||||
|
||||
# Change to "1" to activate NFS V4 (option -NN) for AIX hosts
|
||||
AIX_NFS4="0"
|
||||
|
||||
# Change to "1" to activate NFS V2 / V3 / V4 (option -N) for Linux hosts
|
||||
# Note: Some versions of Nmon introduced a bug that makes Nmon to core when activating NFS, ensure your version is not outdated
|
||||
Linux_NFS="0"
|
||||
|
||||
### LINUX OPTIONS ###
|
||||
|
||||
# Change the priority applied while looking at nmon binary
|
||||
# by default, the metricator_helper.sh script will use any nmon binary found in PATH
|
||||
# Set to "1" to give the priority to embedded nmon binaries
|
||||
# Note: Since release 1.6.07, priority is given by default to embedded binaries
|
||||
Linux_embedded_nmon_priority="1"
|
||||
|
||||
# Change the limit for processes and disks capture of nmon for Linux
|
||||
# In default configuration, nmon will capture most of the process table by capturing main consuming processes
|
||||
# This function is percentage limit of CPU time, with a default limit of 0.01
|
||||
# Changing this value can influence the volume of data to be generated, and the associated CPU overhead for that data to be parsed
|
||||
|
||||
# Possible values are:
|
||||
# Linux_unlimited_capture="0" --> Default nmon behavior, capture main processes (no -I option)
|
||||
# Linux_unlimited_capture="-1" --> Set the capture mode to unlimited (-I -1)
|
||||
# Linux_unlimited_capture="x.xx" --> Set the percentage limit to a custom value, ex: "0.01" will set "-I 0.01"
|
||||
Linux_unlimited_capture="0"
|
||||
|
||||
# Set the maximum number of devices collected by Nmon, default is set to 1500 devices
|
||||
# This option will be ignored if you set the Linux_unlimited_capturation below.
|
||||
# Increase this value if you have systems with more devices
|
||||
# Up to 3000 devices will be taken in charge by the Application (hard limit in nmonparser)
|
||||
Linux_devices="1500"
|
||||
|
||||
# Enable disks extended statistics (DG*)
|
||||
# Default is true, which activates and generates DG statistics
|
||||
Linux_disk_dg_enable="1"
|
||||
|
||||
# Name of the User Defined Disk Groups file, "auto" generates this for you
|
||||
Linux_disk_dg_group="auto"
|
||||
|
||||
### SOLARIS OPTIONS ###
|
||||
|
||||
# Change to "1" to activate VxVM volumes IO statistics
|
||||
Solaris_VxVM="0"
|
||||
|
||||
# UARG collection (new in Version 1.11), Change to "0" to deactivate, "1" to activate (default is activate)
|
||||
Solaris_UARG="1"
|
||||
|
||||
### AIX COMMON OPTIONS ###
|
||||
|
||||
# CAUTION: Since release 1.3.0, we use fifo files, which requires the option "-yoverwrite=1"
|
||||
|
||||
# Change this line if you add or remove common options for AIX, do not change NFS options here (see NFS options)
|
||||
# the -p option is mandatory as it is used at launch time to save instance pid
|
||||
AIX_options="-T -A -d -K -L -M -P -^ -p -yoverwrite=1"
|
||||
|
||||
# enable this line if you want to get only active disks
|
||||
# AIX_options=""-T -A -d -K -L -M -P -^ -p -k `lspv|grep active|awk '{print $1","}'|tr -d '\040\011\012\015'` -yoverwrite=1"
|
||||
|
||||
#############################
|
||||
# Application related options
|
||||
#############################
|
||||
|
||||
#
|
||||
# These options are not directly related to nmon processes but to general features of the technical add-on
|
||||
#
|
||||
|
||||
######################
|
||||
# hostname definition:
|
||||
######################
|
||||
|
||||
# This option can be used to force the technical add-on to use the Splunk configured value of the server hostname
|
||||
# If for some reason, you need to use the Splunk host value instead of the system real hostname value, set this value to "1"
|
||||
|
||||
# We will search for the value of host=<value> in $SPLUNK_HOME/etc/system/local/inputs.conf
|
||||
# If no value can be found, or if the file does not exist, we will fallback to the normal behavior
|
||||
|
||||
# Default is use system hostname
|
||||
|
||||
# FQDN management in nmonparser: The --fqdn option is not compatible with the host name override, if the override_sys_hostname
|
||||
# is activated, the --fqdn argument will have no effect
|
||||
|
||||
override_sys_hostname="0"
|
||||
|
||||
#####################
|
||||
# frameID definition:
|
||||
#####################
|
||||
|
||||
# The frameID definition is an enrichment mechanism used within the application to associate a given host with a given frame identifier
|
||||
# By default, the mapping is operated against the value of "serialnum" which is defined at the raw level by nmon binaries
|
||||
|
||||
# On AIX systems, the serialnum value is equal to the serial number of the frame hosting the partition
|
||||
# On Linux and Solaris systems, the serialnum is equal to the value of the hostname
|
||||
|
||||
# Using this option allows you to override the serialnum value by a static value defined in the nmon.conf configuration file
|
||||
# nmon.conf precedence allows defining the serialnum value on per deployment basis (local/nmon.conf) or on a per server basis (/etc/nmon.conf)
|
||||
|
||||
# default is:
|
||||
# override_sys_serialnum="0"
|
||||
# which lets nmon set the serialnum value
|
||||
|
||||
# Set this value to:
|
||||
# override_sys_serialnum="1"
|
||||
# to activate the serialnum override based on the value defined in:
|
||||
|
||||
# override_sys_serialnum_value="<sting>"
|
||||
# Acceptable values for <string> are letters (lower and upper case), numbers and "-" / "_"
|
||||
|
||||
override_sys_serialnum="0"
|
||||
override_sys_serialnum_value="none"
|
||||
|
||||
########################
|
||||
# nmon external metrics:
|
||||
########################
|
||||
|
||||
# nmon external generation management
|
||||
|
||||
# This option will manage the activation or deactivation of the nmon external data generation at the lower level, before it comes to parsers
|
||||
# default is activated (value=1), set to "0" to deactivate
|
||||
|
||||
nmon_external_generation="1"
|
||||
|
||||
###############
|
||||
# fifo options:
|
||||
###############
|
||||
|
||||
# Fifo options
|
||||
|
||||
# The realtime mode which corresponds to the old mechanism is now deprecated
|
||||
# fifo mode is mandatory
|
||||
|
||||
# Default is "1" which means write to fifo
|
||||
|
||||
mode_fifo="1"
|
||||
|
||||
#######################
|
||||
# nmon parsers options:
|
||||
#######################
|
||||
|
||||
# consult the documentation to get the full list of available options
|
||||
|
||||
# --mode fifo|colddata --> explicitly manage data in fifo/colddata
|
||||
# --use_fqdn --> use the host fully qualified domain name (default)
|
||||
# --silent --> minimize the processing output to save data volume (deactivated by default)
|
||||
# --show_zero_values --> allows generating metrics with 0 values (default removes any metric with a zero value before it reaches the ingestion)
|
||||
|
||||
# In fifo mode, options are sent by the metricator_consumer.sh
|
||||
# In file mode, options are sent by Splunk via the nmon_processing stanza in props.conf
|
||||
|
||||
nmonparser_options="--mode fifo --use_fqdn --silent"
|
||||
@ -0,0 +1,32 @@
|
||||
{
|
||||
"static_section":["CPUnn", "CPU_ALL", "FILE", "MEM", "PAGE", "MEMNEW", "MEMUSE", "PROC", "VM", "NFSSVRV2", "NFSSVRV3", "NFSSVRV4", "NFSCLIV2", "NFSCLIV3", "NFSCLIV4"],
|
||||
|
||||
"Solaris_static_section":["PROCSOL"],
|
||||
|
||||
"LPAR_static_section":["LPAR", "POOLS"],
|
||||
|
||||
"top_section":["TOP"],
|
||||
|
||||
"uarg_section":["UARG"],
|
||||
|
||||
"dynamic_section1":["DISKBUSY", "DISKBSIZE", "DISKREAD", "DISKWRITE", "DISKXFER", "DISKREADSERV", "DISKWRITESERV"],
|
||||
|
||||
"dynamic_section2":["IOADAPT", "NETERROR", "NET", "NETPACKET", "JFSFILE", "JFSINODE", "FCREAD", "FCWRITE", "FCXFERIN", "FCXFEROUT"],
|
||||
|
||||
"disk_extended_section":["DGBUSY", "DGREAD", "DGWRITE", "DGSIZE", "DGXFER", "DGREADS", "DGREADMERGE", "DGREADSERV", "DGWRITES", "DGWRITEMERGE", "DGWRITESERV", "DGINFLIGHT", "DGIOTIME", "DGBACKLOG"],
|
||||
|
||||
"solaris_WLM":["WLMPROJECTCPU", "WLMZONECPU", "WLMTASKCPU", "WLMUSERCPU", "WLMPROJECTMEM", "WLMZONEMEM", "WLMTASKMEM", "WLMUSERMEM"],
|
||||
|
||||
"solaris_VxVM":["VxVMREAD", "VxVMWRITE", "VxVMXFER", "VxVMBSIZE", "VxVMBUSY", "VxVMSVCTM", "VxVMWAITTM"],
|
||||
|
||||
"solaris_dynamic_various":["DISKSVCTM", "DISKWAITTM"],
|
||||
|
||||
"AIX_dynamic_various":["SEA", "SEAPACKET", "SEACHPHY"],
|
||||
|
||||
"AIX_WLM":["WLMCPU", "WLMMEM", "WLMBIO"],
|
||||
|
||||
"nmon_external":["UPTIME", "PROCCOUNT", "DF_STORAGE", "DF_INODES"],
|
||||
|
||||
"nmon_external_transposed":[""]
|
||||
|
||||
}
|
||||
@ -0,0 +1,171 @@
|
||||
# props.conf
|
||||
|
||||
###############################
|
||||
# nmon metrics for metric store
|
||||
###############################
|
||||
|
||||
# Introduced with Splunk 7, metrics are now natively supported
|
||||
# Nmon uses its own copy of the default metrics_csv sourcetype
|
||||
|
||||
[nmon_metrics_csv]
|
||||
SHOULD_LINEMERGE = False
|
||||
pulldown_type = true
|
||||
INDEXED_EXTRACTIONS = csv
|
||||
ADD_EXTRA_TIME_FIELDS = False
|
||||
KV_MODE = none
|
||||
TIMESTAMP_FIELDS = metric_timestamp
|
||||
TIME_FORMAT = %s.%Q
|
||||
category = Metrics
|
||||
description = Comma-separated value format for metrics. Nmon implementation.
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_metrics_csv_hostoverride
|
||||
|
||||
# Metrics can be sent by http using the Splunk Http Event Collector (HEC)
|
||||
[nmon_metrics_http]
|
||||
TIME_PREFIX = metric_timestamp=\"(\d+)\"
|
||||
TIME_FORMAT = %s
|
||||
TRANSFORMS-nmon_metrics_http = nmon_metrics_http_host, nmon_metrics_http_metric_name, nmon_metrics_http_metric_value, nmon_metrics_http_dims, nmon_metrics_http_OStype, nmon_metrics_http_serialnum
|
||||
NO_BINARY_CHECK = true
|
||||
SHOULD_LINEMERGE = false
|
||||
pulldown_type = 1
|
||||
category = Metrics
|
||||
|
||||
########################
|
||||
# nmon metrics as events
|
||||
########################
|
||||
|
||||
# This sourcetype stanza will be used to index nmon csv converted data
|
||||
# Every generated csv file will contain a CSV header used by Splunk to identify fields
|
||||
|
||||
[nmon_data]
|
||||
FIELD_DELIMITER=,
|
||||
FIELD_QUOTE="
|
||||
HEADER_FIELD_LINE_NUMBER=1
|
||||
|
||||
# your settings
|
||||
INDEXED_EXTRACTIONS=csv
|
||||
NO_BINARY_CHECK=1
|
||||
SHOULD_LINEMERGE=false
|
||||
TIMESTAMP_FIELDS=ZZZZ
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
|
||||
# set by detected source type
|
||||
KV_MODE=none
|
||||
pulldown_type=true
|
||||
|
||||
# Leaving PUNCT enabled can impact indexing performance, and uses space
|
||||
# For structured data, it has poor interest and shall be deactivated
|
||||
ANNOTATE_PUNCT=false
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_data_hostoverride
|
||||
|
||||
# nmon_data sent over http using the Splunk Http Event Collector (HEC)
|
||||
# This sourcetype will be automatically renamed to nmon_data
|
||||
|
||||
[nmon_data_http]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_FORMAT=%s
|
||||
TIME_PREFIX=timestamp="
|
||||
MAX_TIMESTAMP_LOOKAHEAD=26
|
||||
KV_MODE=auto
|
||||
|
||||
# Apply indexing time parsing configuration
|
||||
TRANSFORMS-nmon_data_http = nmon_data_http_host, nmon_data_http_OStype, nmon_data_http_type, nmon_data_http_sourcetype
|
||||
|
||||
# For search time extractions, activate kvmode to auto for that source
|
||||
[source::nmon_data:http]
|
||||
KV_MODE=auto
|
||||
|
||||
########################
|
||||
# nmon processing events
|
||||
########################
|
||||
|
||||
[nmon_processing]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE=none
|
||||
|
||||
####################
|
||||
# nmon config events
|
||||
####################
|
||||
|
||||
[nmon_config]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^CONFIG,
|
||||
TIME_FORMAT=%d-%b-%Y:%H:%M.%S
|
||||
LINE_BREAKER=([\r\n]+)CONFIG,\d{2}-\w{3}-\d{4}:\d{2}:\d{2}\.\d{2},
|
||||
TRUNCATE=0
|
||||
MAX_EVENTS=100000
|
||||
MAX_TIMESTAMP_LOOKAHEAD=30
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
|
||||
# Overwritting default host field based on event data for nmon_data sourcetype (useful when managing Nmon central shares)
|
||||
TRANSFORMS-hostfield=nmon_config_hostoverride
|
||||
|
||||
# nmon_config sent over http
|
||||
[nmon_config:http]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
LINE_BREAKER=([\r\n]+)timestamp=\"
|
||||
MAX_EVENTS=100000
|
||||
TIME_FORMAT=%s
|
||||
TIME_PREFIX=timestamp="
|
||||
TRUNCATE=0
|
||||
|
||||
# Rewrite the source Metadata to manage search time extraction
|
||||
TRANSFORMS-nmon_config_http = nmon_config_http_rewrite_host, nmon_config_http_rewrite_sourcetype
|
||||
|
||||
# For search heads
|
||||
[source::nmon_config:http]
|
||||
KV_MODE=none
|
||||
|
||||
#####################
|
||||
# nmon collect events
|
||||
#####################
|
||||
|
||||
[nmon_collect]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
|
||||
###################
|
||||
# nmon clean events
|
||||
###################
|
||||
|
||||
[nmon_clean]
|
||||
SHOULD_LINEMERGE=false
|
||||
NO_BINARY_CHECK=true
|
||||
CHARSET=UTF-8
|
||||
TIME_PREFIX=^
|
||||
TIME_FORMAT=%d-%m-%Y %H:%M:%S
|
||||
MAX_TIMESTAMP_LOOKAHEAD=19
|
||||
LINE_BREAKER=([\n\r]+)\d{2}-\d{2}-\d{4}\s\d{2}:\d{2}:\d{2}
|
||||
TRUNCATE=999999
|
||||
|
||||
# Deactivate KV
|
||||
KV_MODE = none
|
||||
@ -0,0 +1,113 @@
|
||||
# transforms.conf
|
||||
|
||||
##############
|
||||
# nmon metrics
|
||||
##############
|
||||
|
||||
# host Meta overridden with 5th column
|
||||
[nmon_metrics_csv_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = ^\d*,\"{0,1}[^\"\,]*\"{0,1},\"{0,1}[^\"\,]*\"{0,1}[^\"\,]*\"{0,1},\"{0,1}[^\"\,]*\"{0,1},\"{0,1}([^\"\,]*)\"{0,1}
|
||||
FORMAT = host::$1
|
||||
|
||||
# Metrics sent over http - host
|
||||
[nmon_metrics_http_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = hostname=\"([^\"]*)\"
|
||||
FORMAT = host::$1
|
||||
|
||||
# Metrics sent over http - metric_name
|
||||
[nmon_metrics_http_metric_name]
|
||||
REGEX = metric_name=\"([^\"]*)\"
|
||||
FORMAT = metric_name::$1
|
||||
WRITE_META = true
|
||||
|
||||
[nmon_metrics_http_metric_value]
|
||||
REGEX = _value=\"([\d|\.]*)\"
|
||||
FORMAT = _value::$1
|
||||
WRITE_META = true
|
||||
|
||||
# Metrics sent over http - dimensions
|
||||
[nmon_metrics_http_dims]
|
||||
REGEX = (dimension\_\w*)=\"([^\"]*)\"
|
||||
FORMAT = $1::$2
|
||||
WRITE_META = true
|
||||
|
||||
# Metrics sent over http - OStype
|
||||
[nmon_metrics_http_OStype]
|
||||
REGEX = OStype=\"([^\"]*)\"
|
||||
FORMAT = OStype::$1
|
||||
WRITE_META = true
|
||||
|
||||
# Metrics sent over http - serialnum
|
||||
[nmon_metrics_http_serialnum]
|
||||
REGEX = serialnum=\"([^\"]*)\"
|
||||
FORMAT = serialnum::$1
|
||||
WRITE_META = true
|
||||
|
||||
###########
|
||||
# nmon data
|
||||
###########
|
||||
|
||||
# Host override based on event data form nmon_data sourcetype
|
||||
|
||||
[nmon_data_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = ^\"{0,1}[a-zA-Z0-9\_]+\"{0,1},\"{0,1}[a-zA-Z0-9\-\_\.]+\"{0,1},\"{0,1}([a-zA-Z0-9\-\_\.]+)\"{0,1},.+
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon data as events sent over http - host indexed field
|
||||
[nmon_data_http_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = hostname=\"([^\"]*)\"
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon data as events sent over http - OStype indexed field
|
||||
[nmon_data_http_OStype]
|
||||
REGEX = \sOStype=\"([^\"]*)\"
|
||||
WRITE_META = true
|
||||
FORMAT = OStype::$1
|
||||
DEFAULT_VALUE = NULL
|
||||
|
||||
# nmon data as events sent over http - type indexed field
|
||||
[nmon_data_http_type]
|
||||
REGEX = \stype=\"([^\"]*)\"
|
||||
WRITE_META = true
|
||||
FORMAT = type::$1
|
||||
DEFAULT_VALUE = NULL
|
||||
|
||||
# nmon data as events sent over http - rewrite sourcetype
|
||||
[nmon_data_http_sourcetype]
|
||||
DEST_KEY = MetaData:Sourcetype
|
||||
REGEX = .*
|
||||
FORMAT = sourcetype::nmon_data
|
||||
|
||||
#############
|
||||
# nmon config
|
||||
#############
|
||||
|
||||
# Host override based on event data form nmon_config sourcetype
|
||||
|
||||
[nmon_config_hostoverride]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = CONFIG\,[a-zA-Z0-9\-\:\.]+\,([a-zA-Z0-9\-\_\.]+)\,[a-zA-Z0-9\-\_\.]+
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon_config sent over http
|
||||
|
||||
[nmon_config_http_rewrite_host]
|
||||
DEST_KEY = MetaData:Host
|
||||
REGEX = host=\"{0,}([a-zA-Z0-9\-\_\.]+)\"{0,}
|
||||
FORMAT = host::$1
|
||||
|
||||
# nmon_config source
|
||||
[nmon_config_http_rewrite_source]
|
||||
DEST_KEY = MetaData:Source
|
||||
REGEX = .*
|
||||
FORMAT = source::configdata:http
|
||||
|
||||
# nmon_config sourcetype
|
||||
[nmon_config_http_rewrite_sourcetype]
|
||||
DEST_KEY = MetaData:Sourcetype
|
||||
REGEX = .*
|
||||
FORMAT = sourcetype::nmon_config
|
||||
@ -0,0 +1,7 @@
|
||||
|
||||
# Application-level permissions
|
||||
|
||||
[]
|
||||
owner = admin
|
||||
access = read : [ * ], write : [ admin ]
|
||||
export = system
|
||||
@ -0,0 +1,208 @@
|
||||
{
|
||||
"version": "1.0",
|
||||
"date": "2022-11-14T21:09:51.967642588Z",
|
||||
"hashAlgorithm": "SHA-256",
|
||||
"app": {
|
||||
"id": 3948,
|
||||
"version": "1.1.1",
|
||||
"files": [
|
||||
{
|
||||
"path": "README.md",
|
||||
"hash": "8127050db09b70b701b8c6f7a2cd1e457753226b9d6338b69605260e6d8c7dae"
|
||||
},
|
||||
{
|
||||
"path": "license.txt",
|
||||
"hash": "7ddba183d8c539be99f03fe99499ddbc863cc688164bf3b5ea59d0d918a9e653"
|
||||
},
|
||||
{
|
||||
"path": "metadata/default.meta",
|
||||
"hash": "6b6c91fc18940aeb1580da6c06f92810beef8af632f73714070bae9e4a777af2"
|
||||
},
|
||||
{
|
||||
"path": "static/appLogo.png",
|
||||
"hash": "0736204483f4205c90d49c1f212e70d4e15c4d79aee19d43a0ab8247455118d1"
|
||||
},
|
||||
{
|
||||
"path": "static/appIcon.png",
|
||||
"hash": "e0611349e349b6cee55d123f85ed286a4ac2c0f1bbdbbedcbf230207bc2404ee"
|
||||
},
|
||||
{
|
||||
"path": "static/appIcon_2x.png",
|
||||
"hash": "5434fede7130f1bacc4d8e3ec48f2b3bd67367f55658b6d07d5b232b8f60f522"
|
||||
},
|
||||
{
|
||||
"path": "static/appIconAlt.png",
|
||||
"hash": "e0611349e349b6cee55d123f85ed286a4ac2c0f1bbdbbedcbf230207bc2404ee"
|
||||
},
|
||||
{
|
||||
"path": "static/appLogo_2x.png",
|
||||
"hash": "845f9bcdcd947b60e7c6d110f03debad96fee327b13a1bda2457788e069c350e"
|
||||
},
|
||||
{
|
||||
"path": "static/appIconAlt_2x.png",
|
||||
"hash": "5434fede7130f1bacc4d8e3ec48f2b3bd67367f55658b6d07d5b232b8f60f522"
|
||||
},
|
||||
{
|
||||
"path": "README/nmon.conf.spec",
|
||||
"hash": "b3dd40c58f5eece7d673d49e8093a5052584875dcf4363b9b70fd29db04bfe55"
|
||||
},
|
||||
{
|
||||
"path": "default/app.conf",
|
||||
"hash": "10094384fcb9ba79cf3f461e730b9898cd1a3b9b6bcdefbf4d552cca8f8d488f"
|
||||
},
|
||||
{
|
||||
"path": "default/nmon.conf",
|
||||
"hash": "9c85c002d60d6c063c8ffe3c97fa12d181b778cdc959ba6330676df0ac84044f"
|
||||
},
|
||||
{
|
||||
"path": "default/nmonparser_config.json",
|
||||
"hash": "9db81c4534b90ef9f4dd67dfe0417c7c9bd669d95703d0dd91e6f27b4ac27c6c"
|
||||
},
|
||||
{
|
||||
"path": "default/props.conf",
|
||||
"hash": "865bce58bea6b7ae664d6b1f4d95457dfb41ab34e4588af9efa2625ac36325eb"
|
||||
},
|
||||
{
|
||||
"path": "default/inputs.conf",
|
||||
"hash": "e8a2e6fcd7133d4806bbf9e37cc42eedaf9bbfe98117af0bd37eed56a9b08f19"
|
||||
},
|
||||
{
|
||||
"path": "default/transforms.conf",
|
||||
"hash": "c6c949b16af49f8720bd4d548019299a35575b1c967d5a853b6549f97aa0ff05"
|
||||
},
|
||||
{
|
||||
"path": "bin/sarmon_binaries_README",
|
||||
"hash": "ab79e66800d2291dd43dec243fb6df8abd0975dbc2183477656eb61ee95fe456"
|
||||
},
|
||||
{
|
||||
"path": "bin/sarmon_bin_i386.tgz",
|
||||
"hash": "d184fa41438ac42a974373abb4607b926a02ea0b9f64d6e7a15f13c0aad76062"
|
||||
},
|
||||
{
|
||||
"path": "bin/nmonparser.pl",
|
||||
"hash": "822880efea3b7171cea007435c84ac0a3edebba94306105808f5fa03c2e9d64e"
|
||||
},
|
||||
{
|
||||
"path": "bin/nmonparser.py",
|
||||
"hash": "df655ed77c023e8ea83f6a093ca2e10add7811219136b4649870339e1e289f13"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_consumer.sh",
|
||||
"hash": "3799a163dadcbdc6fd39d0cbae977a83d6288fba0352305f83563e4920fc14eb"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_reader.sh",
|
||||
"hash": "0aaefacadc71ebc964f9de2ea11195d8552f24f2b578249f7337d6a1e8003bc0"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_cleaner.sh",
|
||||
"hash": "22ec467d62728d666d2a307a06fd7b5e84e082a15e649bf4c2556c0010c9f0f6"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_helper.sh",
|
||||
"hash": "d2ac9ac67715f51bb00791365fdbf384923303d097a82276dfe58b1c1be58015"
|
||||
},
|
||||
{
|
||||
"path": "bin/linux.tgz",
|
||||
"hash": "376bc4ad7d197dca898ef06d03df631f5e8e6fe365e398bc2905d2146af6cc01"
|
||||
},
|
||||
{
|
||||
"path": "bin/README",
|
||||
"hash": "597cdad620bec4e52e0e8adc3cad99de9b3ce45da0dd18e4159e1009c976e957"
|
||||
},
|
||||
{
|
||||
"path": "bin/sarmon_bin_sparc.tgz",
|
||||
"hash": "73a2b9555ffdd62666063bbfa33520de4ccd68d4b5c63f08a365be5a5f2ea76f"
|
||||
},
|
||||
{
|
||||
"path": "bin/nmonparser.sh",
|
||||
"hash": "7d556425685551724fc98b28beff59ea0b390dd67d11a4c9f0088185e58b4790"
|
||||
},
|
||||
{
|
||||
"path": "bin/create_agent.py",
|
||||
"hash": "f194d6c56577e6ce3dfcfc3b9aa521cb932cf797311b864c4f324a70063dec40"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_cleaner.py",
|
||||
"hash": "da4464b29e9eef55cbbcb7ea966dd160653f387bbe234ee91096112acd1ead10"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_reader.py",
|
||||
"hash": "a58f160fe8eb8b3ad06d13c8e54846f5d1304c6728f598b1182ddac9f74a5d58"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_reader.pl",
|
||||
"hash": "f17c4473722edfc492f99110f5ff41df4628c7371ed9ec9a9ea15ca3a59fb1bb"
|
||||
},
|
||||
{
|
||||
"path": "bin/linux_binaries_README",
|
||||
"hash": "1e5003fdf5a0a4b10dbdc5db1e9bc1960520e7ecd804a20f75ee6b299c043007"
|
||||
},
|
||||
{
|
||||
"path": "bin/metricator_cleaner.pl",
|
||||
"hash": "8218c16414e7f0e94feef0491627076738806db68c41141d65e2061623716108"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/README",
|
||||
"hash": "5931eb43b5a106c340925b610ab2a9357d752440b0c9cb32ec3041c64a92ec3e"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/Text/CSV_PP.pm",
|
||||
"hash": "05de57abb2780a6ae9788af4a56f351928d3f070d29e16ee3d0d1c8c20167317"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/Text/Diff.pm",
|
||||
"hash": "a3c8d575cfcf07cd50848883103ba4606653818c76d19e2d68ff0e0734966fd8"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/Text/CSV.pm",
|
||||
"hash": "4c5e21fa0718c2be432fa53f2c8440e725bfc94f0649539749559807eb133420"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/Text/Diff/Config.pm",
|
||||
"hash": "09dac9cd5f3903c1224d8e901a73dca610e79e1243d246fdbca2775658476b63"
|
||||
},
|
||||
{
|
||||
"path": "bin/lib/aix/Text/Diff/Table.pm",
|
||||
"hash": "c1629847301d6cfaf766a99a64423736684d615f3c9b9cdccddf4988dee9816e"
|
||||
},
|
||||
{
|
||||
"path": "bin/nmon_external_cmd/nmon_external_snap.sh",
|
||||
"hash": "794971722e75aa462dbe47fa182617c81a197da1ed6fa89398c2d2ffacf8f7cb"
|
||||
},
|
||||
{
|
||||
"path": "bin/nmon_external_cmd/nmon_external_start.sh",
|
||||
"hash": "a396573e35d651c48e3227556e0b2f45096f2d5cc7784d96cf48f02aab3abe50"
|
||||
},
|
||||
{
|
||||
"path": "bin/pre_action_scripts/README",
|
||||
"hash": "04edfcdb1d92f92b6c2250da754f2a3b151ec151bd77e87db315aa3c7200c638"
|
||||
}
|
||||
]
|
||||
},
|
||||
"products": [
|
||||
{
|
||||
"platform": "splunk",
|
||||
"product": "enterprise",
|
||||
"versions": [
|
||||
"7.2",
|
||||
"7.3",
|
||||
"8.0",
|
||||
"8.1",
|
||||
"8.2",
|
||||
"9.0"
|
||||
],
|
||||
"architectures": [
|
||||
"x86_64"
|
||||
],
|
||||
"operatingSystems": [
|
||||
"windows",
|
||||
"linux",
|
||||
"macos",
|
||||
"freebsd",
|
||||
"solaris",
|
||||
"aix"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
After Width: | Height: | Size: 3.5 KiB |
|
After Width: | Height: | Size: 3.5 KiB |
|
After Width: | Height: | Size: 4.3 KiB |
|
After Width: | Height: | Size: 4.3 KiB |
|
After Width: | Height: | Size: 1.3 KiB |
|
After Width: | Height: | Size: 2.6 KiB |
@ -0,0 +1,7 @@
|
||||
# Metricator for Nmon
|
||||
|
||||
Copyright 2017-2018 Octamis limited - Copyright 2017-2018 Guilhem Marchand
|
||||
|
||||
All rights reserved.
|
||||
|
||||
https://www.octamis.com/services/metricator
|
||||
@ -0,0 +1,2 @@
|
||||
[logging]
|
||||
loglevel =
|
||||
@ -0,0 +1,2 @@
|
||||
2.0.0
|
||||
2.0.0
|
||||
@ -0,0 +1,54 @@
|
||||
{
|
||||
"dependencies": null,
|
||||
"incompatibleApps": null,
|
||||
"info": {
|
||||
"author": [
|
||||
{
|
||||
"company": "Octamis",
|
||||
"email": "support@octamis.com",
|
||||
"name": "Octamis"
|
||||
}
|
||||
],
|
||||
"classification": {
|
||||
"categories": [
|
||||
"Unix",
|
||||
"Linux",
|
||||
"Performance",
|
||||
"Monitoring",
|
||||
"Capacity planning",
|
||||
"System administration",
|
||||
"Benchmarking"
|
||||
],
|
||||
"developmentStatus": "Production/Stable",
|
||||
"intendedAudience": "IT Professionals"
|
||||
},
|
||||
"commonInformationModels": null,
|
||||
"description": "Metricator for Nmon provides rich and efficient monitoring and capacity planning for Linux, IBM AIX and Oracle Solaris",
|
||||
"id": {
|
||||
"group": null,
|
||||
"name": "metricator-for-nmon",
|
||||
"version": "2.0.0"
|
||||
},
|
||||
"license": {
|
||||
"name": "Octamis",
|
||||
"text": "./license.txt",
|
||||
"uri": ""
|
||||
},
|
||||
"privacyPolicy": {
|
||||
"name": null,
|
||||
"text": null,
|
||||
"uri": null
|
||||
},
|
||||
"releaseDate": "6 September 2021",
|
||||
"releaseNotes": {
|
||||
"name": "version 2.0.0, please consult online documentation",
|
||||
"text": "./README.md",
|
||||
"uri": ""
|
||||
},
|
||||
"title": "Metricator for Nmon"
|
||||
},
|
||||
"inputGroups": null,
|
||||
"platformRequirements": null,
|
||||
"schemaVersion": "1.0.0",
|
||||
"tasks": null
|
||||
}
|
||||
@ -0,0 +1,20 @@
|
||||
.html h1 {
|
||||
color: royalblue;
|
||||
}
|
||||
|
||||
.html h2 {
|
||||
color: #29547f;
|
||||
}
|
||||
|
||||
/* margin left required for Firefox */
|
||||
|
||||
.list li {
|
||||
/*margin-left: 4px; */
|
||||
margin-left: 10px;
|
||||
padding-left: 10px;
|
||||
}
|
||||
|
||||
.list {
|
||||
display: inline-block;
|
||||
margin-right: 50px;
|
||||
}
|
||||
@ -0,0 +1,374 @@
|
||||
/* Logo subtitle */
|
||||
|
||||
.logosubtitle h2 {
|
||||
font-size: 16px;
|
||||
margin: 10px 0;
|
||||
font-weight: normal;
|
||||
color: darkslategrey;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
/* links color */
|
||||
a {
|
||||
color: #adbacd;
|
||||
}
|
||||
|
||||
.mainbutton_container {
|
||||
text-align: center;
|
||||
margin-top: 25px;
|
||||
margin-bottom: 50px;
|
||||
}
|
||||
|
||||
.mainbutton_container_nomargin {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.mainbutton {
|
||||
display: inline-block;
|
||||
margin-left: 30px;
|
||||
margin-right: 30px;
|
||||
}
|
||||
|
||||
.mainbutton_highmargin {
|
||||
display: inline-block;
|
||||
margin-left: 90px;
|
||||
margin-right: 90px;
|
||||
}
|
||||
|
||||
/* main category icons and titles */
|
||||
|
||||
.imgheader {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 40px;
|
||||
}
|
||||
|
||||
.imgheader img {
|
||||
float: left;
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
}
|
||||
|
||||
.imgheader h1 {
|
||||
color: lightslategrey;
|
||||
text-align: left;
|
||||
position: relative;
|
||||
top: 9px;
|
||||
left: 10px;
|
||||
}
|
||||
|
||||
.imgheader h2 {
|
||||
position: relative;
|
||||
top: 18px;
|
||||
left: 10px;
|
||||
}
|
||||
|
||||
.imgminiheader img {
|
||||
position: relative;
|
||||
top: -4px;
|
||||
}
|
||||
|
||||
/* Hide the view title as an App Home Page */
|
||||
/* Now disabled
|
||||
|
||||
.dashboard-header h2 {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
*/
|
||||
|
||||
/* margin left required for Firefox */
|
||||
|
||||
.ui_list li {
|
||||
margin-left: 3px;
|
||||
}
|
||||
|
||||
.custom h1 {
|
||||
color: lightslategrey;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.customleft h1 {
|
||||
color: lightslategrey;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.customright h1 {
|
||||
color: lightslategrey;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.custom img {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
/* Home Button */
|
||||
|
||||
.round-button {
|
||||
width: 3%;
|
||||
height: 0;
|
||||
padding-bottom: 3%;
|
||||
border-radius: 50%;
|
||||
border: 2px solid #f5f5f5;
|
||||
overflow: hidden;
|
||||
background: #464646;
|
||||
box-shadow: 0 0 3px gray;
|
||||
}
|
||||
.round-button:hover {
|
||||
background: #262626;
|
||||
}
|
||||
.round-button img {
|
||||
display: block;
|
||||
width: 76%;
|
||||
padding: 12%;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
.cat_title {
|
||||
color: #5379af;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.bootstrap_title {
|
||||
color: #5379af;
|
||||
margin-bottom: 5px;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
span:hover {
|
||||
position: relative;
|
||||
font-weight: bolder;
|
||||
}
|
||||
|
||||
/* Home page button links, inspired from http://www.w3schools.com */
|
||||
|
||||
a.tryitbtn,
|
||||
a.tryitbtn:link,
|
||||
a.tryitbtn:visited,
|
||||
a.showbtn,
|
||||
a.showbtn:link,
|
||||
a.showbtn:visited {
|
||||
display: inline-block;
|
||||
color: #469496;
|
||||
background-color: #1f1f1f;
|
||||
font-weight: bold;
|
||||
font-size: 12px;
|
||||
text-align: center;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
padding-top: 3px;
|
||||
padding-bottom: 4px;
|
||||
text-decoration: none;
|
||||
margin-left: 0;
|
||||
/* margin-left: 5px; */
|
||||
margin-right: 10px;
|
||||
margin-top: 0px;
|
||||
margin-bottom: 5px;
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #469496;
|
||||
border-radius: 5px;
|
||||
white-space: nowrap;
|
||||
min-width: 60px;
|
||||
}
|
||||
|
||||
a.tryitbtn:hover,
|
||||
a.tryitbtn:active,
|
||||
a.tryitbtn:hover,
|
||||
a.tryitbtn:active {
|
||||
background-color: #469496;
|
||||
color: #1f1f1f;
|
||||
}
|
||||
|
||||
a.tryitbtnxl,
|
||||
a.tryitbtnxl:link,
|
||||
a.tryitbtnxl:visited,
|
||||
a.showbtnxl,
|
||||
a.showbtnxl:link,
|
||||
a.showbtnxl:visited {
|
||||
display: inline-block;
|
||||
color: #469496;
|
||||
background-color: #1f1f1f;
|
||||
font-weight: bold;
|
||||
font-size: 12px;
|
||||
text-align: center;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
padding-top: 3px;
|
||||
padding-bottom: 4px;
|
||||
text-decoration: none;
|
||||
margin-left: 0;
|
||||
/* margin-left: 5px; */
|
||||
margin-right: 10px;
|
||||
margin-top: 5px;
|
||||
margin-bottom: 5px;
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #469496;
|
||||
border-radius: 5px;
|
||||
white-space: nowrap;
|
||||
min-width: 150px;
|
||||
}
|
||||
|
||||
a.tryitbtnxl:hover,
|
||||
a.tryitbtnxl:active,
|
||||
a.tryitbtnxl:hover,
|
||||
a.tryitbtnxl:active {
|
||||
background-color: #469496;
|
||||
color: #1f1f1f;
|
||||
}
|
||||
|
||||
a.tryitbtnxxl,
|
||||
a.tryitbtnxxl:link,
|
||||
a.tryitbtnxxl:visited,
|
||||
a.showbtnxxl,
|
||||
a.showbtnxxl:link,
|
||||
a.showbtnxxl:visited {
|
||||
display: inline-block;
|
||||
color: #469496;
|
||||
background-color: #1f1f1f;
|
||||
font-weight: bold;
|
||||
font-size: 12px;
|
||||
text-align: center;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
padding-top: 3px;
|
||||
padding-bottom: 4px;
|
||||
text-decoration: none;
|
||||
margin-left: 0;
|
||||
/* margin-left: 5px; */
|
||||
margin-right: 10px;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #469496;
|
||||
border-radius: 5px;
|
||||
white-space: nowrap;
|
||||
min-width: 220px;
|
||||
}
|
||||
|
||||
a.tryitbtnxxl:hover,
|
||||
a.tryitbtnxxl:active,
|
||||
a.tryitbtnxxl:hover,
|
||||
a.tryitbtnxxl:active {
|
||||
background-color: #469496;
|
||||
color: #1f1f1f;
|
||||
}
|
||||
|
||||
a.tryitbtn-alt,
|
||||
a.tryitbtn-alt:link,
|
||||
a.tryitbtn-alt:visited,
|
||||
a.tryitbtn-alt,
|
||||
a.tryitbtn-alt:link,
|
||||
a.tryitbtn-alt:visited {
|
||||
display: inline-block;
|
||||
color: #c171b1;
|
||||
background-color: #1f1f1f;
|
||||
font-weight: bold;
|
||||
font-size: 12px;
|
||||
text-align: center;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
padding-top: 3px;
|
||||
padding-bottom: 4px;
|
||||
text-decoration: none;
|
||||
margin-left: 0;
|
||||
/* margin-left: 5px; */
|
||||
margin-right: 10px;
|
||||
margin-top: 0px;
|
||||
margin-bottom: 5px;
|
||||
border: 1px solid #aaaaaa;
|
||||
border: 1px solid #c171b1;
|
||||
border-radius: 5px;
|
||||
white-space: nowrap;
|
||||
min-width: 60px;
|
||||
}
|
||||
|
||||
a.tryitbtn-alt:hover,
|
||||
a.tryitbtn-alt:active,
|
||||
a.tryitbtn-alt:hover,
|
||||
a.tryitbtn-alt:active {
|
||||
background-color: #c171b1;
|
||||
color: #1f1f1f;
|
||||
}
|
||||
|
||||
a.rt {
|
||||
margin-left: 10px;
|
||||
}
|
||||
|
||||
a.rt {
|
||||
-webkit-transition: all 1s ease; /* Safari and Chrome */
|
||||
-moz-transition: all 1s ease; /* Firefox */
|
||||
-ms-transition: all 1s ease; /* IE 9 */
|
||||
-o-transition: all 1s ease; /* Opera */
|
||||
transition: all 1s ease;
|
||||
}
|
||||
|
||||
a.rt:hover img {
|
||||
-webkit-transform: scale(1.1); /* Safari and Chrome */
|
||||
-moz-transform: scale(1.1); /* Firefox */
|
||||
-ms-transform: scale(1.1); /* IE 9 */
|
||||
-o-transform: scale(1.1); /* Opera */
|
||||
transform: scale(1.1);
|
||||
}
|
||||
|
||||
/* Fix single label trouble with Splunk 6.3.0 */
|
||||
|
||||
.before-label {
|
||||
font-size: medium !important;
|
||||
}
|
||||
|
||||
.after-label {
|
||||
font-size: medium !important;
|
||||
}
|
||||
|
||||
.single-result-unit {
|
||||
font-size: medium !important;
|
||||
}
|
||||
|
||||
/* Prevents modal window from generating troubles within Splunk interfaces */
|
||||
.modal {
|
||||
display: none;
|
||||
}
|
||||
/* Custom Modals widths */
|
||||
|
||||
.modal[class^="custom-modal"] {
|
||||
left: 50%;
|
||||
border: 1px solid green;
|
||||
}
|
||||
|
||||
.custom-modal-30 {
|
||||
width: 30%;
|
||||
margin-left: -15%;
|
||||
}
|
||||
|
||||
.custom-modal-50 {
|
||||
width: 50%;
|
||||
margin-left: -25%;
|
||||
}
|
||||
|
||||
.custom-modal-60 {
|
||||
width: 60%;
|
||||
margin-left: -30%;
|
||||
}
|
||||
|
||||
.custom-modal-70 {
|
||||
width: 70%;
|
||||
margin-left: -35%;
|
||||
}
|
||||
|
||||
.custom-modal-80 {
|
||||
width: 80%;
|
||||
margin-left: -40%;
|
||||
}
|
||||
|
||||
.custom-modal-96 {
|
||||
width: 96%;
|
||||
margin-left: -48%;
|
||||
}
|
||||
|
||||
/* Panels title bar customization */
|
||||
.dashboard-row .dashboard-panel h2.panel-title {
|
||||
/* text-align: center; */
|
||||
font-size: 20px;
|
||||
font-weight: 500;
|
||||
background-color: #444444;
|
||||
padding: 5px 5px 5px 5px;
|
||||
}
|
||||
@ -0,0 +1,46 @@
|
||||
(function() {
|
||||
require([
|
||||
"underscore",
|
||||
"jquery",
|
||||
"splunkjs/mvc",
|
||||
"appUtils",
|
||||
"splunkjs/ready!",
|
||||
"splunkjs/mvc/simplexml/ready!",
|
||||
], function(_, $, mvc, appUtils) {
|
||||
|
||||
/////////////////////////////////////////
|
||||
/// Start Main Code Here
|
||||
/////////////////////////////////////////
|
||||
|
||||
var ref = appUtils.getTokenModels();
|
||||
var defaultTokenModel = ref[0];
|
||||
var submittedTokenModel = ref[1];
|
||||
|
||||
appUtils.checkEmptyTokenFocus("host1", appUtils.getToken("host1"));
|
||||
appUtils.checkEmptyTokenFocus("host2", appUtils.getToken("host2"));
|
||||
appUtils.checkEmptyTokenFocus("metric_name", appUtils.getToken("metric_name"));
|
||||
|
||||
defaultTokenModel.on("change:host1", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("host1", value);
|
||||
if (typeof value !== 'undefined' && value.toString().trim() === "") {
|
||||
appUtils.setToken("form.host1", undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
defaultTokenModel.on("change:host2", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("host2", value);
|
||||
if (typeof value !== 'undefined' && value.toString().trim() === "") {
|
||||
appUtils.setToken("form.host2", undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
defaultTokenModel.on("change:metric_name", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("metric_name", value);
|
||||
if (typeof value !== 'undefined' && value.toString().trim() === "") {
|
||||
appUtils.setToken("form.metric_name", undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
appUtils.submitTokens();
|
||||
});
|
||||
}).call(this);
|
||||
@ -0,0 +1,38 @@
|
||||
(function() {
|
||||
require([
|
||||
"underscore",
|
||||
"jquery",
|
||||
"splunkjs/mvc",
|
||||
"appUtils",
|
||||
"splunkjs/ready!",
|
||||
"splunkjs/mvc/simplexml/ready!",
|
||||
], function(_, $, mvc, appUtils) {
|
||||
|
||||
/////////////////////////////////////////
|
||||
/// Start Main Code Here
|
||||
/////////////////////////////////////////
|
||||
|
||||
var ref = appUtils.getTokenModels();
|
||||
var defaultTokenModel = ref[0];
|
||||
var submittedTokenModel = ref[1];
|
||||
|
||||
appUtils.checkEmptyTokenFocus("metric_name", appUtils.getToken("metric_name"));
|
||||
appUtils.checkEmptyTokenFocus("host", appUtils.getToken("host"));
|
||||
|
||||
defaultTokenModel.on("change:metric_name", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("metric_name", value);
|
||||
if (typeof value !== 'undefined' && value.toString().trim() === "") {
|
||||
appUtils.setToken("form.metric_name", undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
defaultTokenModel.on("change:host", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("host", value);
|
||||
if (typeof value !== 'undefined' && value.toString().trim() === "") {
|
||||
appUtils.setToken("form.host", undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
appUtils.submitTokens();
|
||||
});
|
||||
}).call(this);
|
||||
|
After Width: | Height: | Size: 5.8 KiB |
@ -0,0 +1,74 @@
|
||||
require(['splunkjs/mvc/simplexml/ready!'], function(){
|
||||
require(['splunkjs/ready!', 'splunkjs/mvc'], function(mvc){
|
||||
|
||||
/*
|
||||
--------------------------------------------------------------
|
||||
Multi depends buttons - Written by François Toulouse, thanks !
|
||||
--------------------------------------------------------------
|
||||
|
||||
Usage: Add an html bootstrap button
|
||||
|
||||
<button class="btn" data-token-name="foo" data-token-value="1">Activate foo token</button>
|
||||
<button class="btn" data-token-name="bar" data-token-value="1">Activate bar token</button>
|
||||
|
||||
|
||||
*/
|
||||
|
||||
var defaultTokenModel = mvc.Components.getInstance('default', {create: true});
|
||||
var submittedTokenModel = mvc.Components.getInstance('submitted', {create: true});
|
||||
|
||||
function setToken(name, value) {
|
||||
defaultTokenModel.set(name, value);
|
||||
submittedTokenModel.set(name, value);
|
||||
}
|
||||
|
||||
function getToken(name) {
|
||||
var ret = null;
|
||||
|
||||
if(defaultTokenModel.get(name) != undefined){
|
||||
ret = defaultTokenModel.get(name);
|
||||
}
|
||||
else if(submittedTokenModel.get(name) != undefined){
|
||||
ret = submittedTokenModel.get(name);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function unsetToken(name) {
|
||||
defaultTokenModel.unset(name);
|
||||
submittedTokenModel.unset(name);
|
||||
}
|
||||
|
||||
// For each button with the class "custom-sub-nav"
|
||||
$('.custom-sub-nav').each(function(){
|
||||
var $btn_group = $(this);
|
||||
|
||||
/* for each button in this nav:
|
||||
- Cliking on the button: create the token "data-token-name" with attribute value "data-token-value"
|
||||
- Button has been clicked already and the user click on it again: removes the token "data-token-name"
|
||||
*/
|
||||
$btn_group.find('button').on('click', function(){
|
||||
var $btn = $(this);
|
||||
var btn_current_label = $btn.html();
|
||||
var btn_alt_label = $btn.attr('data-alt-label');
|
||||
var tk_name = $btn.attr('data-token-name');
|
||||
var tk_value = $btn.attr('data-token-value');
|
||||
|
||||
if( getToken(tk_name) == null){
|
||||
setToken(tk_name, tk_value);
|
||||
$btn.addClass('active');
|
||||
}
|
||||
else{
|
||||
unsetToken(tk_name);
|
||||
$btn.removeClass('active');
|
||||
}
|
||||
|
||||
// Manage button label
|
||||
$btn.html(btn_alt_label);
|
||||
$btn.attr('data-alt-label', btn_current_label);
|
||||
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,42 @@
|
||||
|
||||
/* blue color for current rendering */
|
||||
|
||||
td.data-bar-cell {
|
||||
padding: 4px 8px;
|
||||
}
|
||||
|
||||
td.data-bar-cell .data-bar-wrapper .data-bar {
|
||||
height: 16px;
|
||||
min-width: 1px;
|
||||
background-color: #5479AF;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.data-bar-over { color: #FFFFFF; }
|
||||
.data-bar-under { color: #000000; }
|
||||
|
||||
.data-bar-wrapper {
|
||||
border-style: solid;
|
||||
border-width: 1px;
|
||||
}
|
||||
|
||||
/* red color for laerting */
|
||||
|
||||
td.red-data-bar-cell {
|
||||
padding: 4px 8px;
|
||||
}
|
||||
|
||||
td.red-data-bar-cell .red-data-bar-wrapper .red-data-bar {
|
||||
height: 16px;
|
||||
min-width: 1px;
|
||||
background-color: #CD5C5C;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.red-data-bar-over { color: #FFFFFF; }
|
||||
.red-data-bar-under { color: #000000; }
|
||||
|
||||
.red-data-bar-wrapper {
|
||||
border-style: solid;
|
||||
border-width: 1px;
|
||||
}
|
||||
@ -0,0 +1,53 @@
|
||||
require([
|
||||
'jquery',
|
||||
'underscore',
|
||||
'splunkjs/mvc',
|
||||
'views/shared/results_table/renderers/BaseCellRenderer',
|
||||
'splunkjs/mvc/simplexml/ready!'
|
||||
], function($, _, mvc, BaseCellRenderer) {
|
||||
|
||||
// blue rendering for current
|
||||
|
||||
var DataBarCellRenderer = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'current_used_percent');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="data-bar-under"
|
||||
if(cell.value > 15){ pColor="data-bar-over" }
|
||||
$td.addClass('data-bar-cell').html(_.template('<div class="data-bar-wrapper"><div class="data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_inventory').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
// red rendering for alerting
|
||||
|
||||
var DataBarCellRenderer2 = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'max_used_percent_alert');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="red-data-bar-under"
|
||||
if(cell.value > 15){ pColor="red-data-bar-over" }
|
||||
$td.addClass('red-data-bar-cell').html(_.template('<div class="red-data-bar-wrapper"><div class="red-data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_content').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer2());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
});
|
||||
@ -0,0 +1,21 @@
|
||||
|
||||
/* red color for alerting */
|
||||
|
||||
td.red-data-bar-cell {
|
||||
padding: 4px 8px;
|
||||
}
|
||||
|
||||
td.red-data-bar-cell .red-data-bar-wrapper .red-data-bar {
|
||||
height: 16px;
|
||||
min-width: 1px;
|
||||
background-color: #CD5C5C;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.red-data-bar-over { color: #FFFFFF; }
|
||||
.red-data-bar-under { color: #000000; }
|
||||
|
||||
.red-data-bar-wrapper {
|
||||
border-style: solid;
|
||||
border-width: 1px;
|
||||
}
|
||||
@ -0,0 +1,71 @@
|
||||
require([
|
||||
'jquery',
|
||||
'underscore',
|
||||
'splunkjs/mvc',
|
||||
'views/shared/results_table/renderers/BaseCellRenderer',
|
||||
'splunkjs/mvc/simplexml/ready!'
|
||||
], function($, _, mvc, BaseCellRenderer) {
|
||||
|
||||
// red rendering for alerting
|
||||
|
||||
var DataBarCellRenderer1 = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'max_cpu_percent');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="red-data-bar-under"
|
||||
if(cell.value > 15){ pColor="red-data-bar-over" }
|
||||
$td.addClass('red-data-bar-cell').html(_.template('<div class="red-data-bar-wrapper"><div class="red-data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_content').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer1());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
var DataBarCellRenderer2 = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'max_phy_percent');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="red-data-bar-under"
|
||||
if(cell.value > 15){ pColor="red-data-bar-over" }
|
||||
$td.addClass('red-data-bar-cell').html(_.template('<div class="red-data-bar-wrapper"><div class="red-data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_content').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer2());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
var DataBarCellRenderer3 = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'max_vir_percent');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="red-data-bar-under"
|
||||
if(cell.value > 15){ pColor="red-data-bar-over" }
|
||||
$td.addClass('red-data-bar-cell').html(_.template('<div class="red-data-bar-wrapper"><div class="red-data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_content').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer3());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
});
|
||||
@ -0,0 +1,31 @@
|
||||
require([
|
||||
'jquery',
|
||||
'underscore',
|
||||
'splunkjs/mvc',
|
||||
'views/shared/results_table/renderers/BaseCellRenderer',
|
||||
'splunkjs/mvc/simplexml/ready!'
|
||||
], function($, _, mvc, BaseCellRenderer) {
|
||||
|
||||
// red rendering for alerting
|
||||
|
||||
var DataBarCellRenderer1 = BaseCellRenderer.extend({
|
||||
canRender: function(cell) {
|
||||
return (cell.field === 'max_fs_percent');
|
||||
},
|
||||
render: function($td, cell) {
|
||||
var pColor="red-data-bar-under"
|
||||
if(cell.value > 15){ pColor="red-data-bar-over" }
|
||||
$td.addClass('red-data-bar-cell').html(_.template('<div class="red-data-bar-wrapper"><div class="red-data-bar <%- pColor %>" style="width:<%- percent %>%"> <%- ppp %>%</div></div>', {
|
||||
percent: Math.min(Math.max(parseFloat(cell.value), 0), 100),
|
||||
ppp: parseFloat(cell.value).toFixed(2),
|
||||
pColor: pColor
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
mvc.Components.get('element_table_show_lookup_content').getVisualization(function(tableView) {
|
||||
tableView.table.addCellRenderer(new DataBarCellRenderer1());
|
||||
tableView.table.render();
|
||||
});
|
||||
|
||||
});
|
||||
@ -0,0 +1,12 @@
|
||||
require.config({
|
||||
paths: {
|
||||
"app": "../app"
|
||||
}
|
||||
});
|
||||
|
||||
require(['splunkjs/mvc/simplexml/ready!'], function(){
|
||||
require(['splunkjs/ready!'], function(){
|
||||
// The splunkjs/ready loader script will automatically instantiate all elements
|
||||
// declared in the dashboard's HTML.
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "bubblechart",
|
||||
"version": "1.0.0",
|
||||
"main": "bubblechart.js",
|
||||
"ignore": [],
|
||||
"dependencies": {
|
||||
"d3": "3.3.x"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
@ -0,0 +1,29 @@
|
||||
.splunk-toolkit-bubble-chart {
|
||||
font-family: arial;
|
||||
position: relative;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.splunk-toolkit-bubble-chart svg {
|
||||
display: block;
|
||||
margin: 0px auto;
|
||||
}
|
||||
.splunk-toolkit-bubble-chart g {
|
||||
display: block;
|
||||
margin: 0px auto;
|
||||
}
|
||||
|
||||
.bubble-chart-tooltip {
|
||||
position: absolute;
|
||||
background-color: #424242;
|
||||
border-radius: 3px 3px 3px 3px;
|
||||
padding: 7px;
|
||||
font-size: 1.0em;
|
||||
color: white;
|
||||
opacity:0;
|
||||
}
|
||||
|
||||
.node:hover{
|
||||
opacity: .7;
|
||||
}
|
||||
@ -0,0 +1,236 @@
|
||||
// Bubble Chart
|
||||
// this displays information as different 'bubbles,' their unique values represented with
|
||||
// the size of the bubble.
|
||||
// supports drilldown clicks
|
||||
|
||||
// available settings:
|
||||
// - nameField: the field to use as the label on each bubble
|
||||
// - valueField: the field to use as the value of each bubble (also dictates size)
|
||||
// - categoryField: the field to use for grouping similar data (usually the same field as nameField)
|
||||
|
||||
// ---expected data format---
|
||||
// a splunk search like this: source=foo | stats count by artist_name, track_name
|
||||
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var _ = require('underscore');
|
||||
var d3 = require("../d3/d3");
|
||||
var SimpleSplunkView = require("splunkjs/mvc/simplesplunkview");
|
||||
|
||||
require("css!./bubblechart.css");
|
||||
|
||||
var BubbleChart = SimpleSplunkView.extend({
|
||||
|
||||
className: "splunk-toolkit-bubble-chart",
|
||||
|
||||
options: {
|
||||
managerid: null,
|
||||
data: "preview",
|
||||
nameField: null,
|
||||
valueField: 'count',
|
||||
categoryField: null
|
||||
},
|
||||
|
||||
output_mode: "json",
|
||||
|
||||
initialize: function() {
|
||||
_.extend(this.options, {
|
||||
formatName: _.identity,
|
||||
formatTitle: function(d) {
|
||||
return (d.source.name + ' -> ' + d.target.name +
|
||||
': ' + d.value);
|
||||
}
|
||||
});
|
||||
SimpleSplunkView.prototype.initialize.apply(this, arguments);
|
||||
|
||||
this.settings.enablePush("value");
|
||||
|
||||
// in the case that any options are changed, it will dynamically update
|
||||
// without having to refresh. copy the following line for whichever field
|
||||
// you'd like dynamic updating on
|
||||
this.settings.on("change:valueField", this.render, this);
|
||||
this.settings.on("change:nameField", this.render, this);
|
||||
this.settings.on("change:categoryField", this.render, this);
|
||||
|
||||
// Set up resize callback. The first argument is a this
|
||||
// pointer which gets passed into the callback event
|
||||
$(window).resize(this, _.debounce(this._handleResize, 20));
|
||||
},
|
||||
|
||||
_handleResize: function(e){
|
||||
|
||||
// e.data is the this pointer passed to the callback.
|
||||
// here it refers to this object and we call render()
|
||||
e.data.render();
|
||||
},
|
||||
|
||||
createView: function() {
|
||||
|
||||
// Here we wet up the initial view layout
|
||||
var margin = {top: 0, right: 0, bottom: 0, left: 0};
|
||||
var availableWidth = parseInt(this.settings.get("width") || this.$el.width());
|
||||
var availableHeight = parseInt(this.settings.get("height") || this.$el.height());
|
||||
|
||||
this.$el.html("");
|
||||
|
||||
var svg = d3.select(this.el)
|
||||
.append("svg")
|
||||
.attr("width", availableWidth)
|
||||
.attr("height", availableHeight)
|
||||
.attr("pointer-events", "all");
|
||||
|
||||
var tooltip = d3.select(this.el).append("div")
|
||||
.attr("class", "bubble-chart-tooltip");
|
||||
|
||||
// The returned object gets passed to updateView as viz
|
||||
return { container: this.$el, svg: svg, margin: margin, tooltip: tooltip};
|
||||
},
|
||||
|
||||
// making the data look how we want it to for updateView to do its job
|
||||
formatData: function(data) {
|
||||
// getting settings
|
||||
var nameField = this.settings.get('nameField');
|
||||
var valueField = this.settings.get('valueField');
|
||||
var categoryField = this.settings.get('categoryField');
|
||||
var collection = data;
|
||||
var bubblechart = { 'name': nameField+"s", 'children': [ ] }; // how we want it to look
|
||||
|
||||
// making the children formatted array
|
||||
for (var i=0; i < collection.length; i++) {
|
||||
var Idx = -1;
|
||||
$.each(bubblechart.children, function(idx, el) {
|
||||
if (el.name == collection[i][categoryField]) {
|
||||
Idx = idx;
|
||||
}
|
||||
});
|
||||
if (Idx == -1) {
|
||||
bubblechart.children.push({ 'name': collection[i][categoryField], children: [ ] });
|
||||
Idx = bubblechart.children.length - 1;
|
||||
}
|
||||
|
||||
bubblechart.children[Idx].children.push({ 'name': collection[i][nameField], 'size': collection[i][valueField] || 1 });
|
||||
}
|
||||
return bubblechart; // this is passed into updateView as 'data'
|
||||
},
|
||||
|
||||
updateView: function(viz, data) {
|
||||
var that = this;
|
||||
|
||||
// Clear svg
|
||||
var svg = $(viz.svg[0]);
|
||||
svg.empty();
|
||||
|
||||
var tooltip = viz.tooltip;
|
||||
|
||||
// Add the graph group as a child of the main svg
|
||||
var graph = viz.svg
|
||||
.append("g")
|
||||
.attr("class", "bubble")
|
||||
.attr("transform", "translate(" + viz.margin.left + "," + viz.margin.top + ")");
|
||||
|
||||
// Set format and color
|
||||
var format = d3.format(",d");
|
||||
var color = d3.scale.category20c();
|
||||
|
||||
// We have two phases in layout. We tell the
|
||||
// d3 lout how much room it has, then set
|
||||
// the sizes of it's containers to match
|
||||
// the size it returns.
|
||||
var containerHeight = this.$el.height();
|
||||
var containerWidth = this.$el.width();
|
||||
var diameter = Math.min(containerWidth, containerHeight);
|
||||
|
||||
// Tell the layout to layout
|
||||
var bubble = d3.layout.pack()
|
||||
.sort(null)
|
||||
.size([diameter, diameter])
|
||||
.padding(1.5);
|
||||
|
||||
// Set containers' sizes to match actual layout
|
||||
var width = bubble.size()[0];
|
||||
var height = bubble.size()[1];
|
||||
graph.attr("width", width)
|
||||
.attr("height", height);
|
||||
svg.height(height);
|
||||
svg.width(width);
|
||||
|
||||
var node = graph.selectAll(".node")
|
||||
.data(bubble.nodes(classes(data))
|
||||
.filter(function(d) { return !d.children; }))
|
||||
.enter().append("g")
|
||||
.attr("class", "node")
|
||||
.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; });
|
||||
|
||||
// NOTE: this is taken out because we have a custom tooltip.
|
||||
// It may need to be put back for accessibility
|
||||
// node.append("title")
|
||||
// .text(function(d) { return d.className + ": " + format(d.value); });
|
||||
|
||||
node.append("circle")
|
||||
.attr("r", function(d) { return d.r; })
|
||||
.style("fill", function(d) { return color(d.packageName); });
|
||||
|
||||
node.append("text")
|
||||
.attr("dy", ".3em")
|
||||
.style("text-anchor", "middle")
|
||||
// ensure the text is truncated if the bubble is tiny
|
||||
.text(function(d) { return (d.className + " " + format(d.value)).substring(0, d.r / 3); });
|
||||
|
||||
// Re-flatten the child array
|
||||
function classes(data) {
|
||||
var classes = [];
|
||||
function recurse(name, node) {
|
||||
if (node.children)
|
||||
node.children.forEach(function(child) {
|
||||
recurse(node.name, child);
|
||||
});
|
||||
else
|
||||
classes.push({packageName: name || "", className: node.name || "", value: node.size});
|
||||
}
|
||||
|
||||
recurse(null, data);
|
||||
return {children: classes};
|
||||
}
|
||||
|
||||
// Tooltips
|
||||
function doMouseEnter(d){
|
||||
var text;
|
||||
if(d.className === undefined || d.className === ""){
|
||||
text = "Event: " + d.value;
|
||||
} else {
|
||||
text = d.className+": " + d.value;
|
||||
}
|
||||
tooltip
|
||||
.text(text)
|
||||
.style("opacity", function(){
|
||||
if(d.value !== undefined) { return 1; }
|
||||
return 0;
|
||||
})
|
||||
.style("left", (d3.mouse(that.el)[0]) + "px")
|
||||
.style("top", (d3.mouse(that.el)[1]) + "px");
|
||||
}
|
||||
|
||||
// More tooltips
|
||||
function doMouseOut(d){
|
||||
tooltip.style("opacity", 1e-6);
|
||||
}
|
||||
|
||||
node.on("mouseover", doMouseEnter);
|
||||
node.on("mouseout", doMouseOut);
|
||||
|
||||
// Drilldown clickings. edit this in order to change the search token that
|
||||
// is set to 'value' (a token in bubbles django), this will change the drilldown
|
||||
// search.
|
||||
node.on('click', function(e) {
|
||||
var clickEvent = {
|
||||
name: e.className,
|
||||
category: e.packageName,
|
||||
value: e.value
|
||||
};
|
||||
that.settings.set("value", e.className);
|
||||
that.trigger("click", clickEvent);
|
||||
});
|
||||
}
|
||||
});
|
||||
return BubbleChart;
|
||||
});
|
||||
@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "calendarheatmap",
|
||||
"version": "1.0.0",
|
||||
"main": "calendarheatmap.js",
|
||||
"ignore": [],
|
||||
"dependencies": {
|
||||
"d3": "3.3.x"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
@ -0,0 +1,136 @@
|
||||
.splunk-toolkit-cal-heatmap {
|
||||
margin-left: 20px;
|
||||
margin-right: 20px;
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container hr {
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-series-title {
|
||||
margin-bottom: 5px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-buttons {
|
||||
margin-bottom: 5px;
|
||||
float: right;
|
||||
display: inline;
|
||||
}
|
||||
|
||||
|
||||
/* Cal-HeatMap CSS */
|
||||
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .graph
|
||||
{
|
||||
clear: both;
|
||||
display: block;
|
||||
font-family: "Lucida Grande", Lucida, Verdana, sans-serif;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .graph-label
|
||||
{
|
||||
fill: #999;
|
||||
font-size: 10px
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .graph, .splunk-toolkit-cal-heatmap .heatmap-container .graph-legend rect {
|
||||
shape-rendering: crispedges
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .graph-rect
|
||||
{
|
||||
fill: #ededed;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .graph rect:hover
|
||||
{
|
||||
stroke: #000;
|
||||
stroke-width: 1px
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .subdomain-text {
|
||||
font-size: 8px;
|
||||
fill: #999;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .hover_cursor:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .qi {
|
||||
background-color: #999;
|
||||
fill: #999;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q0
|
||||
{
|
||||
background-color: #fff;
|
||||
fill: #fff;
|
||||
stroke: #ededed
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q1
|
||||
{
|
||||
background-color: #89dae2 !important;
|
||||
fill: #89dae2 !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q2
|
||||
{
|
||||
background-color: #9ccedb !important;
|
||||
fill: #699cc0 !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q3
|
||||
{
|
||||
background-color: #6bb5cf !important;
|
||||
fill: #45669d !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q4
|
||||
{
|
||||
background-color: #396379 !important;
|
||||
fill: #396379 !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .q5
|
||||
{
|
||||
background-color: #273b64 !important;
|
||||
fill: #273b64 !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container rect.highlight
|
||||
{
|
||||
stroke:#444;
|
||||
stroke-width:1;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container text.highlight
|
||||
{
|
||||
fill: #444;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container rect.now
|
||||
{
|
||||
stroke: white !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container text.now
|
||||
{
|
||||
fill: white !important;
|
||||
font-weight: 800;
|
||||
}
|
||||
|
||||
.splunk-toolkit-cal-heatmap .heatmap-container .domain-background {
|
||||
fill: none;
|
||||
shape-rendering: crispedges;
|
||||
}
|
||||
@ -0,0 +1,263 @@
|
||||
|
||||
|
||||
// calheat!
|
||||
// shows a cool looking heatmap based on different time signatures
|
||||
// requires a timechart search. it dynamically guesses how to set up the
|
||||
// way to show the time, but you can define any settings you want in the html
|
||||
// docs: http://kamisama.github.io/cal-heatmap
|
||||
|
||||
// ---settings---
|
||||
|
||||
// domain: (hour, day, week, month, year)
|
||||
// subDomain: (min, x_min, hour, x_hour, day, x_day, week, x_week, month, x_month)
|
||||
// -- x_ variants are used to rotate the reading order to left to right, then top to bottom.
|
||||
// start: set to 'current' for current time or 'earliest' for your earliest data point
|
||||
|
||||
// TODO:
|
||||
// add a setting for each option at http://kamisama.github.io/cal-heatmap/#options
|
||||
// rather than using the JS method in the HTML like i'm doing now.
|
||||
|
||||
|
||||
|
||||
// the data is expected in this format after formatData (epoch time: event count):
|
||||
// {
|
||||
// "timestamps":[
|
||||
// {
|
||||
// "1378225500":"8",
|
||||
// "1378225560":"8",
|
||||
// "1378225620":"8",
|
||||
// },
|
||||
// {
|
||||
// "1378230300":"4",
|
||||
// "1378230360":"4",
|
||||
// "1378230660":"2"
|
||||
// },
|
||||
// {
|
||||
// "1378225500":"7",
|
||||
// "1378225560":"7",
|
||||
// },
|
||||
// {
|
||||
// "1378225500":"6",
|
||||
// "1378225560":"6",
|
||||
// "1378225620":"7",
|
||||
// },
|
||||
// {
|
||||
// "1378225500":"41",
|
||||
// "1378225560":"41",
|
||||
// },
|
||||
// {
|
||||
// "1378225500":"22",
|
||||
// "1378225560":"22",
|
||||
// }
|
||||
// ],
|
||||
|
||||
// -- we add this part onto the actual data --
|
||||
|
||||
// "start":"2013-09-03T16:25:00.000Z",
|
||||
// "domain":"hour",
|
||||
// "subDomain":"min"
|
||||
// }
|
||||
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var _ = require('underscore');
|
||||
var SimpleSplunkView = require("splunkjs/mvc/simplesplunkview");
|
||||
var d3 = require("../d3/d3");
|
||||
var CalHeatMap = require("./contrib/cal-heatmap");
|
||||
|
||||
require("css!./calendarheatmap.css");
|
||||
|
||||
var CalendarHeatMap = SimpleSplunkView.extend({
|
||||
moduleId: module.id,
|
||||
|
||||
className: "splunk-toolkit-cal-heatmap",
|
||||
|
||||
heatmapOptionNames: [
|
||||
'cellRadius', 'domainMargin', 'maxDate', 'dataType',
|
||||
'considerMissingDataAsZero', 'verticalOrientation',
|
||||
'domainDynamicDimension', 'label', 'legendCellSize',
|
||||
'legendCellPadding', 'legendMargin', 'legendVerticalPosition',
|
||||
'legendHorizontalPosition', 'domainLabelFormat',
|
||||
'subDomainDateFormat', 'subDomainTextFormat', 'nextSelector',
|
||||
'previousSelector', 'itemNamespace', 'onMaxDomainReached',
|
||||
'onMinDomainReached', 'width', 'height'],
|
||||
|
||||
options: {
|
||||
managerid: "search1", // your MANAGER ID
|
||||
data: "preview", // Results type
|
||||
domain: 'hour', // the largest unit it will differentiate by in squares
|
||||
subDomain: 'min', // the smaller unit the calheat goes off of
|
||||
uID: null,
|
||||
range: 4
|
||||
},
|
||||
|
||||
validDomains: {
|
||||
'min': ['hour'],
|
||||
'hour': ['day', 'week'],
|
||||
'day': ['week', 'month', 'year'],
|
||||
'week': ['month', 'year'],
|
||||
'month': ['year']
|
||||
},
|
||||
|
||||
output_mode: "json_rows",
|
||||
|
||||
initialize: function() {
|
||||
var that = this;
|
||||
SimpleSplunkView.prototype.initialize.apply(this, arguments);
|
||||
this.settings.enablePush("value");
|
||||
// whenever domain or subDomain are changed, we will re-render.
|
||||
this.settings.on("change:domain", this.onDomainChange, this);
|
||||
this.settings.on("change:subDomain", this.onDomainChange, this);
|
||||
this.settings.on("change", this._onSettingsChange, this);
|
||||
var uniqueID=Math.floor(Math.random()*1000001);
|
||||
this.settings.set("uID", uniqueID);
|
||||
},
|
||||
|
||||
onDomainChange: function() {
|
||||
var dom = this.settings.get('domain');
|
||||
var sd = this.settings.get('subDomain');
|
||||
|
||||
// Knock off the prefix cause it doesnt matter here
|
||||
var sdShort = sd.replace("x_", "");
|
||||
|
||||
// If the current domain is valid for this subdomain
|
||||
if (_.contains(this.validDomains[sdShort], dom)){
|
||||
this.render();
|
||||
}
|
||||
else{
|
||||
console.log(sd + " is and invalid subDomain for " + dom);
|
||||
}
|
||||
},
|
||||
|
||||
_onSettingsChange: function(changed) {
|
||||
// Route heatmap visualization changes to the renderer
|
||||
if ((_.intersection(_.keys(changed.changed), this.heatmapOptionNames)).length > 0) {
|
||||
this.render();
|
||||
}
|
||||
},
|
||||
|
||||
createView: function() {
|
||||
return true;
|
||||
},
|
||||
|
||||
// making the data look how we want it to for updateView to do its job
|
||||
// in this case, it looks like this:
|
||||
// {timestamp1: count, timestamp2: count, ... }
|
||||
formatData: function(data) {
|
||||
var rawFields = this.resultsModel.data().fields;
|
||||
var domain = this.settings.get('domain');
|
||||
var subDomain = this.settings.get('subDomain');
|
||||
|
||||
var filteredFields = _.filter(rawFields, function(d){ return d[0] !== "_"; });
|
||||
var objects = _.map(data, function(row) {
|
||||
return _.object(rawFields, row);
|
||||
});
|
||||
|
||||
var series = [];
|
||||
for(var i = 0; i < filteredFields.length; i++) {
|
||||
series.push({ name: filteredFields[i], timestamps: {}, min: Number.POSITIVE_INFINITY, max: Number.NEGATIVE_INFINITY });
|
||||
}
|
||||
|
||||
_.each(objects, function(object) {
|
||||
// Get the timestamp for this object
|
||||
var time = new Date(object['_time']);
|
||||
var timeValue = time.valueOf() / 1000;
|
||||
|
||||
// For each actual value, store it in the timestamp object
|
||||
_.each(filteredFields, function(field, i) {
|
||||
var value = object[field];
|
||||
series[i].timestamps[timeValue] = parseInt(value, 10) || 0;
|
||||
series[i].min = Math.min(series[i].min, value);
|
||||
series[i].max = Math.max(series[i].max, value);
|
||||
});
|
||||
});
|
||||
|
||||
_.each(series, function(serie) {
|
||||
|
||||
});
|
||||
|
||||
return {
|
||||
series: series,
|
||||
domain: domain,
|
||||
subDomain: subDomain,
|
||||
start: new Date(objects[0]['_time']),
|
||||
min: new Date(objects[0]['_time']),
|
||||
max: new Date(objects[objects.length - 1]['_time'])
|
||||
};
|
||||
},
|
||||
|
||||
updateView: function(viz, data) {
|
||||
var that = this;
|
||||
// Options that can be set externally after instantiation
|
||||
// that affect the display. Ensure that any "empty" values
|
||||
// are set to null (use default). Some controls hand back
|
||||
// empty strings, which result in nothing being shown.
|
||||
// Not what is wanted.
|
||||
|
||||
var vizOptions = _.chain(this.settings.toJSON())
|
||||
.pairs()
|
||||
.filter(function(kv) { return _.contains(that.heatmapOptionNames, kv[0]); })
|
||||
.filter(function(kv) { return ! (_.isNull(kv[1]) || _.isUndefined(kv[1])) || (kv[1] !== ""); })
|
||||
.object()
|
||||
.value();
|
||||
|
||||
this.$el.html('');
|
||||
_.each(data.series, function(series, idx) {
|
||||
var scale = d3.scale.quantile()
|
||||
.domain([series.min, series.max])
|
||||
.range([0,1,2,3,4]);
|
||||
var legend = _.map(scale.quantiles(), function(x) { return Math.round(x); });
|
||||
|
||||
var title = series.name;
|
||||
|
||||
var $el = $("<div class='heatmap-container'/>").appendTo(that.el);
|
||||
var $title = $("<h4 class='heatmap-series-title'>Heatmap for: " + series.name + "</h4>").appendTo($el);
|
||||
var $buttons = $("<div class='heatmap-buttons'/>").appendTo($el);
|
||||
var $prev = $("<a class='heatmap-prev btn-pill icon-triangle-left'></a>").appendTo($buttons);
|
||||
var $next = $("<a class='heatmap-next btn-pill icon-triangle-right'></a>").appendTo($buttons);
|
||||
var options = _.extend({
|
||||
itemSelector: $el[0],
|
||||
previousSelector: $prev[0],
|
||||
nextSelector: $next[0],
|
||||
data: series.timestamps,
|
||||
domain: data.domain,
|
||||
subDomain: data.subDomain,
|
||||
start: data.start,
|
||||
range: 4,
|
||||
cellSize: 20,
|
||||
cellPadding: 3,
|
||||
domainGutter: 10,
|
||||
highlight: ['now', new Date()],
|
||||
legend: legend,
|
||||
legendMargin: [0, 0, 20, 0],
|
||||
legendCellSize: 14,
|
||||
minDate: data.min,
|
||||
maxDate: data.max,
|
||||
onMinDomainReached: function(hit) {
|
||||
$prev.attr("disabled", hit ? "disabled" : false);
|
||||
},
|
||||
onMaxDomainReached: function(hit) {
|
||||
$next.attr("disabled", hit ? "disabled" : false);
|
||||
},
|
||||
onClick: function(date, value, title) {
|
||||
that.trigger('click', {
|
||||
date: date,
|
||||
value: value,
|
||||
series: series.name
|
||||
});
|
||||
that.settings.set('value', date.valueOf());
|
||||
}
|
||||
}, vizOptions);
|
||||
|
||||
var cal = new CalHeatMap();
|
||||
cal.init(options); // create the calendar using either default or user defined options */
|
||||
|
||||
if (idx < data.series.length - 1) {
|
||||
$("<hr/>").appendTo($el);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return CalendarHeatMap;
|
||||
});
|
||||
@ -0,0 +1,22 @@
|
||||
Copyright (c) 2012 Tyler Kellen, contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
@ -0,0 +1,110 @@
|
||||
/* Cal-HeatMap CSS */
|
||||
|
||||
|
||||
.graph
|
||||
{
|
||||
clear: both;
|
||||
display: block;
|
||||
font-family: "Lucida Grande", Lucida, Verdana, sans-serif;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.graph-label
|
||||
{
|
||||
fill: #999;
|
||||
font-size: 10px
|
||||
}
|
||||
|
||||
.graph, .graph-legend rect {
|
||||
shape-rendering: crispedges
|
||||
}
|
||||
|
||||
.graph-rect
|
||||
{
|
||||
fill: #ededed;
|
||||
}
|
||||
|
||||
.graph rect:hover
|
||||
{
|
||||
stroke: #000;
|
||||
stroke-width: 1px
|
||||
}
|
||||
|
||||
.subdomain-text {
|
||||
font-size: 8px;
|
||||
fill: #999;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.hover_cursor:hover {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.qi {
|
||||
background-color: #999;
|
||||
fill: #999;
|
||||
}
|
||||
|
||||
.q0
|
||||
{
|
||||
background-color: #fff;
|
||||
fill: #fff;
|
||||
stroke: #ededed
|
||||
}
|
||||
|
||||
.q1
|
||||
{
|
||||
background-color: #dae289;
|
||||
fill: #dae289
|
||||
}
|
||||
|
||||
.q2
|
||||
{
|
||||
background-color: #cedb9c;
|
||||
fill: #9cc069
|
||||
}
|
||||
|
||||
.q3
|
||||
{
|
||||
background-color: #b5cf6b;
|
||||
fill: #669d45
|
||||
}
|
||||
|
||||
.q4
|
||||
{
|
||||
background-color: #637939;
|
||||
fill: #637939
|
||||
}
|
||||
|
||||
.q5
|
||||
{
|
||||
background-color: #3b6427;
|
||||
fill: #3b6427
|
||||
}
|
||||
|
||||
rect.highlight
|
||||
{
|
||||
stroke:#444;
|
||||
stroke-width:1;
|
||||
}
|
||||
|
||||
text.highlight
|
||||
{
|
||||
fill: #444;
|
||||
}
|
||||
|
||||
rect.now
|
||||
{
|
||||
stroke: red;
|
||||
}
|
||||
|
||||
text.now
|
||||
{
|
||||
fill: red;
|
||||
font-weight: 800;
|
||||
}
|
||||
|
||||
.domain-background {
|
||||
fill: none;
|
||||
shape-rendering: crispedges;
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
Copyright (c) 2013, Michael Bostock
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* The name Michael Bostock may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "d3",
|
||||
"version": "3.3.5",
|
||||
"main": "d3.js",
|
||||
"ignore": [],
|
||||
"dependencies": {},
|
||||
"devDependencies": {}
|
||||
}
|
||||
@ -0,0 +1,13 @@
|
||||
.node circle {
|
||||
stroke-width: 1.5px;
|
||||
}
|
||||
|
||||
.node text {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
path.link {
|
||||
fill: none;
|
||||
stroke: #ccc;
|
||||
stroke-width: 1.5px;
|
||||
}
|
||||
@ -0,0 +1,361 @@
|
||||
// Cluster Dendrogram D3.js code taken and modified from http://bl.ocks.org/mbostock/4063570 by Mike Bostock
|
||||
|
||||
define(function (require, exports, module) {
|
||||
var d3 = require("../d3/d3.layout");
|
||||
var SimpleSplunkView = require("splunkjs/mvc/simplesplunkview");
|
||||
var _ = require("underscore");
|
||||
require("css!./dendrogram.css");
|
||||
|
||||
var Dendrogram = SimpleSplunkView.extend({
|
||||
className: "splunk-toolkit-dendrogram",
|
||||
options: {
|
||||
managerid: null,
|
||||
data: "preview",
|
||||
root_label: "root_label not set",
|
||||
height: "auto",
|
||||
node_outline_color: "#509DDD",
|
||||
node_close_color: "#e7969c",
|
||||
node_open_color: "#ffffff",
|
||||
label_size_color: "#509DDD",
|
||||
label_count_color: "#1f77b4",
|
||||
has_size: true,
|
||||
initial_open_level: 1,
|
||||
margin_left: 100,
|
||||
margin_right: 400,
|
||||
},
|
||||
output_mode: "json_rows",
|
||||
initialize: function () {
|
||||
_(this.options).extend({
|
||||
height_px: 500,
|
||||
width_px: 2000,
|
||||
});
|
||||
|
||||
SimpleSplunkView.prototype.initialize.apply(this, arguments);
|
||||
|
||||
this.settings.on("change:order", this.render, this);
|
||||
|
||||
$(window).resize(this, _.debounce(this._handleResize, 20));
|
||||
},
|
||||
_handleResize: function (e) {
|
||||
// e.data is the this pointer passed to the callback.
|
||||
// here it refers to this object and we call render()
|
||||
e.data.render();
|
||||
},
|
||||
createView: function () {
|
||||
return true;
|
||||
},
|
||||
// Making the data look how we want it to for updateView to do its job
|
||||
formatData: function (data) {
|
||||
var height = this.settings.get("height");
|
||||
var height_px = this.settings.get("height_px");
|
||||
var width = this.settings.get("width");
|
||||
var width_px = this.settings.get("width_px");
|
||||
var root_label = this.settings.get("root_label");
|
||||
var has_size = this.settings.get("has_size");
|
||||
|
||||
this.settings.set(
|
||||
"height_px",
|
||||
height === "auto" ? Math.max(data.length * 30, height_px) : height
|
||||
);
|
||||
|
||||
data = _(data).map(function (row) {
|
||||
return _(row).map(function (item, i) {
|
||||
// Convert the string value to number
|
||||
return has_size && i + 1 === row.length ? parseFloat(item) : item;
|
||||
});
|
||||
});
|
||||
|
||||
var get_sum = function (list) {
|
||||
return _(list)
|
||||
.pluck(list[0].length - 1)
|
||||
.reduce(function (memo, num) {
|
||||
return memo + num;
|
||||
}, 0);
|
||||
};
|
||||
|
||||
var nest = function (list) {
|
||||
var groups = _(list).groupBy(0);
|
||||
|
||||
return _(groups).map(function (value, key) {
|
||||
var children = _(value)
|
||||
.chain()
|
||||
.map(function (v) {
|
||||
return _(v).rest();
|
||||
})
|
||||
.compact()
|
||||
.value();
|
||||
|
||||
if (has_size) {
|
||||
var sum = get_sum(children);
|
||||
var count = children.length;
|
||||
|
||||
return children.length == 1 && children[0].length === 1
|
||||
? { name: key, size: children[0][0] }
|
||||
: { name: key, sum: sum, count: count, children: nest(children) };
|
||||
} else {
|
||||
return children.length == 1 && children[0].length === 0
|
||||
? { name: key }
|
||||
: { name: key, children: nest(children) };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var formatted_data = {
|
||||
name: root_label,
|
||||
children: nest(data),
|
||||
};
|
||||
|
||||
if (has_size) {
|
||||
_(formatted_data).extend({
|
||||
sum: get_sum(data),
|
||||
count: data.length,
|
||||
});
|
||||
}
|
||||
|
||||
return formatted_data;
|
||||
},
|
||||
updateView: function (viz, data) {
|
||||
this.$el.html("");
|
||||
|
||||
//this.$el.append('<button id="open_all">Open all</button>');
|
||||
//this.$el.append('<button id="close_all">Close all</button>');
|
||||
|
||||
//$("#open_all").on("click", function() {
|
||||
// $("g.node_close").click();
|
||||
//});
|
||||
|
||||
//$("#close_all").on("click", function() {
|
||||
// $("g.node_open").click();
|
||||
//});
|
||||
|
||||
var has_size = this.settings.get("has_size");
|
||||
|
||||
var node_outline_color = this.settings.get("node_outline_color");
|
||||
var node_close_color = this.settings.get("node_close_color");
|
||||
var node_open_color = this.settings.get("node_open_color");
|
||||
var label_size_color = this.settings.get("label_size_color");
|
||||
var label_count_color = this.settings.get("label_count_color");
|
||||
|
||||
var width = this.settings.get("width_px");
|
||||
var height = this.settings.get("height_px");
|
||||
|
||||
var m = [
|
||||
20,
|
||||
this.settings.get("margin_right"),
|
||||
20,
|
||||
this.settings.get("margin_left"),
|
||||
],
|
||||
w = width - m[1] - m[3],
|
||||
h = height - m[0] - m[2],
|
||||
i = 0;
|
||||
|
||||
var tree = d3.layout.tree().size([h, w]);
|
||||
|
||||
var diagonal = d3.svg.diagonal().projection(function (d) {
|
||||
return [d.y, d.x];
|
||||
});
|
||||
|
||||
var vis = d3
|
||||
.select(this.el)
|
||||
.append("svg:svg")
|
||||
.attr("width", w + m[1] + m[3])
|
||||
.attr("height", h + m[0] + m[2])
|
||||
.append("svg:g")
|
||||
.attr("transform", "translate(" + m[3] + "," + m[0] + ")");
|
||||
|
||||
data.x0 = h / 2;
|
||||
data.y0 = 0;
|
||||
|
||||
function toggle_children(tree, level) {
|
||||
if (tree.children) {
|
||||
_(tree.children).each(function (child) {
|
||||
toggle_children(child, level + 1);
|
||||
});
|
||||
|
||||
if (level >= initial_open_level) {
|
||||
toggle(tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var initial_open_level = this.settings.get("initial_open_level");
|
||||
|
||||
if (initial_open_level >= 0) {
|
||||
toggle_children(data, 0);
|
||||
}
|
||||
|
||||
var duration = 0;
|
||||
update(data);
|
||||
duration = d3.event && d3.event.altKey ? 5000 : 500;
|
||||
|
||||
function update(source) {
|
||||
// Compute the new tree layout.
|
||||
var nodes = tree.nodes(data).reverse();
|
||||
|
||||
// Normalize for fixed-depth.
|
||||
nodes.forEach(function (d) {
|
||||
d.y = d.depth * 180;
|
||||
});
|
||||
|
||||
// Update the nodes…
|
||||
var node = vis.selectAll("g.node").data(nodes, function (d) {
|
||||
return d.id || (d.id = ++i);
|
||||
});
|
||||
|
||||
// Enter any new nodes at the parent's previous position.
|
||||
var nodeEnter = node
|
||||
.enter()
|
||||
.append("svg:g")
|
||||
//.attr("class", "node")
|
||||
.attr("class", function (d) {
|
||||
return d._children ? "node node_close" : "node node_open";
|
||||
})
|
||||
.attr("transform", function (d) {
|
||||
return "translate(" + source.y0 + "," + source.x0 + ")";
|
||||
})
|
||||
.on("click", function (d) {
|
||||
toggle(d);
|
||||
update(d);
|
||||
});
|
||||
|
||||
nodeEnter
|
||||
.append("svg:circle")
|
||||
.attr("r", 1e-6)
|
||||
.style("fill", function (d) {
|
||||
return d._children ? node_close_color : node_open_color;
|
||||
})
|
||||
.style("cursor", function (d) {
|
||||
return d.children || d._children ? "pointer" : "default";
|
||||
})
|
||||
.style("stroke", node_outline_color);
|
||||
|
||||
nodeEnter
|
||||
.append("svg:text")
|
||||
.attr("x", function (d) {
|
||||
return d.children || d._children ? -10 : 10;
|
||||
})
|
||||
.attr("dy", ".35em")
|
||||
.attr("text-anchor", function (d) {
|
||||
return d.children || d._children ? "end" : "start";
|
||||
})
|
||||
.style("cursor", function (d) {
|
||||
return d.children || d._children ? "pointer" : "default";
|
||||
})
|
||||
.style("fill-opacity", 1e-6)
|
||||
.html(function (d) {
|
||||
if (has_size) {
|
||||
var sum = Number(d.sum).toLocaleString("en");
|
||||
var size = Number(d.size).toLocaleString("en");
|
||||
|
||||
var long_label =
|
||||
d.name +
|
||||
' - <tspan fill="' +
|
||||
label_size_color +
|
||||
'">' +
|
||||
sum +
|
||||
'</tspan> - <tspan fill="' +
|
||||
label_count_color +
|
||||
'">' +
|
||||
d.count +
|
||||
"<tspan>";
|
||||
var short_label =
|
||||
d.name +
|
||||
' - <tspan fill="' +
|
||||
label_size_color +
|
||||
'">' +
|
||||
size +
|
||||
"<tspan>";
|
||||
|
||||
return d.children || d._children ? long_label : short_label;
|
||||
} else {
|
||||
return d.name;
|
||||
}
|
||||
});
|
||||
|
||||
// Transition nodes to their new position.
|
||||
var nodeUpdate = node
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("transform", function (d) {
|
||||
return "translate(" + d.y + "," + d.x + ")";
|
||||
});
|
||||
|
||||
nodeUpdate
|
||||
.select("circle")
|
||||
.attr("r", 4.5)
|
||||
.style("fill", function (d) {
|
||||
return d._children ? node_close_color : node_open_color;
|
||||
});
|
||||
|
||||
nodeUpdate.select("text").style("fill-opacity", 1);
|
||||
nodeUpdate.select("text").style("fill", "white");
|
||||
|
||||
// Transition exiting nodes to the parent's new position.
|
||||
var nodeExit = node
|
||||
.exit()
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("transform", function (d) {
|
||||
return "translate(" + source.y + "," + source.x + ")";
|
||||
})
|
||||
.remove();
|
||||
|
||||
nodeExit.select("circle").attr("r", 1e-6);
|
||||
|
||||
nodeExit.select("text").style("fill-opacity", 1e-6);
|
||||
|
||||
// Update the links…
|
||||
var link = vis
|
||||
.selectAll("path.link")
|
||||
.data(tree.links(nodes), function (d) {
|
||||
return d.target.id;
|
||||
});
|
||||
|
||||
// Enter any new links at the parent's previous position.
|
||||
link
|
||||
.enter()
|
||||
.insert("svg:path", "g")
|
||||
.attr("class", "link")
|
||||
.attr("d", function (d) {
|
||||
var o = { x: source.x0, y: source.y0 };
|
||||
return diagonal({ source: o, target: o });
|
||||
})
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("d", diagonal);
|
||||
|
||||
// Transition links to their new position.
|
||||
link.transition().duration(duration).attr("d", diagonal);
|
||||
|
||||
// Transition exiting nodes to the parent's new position.
|
||||
link
|
||||
.exit()
|
||||
.transition()
|
||||
.duration(duration)
|
||||
.attr("d", function (d) {
|
||||
var o = { x: source.x, y: source.y };
|
||||
return diagonal({ source: o, target: o });
|
||||
})
|
||||
.remove();
|
||||
|
||||
// Stash the old positions for transition.
|
||||
nodes.forEach(function (d) {
|
||||
d.x0 = d.x;
|
||||
d.y0 = d.y;
|
||||
});
|
||||
}
|
||||
|
||||
// Toggle children.
|
||||
function toggle(d) {
|
||||
if (d.children) {
|
||||
d._children = d.children;
|
||||
d.children = null;
|
||||
} else {
|
||||
d.children = d._children;
|
||||
d._children = null;
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
return Dendrogram;
|
||||
});
|
||||
@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "forcedirected",
|
||||
"version": "1.0.0",
|
||||
"main": "forcedirected.js",
|
||||
"ignore": [],
|
||||
"dependencies": {
|
||||
"d3": "3.3.x"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
@ -0,0 +1,86 @@
|
||||
.splunk-toolkit-force-directed {
|
||||
overflow: hidden;
|
||||
font-family: arial;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed circle.node {
|
||||
stroke: #fff;
|
||||
stroke-width: 1.5px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .link, .splunk-toolkit-force-directed #arrowEnd {
|
||||
stroke: #999;
|
||||
stroke-opacity: .6;
|
||||
fill: none;
|
||||
}
|
||||
.splunk-toolkit-force-directed #arrowEnd {
|
||||
fill: #999;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed circle.node {
|
||||
stroke: #fff;
|
||||
stroke-width: 1.5px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed circle.nodeHighlight,
|
||||
.splunk-toolkit-force-directed circle.highlight {
|
||||
stroke-width: 2px;
|
||||
stroke: #E89595;
|
||||
}
|
||||
|
||||
.linkHighlight {
|
||||
stroke: red !important;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed circle.nodeHighlight.highlight {
|
||||
stroke-width: 3px;
|
||||
}
|
||||
|
||||
|
||||
.splunk-toolkit-force-directed line.link {
|
||||
stroke: #999;
|
||||
stroke-opacity: .6;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed #chart {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed #tooltipContainer {
|
||||
border: 1px solid hsl(0, 0%, 80%);
|
||||
position: absolute;
|
||||
min-width: 200px;
|
||||
min-height: 50px;
|
||||
border-radius:3px;
|
||||
z-index:100;
|
||||
background: #3A3A3A;
|
||||
padding: 10px;
|
||||
color: white;
|
||||
top:50px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .group-swatch {
|
||||
width:20px;
|
||||
height:20px;
|
||||
float:left;
|
||||
margin:2px;
|
||||
margin-right: 10px
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .group-name {
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .tooltipLabel {
|
||||
font-weight:bold;
|
||||
padding-right:5px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .tooltipRow {
|
||||
margin-bottom:10px;
|
||||
}
|
||||
|
||||
.splunk-toolkit-force-directed .panCursor {
|
||||
cursor: move;
|
||||
}
|
||||
@ -0,0 +1,546 @@
|
||||
// Force Directed Graphs!
|
||||
// these require an input of (at least) 3 fields in the format
|
||||
// 'stats count by field1 field2 field3'
|
||||
|
||||
// ---- settings ----
|
||||
// height, width
|
||||
// panAndZoom: the ability to zoom (true, false)
|
||||
// directional: true, false
|
||||
// valueField: what field to count by
|
||||
// charges, gravity: change the look of the graph, play around with these!
|
||||
// linkDistance: the distance between each node
|
||||
|
||||
// ---- expected data format ----
|
||||
// a splunk search like this: source=*somedata* | stats count by artist_name track_name device
|
||||
// each group is an artist/song pairing
|
||||
// {
|
||||
// "nodes":[
|
||||
// {
|
||||
// "source":"Bruno Mars",
|
||||
// "group":0
|
||||
// },
|
||||
// {
|
||||
// "source":"It Will Rain",
|
||||
// "group":0
|
||||
// },
|
||||
// {
|
||||
// "source":"Cobra Starship",
|
||||
// "group":1
|
||||
// },
|
||||
// {
|
||||
// "source":"You Make Me Feel",
|
||||
// "group":1
|
||||
// },
|
||||
// {
|
||||
// "source":"Gym Class Heroes",
|
||||
// "group":2
|
||||
// },
|
||||
// {
|
||||
// "source":"Stereo Hearts",
|
||||
// "group":2
|
||||
// },
|
||||
// ],
|
||||
// "links":[
|
||||
// {
|
||||
// "source":0,
|
||||
// "target":1,
|
||||
// "value":null
|
||||
// },
|
||||
// {
|
||||
// "source":2,
|
||||
// "target":3,
|
||||
// "value":null
|
||||
// },
|
||||
// {
|
||||
// "source":4,
|
||||
// "target":5,
|
||||
// "value":null
|
||||
// },
|
||||
// ],
|
||||
|
||||
// - we add this part -
|
||||
|
||||
// "groupNames":{
|
||||
// "iphone":49,
|
||||
// "android":53,
|
||||
// "blackberry":48,
|
||||
// "ipad":52,
|
||||
// "ipod":50
|
||||
// },
|
||||
// "groupLookup":[
|
||||
// "iphone",
|
||||
// "android",
|
||||
// "blackberry",
|
||||
// "ipad",
|
||||
// "ipod"
|
||||
// ]
|
||||
// }
|
||||
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var _ = require('underscore');
|
||||
var d3 = require("../d3/d3");
|
||||
var SimpleSplunkView = require("splunkjs/mvc/simplesplunkview");
|
||||
|
||||
var ForceDirected = SimpleSplunkView.extend({
|
||||
moduleId: module.id,
|
||||
|
||||
className: "splunk-toolkit-force-directed",
|
||||
|
||||
options: {
|
||||
managerid: null,
|
||||
data: 'preview',
|
||||
panAndZoom: true,
|
||||
directional: true,
|
||||
valueField: 'count',
|
||||
charges: -500,
|
||||
gravity: 0.2,
|
||||
linkDistance: 15,
|
||||
swoop: false,
|
||||
isStatic: true
|
||||
},
|
||||
|
||||
output_mode: "json_rows",
|
||||
|
||||
initialize: function() {
|
||||
SimpleSplunkView.prototype.initialize.apply(this, arguments);
|
||||
|
||||
// in the case that any options are changed, it will dynamically update
|
||||
// without having to refresh.
|
||||
this.settings.on("change:charges", this.render, this);
|
||||
this.settings.on("change:gravity", this.render, this);
|
||||
this.settings.on("change:linkDistance", this.render, this);
|
||||
this.settings.on("change:directional", this.render, this);
|
||||
this.settings.on("change:panAndZoom", this.render, this);
|
||||
this.settings.on("change:swoop", this.render, this);
|
||||
this.settings.on("change:isStatic", this.render, this);
|
||||
},
|
||||
|
||||
createView: function() {
|
||||
var margin = {top: 10, right: 10, bottom: 10, left: 10};
|
||||
var availableWidth = parseInt(this.settings.get("width") || this.$el.width(), 10);
|
||||
var availableHeight = parseInt(this.settings.get("height") || this.$el.height(), 10);
|
||||
|
||||
this.$el.html("");
|
||||
|
||||
var svg = d3.select(this.el)
|
||||
.append("svg")
|
||||
.attr("width", availableWidth)
|
||||
.attr("height", availableHeight)
|
||||
.attr("pointer-events", "all");
|
||||
|
||||
return { container: this.$el, svg: svg, margin: margin };
|
||||
},
|
||||
|
||||
// making the data look how we want it to for updateView to do its job
|
||||
formatData: function(data) {
|
||||
var nodes = {};
|
||||
var links = [];
|
||||
data.forEach(function(link) {
|
||||
var sourceName = link[0];
|
||||
var targetName = link[1];
|
||||
var groupName = link[2];
|
||||
var newLink = {};
|
||||
newLink.source = nodes[sourceName] ||
|
||||
(nodes[sourceName] = {name: sourceName, group: groupName, value: 0});
|
||||
newLink.target = nodes[targetName] ||
|
||||
(nodes[targetName] = {name: targetName, group: groupName, value: 0});
|
||||
newLink.value = +link[3];
|
||||
newLink.source.value += newLink.value;
|
||||
newLink.target.value += newLink.value;
|
||||
links.push(newLink);
|
||||
});
|
||||
|
||||
return {nodes: d3.values(nodes), links: links};
|
||||
},
|
||||
|
||||
updateView: function(viz, data) {
|
||||
var that = this;
|
||||
var containerHeight = this.$el.height();
|
||||
var containerWidth = this.$el.width();
|
||||
|
||||
// Clear svg
|
||||
var svg = $(viz.svg[0]);
|
||||
svg.empty();
|
||||
svg.height(containerHeight);
|
||||
svg.width(containerWidth);
|
||||
|
||||
// Add the graph group as a child of the main svg
|
||||
var graphWidth = containerWidth - viz.margin.left - viz.margin.right;
|
||||
var graphHeight = containerHeight - viz.margin.top - viz.margin.bottom;
|
||||
var graph = viz.svg
|
||||
.append("g")
|
||||
.attr("width", graphWidth)
|
||||
.attr("height", graphHeight)
|
||||
.attr("transform", "translate(" + viz.margin.left + "," + viz.margin.top + ")");
|
||||
|
||||
// Get settings
|
||||
this.charge = this.settings.get('charges');
|
||||
this.gravity = this.settings.get('gravity');
|
||||
this.linkDistance = this.settings.get('linkDistance');
|
||||
this.zoomable = this.settings.get('panAndZoom');
|
||||
this.swoop = this.settings.get('swoop');
|
||||
this.isStatic = this.settings.get('isStatic');
|
||||
this.isDirectional = this.settings.get('directional');
|
||||
this.zoomFactor = 0.5;
|
||||
|
||||
this.groupNameLookup = data.groupLookup;
|
||||
|
||||
// Set up graph
|
||||
var r = 6;
|
||||
var height = graphHeight;
|
||||
var width = graphWidth;
|
||||
var force = d3.layout.force()
|
||||
.gravity(this.gravity)
|
||||
.charge(this.charge)
|
||||
.linkDistance(this.linkDistance)
|
||||
.size([width, height]);
|
||||
|
||||
this.color = d3.scale.category20();
|
||||
|
||||
this.tooltips = new Tooltips(graph);
|
||||
|
||||
if (this.zoomable) {
|
||||
initPanZoom.call(this, viz.svg);
|
||||
}
|
||||
|
||||
graph.style("opacity", 1e-6)
|
||||
.transition()
|
||||
.duration(1000)
|
||||
.style("opacity", 1);
|
||||
|
||||
graph.append("svg:defs").selectAll("marker")
|
||||
.data(["arrowEnd"])
|
||||
.enter().append("svg:marker")
|
||||
.attr("id", String)
|
||||
.attr("viewBox", "0 -5 10 10")
|
||||
.attr("refX", 0)
|
||||
.attr("refY", 0)
|
||||
.attr("markerWidth", 6)
|
||||
.attr("markerHeight", 6)
|
||||
.attr("markerUnits", "userSpaceOnUse")
|
||||
.attr("orient", "auto")
|
||||
.append("svg:path")
|
||||
.attr("d", "M0,-5L10,0L0,5");
|
||||
|
||||
var link = graph.selectAll("line.link")
|
||||
.data(data.links)
|
||||
.enter().append('path')
|
||||
.attr("class", "link")
|
||||
.attr("marker-end", function(d) {
|
||||
if (that.isDirectional) {
|
||||
return "url(#" + "arrowEnd" + ")";
|
||||
}
|
||||
})
|
||||
.style("stroke-width", function(d) {
|
||||
var num = Math.max(Math.round(Math.log(d.value)), 1);
|
||||
return _.isNaN(num) ? 1 : num;
|
||||
});
|
||||
|
||||
link
|
||||
.on('click', function(d) {
|
||||
that.trigger('click:link', {
|
||||
source: d.source.name,
|
||||
sourceGroup: d.source.group,
|
||||
target: d.target.name,
|
||||
targetGroup: d.target.group,
|
||||
value: d.value
|
||||
});
|
||||
})
|
||||
.on('mouseover', function(d) {
|
||||
d3.select(this).classed('linkHighlight', true);
|
||||
openLinkTooltip(d, this);
|
||||
})
|
||||
.on('mouseout', function(d) {
|
||||
d3.select(this).classed('linkHighlight', false);
|
||||
that.tooltips.close(this);
|
||||
});
|
||||
|
||||
var node = graph.selectAll("circle.node")
|
||||
.data(data.nodes)
|
||||
.enter().append("svg:circle")
|
||||
.attr("class", "node")
|
||||
.attr("r", r - 1)
|
||||
.style("fill", function(d) {
|
||||
return that.color(d.group);
|
||||
})
|
||||
.call(force.drag);
|
||||
|
||||
node.append("title")
|
||||
.text(function(d) { return d.name; });
|
||||
|
||||
node
|
||||
.on('click', function(d) {
|
||||
that.trigger('click:node', {
|
||||
name: d.name,
|
||||
group: d.group,
|
||||
value: d.value
|
||||
});
|
||||
})
|
||||
.on('mouseover', function(d) {
|
||||
d3.select(this).classed('nodeHighlight', true);
|
||||
openNodeTooltip(d, this);
|
||||
})
|
||||
.on('mouseout', function(d) {
|
||||
d3.select(this).classed('nodeHighlight', false);
|
||||
that.tooltips.close(this);
|
||||
});
|
||||
|
||||
force.nodes(data.nodes)
|
||||
.links(data.links)
|
||||
.on("tick", function() {
|
||||
link.attr("d", function(d) {
|
||||
var diffX = d.target.x - d.source.x;
|
||||
var diffY = d.target.y - d.source.y;
|
||||
|
||||
// Length of path from center of source node to center of target node
|
||||
var pathLength = Math.sqrt((diffX * diffX) + (diffY * diffY));
|
||||
|
||||
// x and y distances from center to outside edge of target node
|
||||
var offsetX = (diffX * (r * 2)) / pathLength;
|
||||
var offsetY = (diffY * (r * 2)) / pathLength;
|
||||
|
||||
if (!that.swoop) {
|
||||
pathLength = 0;
|
||||
}
|
||||
|
||||
return "M" + d.source.x + "," + d.source.y + "A" + pathLength + "," + pathLength + " 0 0,1 " + (d.target.x - offsetX) + "," + (d.target.y - offsetY);
|
||||
});
|
||||
|
||||
node.attr("cx", function(d) {
|
||||
d.x = Math.max(r, Math.min(width - r, d.x));
|
||||
return d.x;
|
||||
})
|
||||
.attr("cy", function(d) {
|
||||
d.y = Math.max(r, Math.min(height - r, d.y));
|
||||
return d.y;
|
||||
});
|
||||
|
||||
}).start();
|
||||
|
||||
if (this.isStatic) {
|
||||
forwardAlpha(force, 0.005, 1000);
|
||||
}
|
||||
|
||||
function forwardAlpha(layout, alpha, max) {
|
||||
alpha = alpha || 0;
|
||||
max = max || 1000;
|
||||
var i = 0;
|
||||
while (layout.alpha() > alpha && i++ < max) {
|
||||
layout.tick();
|
||||
}
|
||||
}
|
||||
|
||||
// draggin'
|
||||
function initPanZoom(svg) {
|
||||
var that = this;
|
||||
svg.on('mousedown.drag', function() {
|
||||
if (that.zoomable) {
|
||||
svg.classed('panCursor', true);
|
||||
}
|
||||
// console.log('drag start');
|
||||
});
|
||||
|
||||
svg.on('mouseup.drag', function() {
|
||||
svg.classed('panCursor', false);
|
||||
// console.log('drag stop');
|
||||
});
|
||||
|
||||
svg.call(d3.behavior.zoom().on("zoom", function() {
|
||||
panZoom();
|
||||
}));
|
||||
}
|
||||
|
||||
// zoomin'
|
||||
function panZoom() {
|
||||
graph.attr("transform",
|
||||
"translate(" + d3.event.translate + ")"
|
||||
+ " scale(" + d3.event.scale + ")");
|
||||
}
|
||||
|
||||
function openNodeTooltip(d, node) {
|
||||
var groupName;
|
||||
|
||||
if (that.groupNameLookup !== undefined) {
|
||||
groupName = that.groupNameLookup[d.group];
|
||||
} else {
|
||||
groupName = d.group;
|
||||
}
|
||||
|
||||
that.tooltips.open('nodes', {
|
||||
slots: {
|
||||
val: d.name,
|
||||
group: groupName
|
||||
},
|
||||
swatch: that.color(d.group)
|
||||
}, node);
|
||||
}
|
||||
|
||||
function openLinkTooltip(d, node) {
|
||||
that.tooltips.open('links', {
|
||||
slots: {
|
||||
source: d.source.name,
|
||||
target: d.target.name
|
||||
}
|
||||
}, node);
|
||||
}
|
||||
|
||||
//TODO: This doesn't seem to be used in this file
|
||||
function getSafeVal(getobj, name) {
|
||||
var retVal;
|
||||
if (getobj.hasOwnProperty(name) && getobj[name] !== null) {
|
||||
retVal = getobj[name];
|
||||
} else {
|
||||
retVal = name;
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
function highlightNodes(val) {
|
||||
var self = this, groupName;
|
||||
if (val !== ' ' && val !== '') {
|
||||
graph.selectAll('circle')
|
||||
.filter(function(d, i) {
|
||||
groupName = self.groupNameLookup[d.group];
|
||||
if (d.source.indexOf(val) >= 0 || groupName.indexOf(val) >= 0) {
|
||||
d3.select(this).classed('highlight', true);
|
||||
} else {
|
||||
d3.select(this).classed('highlight', false);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
graph.selectAll('circle').classed('highlight', false);
|
||||
}
|
||||
}
|
||||
|
||||
/////////////////////// formerly known as tooltips.js /////////////////////////////
|
||||
|
||||
function Tooltips(svg) {
|
||||
var tooltipTimer = null,
|
||||
tooltipOpenCoords = {},
|
||||
tooltipIsOpen = false,
|
||||
tooltipContents,
|
||||
$tooltipContainer,
|
||||
isReady = false,
|
||||
layouts;
|
||||
|
||||
setup(svg, viz.container);
|
||||
|
||||
function setup(svg, $container) {
|
||||
var self = this,
|
||||
data = [0],
|
||||
$nodeVal, $nodeGroup, $nodeContainer,
|
||||
$linkSource, $linkTarget, $linkContainer;
|
||||
|
||||
$tooltipContainer = $("<div id='tooltipContainer'></div>");
|
||||
|
||||
$nodeContainer = $("<div class='nodeContainer'></div>");
|
||||
$nodeVal = $("<div class='node-value tooltipRow'><span class='tooltipLabel'>Value: </span><span class='field1-val'></span></div>");
|
||||
$nodeGroup = $("<div class='node-group tooltipRow'></div><div class='group-swatch'></div><div class='group-name'><span class='tooltipLabel'>Group: </span><span class='group-val'></span></div>");
|
||||
$nodeContainer.append($nodeVal);
|
||||
$nodeContainer.append($nodeGroup);
|
||||
$tooltipContainer.append($nodeContainer);
|
||||
|
||||
$linkContainer = $("<div class='linkContainer'></div>");
|
||||
$linkSource = $("<div class='source tooltipRow'><span class='tooltipLabel'>Source: </span><span class='source-val'></span></div>");
|
||||
$linkTarget = $("<div class='target tooltipRow'><span class='tooltipLabel'>Target: </span><span class='target-val'></span></div>");
|
||||
$linkContainer.append($linkSource);
|
||||
$linkContainer.append($linkTarget);
|
||||
$tooltipContainer.append($linkContainer);
|
||||
|
||||
$tooltipContainer.find('.group-swatch').hide();
|
||||
|
||||
$container.prepend($tooltipContainer);
|
||||
$tooltipContainer.hide();
|
||||
|
||||
layouts = {
|
||||
'nodes': {
|
||||
"container": $nodeContainer,
|
||||
"slots": {
|
||||
"val": $nodeVal.find('.field1-val'),
|
||||
"group": $nodeGroup.find('.group-val')
|
||||
},
|
||||
"swatch": $nodeContainer.find('.group-swatch')
|
||||
},
|
||||
'links': {
|
||||
"container": $linkContainer,
|
||||
"slots": {
|
||||
"source": $linkSource.find('.source-val'),
|
||||
"target": $linkTarget.find('.target-val')
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
isReady = true;
|
||||
}
|
||||
|
||||
function clearTooltips() {
|
||||
if (isReady) {
|
||||
$.each(layouts, function(k, layout) {
|
||||
$.each(layout.slots, function(k, v) {
|
||||
// this isnt really neccesary because it's either hidden or shown with newly-replaced content
|
||||
v.empty();
|
||||
});
|
||||
layout.container.hide();
|
||||
if (layout.swatch !== undefined) {
|
||||
layout.swatch.hide();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.close = function(node) {
|
||||
// return false;
|
||||
var self = this,
|
||||
dx, dy;
|
||||
|
||||
var mouseCoords = d3.mouse(node);
|
||||
|
||||
if (tooltipTimer !== null) {
|
||||
window.clearTimeout(tooltipTimer);
|
||||
}
|
||||
|
||||
dx = Math.abs(tooltipOpenCoords.x - mouseCoords[0]);
|
||||
dy = Math.abs(tooltipOpenCoords.y - mouseCoords[1]);
|
||||
|
||||
/*
|
||||
only close the tooltip when the user has moved a certain distance away
|
||||
this helps when an element is very small and the user might have
|
||||
difficulty keeping their mouse directly over it
|
||||
*/
|
||||
if (dy > 10 || dx > 10) {
|
||||
tooltipIsOpen = false;
|
||||
tooltipTimer = window.setTimeout(function() {
|
||||
$tooltipContainer.fadeOut(400);
|
||||
}, 500);
|
||||
}
|
||||
};
|
||||
|
||||
this.open = function(layout, data, node) {
|
||||
var mouseCoords = d3.mouse(node);
|
||||
tooltipIsOpen = true;
|
||||
tooltipOpenCoords = {
|
||||
x: mouseCoords[0] + 6 * 2,
|
||||
y: mouseCoords[1] + 6 * 3
|
||||
};
|
||||
|
||||
clearTooltips();
|
||||
$.each(data.slots, function(k, v) {
|
||||
layouts[layout]['slots'][k].append(v);
|
||||
});
|
||||
layouts[layout]['container'].show();
|
||||
if (layouts[layout]['swatch'] !== undefined) {
|
||||
layouts[layout]['swatch'].show().css('background-color', data.swatch);
|
||||
}
|
||||
|
||||
$tooltipContainer
|
||||
.css("left", tooltipOpenCoords.x)
|
||||
.css("top", tooltipOpenCoords.y);
|
||||
$tooltipContainer.fadeIn(400);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
return ForceDirected;
|
||||
});
|
||||
@ -0,0 +1,53 @@
|
||||
// This code has been imported from the following nice work: https://splunkbase.splunk.com/app/3171/
|
||||
|
||||
// Author: Ryan Thibodeaux
|
||||
//
|
||||
//
|
||||
// Options module for passing
|
||||
// parameters between modules/functions.
|
||||
|
||||
(function() {
|
||||
define([
|
||||
"module",
|
||||
], function(module) {
|
||||
|
||||
var appOptions;
|
||||
var options = {};
|
||||
var config = module.config();
|
||||
|
||||
if (typeof config !== 'undefined' && config !== null) {
|
||||
options = config.options;
|
||||
}
|
||||
|
||||
return appOptions = (function() {
|
||||
function appOptions() {} // empty constructor
|
||||
|
||||
// check if appOptions contains parameter 'name'
|
||||
appOptions.hasOption = function(name) {
|
||||
if (typeof options === 'undefined' ||
|
||||
typeof name === 'undefined' ||
|
||||
options.hasOwnProperty(name) !== true) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// return value stored in parameter 'name'
|
||||
appOptions.getOptionValue = function(name) {
|
||||
return (appOptions.hasOption(name) ? options[name] : undefined);
|
||||
};
|
||||
|
||||
// set parameter 'name' to value
|
||||
appOptions.setOptionValue = function(name, value) {
|
||||
if (!appOptions.hasOption(name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
options[name] = value;
|
||||
return true;
|
||||
};
|
||||
|
||||
return appOptions;
|
||||
})();
|
||||
});
|
||||
}).call(this);
|
||||
@ -0,0 +1,740 @@
|
||||
// This code has been imported from the following nice work: https://splunkbase.splunk.com/app/3171/
|
||||
|
||||
// Author: Ryan Thibodeaux
|
||||
//
|
||||
//
|
||||
// Utility functions used throughout
|
||||
// other modules and funcitons in the app.
|
||||
|
||||
|
||||
(function() {
|
||||
define([
|
||||
"jquery",
|
||||
"underscore",
|
||||
"appOptions",
|
||||
"splunkjs/mvc",
|
||||
"splunkjs/mvc/simplexml/ready!",
|
||||
], function($, _, appOptions, mvc) {
|
||||
|
||||
var appUtils;
|
||||
var footerRemovalTimerOn = 0;
|
||||
|
||||
var submittedTokenModel = mvc.Components.get('submitted');
|
||||
var defaultTokenModel = mvc.Components.get('default');
|
||||
|
||||
if (typeof mvc === 'undefined' || !submittedTokenModel || !defaultTokenModel) {
|
||||
var str = "Failed to load Splunk components. " +
|
||||
"This is probably a symptom of a bigger problem.";
|
||||
alert(str);
|
||||
console.error(str);
|
||||
}
|
||||
|
||||
return appUtils = (function() {
|
||||
|
||||
function appUtils() {} // empty constructor
|
||||
|
||||
|
||||
// Initializes app tokens and set footer removal timer
|
||||
appUtils.initiliazeApp = function(submit) {
|
||||
|
||||
// make sure appName and pageName are set, and set
|
||||
// the 'my_app' and 'my_view' tokens accordingly
|
||||
var myApp = appOptions.getOptionValue('appName');
|
||||
var myView = appOptions.getOptionValue('pageName');
|
||||
if (typeof myApp === 'undefined' || myApp.toString().trim().length < 1 ||
|
||||
typeof myView === 'undefined' || myView.toString().trim().length < 1) {
|
||||
|
||||
var comps = (location.pathname.split('?')[0]).split('/');
|
||||
var idx = comps.indexOf('app');
|
||||
myApp = comps[idx + 1];
|
||||
myView = comps[idx + 2];
|
||||
|
||||
appOptions.setOptionValue('appName', myApp);
|
||||
appOptions.setOptionValue('pageName', myView);
|
||||
}
|
||||
|
||||
appUtils.setToken('my_app', myApp);
|
||||
appUtils.setToken('my_view', myView);
|
||||
|
||||
if (!!submit) { //!!undefined is false
|
||||
appUtils.submitTokens();
|
||||
}
|
||||
|
||||
// make sure pageStartTime is set
|
||||
var startTime = appOptions.getOptionValue('pageStartTime');
|
||||
if (typeof startTime === 'undefined' || startTime <= 0) {
|
||||
startTime = new Date().valueOf();
|
||||
appOptions.setOptionValue('pageStartTime', startTime);
|
||||
}
|
||||
|
||||
// set timer to remove footer instead of relying on
|
||||
// page 'load' event (that was unreliable)
|
||||
appUtils.setFooterEditTimer(200);
|
||||
};
|
||||
|
||||
|
||||
// return token model objects
|
||||
appUtils.getTokenModels = function() {
|
||||
return ([defaultTokenModel, submittedTokenModel]);
|
||||
}
|
||||
|
||||
|
||||
// set generic wildcard tooltip on passed element name(s) where
|
||||
// inputs can be an array or a comma-delimited list
|
||||
appUtils.setWildCardTooltip = function(inputElements) {
|
||||
|
||||
if (typeof inputElements === 'undefined' || inputElements.length < 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
var newArray = inputElements;
|
||||
|
||||
// test if not an Array - turn it into one if it is not
|
||||
if (Object.prototype.toString.call(inputElements) !== '[object Array]') {
|
||||
newArray = inputElements.replace(/^,+|,+$/gm, '').split(",");
|
||||
}
|
||||
|
||||
var len = newArray.length;
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
appUtils.setTooltip(newArray[i], 'Use \"*\" as a wildcard');
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// set the 'tip' string as the tooltip for element 'name'
|
||||
appUtils.setTooltip = function(name, tip) {
|
||||
|
||||
if (typeof name === 'undefined' || name.length < 1 || typeof tip === 'undefined') {
|
||||
return;
|
||||
}
|
||||
|
||||
var eleID = (name[0] === '#' ? name : '#' + name);
|
||||
var e = $(eleID);
|
||||
|
||||
// element exists, so add tooltips
|
||||
if (e.length) {
|
||||
|
||||
// add tooltips to text inputs
|
||||
var textChild = e.children('div.splunk-textinput');
|
||||
if (textChild.length) {
|
||||
textChild.attr('title', tip);
|
||||
var textChildInputs = textChild.find('input');
|
||||
if (textChildInputs.length) {
|
||||
textChildInputs.attr('placeholder', tip);
|
||||
}
|
||||
}
|
||||
|
||||
// add tooltips to multiselect and dropdown inputs
|
||||
var msChild = e.children('div.splunk-choice-input');
|
||||
if (msChild.length) {
|
||||
msChild.attr('title', tip);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// remove links from Splunk footer
|
||||
appUtils.hideFooterLinks = function() {
|
||||
|
||||
footerRemovalTimerOn = 0;
|
||||
|
||||
var footer = $('#footer');
|
||||
|
||||
if (footer.length > 0) {
|
||||
links = footer.find('a');
|
||||
if (links.length > 0) {
|
||||
links.hide();
|
||||
|
||||
// hide the "Hide Filters" link on top of the
|
||||
// dashboard if it is present
|
||||
var hideLink = $('.hide-global-filters');
|
||||
if (hideLink.length > 0) {
|
||||
hideLink.hide();
|
||||
}
|
||||
|
||||
} else {
|
||||
appUtils.setFooterEditTimer();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// setup timer to remove links from Splunk footer, where
|
||||
// the footer is checked 'delayMS' milliseconds from now
|
||||
appUtils.setFooterEditTimer = function(delayMS) {
|
||||
|
||||
// set default delay for when to check for footer
|
||||
// if delayMS was not set
|
||||
if (Math.floor(delayMS) > 1) {} else {
|
||||
delayMS = 1000;
|
||||
}
|
||||
|
||||
// don't allow setting this timer after page has been loaded
|
||||
// for more than 60 seconds
|
||||
if (appUtils.getPageLoadedSecs() > 60) {
|
||||
return;
|
||||
}
|
||||
|
||||
// only allow assessing the footer if the footer is on
|
||||
// the dashboard, i.e., hideFooter is not true
|
||||
if ($('#footer').length > 0) {
|
||||
if (!footerRemovalTimerOn) {
|
||||
setTimeout(appUtils.hideFooterLinks, delayMS);
|
||||
footerRemovalTimerOn = 1;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// number of seconds the page has been loaded
|
||||
appUtils.getPageLoadedSecs = function() {
|
||||
return ((new Date().valueOf() - appOptions.getOptionValue('pageStartTime')) / 1000);
|
||||
};
|
||||
|
||||
|
||||
// Sets token 'name' to 'value' in submittedTokenModel and
|
||||
// defaultTokenModel unless excludeDefault is set to true
|
||||
appUtils.setSubmittedToken = function(name, value, excludeDefault) {
|
||||
if (typeof name === 'undefined' || !submittedTokenModel) {
|
||||
return;
|
||||
}
|
||||
if (!excludeDefault) {
|
||||
appUtils.setDefaulToken(name, value);
|
||||
}
|
||||
submittedTokenModel.set(name, value);
|
||||
};
|
||||
|
||||
// Sets token 'name' to 'value' in defaultTokenModel.
|
||||
appUtils.setDefaulToken = function(name, value) {
|
||||
if (typeof name === 'undefined' || !defaultTokenModel) {
|
||||
return;
|
||||
}
|
||||
defaultTokenModel.set(name, value);
|
||||
};
|
||||
|
||||
// Sets token 'name' to 'value' in defaultTokenModel and
|
||||
// submit all tokens if set
|
||||
appUtils.setToken = function(name, value, submit) {
|
||||
appUtils.setDefaulToken(name, value);
|
||||
|
||||
if (!!submit) {
|
||||
appUtils.submitTokens();
|
||||
}
|
||||
};
|
||||
|
||||
// Returns value of token 'name' in defaultTokenModel and
|
||||
appUtils.getDefaultToken = function(name) {
|
||||
if (typeof name === 'undefined' || !defaultTokenModel) {
|
||||
return undefined;
|
||||
}
|
||||
return defaultTokenModel.get(name);
|
||||
};
|
||||
|
||||
// Returns value of token 'name' in submittedTokenModel and
|
||||
appUtils.getSubmittedToken = function(name) {
|
||||
if (typeof name === 'undefined' || !submittedTokenModel) {
|
||||
return undefined;
|
||||
}
|
||||
return submittedTokenModel.get(name);
|
||||
};
|
||||
|
||||
// Returns value of token 'name' in token model 'model'
|
||||
appUtils.getToken = function(name, model) {
|
||||
var tokens = (typeof model === 'undefined') ? defaultTokenModel : model;
|
||||
|
||||
if (typeof name === 'undefined' || !tokens) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return tokens.get(name);
|
||||
};
|
||||
|
||||
|
||||
// Copy defaultTokenModel values into submittedTokenModels
|
||||
appUtils.submitTokens = function() {
|
||||
if (submittedTokenModel && defaultTokenModel) {
|
||||
submittedTokenModel.set(defaultTokenModel.toJSON());
|
||||
}
|
||||
};
|
||||
|
||||
// Return boolean answer to if 'checkval' matches 'newval'
|
||||
appUtils.checkTokenValue = function(checkval, newval) {
|
||||
return (newval === checkval ? true : false);
|
||||
};
|
||||
|
||||
|
||||
// Jumps to div element eleID
|
||||
appUtils.scrollIntoView = function(eleID, setting) {
|
||||
var e = document.getElementById(eleID);
|
||||
if (!!e && e.scrollIntoView) {
|
||||
e.scrollIntoView(setting);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// return if a token is set or not, where "lax" determines
|
||||
// if the token is checked if it is an empty value as well
|
||||
appUtils.checkEmptyValue = function(value, lax) {
|
||||
if (!!lax) {
|
||||
return (typeof value === 'undefined')
|
||||
} else {
|
||||
return (typeof value === 'undefined' || value.length < 1)
|
||||
}
|
||||
};
|
||||
|
||||
// Toggles visibility of HTML elements of a dashboard
|
||||
appUtils.hideHtmlElement = function(eleID, hide) {
|
||||
if (appUtils.checkEmptyValue(eleID)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var e = (eleID[0] === '#' ? eleID : '#' + eleID);
|
||||
if ($(e).length) {
|
||||
if (!!hide) {
|
||||
$(e).hide();
|
||||
} else {
|
||||
$(e).show();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// loop through input elements and evaluate each one for focus
|
||||
appUtils.checkEmptyTokenFocusForDashboard = function(inputs) {
|
||||
if (typeof inputs === 'undefined') {
|
||||
return;
|
||||
}
|
||||
|
||||
var len = inputs.length;
|
||||
var currentValue = undefined;
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
currentValue = (defaultTokenModel.attributes.hasOwnProperty('form.' + inputs[i]) ? appUtils.getToken('form.' + inputs[i]) : appUtils.getToken(inputs[i]));
|
||||
appUtils.checkEmptyTokenFocus(inputs[i], currentValue);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// set border style based on state of 'value'
|
||||
appUtils.checkEmptyTokenFocus = function(name, value) {
|
||||
var id = (name[0] === '#' ? name : '#' + name);
|
||||
var p = $(id);
|
||||
if (p.length) {
|
||||
if (typeof value === 'undefined' || value.length < 1) {
|
||||
appUtils.setInputFocus(p);
|
||||
return true;
|
||||
} else {
|
||||
appUtils.clearInputFocus(p);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
// set the focus effects on the based element
|
||||
appUtils.setInputFocus = function(el) {
|
||||
if (el.hasClass('input-text') || el.hasClass('splunk-textinput')) {
|
||||
el.find('input[type="text"]').css("border-color", "red").css("box-shadow", "0px 1px 1px rgba(0, 0, 0, 0.075) inset, 0px 0px 8px rgba(222, 79, 79, 0.6");
|
||||
} else if (el.hasClass('input-dropdown')) {
|
||||
el.find('.select2-choice').css("border-color", "red").css("box-shadow", "0px 1px 1px rgba(0, 0, 0, 0.075) inset, 0px 0px 8px rgba(222, 79, 79, 0.6");
|
||||
} else if (el.hasClass('input-multiselect')) {
|
||||
el.find('.select2-choices').css("border-color", "red").css("box-shadow", "0px 1px 1px rgba(0, 0, 0, 0.075) inset, 0px 0px 8px rgba(222, 79, 79, 0.6");
|
||||
} else {
|
||||
el.css("border-style", "double");
|
||||
}
|
||||
};
|
||||
|
||||
// clear the focus effects on the based element
|
||||
appUtils.clearInputFocus = function(el) {
|
||||
if (el.hasClass('input-text') || el.hasClass('splunk-textinput')) {
|
||||
el.find('input[type="text"]').css("border-color", "").css("box-shadow", "");
|
||||
} else if (el.hasClass('input-dropdown')) {
|
||||
el.find('.select2-choice').css("border-color", "").css("box-shadow", "");
|
||||
} else if (el.hasClass('input-multiselect')) {
|
||||
el.find('.select2-choices').css("border-color", "").css("box-shadow", "");
|
||||
} else {
|
||||
el.css("border-style", "none");
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// add event listener of type to object using the assigned callback
|
||||
appUtils.addEvent = function(object, type, callback) {
|
||||
if (typeof object === 'undefined') {
|
||||
return;
|
||||
}
|
||||
if (object.addEventListener) {
|
||||
object.addEventListener(type, callback, false);
|
||||
} else if (object.attachEvent) {
|
||||
object.attachEvent("on" + type, callback);
|
||||
} else {
|
||||
object["on" + type] = callback;
|
||||
}
|
||||
};
|
||||
|
||||
// redirects to a new page in the current app where urlSegment
|
||||
// starts with the new page to go to and newTab indicates if
|
||||
// we want to open a new tab or not
|
||||
appUtils.drilldownRedirect = function(urlSegment, newTab) {
|
||||
|
||||
if (typeof urlSegment === 'undefined' || urlSegment.toString().trim().length < 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure the new segment starts with a '/'
|
||||
var segment = urlSegment.toString().trim();
|
||||
segment = (segment[0] === '/' ? segment : '/' + segment);
|
||||
|
||||
// get strip everything in the current URL and strip it down
|
||||
// to what comes before the current page, including the last '/'
|
||||
var uri = window.location.toString();
|
||||
var currentPage = appUtils.getToken('my_view');
|
||||
var path = uri.substr(0, uri.indexOf(currentPage)).replace(/\/+$/i, '');
|
||||
|
||||
// go to new URL
|
||||
if (!!newTab) {
|
||||
window.open(path + segment, "_blank");
|
||||
} else {
|
||||
window.location = path + segment;
|
||||
}
|
||||
};
|
||||
|
||||
// generate html button in parent element
|
||||
// id: id and name to use on the html button
|
||||
// label: label/span to apply to the button
|
||||
// parent: id of parent html element in which to place the button
|
||||
// append?: should append or prepend in parent list of children
|
||||
// submit?: should it be a submit button type or not
|
||||
// vertical?: is the button used in a vertical list of items
|
||||
appUtils.generateButton = function(id, label, parent, append, submit, vertical) {
|
||||
|
||||
var btn = document.createElement('button');
|
||||
var span;
|
||||
|
||||
// apply id field
|
||||
if (typeof id !== 'undefined' && id.length > 0) {
|
||||
btn.id = id;
|
||||
btn.name = id;
|
||||
}
|
||||
|
||||
// apply label
|
||||
if (typeof label !== 'undefined' && label.length > 0) {
|
||||
span = document.createElement('span');
|
||||
span.innerHTML = label;
|
||||
btn.appendChild(span);
|
||||
}
|
||||
|
||||
// assign styling and insert if parent is set
|
||||
if (typeof parent !== 'undefined' && parent.length > 0) {
|
||||
var parentID = (parent[0] === '#' ? parent : '#' + parent);
|
||||
var p = $(parentID);
|
||||
|
||||
if (p.length) {
|
||||
|
||||
// set button in its place of the parent
|
||||
var t = p.find('.fieldset');
|
||||
if (t.length) {
|
||||
t = $(t[0]);
|
||||
if (!!append) {
|
||||
t.append(btn);
|
||||
} else {
|
||||
t.prepend(btn);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set button type classes and CSS
|
||||
if (!!submit) {
|
||||
btn.className = 'btn btn-primary';
|
||||
} else {
|
||||
btn.className = 'btn-info btn-app-info';
|
||||
}
|
||||
|
||||
// set button CSS based on it being in a
|
||||
// vertical stack of items or not
|
||||
if (!!vertical) {
|
||||
btn.style.verticalAlign = 'middle';
|
||||
btn.style.margin = "5px 10px 5px 0px";
|
||||
} else {
|
||||
btn.style.verticalAlign = 'top';
|
||||
btn.style.marginTop = "21px";
|
||||
btn.style.marginRight = " 10px";
|
||||
}
|
||||
|
||||
return $(btn);
|
||||
};
|
||||
|
||||
// strip value of dangerous characters in Splunk and trim the result
|
||||
appUtils.cleanTxtString = function(value) {
|
||||
if (typeof value === 'undefined' || value.length < 1) {
|
||||
return "";
|
||||
}
|
||||
return value.replace(/%|\||\=|\[|\]|\(|\)/g, "").trim();
|
||||
};
|
||||
|
||||
// clean raw input text elements
|
||||
// value: current value to clean
|
||||
// defaultVal: default value to return if cleaned version is empty/undefined
|
||||
// post: function to use to clean passed value
|
||||
appUtils.cleanTextInputElement = function(value, defaultVal, post) {
|
||||
|
||||
var cleanedValue = defaultVal;
|
||||
|
||||
if (typeof value !== 'undefined') {
|
||||
|
||||
if (!!post) {
|
||||
cleanedValue = post(value.toString());
|
||||
} else {
|
||||
cleanedValue = appUtils.cleanTxtString(value.toString());
|
||||
}
|
||||
|
||||
if (cleanedValue.length < 1) {
|
||||
cleanedValue = defaultVal;
|
||||
}
|
||||
}
|
||||
|
||||
return cleanedValue;
|
||||
};
|
||||
|
||||
|
||||
// Forces the strict ordering of the values in the token of a checkbox
|
||||
// group identified by the argument "name". The ordering is determined
|
||||
// by the order of the individual checkboxes in the group. The current
|
||||
// values of the token is passed via the "value" argument.
|
||||
appUtils.enforceCheckboxOrdering = function(name, value) {
|
||||
var cb = mvc.Components.getInstance(name);
|
||||
if (typeof cb !== 'undefined') {
|
||||
var preferred_values_order = [];
|
||||
var new_field_list = [];
|
||||
var matched = [];
|
||||
var choices = cb.options.choices;
|
||||
|
||||
// set list of preferred order based on value ordering in checkbox
|
||||
for (var i = 0; i < choices.length; i++) {
|
||||
preferred_values_order.push(choices[i]['value']);
|
||||
}
|
||||
|
||||
// values that do match entries in ordered_preference
|
||||
matched = value.filter(function(x) { return preferred_values_order.indexOf(x) >= 0 });
|
||||
|
||||
// loop through preferred_values_order and add them if they are present in matched
|
||||
for (var j = 0; j < preferred_values_order.length; j++) {
|
||||
if (matched.indexOf(preferred_values_order[j]) >= 0) {
|
||||
new_field_list.push(preferred_values_order[j]);
|
||||
}
|
||||
}
|
||||
|
||||
appUtils.setToken("form." + name, new_field_list);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Setup the modal search tool button and event listeners
|
||||
// for the passed instance in "modalObject"
|
||||
appUtils.setupModalSearchTool = function(modalObject) {
|
||||
|
||||
if (typeof modalObject !== 'undefined') {
|
||||
|
||||
// Create a button on the top fieldset that will open the modal window
|
||||
var modalButton = appUtils.generateButton('btn_modal_open', 'Open Search Tool');
|
||||
modalButton.click(function() {
|
||||
appUtils.setToken('dd_modal_search_time.earliest', appUtils.getToken('earliest'));
|
||||
appUtils.setToken('dd_modal_search_time.latest', appUtils.getToken('latest'));
|
||||
modalObject.show();
|
||||
});
|
||||
|
||||
// add modal button to end of top fieldset after the last input / submit button
|
||||
var topFieldset = $('.dashboard-body').find('.fieldset').first();
|
||||
if (topFieldset.length > 0) {
|
||||
var topFieldsetChildren = topFieldset.children();
|
||||
if (topFieldsetChildren.length > 0) {
|
||||
var i = topFieldsetChildren.length - 1;
|
||||
for(i; i >= 0 ; i--) {
|
||||
var lastChild = $(topFieldsetChildren[i]);
|
||||
if (!lastChild.hasClass('form-submit') && !lastChild.hasClass('input')) {
|
||||
// continue, go to next previous child
|
||||
} else {
|
||||
lastChild.after(modalButton);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i < 0) {
|
||||
topFieldset.append(modalButton);
|
||||
}
|
||||
} else {
|
||||
topFieldset.append(modalButton);
|
||||
}
|
||||
}
|
||||
|
||||
defaultTokenModel.on("change:dd_modal_search_value", function(model, value, options) {
|
||||
appUtils.checkEmptyTokenFocus("dd_modal_search_value", value);
|
||||
});
|
||||
|
||||
submittedTokenModel.on("change:dd_modal_search_value", function(model, value, options) {
|
||||
appUtils.setToken("dd_modal_search_value_internal", appUtils.parseModalSearchTerm("dd_modal_search_value", value), true);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Parse and clean the search input string from modal
|
||||
// window search tool.
|
||||
// Returns the cleaned value.
|
||||
// name: name of the token used for the search input text box
|
||||
// value: value obtained from the text box
|
||||
appUtils.parseModalSearchTerm = function(name, value) {
|
||||
|
||||
if (typeof value === 'undefined') {
|
||||
return undefined;
|
||||
} else if (value.toString().trim() === "") {
|
||||
appUtils.setToken(name, undefined, true);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
var valueCleaned = value.toString().replace(/\'|\"|\||%|\[|\]|\(|\)|\=/g, '');
|
||||
|
||||
if (valueCleaned !== value) {
|
||||
alert('Search string contained disallowed characters (\'\"%|[]()=). They have been stripped in the applied search value.');
|
||||
}
|
||||
|
||||
value = valueCleaned.trim();
|
||||
|
||||
if (value === "") {
|
||||
alert("Applied search string is empty. Please enter a valid search string.");
|
||||
appUtils.setToken(name, undefined, true);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
|
||||
// Parse and clean selected host. Valid inputs values
|
||||
// will be separated into a domain and user token.
|
||||
// Returns boolean value indicating if the tokens have changed.
|
||||
// name: name of the token used for the input text box
|
||||
// value: value obtained from the text box
|
||||
appUtils.parseDashboardHostTokens = function(name, value) {
|
||||
|
||||
var newHostValue = undefined;
|
||||
var currentHostValue = appUtils.getSubmittedToken('dd_target_host_internal');
|
||||
var submit = false;
|
||||
|
||||
if (typeof value !== 'undefined') {
|
||||
|
||||
// trim whitespace
|
||||
var cleanedValue = value.trim();
|
||||
|
||||
// reset initial value if nothing is left after cleaning,
|
||||
// but don't call submit
|
||||
if (cleanedValue.length < 1) {
|
||||
appUtils.setToken(name, undefined);
|
||||
} else {
|
||||
|
||||
// alert the host if there are disallowed characters
|
||||
if (typeof cleanedValue !== 'undefined') {
|
||||
var cleanedUser = cleanedValue.replace(/\*|\'|\"|\||%|\[|\]|\(|\)|\=/g, '');
|
||||
if (cleanedUser !== cleanedValue) {
|
||||
alert("The passed Host value contained disallowed characters (*\'\"%|[]()=). They have been stripped from the applied search.");
|
||||
}
|
||||
newHostValue = cleanedUser.trim();
|
||||
}
|
||||
|
||||
// set user value to undefined if an empty string
|
||||
if (typeof newHostValue === 'undefined' || newHostValue.length < 1) {
|
||||
alert("The passed Host value is incomplete. Use the Search Tool to choose a Host.");
|
||||
newHostValue = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set host token value if different than the current value
|
||||
if (newHostValue !== currentHostValue) {
|
||||
appUtils.setToken('dd_target_host_internal', newHostValue);
|
||||
submit = true;
|
||||
}
|
||||
|
||||
return submit;
|
||||
};
|
||||
|
||||
// Parse and clean selected user. Valid inputs values
|
||||
// will be separated into a domain and user token.
|
||||
// Returns boolean value indicating if the tokens have changed.
|
||||
// name: name of the token used for the input text box
|
||||
// value: value obtained from the text box
|
||||
appUtils.parseDashboardUserTokens = function(name, value) {
|
||||
|
||||
var newUserValue = undefined;
|
||||
var currentUserValue = appUtils.getSubmittedToken('dd_target_user_internal');
|
||||
var newDomainValue = undefined;
|
||||
var currentDomainValue = appUtils.getSubmittedToken('dd_target_domain_internal');
|
||||
var submit = false;
|
||||
|
||||
if (typeof value !== 'undefined') {
|
||||
|
||||
// trim whitespace
|
||||
var cleanedValue = value.trim();
|
||||
|
||||
// reset initial value if nothing is left after cleaning,
|
||||
// but don't call submit
|
||||
if (cleanedValue.length < 1) {
|
||||
appUtils.setToken(name, undefined);
|
||||
} else {
|
||||
|
||||
// inspect the data for validity
|
||||
var regex = /([^\x5c]*)(?:\x5c+)?([^\x5c]*)?/g;
|
||||
var match = regex.exec(cleanedValue);
|
||||
var dom = match.length > 1 ? match[1] : undefined;
|
||||
var user = match.length > 2 ? match[2] : undefined;
|
||||
|
||||
// alert the user if there are disallowed characters
|
||||
if (typeof user !== 'undefined') {
|
||||
var cleanedUser = user.replace(/\*|\'|\"|\||%|\[|\]|\(|\)|\=/g, '');
|
||||
if (cleanedUser !== user) {
|
||||
alert("The passed User value contained disallowed characters (*\'\"%|[]()=). They have been stripped from the applied search.");
|
||||
}
|
||||
newUserValue = cleanedUser.trim();
|
||||
}
|
||||
|
||||
// alert the user if there are disallowed characters
|
||||
if (typeof dom !== 'undefined') {
|
||||
var cleanedDomain = dom.replace(/\*|\'|\"|\||%|\[|\]|\(|\)|\=/g, '');
|
||||
if (cleanedDomain !== dom) {
|
||||
alert("The passed Domain value contained disallowed characters (*\'\"%|[]()=). They have been stripped from the applied search.");
|
||||
}
|
||||
newDomainValue = cleanedDomain.trim();
|
||||
}
|
||||
|
||||
// set user value to undefined if an empty string
|
||||
if (typeof newUserValue === 'undefined' || newUserValue.length < 1) {
|
||||
alert("The passed User value is incomplete. Use the Search Tool to choose a User.");
|
||||
newUserValue = undefined;
|
||||
}
|
||||
|
||||
// set domain value to undefined if an empty string
|
||||
if (typeof newDomainValue === 'undefined' || newDomainValue.length < 1) {
|
||||
alert("The passed Domain value is incomplete. Use the Search Tool to choose a Domain.");
|
||||
newDomainValue = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set user token value if different than the current value
|
||||
if (newUserValue !== currentUserValue) {
|
||||
appUtils.setToken('dd_target_user_internal', newUserValue);
|
||||
submit = true;
|
||||
}
|
||||
|
||||
// set domain token value if different than the current value
|
||||
if (newDomainValue !== currentDomainValue) {
|
||||
appUtils.setToken('dd_target_domain_internal', newDomainValue);
|
||||
submit = true;
|
||||
}
|
||||
return submit;
|
||||
};
|
||||
|
||||
return appUtils;
|
||||
})();
|
||||
});
|
||||
}).call(this);
|
||||
@ -0,0 +1,29 @@
|
||||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2017, Ryan Thibodeaux
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,101 @@
|
||||
/*
|
||||
* Copyright (c) 2017, Ryan Thibodeaux. All Rights Reserved
|
||||
* see included LICENSE file (BSD 3-clause)
|
||||
*/
|
||||
|
||||
/* top-level entity */
|
||||
.modal-text-msg {
|
||||
position: fixed;
|
||||
top: 40%;
|
||||
left: 50%;
|
||||
width: 450px;
|
||||
margin-left: -225px;
|
||||
z-index: 200000;
|
||||
}
|
||||
|
||||
.modal-text-msg-unactivated {
|
||||
display: none !important;
|
||||
}
|
||||
.modal-text-msg-activated {}
|
||||
|
||||
.modal-text-msg > .modal-body {
|
||||
padding: 15px 20px 10px 20px;
|
||||
max-height: 800px;
|
||||
}
|
||||
|
||||
/* background */
|
||||
.modal-text-msg-backdrop {
|
||||
opacity: 0.5;
|
||||
background-color: black;
|
||||
z-index: 100000;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.modal-text-msg-backdrop-clear {
|
||||
opacity: 0.0 !important;
|
||||
}
|
||||
|
||||
/* header items */
|
||||
.modal-text-msg > .modal-header {
|
||||
padding: 7px 0px 7px 20px
|
||||
}
|
||||
|
||||
.modal-text-msg > .modal-header {
|
||||
background: #90b5ea;
|
||||
-webkit-border-top-left-radius: 5px;
|
||||
-moz-border-radius-topleft: 5px;
|
||||
border-top-left-radius: 5px;
|
||||
-webkit-border-top-right-radius: 5px;
|
||||
-moz-border-radius-topright: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
border-radius: 5px 5px 0 0;
|
||||
}
|
||||
.modal-text-msg > .info {
|
||||
background: #90b5ea;
|
||||
}
|
||||
.modal-text-msg > .debug {
|
||||
background: #f5f5f5;
|
||||
}
|
||||
.modal-text-msg > .warn {
|
||||
background: #dd9754;
|
||||
}
|
||||
.modal-text-msg > .error {
|
||||
background: indianred;
|
||||
}
|
||||
.modal-text-msg > .modal-header > .modal-title {
|
||||
line-height: 22px;
|
||||
overflow-wrap: break-word;
|
||||
padding-right: 30px;
|
||||
font-size: 20px;
|
||||
font-weight: bold;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
/* Text content elements */
|
||||
.modal-text-msg p {
|
||||
font-size:14px;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
/* footer items */
|
||||
.modal-text-msg > .modal-footer {
|
||||
padding: 5px 10px 5px 10px;
|
||||
background-color: #f5f5f5;
|
||||
border-radius: 0 0 5px 5px;
|
||||
-webkit-border-bottom-left-radius: 5px;
|
||||
-moz-border-radius-bottomleft: 5px;
|
||||
border-bottom-left-radius: 5px;
|
||||
-webkit-border-bottom-right-radius: 5px;
|
||||
-moz-border-radius-bottomright: 5px;
|
||||
border-bottom-right-radius: 5px;
|
||||
}
|
||||
|
||||
.modal-text-msg-close:before {
|
||||
color: #000000;
|
||||
font-size: 18px;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.modal-text-msg-close:hover:before {
|
||||
color: #000000;
|
||||
}
|
||||
@ -0,0 +1,157 @@
|
||||
/**
|
||||
* @fileoverview Class definition for Modal Text Message
|
||||
* @author Ryan Thibodeaux
|
||||
* @version 1.0.1
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (c) 2017, Ryan Thibodeaux. All Rights Reserved
|
||||
* see included LICENSE file (BSD 3-clause)
|
||||
*/
|
||||
|
||||
define(function(require, exports, module) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var _ = require('underscore');
|
||||
var $ = require('jquery');
|
||||
var Backbone = require('backbone');
|
||||
|
||||
require("css!/static/app/metricator-for-nmon/components/modaltextmsg/modaltextmsg.css");
|
||||
|
||||
// escapes any HTML passed into string
|
||||
function escapeHTML(str) {
|
||||
var div = document.createElement('div');
|
||||
div.appendChild(document.createTextNode(str));
|
||||
return div.innerHTML;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new ModalTextMsg object.
|
||||
* @class
|
||||
* @classdesc Modal Text Message class that displays messages as a modal box.
|
||||
* @param {Object} options
|
||||
* @param {String} options.type Type of text message to show
|
||||
* @param {String} options.title Title for the text message
|
||||
* @param {String} options.message Message content string
|
||||
* @param {String} options.id HTML ID to use
|
||||
*/
|
||||
var ModalTextMsg = Backbone.View.extend({
|
||||
|
||||
className: 'ModalTextMsg',
|
||||
content: undefined,
|
||||
|
||||
defaults: {
|
||||
title: "", // title to show on top of the modal window
|
||||
type: "info", // the type of modal message [info, debug, warn, error]
|
||||
message: "", // the message to display in the modal window
|
||||
id: "ModalTextMsgID", // the html ID to use for the modal text message
|
||||
},
|
||||
|
||||
// initialize ModalTextMsg object
|
||||
initialize: function(options) {
|
||||
this.options = options;
|
||||
this.options.title = (typeof this.options.title === 'undefined' ? this.defaults.title : escapeHTML(this.options.title).trim());
|
||||
this.options.type = (typeof this.options.type === 'undefined' ? this.defaults.type : escapeHTML(this.options.type).trim().toLowerCase());
|
||||
this.options.message = (typeof this.options.message === 'undefined' ? this.defaults.message : escapeHTML(this.options.message).trim());
|
||||
this.options.id = (typeof this.options.id === 'undefined' ? this.defaults.id : escapeHTML(this.options.id).trim());
|
||||
this.template = _.template(this.template);
|
||||
|
||||
// enforce the type to be of a specific value
|
||||
switch(this.options.type) {
|
||||
case "error":
|
||||
this.options.title = (this.options.title === "" ? "Error" : this.options.title);
|
||||
break;
|
||||
case "warn":
|
||||
this.options.title = (this.options.title === "" ? "Warning" : this.options.title);
|
||||
break;
|
||||
case "debug":
|
||||
this.options.title = (this.options.title === "" ? "Debug" : this.options.title);
|
||||
break;
|
||||
case "info":
|
||||
default:
|
||||
this.options.type = "info";
|
||||
this.options.title = (this.options.title === "" ? "Info" : this.options.title);
|
||||
break;
|
||||
}
|
||||
|
||||
// setup content div by breaking up message into
|
||||
// paragraphs for every <br/> tag found
|
||||
var c = document.createElement('div');
|
||||
c.id = "modal-text-msg-content";
|
||||
var msgParts = this.options.message.split("<br/>");
|
||||
msgParts.forEach( function(str) {
|
||||
var para = document.createElement("p");
|
||||
var t = document.createTextNode(str.trim());
|
||||
para.appendChild(t);
|
||||
c.appendChild(para);
|
||||
});
|
||||
this.content = c;
|
||||
|
||||
// render the content and add it to the HTML body but don't show it
|
||||
(this.render()).$el.addClass('modal-text-msg-unactivated');
|
||||
$(document.body).append(this.el);
|
||||
},
|
||||
|
||||
// click listeners
|
||||
events: {
|
||||
'click .modal-text-msg-close' : 'close',
|
||||
'click .modal-text-msg-backdrop' : 'close'
|
||||
},
|
||||
|
||||
// render the content based on the template
|
||||
render: function() {
|
||||
this.$el.html(this.template({
|
||||
id : this.options.id,
|
||||
title : this.options.title,
|
||||
type : this.options.type
|
||||
}));
|
||||
this.$el.find(".modal-body").append(this.content);
|
||||
return this;
|
||||
},
|
||||
|
||||
// show the modal text message window
|
||||
show: function() {
|
||||
if (this.$el.hasClass('modal-text-msg-unactivated')) {
|
||||
this.$el.removeClass('modal-text-msg-unactivated').addClass('modal-text-msg-activated');
|
||||
}
|
||||
this.updateVisibility();
|
||||
return this;
|
||||
},
|
||||
|
||||
// close the modal window and destroy the content
|
||||
close: function() {
|
||||
this.unbind();
|
||||
this.remove();
|
||||
this.updateVisibility();
|
||||
return this;
|
||||
},
|
||||
|
||||
// update visibility of modal windows that
|
||||
// have been activated
|
||||
updateVisibility: function() {
|
||||
// make the last window visible and all others invisible
|
||||
var modals = $(".modal-text-msg-activated");
|
||||
if (modals.length > 0) {
|
||||
modals.each(function(i,m) {
|
||||
if (i == modals.length - 1) {
|
||||
$(m).show()
|
||||
} else {
|
||||
$(m).hide();
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// html template
|
||||
template: '<div id="<%- id %>" class="modal modal-text-msg" role="dialog">' +
|
||||
'<div class="modal-header <%- type %>">' +
|
||||
'<div class="modal-title"><%- title %></div>' +
|
||||
'</div>' +
|
||||
'<div class="modal-body"></div>' +
|
||||
'<div class="modal-footer"><button class="close modal-text-msg-close"/></div>' +
|
||||
'</div>' +
|
||||
'<div class="modal-backdrop modal-text-msg-backdrop"></div>'
|
||||
});
|
||||
return ModalTextMsg;
|
||||
});
|
||||
@ -0,0 +1,138 @@
|
||||
/**
|
||||
* @fileoverview Controlling logic for the Modal Text Message feature
|
||||
* @author Ryan Thibodeaux
|
||||
* @version 1.0.1
|
||||
*/
|
||||
|
||||
/*
|
||||
* Copyright (c) 2017, Ryan Thibodeaux. All Rights Reserved
|
||||
* see included LICENSE file (BSD 3-clause)
|
||||
*/
|
||||
|
||||
(function() {
|
||||
require([
|
||||
"jquery",
|
||||
"ModalTextMsg",
|
||||
"splunkjs/mvc",
|
||||
"splunkjs/ready!",
|
||||
"splunkjs/mvc/simplexml/ready!"
|
||||
], function($, ModalTextMsg, mvc) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// get token models and setup modifier functions
|
||||
var defaultTokenModel = mvc.Components.get('default');
|
||||
var submittedTokenModel = mvc.Components.get('submitted');
|
||||
var urlTokenModel = mvc.Components.get('url');
|
||||
|
||||
function setToken(name, value, submit) {
|
||||
defaultTokenModel.set(name, value);
|
||||
if (!!submit) {
|
||||
submitTokens();
|
||||
}
|
||||
}
|
||||
function submitTokens() {
|
||||
if (submittedTokenModel && defaultTokenModel) {
|
||||
submittedTokenModel.set(defaultTokenModel.toJSON());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// parse passed modal text message title
|
||||
function parseMessageTitle(token, title) {
|
||||
var value = title;
|
||||
if (typeof value !== 'undefined') {
|
||||
if (Object.prototype.toString.call(title) === '[object Array]') {
|
||||
value = title[0];
|
||||
}
|
||||
value = value.trim()
|
||||
if (value.length > 0) {
|
||||
next_title = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// parse passed modal text message content and trigger the display
|
||||
function parseMessageToken(token, msg) {
|
||||
var value = msg;
|
||||
if (typeof value !== 'undefined') {
|
||||
if (Object.prototype.toString.call(msg) === '[object Array]') {
|
||||
value = msg[0];
|
||||
}
|
||||
value = value.trim();
|
||||
if (value.length > 0) {
|
||||
next_msg = value;
|
||||
next_msg_type = token.split("_").pop();
|
||||
triggerMsgDisplay();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// show the modal text message
|
||||
function triggerMsgDisplay() {
|
||||
var k = new ModalTextMsg({
|
||||
title : next_title,
|
||||
type : next_msg_type,
|
||||
message : next_msg
|
||||
});
|
||||
k.show();
|
||||
|
||||
next_title = undefined;
|
||||
next_msg = undefined;
|
||||
next_msg_type = undefined;
|
||||
}
|
||||
|
||||
/////////////////////////////////////////
|
||||
/// Start Main Code Here
|
||||
/////////////////////////////////////////
|
||||
|
||||
// array of message tokens in increasing order of priority
|
||||
const MESSAGE_TOKENS = ["modal_msg_title", "modal_msg_debug", "modal_msg_info", "modal_msg_warn", "modal_msg_error"];
|
||||
const MESSAGE_URL_TOKENS = ["modal_msg_url_title", "modal_msg_url_debug", "modal_msg_url_info", "modal_msg_url_warn", "modal_msg_url_error"];
|
||||
var next_title = undefined;
|
||||
var next_msg = undefined;
|
||||
var next_msg_type = undefined;
|
||||
|
||||
// parse and display messages passed via URL tokens
|
||||
var urlTokensSet = urlTokenModel.keys();
|
||||
for (var i = 0; i < MESSAGE_URL_TOKENS.length; i++) {
|
||||
if (urlTokensSet.indexOf(MESSAGE_URL_TOKENS[i]) >= 0) {
|
||||
if (MESSAGE_URL_TOKENS[i] === "modal_msg_url_title") {
|
||||
parseMessageTitle(MESSAGE_URL_TOKENS[i], urlTokenModel.get(MESSAGE_URL_TOKENS[i]));
|
||||
} else {
|
||||
parseMessageToken(MESSAGE_URL_TOKENS[i], urlTokenModel.get(MESSAGE_URL_TOKENS[i]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// listen for changes to the message type tokens
|
||||
MESSAGE_TOKENS.forEach(function(str) {
|
||||
submittedTokenModel.on("change:" + str, function(model, value, options) {
|
||||
if (str === "modal_msg_title") {
|
||||
parseMessageTitle(str, value);
|
||||
} else {
|
||||
parseMessageToken(str, value);
|
||||
}
|
||||
if (typeof value !== 'undefined') {
|
||||
setToken(str, undefined, true);
|
||||
}
|
||||
});
|
||||
|
||||
// see if the value was already set at page load via quick changes that
|
||||
// may not be registered in the code block above since it isn't loaded
|
||||
// quickly enough in 6.5+
|
||||
var currentValue = submittedTokenModel.get(str);
|
||||
var urlValue = urlTokenModel.get(str);
|
||||
if (typeof currentValue !== "undefined" && currentValue.length > 0) {
|
||||
if (typeof urlValue === "undefined" || urlValue.length < 1) {
|
||||
setToken(str, currentValue + " ", true);
|
||||
}
|
||||
}
|
||||
});
|
||||
},function(err) {
|
||||
// error callback
|
||||
// the error has a list of modules that failed
|
||||
var failedId = err.requireModules && err.requireModules[0];
|
||||
console.error("Error when loading dependencies in Modal Text Message wrapper: ", err);
|
||||
});
|
||||
}).call(this);
|
||||
@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "parallelcoords",
|
||||
"version": "1.0.0",
|
||||
"main": "parallelcoords.js",
|
||||
"ignore": [],
|
||||
"dependencies": {
|
||||
"d3": "3.3.x"
|
||||
},
|
||||
"devDependencies": {}
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
Copyright (c) 2012, Kai Chang
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* The name Kai Chang may not be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT,
|
||||
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@ -0,0 +1,34 @@
|
||||
.parcoords > svg, .parcoords > canvas {
|
||||
font: 14px sans-serif;
|
||||
position: absolute;
|
||||
}
|
||||
.parcoords > canvas {
|
||||
pointer-events: none;
|
||||
}
|
||||
.parcoords rect.background {
|
||||
fill: transparent;
|
||||
}
|
||||
.parcoords rect.background:hover {
|
||||
fill: rgba(120,120,120,0.2);
|
||||
}
|
||||
.parcoords .resize rect {
|
||||
fill: rgba(0,0,0,0.1);
|
||||
}
|
||||
.parcoords rect.extent {
|
||||
fill: rgba(255,255,255,0.25);
|
||||
stroke: rgba(0,0,0,0.6);
|
||||
}
|
||||
.parcoords .axis line, .parcoords .axis path {
|
||||
fill: none;
|
||||
stroke: #222;
|
||||
shape-rendering: crispEdges;
|
||||
}
|
||||
.parcoords canvas {
|
||||
opacity: 1;
|
||||
-moz-transition: opacity 0.3s;
|
||||
-webkit-transition: opacity 0.3s;
|
||||
-o-transition: opacity 0.3s;
|
||||
}
|
||||
.parcoords canvas.faded {
|
||||
opacity: 0.25;
|
||||
}
|
||||
@ -0,0 +1,598 @@
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var d3 = require("../../d3/d3");
|
||||
require("css!./d3-parcoords.css");
|
||||
|
||||
/// BEGIN LIBRARY CODE
|
||||
//
|
||||
d3.parcoords = function(config) {
|
||||
var __ = {
|
||||
data: [],
|
||||
dimensions: [],
|
||||
dimensionTitles: {},
|
||||
types: {},
|
||||
brushed: false,
|
||||
mode: "default",
|
||||
rate: 20,
|
||||
width: 600,
|
||||
height: 300,
|
||||
margin: { top: 24, right: 0, bottom: 12, left: 0 },
|
||||
color: "#069",
|
||||
composite: "source-over",
|
||||
alpha: 0.7
|
||||
};
|
||||
|
||||
extend(__, config);
|
||||
var pc = function(selection) {
|
||||
selection = pc.selection = d3.select(selection);
|
||||
|
||||
__.width = selection[0][0].clientWidth;
|
||||
__.height = selection[0][0].clientHeight;
|
||||
|
||||
// canvas data layers
|
||||
["shadows", "marks", "foreground", "highlight"].forEach(function(layer) {
|
||||
canvas[layer] = selection
|
||||
.append("canvas")
|
||||
.attr("class", layer)[0][0];
|
||||
ctx[layer] = canvas[layer].getContext("2d");
|
||||
});
|
||||
|
||||
// svg tick and brush layers
|
||||
pc.svg = selection
|
||||
.append("svg")
|
||||
.attr("width", __.width)
|
||||
.attr("height", __.height)
|
||||
.append("svg:g")
|
||||
.attr("transform", "translate(" + __.margin.left + "," + __.margin.top + ")");
|
||||
|
||||
return pc;
|
||||
};
|
||||
var events = d3.dispatch.apply(this,["render", "resize", "highlight", "brush"].concat(d3.keys(__))),
|
||||
w = function() { return __.width - __.margin.right - __.margin.left; },
|
||||
h = function() { return __.height - __.margin.top - __.margin.bottom },
|
||||
flags = {
|
||||
brushable: false,
|
||||
reorderable: false,
|
||||
axes: false,
|
||||
interactive: false,
|
||||
shadows: false,
|
||||
debug: false
|
||||
},
|
||||
xscale = d3.scale.ordinal(),
|
||||
yscale = {},
|
||||
dragging = {},
|
||||
line = d3.svg.line(),
|
||||
axis = d3.svg.axis().orient("left").ticks(5),
|
||||
g, // groups for axes, brushes
|
||||
ctx = {},
|
||||
canvas = {};
|
||||
|
||||
// side effects for setters
|
||||
var side_effects = d3.dispatch.apply(this,d3.keys(__))
|
||||
.on("composite", function(d) { ctx.foreground.globalCompositeOperation = d.value; })
|
||||
.on("alpha", function(d) { ctx.foreground.globalAlpha = d.value; })
|
||||
.on("width", function(d) { pc.resize(); })
|
||||
.on("height", function(d) { pc.resize(); })
|
||||
.on("margin", function(d) { pc.resize(); })
|
||||
.on("rate", function(d) { rqueue.rate(d.value); })
|
||||
.on("data", function(d) {
|
||||
if (flags.shadows) paths(__.data, ctx.shadows);
|
||||
})
|
||||
.on("dimensions", function(d) {
|
||||
xscale.domain(__.dimensions);
|
||||
if (flags.interactive) pc.render().updateAxes();
|
||||
});
|
||||
|
||||
// expose the state of the chart
|
||||
pc.state = __;
|
||||
pc.flags = flags;
|
||||
|
||||
// create getter/setters
|
||||
getset(pc, __, events);
|
||||
|
||||
// expose events
|
||||
d3.rebind(pc, events, "on");
|
||||
|
||||
// tick formatting
|
||||
d3.rebind(pc, axis, "ticks", "orient", "tickValues", "tickSubdivide", "tickSize", "tickPadding", "tickFormat");
|
||||
|
||||
// getter/setter with event firing
|
||||
function getset(obj,state,events) {
|
||||
d3.keys(state).forEach(function(key) {
|
||||
obj[key] = function(x) {
|
||||
if (!arguments.length) return state[key];
|
||||
var old = state[key];
|
||||
state[key] = x;
|
||||
side_effects[key].call(pc,{"value": x, "previous": old});
|
||||
events[key].call(pc,{"value": x, "previous": old});
|
||||
return obj;
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
function extend(target, source) {
|
||||
for (key in source) {
|
||||
target[key] = source[key];
|
||||
}
|
||||
return target;
|
||||
};
|
||||
pc.autoscale = function() {
|
||||
// yscale
|
||||
var defaultScales = {
|
||||
"date": function(k) {
|
||||
return d3.time.scale()
|
||||
.domain(d3.extent(__.data, function(d) {
|
||||
return d[k] ? d[k].getTime() : null;
|
||||
}))
|
||||
.range([h()+1, 1])
|
||||
},
|
||||
"number": function(k) {
|
||||
return d3.scale.linear()
|
||||
.domain(d3.extent(__.data, function(d) { return +d[k]; }))
|
||||
.range([h()+1, 1])
|
||||
},
|
||||
"string": function(k) {
|
||||
return d3.scale.ordinal()
|
||||
.domain(__.data.map(function(p) { return p[k]; }))
|
||||
.rangePoints([h()+1, 1])
|
||||
}
|
||||
};
|
||||
|
||||
__.dimensions.forEach(function(k) {
|
||||
yscale[k] = defaultScales[__.types[k]](k);
|
||||
});
|
||||
|
||||
// hack to remove ordinal dimensions with many values
|
||||
pc.dimensions(pc.dimensions().filter(function(p,i) {
|
||||
var uniques = yscale[p].domain().length;
|
||||
if (__.types[p] == "string" && (uniques > 60 || uniques < 2)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}));
|
||||
|
||||
// xscale
|
||||
xscale.rangePoints([0, w()], 1);
|
||||
|
||||
// canvas sizes
|
||||
pc.selection.selectAll("canvas")
|
||||
.style("margin-top", __.margin.top + "px")
|
||||
.style("margin-left", __.margin.left + "px")
|
||||
.attr("width", w()+2)
|
||||
.attr("height", h()+2)
|
||||
|
||||
// default styles, needs to be set when canvas width changes
|
||||
ctx.foreground.strokeStyle = __.color;
|
||||
ctx.foreground.lineWidth = 1.4;
|
||||
ctx.foreground.globalCompositeOperation = __.composite;
|
||||
ctx.foreground.globalAlpha = __.alpha;
|
||||
ctx.highlight.lineWidth = 3;
|
||||
ctx.shadows.strokeStyle = "#dadada";
|
||||
|
||||
return this;
|
||||
};
|
||||
pc.detectDimensions = function() {
|
||||
pc.types(pc.detectDimensionTypes(__.data));
|
||||
pc.dimensions(d3.keys(pc.types()));
|
||||
return this;
|
||||
};
|
||||
|
||||
// a better "typeof" from this post: http://stackoverflow.com/questions/7390426/better-way-to-get-type-of-a-javascript-variable
|
||||
pc.toType = function(v) {
|
||||
return ({}).toString.call(v).match(/\s([a-zA-Z]+)/)[1].toLowerCase()
|
||||
};
|
||||
|
||||
// try to coerce to number before returning type
|
||||
pc.toTypeCoerceNumbers = function(v) {
|
||||
if ((parseFloat(v) == v) && (v != null)) return "number";
|
||||
return pc.toType(v);
|
||||
};
|
||||
|
||||
// attempt to determine types of each dimension based on first row of data
|
||||
pc.detectDimensionTypes = function(data) {
|
||||
var types = {}
|
||||
d3.keys(data[0])
|
||||
.forEach(function(col) {
|
||||
types[col] = pc.toTypeCoerceNumbers(data[0][col]);
|
||||
});
|
||||
return types;
|
||||
};
|
||||
pc.render = function() {
|
||||
// try to autodetect dimensions and create scales
|
||||
if (!__.dimensions.length) pc.detectDimensions();
|
||||
if (!(__.dimensions[0] in yscale)) pc.autoscale();
|
||||
|
||||
pc.render[__.mode]();
|
||||
|
||||
events.render.call(this);
|
||||
return this;
|
||||
};
|
||||
|
||||
pc.render.default = function() {
|
||||
pc.clear('foreground');
|
||||
if (__.brushed) {
|
||||
__.brushed.forEach(path_foreground);
|
||||
} else {
|
||||
__.data.forEach(path_foreground);
|
||||
}
|
||||
};
|
||||
|
||||
var rqueue = d3.renderQueue(path_foreground)
|
||||
.rate(50)
|
||||
.clear(function() { pc.clear('foreground'); });
|
||||
|
||||
pc.render.queue = function() {
|
||||
if (__.brushed) {
|
||||
rqueue(__.brushed);
|
||||
} else {
|
||||
rqueue(__.data);
|
||||
}
|
||||
};
|
||||
pc.shadows = function() {
|
||||
flags.shadows = true;
|
||||
if (__.data.length > 0) paths(__.data, ctx.shadows);
|
||||
return this;
|
||||
};
|
||||
|
||||
// draw little dots on the axis line where data intersects
|
||||
pc.axisDots = function() {
|
||||
var ctx = pc.ctx.marks;
|
||||
ctx.globalAlpha = d3.min([1/Math.pow(data.length, 1/2), 1]);
|
||||
__.data.forEach(function(d) {
|
||||
__.dimensions.map(function(p,i) {
|
||||
ctx.fillRect(position(p)-0.75,yscale[p](d[p])-0.75,1.5,1.5);
|
||||
});
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
// draw single polyline
|
||||
function color_path(d, ctx) {
|
||||
ctx.strokeStyle = d3.functor(__.color)(d);
|
||||
ctx.beginPath();
|
||||
__.dimensions.map(function(p,i) {
|
||||
if (i == 0) {
|
||||
ctx.moveTo(position(p),yscale[p](d[p]));
|
||||
} else {
|
||||
ctx.lineTo(position(p),yscale[p](d[p]));
|
||||
}
|
||||
});
|
||||
ctx.stroke();
|
||||
};
|
||||
|
||||
// draw many polylines of the same color
|
||||
function paths(data, ctx) {
|
||||
ctx.clearRect(-1,-1,w()+2,h()+2);
|
||||
ctx.beginPath();
|
||||
data.forEach(function(d) {
|
||||
__.dimensions.map(function(p,i) {
|
||||
if (i == 0) {
|
||||
ctx.moveTo(position(p),yscale[p](d[p]));
|
||||
} else {
|
||||
ctx.lineTo(position(p),yscale[p](d[p]));
|
||||
}
|
||||
});
|
||||
});
|
||||
ctx.stroke();
|
||||
};
|
||||
|
||||
function path_foreground(d) {
|
||||
return color_path(d, ctx.foreground);
|
||||
};
|
||||
|
||||
function path_highlight(d) {
|
||||
return color_path(d, ctx.highlight);
|
||||
};
|
||||
pc.clear = function(layer) {
|
||||
ctx[layer].clearRect(0,0,w()+2,h()+2);
|
||||
return this;
|
||||
};
|
||||
pc.createAxes = function() {
|
||||
if (g) pc.removeAxes();
|
||||
|
||||
// Add a group element for each dimension.
|
||||
g = pc.svg.selectAll(".dimension")
|
||||
.data(__.dimensions, function(d) { return d; })
|
||||
.enter().append("svg:g")
|
||||
.attr("class", "dimension")
|
||||
.attr("transform", function(d) { return "translate(" + xscale(d) + ")"; })
|
||||
|
||||
// Add an axis and title.
|
||||
g.append("svg:g")
|
||||
.attr("class", "axis")
|
||||
.attr("transform", "translate(0,0)")
|
||||
.each(function(d) { d3.select(this).call(axis.scale(yscale[d])); })
|
||||
.append("svg:text")
|
||||
.attr({
|
||||
"text-anchor": "middle",
|
||||
"y": 0,
|
||||
"transform": "translate(0,-12)",
|
||||
"x": 0,
|
||||
"class": "label"
|
||||
})
|
||||
.text(function(d) {
|
||||
return d in __.dimensionTitles ? __.dimensionTitles[d] : d; // dimension display names
|
||||
})
|
||||
|
||||
flags.axes= true;
|
||||
return this;
|
||||
};
|
||||
|
||||
pc.removeAxes = function() {
|
||||
g.remove();
|
||||
return this;
|
||||
};
|
||||
|
||||
pc.updateAxes = function() {
|
||||
var g_data = pc.svg.selectAll(".dimension")
|
||||
.data(__.dimensions, function(d) { return d; })
|
||||
|
||||
g_data.enter().append("svg:g")
|
||||
.attr("class", "dimension")
|
||||
.attr("transform", function(p) { return "translate(" + position(p) + ")"; })
|
||||
.style("opacity", 0)
|
||||
.append("svg:g")
|
||||
.attr("class", "axis")
|
||||
.attr("transform", "translate(0,0)")
|
||||
.each(function(d) { d3.select(this).call(axis.scale(yscale[d])); })
|
||||
.append("svg:text")
|
||||
.attr({
|
||||
"text-anchor": "middle",
|
||||
"y": 0,
|
||||
"transform": "translate(0,-12)",
|
||||
"x": 0,
|
||||
"class": "label"
|
||||
})
|
||||
.text(String);
|
||||
|
||||
g_data.exit().remove();
|
||||
|
||||
g = pc.svg.selectAll(".dimension");
|
||||
|
||||
g.transition().duration(1100)
|
||||
.attr("transform", function(p) { return "translate(" + position(p) + ")"; })
|
||||
.style("opacity", 1)
|
||||
if (flags.shadows) paths(__.data, ctx.shadows);
|
||||
return this;
|
||||
};
|
||||
|
||||
pc.brushable = function() {
|
||||
if (!g) pc.createAxes();
|
||||
|
||||
// Add and store a brush for each axis.
|
||||
g.append("svg:g")
|
||||
.attr("class", "brush")
|
||||
.each(function(d) {
|
||||
d3.select(this).call(
|
||||
yscale[d].brush = d3.svg.brush()
|
||||
.y(yscale[d])
|
||||
.on("brushstart", function() {
|
||||
d3.event.sourceEvent.stopPropagation();
|
||||
})
|
||||
.on("brush", pc.brush)
|
||||
);
|
||||
})
|
||||
.selectAll("rect")
|
||||
.style("visibility", null)
|
||||
.attr("x", -15)
|
||||
.attr("width", 30)
|
||||
flags.brushable = true;
|
||||
return this;
|
||||
};
|
||||
|
||||
// Jason Davies, http://bl.ocks.org/1341281
|
||||
pc.reorderable = function() {
|
||||
if (!g) pc.createAxes();
|
||||
|
||||
g.style("cursor", "move")
|
||||
.call(d3.behavior.drag()
|
||||
.on("dragstart", function(d) {
|
||||
dragging[d] = this.__origin__ = xscale(d);
|
||||
})
|
||||
.on("drag", function(d) {
|
||||
dragging[d] = Math.min(w(), Math.max(0, this.__origin__ += d3.event.dx));
|
||||
__.dimensions.sort(function(a, b) { return position(a) - position(b); });
|
||||
xscale.domain(__.dimensions);
|
||||
pc.render();
|
||||
g.attr("transform", function(d) { return "translate(" + position(d) + ")"; })
|
||||
})
|
||||
.on("dragend", function(d) {
|
||||
delete this.__origin__;
|
||||
delete dragging[d];
|
||||
d3.select(this).transition().attr("transform", "translate(" + xscale(d) + ")");
|
||||
pc.render();
|
||||
}));
|
||||
flags.reorderable = true;
|
||||
return this;
|
||||
};
|
||||
|
||||
// pairs of adjacent dimensions
|
||||
pc.adjacent_pairs = function(arr) {
|
||||
var ret = [];
|
||||
for (var i = 0; i < arr.length-1; i++) {
|
||||
ret.push([arr[i],arr[i+1]]);
|
||||
};
|
||||
return ret;
|
||||
};
|
||||
pc.interactive = function() {
|
||||
flags.interactive = true;
|
||||
return this;
|
||||
};
|
||||
|
||||
// Get data within brushes
|
||||
pc.brush = function() {
|
||||
__.brushed = selected();
|
||||
events.brush.call(pc,__.brushed);
|
||||
pc.render();
|
||||
};
|
||||
|
||||
// expose a few objects
|
||||
pc.xscale = xscale;
|
||||
pc.yscale = yscale;
|
||||
pc.ctx = ctx;
|
||||
pc.canvas = canvas;
|
||||
pc.g = function() { return g; };
|
||||
|
||||
pc.brushReset = function(dimension) {
|
||||
if (g) {
|
||||
g.selectAll('.brush')
|
||||
.each(function(d) {
|
||||
d3.select(this).call(
|
||||
yscale[d].brush.clear()
|
||||
);
|
||||
})
|
||||
pc.brush();
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
// rescale for height, width and margins
|
||||
// TODO currently assumes chart is brushable, and destroys old brushes
|
||||
pc.resize = function() {
|
||||
// selection size
|
||||
pc.selection.select("svg")
|
||||
.attr("width", __.width)
|
||||
.attr("height", __.height)
|
||||
pc.svg.attr("transform", "translate(" + __.margin.left + "," + __.margin.top + ")");
|
||||
|
||||
// scales
|
||||
pc.autoscale();
|
||||
|
||||
// axes, destroys old brushes. the current brush state should pass through in the future
|
||||
if (g) pc.createAxes().brushable();
|
||||
|
||||
events.resize.call(this, {width: __.width, height: __.height, margin: __.margin});
|
||||
return this;
|
||||
};
|
||||
|
||||
// highlight an array of data
|
||||
pc.highlight = function(data) {
|
||||
pc.clear("highlight");
|
||||
d3.select(canvas.foreground).classed("faded", true);
|
||||
data.forEach(path_highlight);
|
||||
events.highlight.call(this,data);
|
||||
return this;
|
||||
};
|
||||
|
||||
// clear highlighting
|
||||
pc.unhighlight = function(data) {
|
||||
pc.clear("highlight");
|
||||
d3.select(canvas.foreground).classed("faded", false);
|
||||
return this;
|
||||
};
|
||||
|
||||
// calculate 2d intersection of line a->b with line c->d
|
||||
// points are objects with x and y properties
|
||||
pc.intersection = function(a, b, c, d) {
|
||||
return {
|
||||
x: ((a.x * b.y - a.y * b.x) * (c.x - d.x) - (a.x - b.x) * (c.x * d.y - c.y * d.x)) / ((a.x - b.x) * (c.y - d.y) - (a.y - b.y) * (c.x - d.x)),
|
||||
y: ((a.x * b.y - a.y * b.x) * (c.y - d.y) - (a.y - b.y) * (c.x * d.y - c.y * d.x)) / ((a.x - b.x) * (c.y - d.y) - (a.y - b.y) * (c.x - d.x))
|
||||
};
|
||||
};
|
||||
|
||||
function is_brushed(p) {
|
||||
return !yscale[p].brush.empty();
|
||||
};
|
||||
|
||||
// data within extents
|
||||
function selected() {
|
||||
var actives = __.dimensions.filter(is_brushed),
|
||||
extents = actives.map(function(p) { return yscale[p].brush.extent(); });
|
||||
|
||||
// test if within range
|
||||
var within = {
|
||||
"date": function(d,p,dimension) {
|
||||
return extents[dimension][0] <= d[p] && d[p] <= extents[dimension][1]
|
||||
},
|
||||
"number": function(d,p,dimension) {
|
||||
return extents[dimension][0] <= d[p] && d[p] <= extents[dimension][1]
|
||||
},
|
||||
"string": function(d,p,dimension) {
|
||||
return extents[dimension][0] <= yscale[p](d[p]) && yscale[p](d[p]) <= extents[dimension][1]
|
||||
}
|
||||
};
|
||||
|
||||
return __.data
|
||||
.filter(function(d) {
|
||||
return actives.every(function(p, dimension) {
|
||||
return within[__.types[p]](d,p,dimension);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function position(d) {
|
||||
var v = dragging[d];
|
||||
return v == null ? xscale(d) : v;
|
||||
}
|
||||
pc.toString = function() { return "Parallel Coordinates: " + __.dimensions.length + " dimensions (" + d3.keys(__.data[0]).length + " total) , " + __.data.length + " rows"; };
|
||||
|
||||
pc.version = "0.2.2";
|
||||
|
||||
return pc;
|
||||
};
|
||||
|
||||
d3.renderQueue = (function(func) {
|
||||
var _queue = [], // data to be rendered
|
||||
_rate = 10, // number of calls per frame
|
||||
_clear = function() {}, // clearing function
|
||||
_i = 0; // current iteration
|
||||
|
||||
var rq = function(data) {
|
||||
if (data) rq.data(data);
|
||||
rq.invalidate();
|
||||
_clear();
|
||||
rq.render();
|
||||
};
|
||||
|
||||
rq.render = function() {
|
||||
_i = 0;
|
||||
var valid = true;
|
||||
rq.invalidate = function() { valid = false; };
|
||||
|
||||
function doFrame() {
|
||||
if (!valid) return true;
|
||||
if (_i > _queue.length) return true;
|
||||
var chunk = _queue.slice(_i,_i+_rate);
|
||||
_i += _rate;
|
||||
chunk.map(func);
|
||||
}
|
||||
|
||||
d3.timer(doFrame);
|
||||
};
|
||||
|
||||
rq.data = function(data) {
|
||||
rq.invalidate();
|
||||
_queue = data.slice(0);
|
||||
return rq;
|
||||
};
|
||||
|
||||
rq.rate = function(value) {
|
||||
if (!arguments.length) return _rate;
|
||||
_rate = value;
|
||||
return rq;
|
||||
};
|
||||
|
||||
rq.remaining = function() {
|
||||
return _queue.length - _i;
|
||||
};
|
||||
|
||||
// clear the canvas
|
||||
rq.clear = function(func) {
|
||||
if (!arguments.length) {
|
||||
_clear();
|
||||
return rq;
|
||||
}
|
||||
_clear = func;
|
||||
return rq;
|
||||
};
|
||||
|
||||
rq.invalidate = function() {};
|
||||
|
||||
return rq;
|
||||
});
|
||||
|
||||
/// END LIBRARY CODE
|
||||
|
||||
return d3.parcoords;
|
||||
|
||||
});
|
||||
@ -0,0 +1,117 @@
|
||||
// parallel coords!
|
||||
// a visualisation technique for multidimensional categorical data
|
||||
// you can drag the vertical axis for each section to filter things (try it out for yourself)
|
||||
|
||||
// --- settings ---
|
||||
// none for the time being.
|
||||
// TODO: add settings to choose which data goes where
|
||||
|
||||
// --- expected data format ---
|
||||
// a splunk search like this: index=_internal sourcetype=splunkd_access | table method status
|
||||
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var _ = require('underscore');
|
||||
var d3 = require("../d3/d3");
|
||||
var parcoords = require("./contrib/d3-parcoords");
|
||||
var SimpleSplunkView = require("splunkjs/mvc/simplesplunkview");
|
||||
|
||||
var ParCoords = SimpleSplunkView.extend({
|
||||
|
||||
className: "splunk-toolkit-parcoords",
|
||||
|
||||
options: {
|
||||
managerid: null, // your MANAGER ID
|
||||
data: "preview", // Results type
|
||||
},
|
||||
|
||||
output_mode: "json_rows",
|
||||
|
||||
initialize: function() {
|
||||
SimpleSplunkView.prototype.initialize.apply(this, arguments);
|
||||
|
||||
this.settings.enablePush("value");
|
||||
|
||||
// Set up resize callback. The first argument is a this
|
||||
// pointer which gets passed into the callback event
|
||||
$(window).resize(this, _.debounce(this._handleResize, 20));
|
||||
},
|
||||
|
||||
_handleResize: function(e){
|
||||
|
||||
// e.data is the this pointer passed to the callback.
|
||||
// here it refers to this object and we call render()
|
||||
e.data.render();
|
||||
},
|
||||
|
||||
createView: function() {
|
||||
this.$el.html(''); // clearing all prior junk from the view (eg. 'waiting for data...')
|
||||
return true;
|
||||
},
|
||||
|
||||
// making the data look how we want it to for updateView to do its job
|
||||
formatData: function(data) {
|
||||
|
||||
// Decide what fields we want
|
||||
// TODO: this should be specifialbe
|
||||
var fields = _.filter(this.resultsModel.data().fields, function(d){return d[0] !== "_" });
|
||||
var objects = _.map(data, function(row) {
|
||||
var obj = {};
|
||||
_.each(fields, function(field, idx) {
|
||||
if (row[idx] !== null) {
|
||||
obj[field] = row[idx];
|
||||
}
|
||||
else {
|
||||
obj[field] = "";
|
||||
}
|
||||
});
|
||||
|
||||
return obj;
|
||||
});
|
||||
|
||||
data = {
|
||||
'results': objects,
|
||||
'fields': fields
|
||||
}
|
||||
|
||||
return data;
|
||||
},
|
||||
|
||||
updateView: function(viz, data) {
|
||||
var that = this;
|
||||
var availableHeight = parseInt(this.settings.get("height") || this.$el.height());
|
||||
|
||||
this.$el.html('');
|
||||
var fields = data.fields;
|
||||
viz = $("<div id='"+this.id+"_parallelcoords' class='parcoords'>").appendTo(this.el)
|
||||
.css("height", availableHeight)
|
||||
var colorgen = d3.scale.category20();
|
||||
var colors = {};
|
||||
_(data.results).chain()
|
||||
.pluck(fields[0])
|
||||
.uniq()
|
||||
.each(function(d,i) {
|
||||
colors[d] = colorgen(i);
|
||||
});
|
||||
|
||||
var color = function(d) {return colors[d[fields[0]]]; };
|
||||
|
||||
var pc_progressive = d3.parcoords()('#' + this.id + '_parallelcoords')
|
||||
.data(data.results)
|
||||
.color(color)
|
||||
.alpha(0.4)
|
||||
.margin({ top: 24, left: 150, bottom: 12, right: 0 })
|
||||
.mode("queue")
|
||||
.render()
|
||||
.brushable() // enable brushing
|
||||
.interactive() // command line mode
|
||||
.on("brush", function(selected) {
|
||||
that.trigger("select", {selected: selected});
|
||||
});
|
||||
|
||||
pc_progressive.svg.selectAll("text")
|
||||
.style("font", "10px sans-serif");
|
||||
}
|
||||
});
|
||||
return ParCoords;
|
||||
});
|
||||