diff --git a/.gitignore b/.gitignore index 2346d353a..05505fe74 100644 --- a/.gitignore +++ b/.gitignore @@ -15,17 +15,12 @@ proxy.log MANIFEST coverage_report test.log -examples/*/local -examples/**/local.meta -examples/**/*.log tests/searchcommands_data/log/ tests/searchcommands_data/output/ -examples/searchcommands_app/searchcommand_app.log Test Results*.html tests/searchcommands/data/app/app.log splunk_sdk.egg-info/ dist/ -examples/searchcommands_app/package/lib/splunklib tests/searchcommands/apps/app_with_logging_configuration/*.log *.observed venv/ diff --git a/README.md b/README.md index 77dedf876..5fde93107 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ #### Version 1.6.19 -The Splunk Enterprise Software Development Kit (SDK) for Python contains library code and examples designed to enable developers to build applications using the Splunk platform. +The Splunk Enterprise Software Development Kit (SDK) for Python contains library code designed to enable developers to build applications using the Splunk platform. The Splunk platform is a search engine and analytic environment that uses a distributed map-reduce architecture to efficiently index, search, and process large time-varying data sets. @@ -18,7 +18,7 @@ The Splunk developer platform enables developers to take advantage of the same t ## Get started with the Splunk Enterprise SDK for Python -The Splunk Enterprise SDK for Python contains library code and examples that show how to programmatically interact with the Splunk platform for a variety of scenarios including searching, saved searches, data inputs, and many more, along with building complete applications. +The Splunk Enterprise SDK for Python contains library code, and it's examples are located in the [splunk-app-examples](https://github.com/splunk/splunk-app-examples) repository, that show how to programmatically interact with the Splunk platform for a variety of scenarios including searching, saved searches, data inputs, and many more, along with building complete applications. ### Requirements @@ -39,7 +39,7 @@ Here's what you need to get going with the Splunk Enterprise SDK for Python. ### Install the SDK -Use the following commands to install the Splunk Enterprise SDK for Python libraries. However, it's not necessary to install the libraries to run the examples and unit tests from the SDK. +Use the following commands to install the Splunk Enterprise SDK for Python libraries. However, it's not necessary to install the libraries to run the unit tests from the SDK. Use `pip`: @@ -68,8 +68,6 @@ To run the examples and unit tests, you must put the root of the SDK on your PYT export PYTHONPATH=~/splunk-sdk-python -The SDK command-line examples require a common set of arguments that specify the host, port, and login credentials for Splunk Enterprise. For a full list of command-line arguments, include `--help` as an argument to any of the examples. - ### Following are the different ways to connect to Splunk Enterprise #### Using username/password ```python @@ -115,29 +113,9 @@ here is an example of .env file: # Session key for authentication #sessionKey= -#### Run the examples - -Examples are located in the **/splunk-sdk-python/examples** directory. To run the examples at the command line, use the Python interpreter and include any arguments that are required by the example. In the commands below, replace "examplename" with the name of the specific example in the directory that you want to run: - -Using username and Password - - python examplename.py --username="admin" --password="changeme" - -Using Bearer token - - python examplename.py --bearerToken= - -Using Session key - - python examplename.py --sessionKey="" +#### SDK examples -If you saved your login credentials in the **.env** file, you can omit those arguments: - - python examplename.py - -To get help for an example, use the `--help` argument with an example: - - python examplename.py --help +Examples for the Splunk Enterprise SDK for Python are located in the [splunk-app-examples](https://github.com/splunk/splunk-app-examples) repository. For details, see the [Examples using the Splunk Enterprise SDK for Python](https://dev.splunk.com/enterprise/docs/devtools/python/sdk-python/examplespython) on the Splunk Developer Portal. #### Run the unit tests @@ -162,10 +140,9 @@ The test suite uses Python's standard library, the built-in `unittest` library, | Directory | Description | |:--------- |:---------------------------------------------------------- | |/docs | Source for Sphinx-based docs and build | -|/examples | Examples demonstrating various SDK features | |/splunklib | Source for the Splunk library modules | |/tests | Source for unit tests | -|/utils | Source for utilities shared by the examples and unit tests | +|/utils | Source for utilities shared by the unit tests | ### Customization * When working with custom search commands such as Custom Streaming Commands or Custom Generating Commands, We may need to add new fields to the records based on certain conditions. @@ -216,7 +193,7 @@ class GeneratorTest(GeneratingCommand): ### Access metadata of modular inputs app * In stream_events() method we can access modular input app metadata from InputDefinition object -* See [GitHub Commit](https://github.com/splunk/splunk-sdk-python/blob/develop/examples/github_commits/bin/github_commits.py) Modular input App example for reference. +* See [GitHub Commit](https://github.com/splunk/splunk-app-examples/blob/master/modularinputs/python/github_commits/bin/github_commits.py) Modular input App example for reference. ```python def stream_events(self, inputs, ew): # other code @@ -262,7 +239,7 @@ To learn about our branching model, see [Branching Model](https://github.com/spl | [REST API Reference Manual](https://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTprolog) | Splunk REST API reference documentation | | [Splunk>Docs](https://docs.splunk.com/Documentation) | General documentation for the Splunk platform | | [GitHub Wiki](https://github.com/splunk/splunk-sdk-python/wiki/) | Documentation for this SDK's repository on GitHub | - +| [Splunk Enterprise SDK for Python Examples](https://github.com/splunk/splunk-app-examples) | Examples for this SDK's repository | ## Community diff --git a/docker-compose.yml b/docker-compose.yml index 84c427072..0527a30bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,16 +9,6 @@ services: - SPLUNK_HEC_TOKEN=11111111-1111-1111-1111-1111111111113 - SPLUNK_PASSWORD=changed! - SPLUNK_APPS_URL=https://github.com/splunk/sdk-app-collection/releases/download/v1.1.0/sdkappcollection.tgz - volumes: - - ./examples/github_forks:/opt/splunk/etc/apps/github_forks - - ./splunklib:/opt/splunk/etc/apps/github_forks/lib/splunklib - - ./examples/random_numbers:/opt/splunk/etc/apps/random_numbers - - ./splunklib:/opt/splunk/etc/apps/random_numbers/lib/splunklib - - ./examples/github_commits:/opt/splunk/etc/apps/github_commits - - ./splunklib:/opt/splunk/etc/apps/github_commits/lib/splunklib - - ./examples/searchcommands_app/package:/opt/splunk/etc/apps/searchcommands_app - - ./splunklib:/opt/splunk/etc/apps/searchcommands_app/lib/splunklib - - ./examples/twitted/twitted:/opt/splunk/etc/apps/twitted ports: - 8000:8000 - 8088:8088 diff --git a/examples/abc/README.md b/examples/abc/README.md deleted file mode 100644 index d824e816e..000000000 --- a/examples/abc/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# The ABCs of Calling the Splunk REST API - -This example shows three different approaches to making calls against the -Splunk REST API. - -The examples all happen to retrieve a list of installed apps from a given -Splunk instance, but they could apply as easily to any other area of the REST -API. - -* **a.py** uses Python's standard httplib module to make calls against the - Splunk REST API. This example does not use any SDK libraries to access - Splunk. - -* **b.py** users the SDK's lower level binding module to access the REST API. - The binding module handles authentication details (and some additional book- - keeping details not demonstrated by this sample) and the result is a much - simplified interaction with Splunk, but its still very much a 'wire' level - coding experience. - -* **c.py** uses the SDK client module, which abstracts away most most of the - wire level details of invoking the REST API, but that still presents a - stateless interface to Splunk the attempts to faithfully represent the - semantics of the underlying REST API. - diff --git a/examples/abc/a.py b/examples/abc/a.py deleted file mode 100755 index 8e378539b..000000000 --- a/examples/abc/a.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Retrieves a list of installed apps from Splunk by making REST API calls - using Python's httplib module.""" - -from __future__ import absolute_import -from __future__ import print_function -import splunklib.six.moves.http_client -import urllib -from xml.etree import ElementTree - -HOST = "localhost" -PORT = 8089 -USERNAME = "admin" -PASSWORD = "changeme" - -# Present credentials to Splunk and retrieve the session key -connection = six.moves.http_client.HTTPSConnection(HOST, PORT) -body = urllib.urlencode({'username': USERNAME, 'password': PASSWORD}) -headers = { - 'Content-Type': "application/x-www-form-urlencoded", - 'Content-Length': str(len(body)), - 'Host': HOST, - 'User-Agent': "a.py/1.0", - 'Accept': "*/*" -} -try: - connection.request("POST", "/services/auth/login", body, headers) - response = connection.getresponse() -finally: - connection.close() -if response.status != 200: - raise Exception("%d (%s)" % (response.status, response.reason)) -body = response.read() -sessionKey = ElementTree.XML(body).findtext("./sessionKey") - -# Now make the request to Splunk for list of installed apps -connection = six.moves.http_client.HTTPSConnection(HOST, PORT) -headers = { - 'Content-Length': "0", - 'Host': HOST, - 'User-Agent': "a.py/1.0", - 'Accept': "*/*", - 'Authorization': "Splunk %s" % sessionKey, -} -try: - connection.request("GET", "/services/apps/local", "", headers) - response = connection.getresponse() -finally: - connection.close() -if response.status != 200: - raise Exception("%d (%s)" % (response.status, response.reason)) - -body = response.read() -data = ElementTree.XML(body) -apps = data.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}title") -for app in apps: - print(app.text) diff --git a/examples/abc/b.py b/examples/abc/b.py deleted file mode 100755 index 2367b68bc..000000000 --- a/examples/abc/b.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Retrieves a list of installed apps from Splunk using the binding module.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -from xml.etree import ElementTree - -import splunklib.binding as binding - -HOST = "localhost" -PORT = 8089 -USERNAME = "admin" -PASSWORD = "changeme" - -context = binding.connect( - host=HOST, - port=PORT, - username=USERNAME, - password=PASSWORD) - -response = context.get('apps/local') -if response.status != 200: - raise Exception("%d (%s)" % (response.status, response.reason)) - -body = response.body.read() -data = ElementTree.XML(body) -apps = data.findall("{http://www.w3.org/2005/Atom}entry/{http://www.w3.org/2005/Atom}title") -for app in apps: - print(app.text) - diff --git a/examples/abc/c.py b/examples/abc/c.py deleted file mode 100755 index 9ba23ca72..000000000 --- a/examples/abc/c.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Retrieves a list of installed apps from Splunk using the client module.""" -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -import splunklib.client as client - -HOST = "localhost" -PORT = 8089 -USERNAME = "admin" -PASSWORD = "changeme" - -service = client.connect( - host=HOST, - port=PORT, - username=USERNAME, - password=PASSWORD) - -for app in service.apps: - print(app.name) diff --git a/examples/analytics/README.md b/examples/analytics/README.md deleted file mode 100644 index d99f6bf14..000000000 --- a/examples/analytics/README.md +++ /dev/null @@ -1,153 +0,0 @@ -# Analytics Example - -The Analytics example is meant as a sample implementation of a -"mini-Google Analytics" or "mini-Mixpanel" style web service. - -At its core, it allows for logging of arbitrary events together with arbitrary -`key=value` properties for each event. You don't need to define a schema -up front and some events can have more properties than others (even within -the same kind of event). - -This type of service is especially suited to Splunk, given the temporal nature -of the data, together with the lack of schema and no need to update past events. - -## Architecture - -The main component of the Analytics example are two pieces of reusable code -meant to manage input and output of data into Splunk. - -### AnalyticsTracker - -The `input.py` file defines the "input" side of the Analytics service. If you -wanted to log some analytics data in your app, you would have the `AnalyticsTracker` -class defined in this file in order to do so. - -The `AnalyticsTracker` class encapsulates all the information required to log -events to Splunk. This includes the "application" name (think of it as a sort -of namespace, if you wanted to log multiple apps' worth of events into the -same Splunk instance) and Splunk connection parameters. It also takes -an optional "index" parameter, but that's there mostly for testing purposes. - -So, for example, you could write an `AnalyticsTracker` like this: - -```python -from analytics.input import AnalyticsTracker - -splunk_opts = ... -tracker = AnalyticsTracker("myapp", splunk_opts) -``` - -Once you have an instance of the `AnalyticsTracker`, you can use it to track -your events. For example, if you wanted to log an event regarding a user -logging in, and you wanted to add the name of the user and also his user -agent, you could do something like this: - -```python -userid = ... -username = ... -useragent = ... -tracker.track("login", distinct_id = user_id, "username"=username, "useragent"=useragent) -``` - -The first parameter is the name of the event you want to log. The `distinct_id` -parameter specifies a "unique ID". You can use the unique ID to group events, -for example if you only wanted to count unique logins by user_id. The rest of -the parameters are arbitrary `key=value` pairs that you can also extract. - -Internally, when you ask the `AnalyticsTracker` to log an event, it will construct -a textual representation of that event. It will also make sure to encode all the -content to fit properly in Splunk. For example, for the above event, it -would look something like this: - -``` -2011-08-08T11:45:17.735045 application="myapp" event="login" distinct_id="..." analytics_prop__username="..." analytics_prop__useragent="..." -``` - -The reason that we use the `analytics_prop__` prefix is to make sure there is -no ambiguity between known fields such as `application` and `event` and user -supplied `key=value=` properties. - -### AnalyticsRetriever - -Similarly to `AnalyticsTracker`, the `output.py` file defines the "output" side -of the Analytics service. If you want to extract the events you logged in using -`AnalyticsTracker`, you'd use the `AnalyticsRetriever` class. - -Creating an `AnalyticsRetriever` instance is identical to the `AnalyticsTracker`: - -```python -from analytics.output import AnalyticsRetriever - -splunk_opts = ... -retriever = AnalyticsRetriever("myapp", splunk_opts) -``` - -Once you have an instance of the `AnalyticsRetriever`, you can use its variety -of methods in order to query information about events. - -Executing each of the methods will execute a Splunk search, retrieve the -results, and transform them into a well-defined Python dictionary format. - -#### Examples - -Listing all applications: - -```python -print retriever.applications() -``` - -Listing all the types of events in the system: - -```python -print retriever.events() -``` - -Listing all the union of all the properties used for a particular event: - -```python -event_name = "login" -print retriever.properties(event_name) -``` - -Getting all the values of a given property for some event: - -```python -event_name = "login" -prop_name = "useragent" -print retriever.property_values(event_name, prop_name)) -``` - -Getting a "graph" of event information over time for all events of a -specific application (this uses the default TimeRange.MONTH): - -```python -print retriever.events_over_time() -``` - -Getting a graph of event information over time for a specific event: - -```python -print retriever.events_over_time(event_name="login") -``` - -### server.py - -The `server.py` file provides a sample "web app" built on top of the -Analytics service. It lists applications, and for each application -you can see a graph of events over time, properties, etc. - -We make use of the excellent open source -[flot](http://code.google.com/p/flot/) graphing library to render -our Javascript graphs. We also use the [`bottle.py`](http://bottlepy.org) -micro-web framework. - -## Running the Sample - -In order to run the sample, you can simply execute: - - ./server.py - -And navigate to http://localhost:8080/applications. I suggest you input some -events in beforehand, though `server.py` logs some events itself -as you navigate the site (it's meta analytics!). - diff --git a/examples/analytics/__init__.py b/examples/analytics/__init__.py deleted file mode 100644 index f0d6e7ecc..000000000 --- a/examples/analytics/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -from . import input -from . import output diff --git a/examples/analytics/bottle.py b/examples/analytics/bottle.py deleted file mode 100755 index 76ae393a9..000000000 --- a/examples/analytics/bottle.py +++ /dev/null @@ -1,2531 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Bottle is a fast and simple micro-framework for small web applications. It -offers request dispatching (Routes) with url parameter support, templates, -a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and -template engines - all in a single file and with no dependencies other than the -Python Standard Library. - -Homepage and documentation: http://bottlepy.org/ - -Copyright (c) 2011, Marcel Hellkamp. -License: MIT (see LICENSE.txt for details) -""" - -from __future__ import with_statement - -from __future__ import absolute_import -from __future__ import print_function -from splunklib import six -from six.moves import map -from six.moves import zip -__author__ = 'Marcel Hellkamp' -__version__ = '0.9.6' -__license__ = 'MIT' - -import base64 -import cgi -import email.utils -import functools -import hmac -import splunklib.six.moves.http_client -import imp -import itertools -import mimetypes -import os -import re -import subprocess -import sys -import tempfile -import splunklib.six.moves._thread -import threading -import time -import warnings - -from six.moves.http_cookies import SimpleCookie -from tempfile import TemporaryFile -from traceback import format_exc -from urllib import urlencode, quote as urlquote, unquote as urlunquote -from urlparse import urljoin, SplitResult as UrlSplitResult - -try: from collections import MutableMapping as DictMixin -except ImportError: # pragma: no cover - from UserDict import DictMixin - -try: from urlparse import parse_qs -except ImportError: # pragma: no cover - from cgi import parse_qs - -try: import splunklib.six.moves.cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -try: from json import dumps as json_dumps -except ImportError: # pragma: no cover - try: from simplejson import dumps as json_dumps - except ImportError: # pragma: no cover - try: from django.utils.simplejson import dumps as json_dumps - except ImportError: # pragma: no cover - json_dumps = None - -NCTextIOWrapper = None -if sys.version_info >= (3,0,0): # pragma: no cover - # See Request.POST - from io import BytesIO - def touni(x, enc='utf8', err='strict'): - """ Convert anything to unicode """ - return str(x, enc, err) if isinstance(x, bytes) else str(x) - if sys.version_info < (3,2,0): - from io import TextIOWrapper - class NCTextIOWrapper(TextIOWrapper): - ''' Garbage collecting an io.TextIOWrapper(buffer) instance closes - the wrapped buffer. This subclass keeps it open. ''' - def close(self): pass -else: - from StringIO import StringIO as BytesIO - bytes = str - def touni(x, enc='utf8', err='strict'): - """ Convert anything to unicode """ - return x if isinstance(x, six.text_type) else six.text_type(str(x), enc, err) - -def tob(data, enc='utf8'): - """ Convert anything to bytes """ - return data.encode(enc) if isinstance(data, six.text_type) else bytes(data) - -# Convert strings and unicode to native strings -if sys.version_info >= (3,0,0): - tonat = touni -else: - tonat = tob -tonat.__doc__ = """ Convert anything to native strings """ - - -# Backward compatibility -def depr(message, critical=False): - if critical: raise DeprecationWarning(message) - warnings.warn(message, DeprecationWarning, stacklevel=3) - - -# Small helpers -def makelist(data): - if isinstance(data, (tuple, list, set, dict)): return list(data) - elif data: return [data] - else: return [] - - -class DictProperty(object): - ''' Property that maps to a key in a local dict-like attribute. ''' - def __init__(self, attr, key=None, read_only=False): - self.attr, self.key, self.read_only = attr, key, read_only - - def __call__(self, func): - functools.update_wrapper(self, func, updated=[]) - self.getter, self.key = func, self.key or func.__name__ - return self - - def __get__(self, obj, cls): - if obj is None: return self - key, storage = self.key, getattr(obj, self.attr) - if key not in storage: storage[key] = self.getter(obj) - return storage[key] - - def __set__(self, obj, value): - if self.read_only: raise AttributeError("Read-Only property.") - getattr(obj, self.attr)[self.key] = value - - def __delete__(self, obj): - if self.read_only: raise AttributeError("Read-Only property.") - del getattr(obj, self.attr)[self.key] - -def cached_property(func): - ''' A property that, if accessed, replaces itself with the computed - value. Subsequent accesses won't call the getter again. ''' - return DictProperty('__dict__')(func) - -class lazy_attribute(object): # Does not need configuration -> lower-case name - ''' A property that caches itself to the class object. ''' - def __init__(self, func): - functools.update_wrapper(self, func, updated=[]) - self.getter = func - - def __get__(self, obj, cls): - value = self.getter(cls) - setattr(cls, self.__name__, value) - return value - - - - - - -############################################################################### -# Exceptions and Events ######################################################## -############################################################################### - - -class BottleException(Exception): - """ A base class for exceptions used by bottle. """ - pass - - -class HTTPResponse(BottleException): - """ Used to break execution and immediately finish the response """ - def __init__(self, output='', status=200, header=None): - super(BottleException, self).__init__("HTTP Response %d" % status) - self.status = int(status) - self.output = output - self.headers = HeaderDict(header) if header else None - - def apply(self, response): - if self.headers: - for key, value in self.headers.iterallitems(): - response.headers[key] = value - response.status = self.status - - -class HTTPError(HTTPResponse): - """ Used to generate an error page """ - def __init__(self, code=500, output='Unknown Error', exception=None, - traceback=None, header=None): - super(HTTPError, self).__init__(output, code, header) - self.exception = exception - self.traceback = traceback - - def __repr__(self): - return template(ERROR_PAGE_TEMPLATE, e=self) - - - - - - -############################################################################### -# Routing ###################################################################### -############################################################################### - - -class RouteError(BottleException): - """ This is a base class for all routing related exceptions """ - - -class RouteReset(BottleException): - """ If raised by a plugin or request handler, the route is reset and all - plugins are re-applied. """ - - -class RouteSyntaxError(RouteError): - """ The route parser found something not supported by this router """ - - -class RouteBuildError(RouteError): - """ The route could not been built """ - - -class Router(object): - ''' A Router is an ordered collection of route->target pairs. It is used to - efficiently match WSGI requests against a number of routes and return - the first target that satisfies the request. The target may be anything, - usually a string, ID or callable object. A route consists of a path-rule - and a HTTP method. - - The path-rule is either a static path (e.g. `/contact`) or a dynamic - path that contains wildcards (e.g. `/wiki/:page`). By default, wildcards - consume characters up to the next slash (`/`). To change that, you may - add a regular expression pattern (e.g. `/wiki/:page#[a-z]+#`). - - For performance reasons, static routes (rules without wildcards) are - checked first. Dynamic routes are searched in order. Try to avoid - ambiguous or overlapping rules. - - The HTTP method string matches only on equality, with two exceptions: - * ´GET´ routes also match ´HEAD´ requests if there is no appropriate - ´HEAD´ route installed. - * ´ANY´ routes do match if there is no other suitable route installed. - - An optional ``name`` parameter is used by :meth:`build` to identify - routes. - ''' - - default = '[^/]+' - - @lazy_attribute - def syntax(cls): - return re.compile(r'(?(rule, build_info) mapping - self.static = {} # Cache for static routes: {path: {method: target}} - self.dynamic = [] # Cache for dynamic routes. See _compile() - - def add(self, rule, method, target, name=None, static=False): - ''' Add a new route or replace the target for an existing route. ''' - if static: - depr("Use a backslash to escape ':' in routes.") # 0.9 - rule = rule.replace(':','\\:') - - if rule in self.routes: - self.routes[rule][method.upper()] = target - else: - self.routes[rule] = {method.upper(): target} - self.rules.append(rule) - if self.static or self.dynamic: # Clear precompiler cache. - self.static, self.dynamic = {}, {} - if name: - self.named[name] = (rule, None) - - def build(self, _name, *anon, **args): - ''' Return a string that matches a named route. Use keyword arguments - to fill out named wildcards. Remaining arguments are appended as a - query string. Raises RouteBuildError or KeyError.''' - if _name not in self.named: - raise RouteBuildError("No route with that name.", _name) - rule, pairs = self.named[_name] - if not pairs: - token = self.syntax.split(rule) - parts = [p.replace('\\:',':') for p in token[::3]] - names = token[1::3] - if len(parts) > len(names): names.append(None) - pairs = list(zip(parts, names)) - self.named[_name] = (rule, pairs) - try: - anon = list(anon) - url = [s if k is None - else s+str(args.pop(k)) if k else s+str(anon.pop()) - for s, k in pairs] - except IndexError: - msg = "Not enough arguments to fill out anonymous wildcards." - raise RouteBuildError(msg) - except KeyError as e: - raise RouteBuildError(*e.args) - - if args: url += ['?', urlencode(args)] - return ''.join(url) - - def match(self, environ): - ''' Return a (target, url_agrs) tuple or raise HTTPError(404/405). ''' - targets, urlargs = self._match_path(environ) - if not targets: - raise HTTPError(404, "Not found: " + repr(environ['PATH_INFO'])) - method = environ['REQUEST_METHOD'].upper() - if method in targets: - return targets[method], urlargs - if method == 'HEAD' and 'GET' in targets: - return targets['GET'], urlargs - if 'ANY' in targets: - return targets['ANY'], urlargs - allowed = [verb for verb in targets if verb != 'ANY'] - if 'GET' in allowed and 'HEAD' not in allowed: - allowed.append('HEAD') - raise HTTPError(405, "Method not allowed.", - header=[('Allow',",".join(allowed))]) - - def _match_path(self, environ): - ''' Optimized PATH_INFO matcher. ''' - path = environ['PATH_INFO'] or '/' - # Assume we are in a warm state. Search compiled rules first. - match = self.static.get(path) - if match: return match, {} - for combined, rules in self.dynamic: - match = combined.match(path) - if not match: continue - gpat, match = rules[match.lastindex - 1] - return match, gpat.match(path).groupdict() if gpat else {} - # Lazy-check if we are really in a warm state. If yes, stop here. - if self.static or self.dynamic or not self.routes: return None, {} - # Cold state: We have not compiled any rules yet. Do so and try again. - if not environ.get('wsgi.run_once'): - self._compile() - return self._match_path(environ) - # For run_once (CGI) environments, don't compile. Just check one by one. - epath = path.replace(':','\\:') # Turn path into its own static rule. - match = self.routes.get(epath) # This returns static rule only. - if match: return match, {} - for rule in self.rules: - #: Skip static routes to reduce re.compile() calls. - if rule.count(':') < rule.count('\\:'): continue - match = self._compile_pattern(rule).match(path) - if match: return self.routes[rule], match.groupdict() - return None, {} - - def _compile(self): - ''' Prepare static and dynamic search structures. ''' - self.static = {} - self.dynamic = [] - def fpat_sub(m): - return m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:' - for rule in self.rules: - target = self.routes[rule] - if not self.syntax.search(rule): - self.static[rule.replace('\\:',':')] = target - continue - gpat = self._compile_pattern(rule) - fpat = re.sub(r'(\\*)(\(\?P<[^>]*>|\((?!\?))', fpat_sub, gpat.pattern) - gpat = gpat if gpat.groupindex else None - try: - combined = '%s|(%s)' % (self.dynamic[-1][0].pattern, fpat) - self.dynamic[-1] = (re.compile(combined), self.dynamic[-1][1]) - self.dynamic[-1][1].append((gpat, target)) - except (AssertionError, IndexError) as e: # AssertionError: Too many groups - self.dynamic.append((re.compile('(^%s$)'%fpat), - [(gpat, target)])) - except re.error as e: - raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, e)) - - def _compile_pattern(self, rule): - ''' Return a regular expression with named groups for each wildcard. ''' - out = '' - for i, part in enumerate(self.syntax.split(rule)): - if i%3 == 0: out += re.escape(part.replace('\\:',':')) - elif i%3 == 1: out += '(?P<%s>' % part if part else '(?:' - else: out += '%s)' % (part or '[^/]+') - return re.compile('^%s$'%out) - - - - - - -############################################################################### -# Application Object ########################################################### -############################################################################### - - -class Bottle(object): - """ WSGI application """ - - def __init__(self, catchall=True, autojson=True, config=None): - """ Create a new bottle instance. - You usually don't do that. Use `bottle.app.push()` instead. - """ - self.routes = [] # List of installed routes including metadata. - self.router = Router() # Maps requests to self.route indices. - self.ccache = {} # Cache for callbacks with plugins applied. - - self.plugins = [] # List of installed plugins. - - self.mounts = {} - self.error_handler = {} - #: If true, most exceptions are caught and returned as :exc:`HTTPError` - self.catchall = catchall - self.config = config or {} - self.serve = True - # Default plugins - self.hooks = self.install(HooksPlugin()) - self.typefilter = self.install(TypeFilterPlugin()) - if autojson: - self.install(JSONPlugin()) - self.install(TemplatePlugin()) - - def optimize(self, *a, **ka): - depr("Bottle.optimize() is obsolete.") - - def mount(self, app, prefix, **options): - ''' Mount an application to a specific URL prefix. The prefix is added - to SCIPT_PATH and removed from PATH_INFO before the sub-application - is called. - - :param app: an instance of :class:`Bottle`. - :param prefix: path prefix used as a mount-point. - - All other parameters are passed to the underlying :meth:`route` call. - ''' - if not isinstance(app, Bottle): - raise TypeError('Only Bottle instances are supported for now.') - prefix = '/'.join([_f for _f in prefix.split('/') if _f]) - if not prefix: - raise TypeError('Empty prefix. Perhaps you want a merge()?') - for other in self.mounts: - if other.startswith(prefix): - raise TypeError('Conflict with existing mount: %s' % other) - path_depth = prefix.count('/') + 1 - options.setdefault('method', 'ANY') - options.setdefault('skip', True) - self.mounts[prefix] = app - @self.route('/%s/:#.*#' % prefix, **options) - def mountpoint(): - request.path_shift(path_depth) - return app._handle(request.environ) - - def add_filter(self, ftype, func): - depr("Filters are deprecated and can be replaced with plugins.") #0.9 - self.typefilter.add(ftype, func) - - def install(self, plugin): - ''' Add a plugin to the list of plugins and prepare it for beeing - applied to all routes of this application. A plugin may be a simple - decorator or an object that implements the :class:`Plugin` API. - ''' - if hasattr(plugin, 'setup'): plugin.setup(self) - if not callable(plugin) and not hasattr(plugin, 'apply'): - raise TypeError("Plugins must be callable or implement .apply()") - self.plugins.append(plugin) - self.reset() - return plugin - - def uninstall(self, plugin): - ''' Uninstall plugins. Pass an instance to remove a specific plugin. - Pass a type object to remove all plugins that match that type. - Subclasses are not removed. Pass a string to remove all plugins with - a matching ``name`` attribute. Pass ``True`` to remove all plugins. - The list of affected plugins is returned. ''' - removed, remove = [], plugin - for i, plugin in list(enumerate(self.plugins))[::-1]: - if remove is True or remove is plugin or remove is type(plugin) \ - or getattr(plugin, 'name', True) == remove: - removed.append(plugin) - del self.plugins[i] - if hasattr(plugin, 'close'): plugin.close() - if removed: self.reset() - return removed - - def reset(self, id=None): - ''' Reset all routes (force plugins to be re-applied) and clear all - caches. If an ID is given, only that specific route is affected. ''' - if id is None: self.ccache.clear() - else: self.ccache.pop(id, None) - if DEBUG: - for route in self.routes: - if route['id'] not in self.ccache: - self.ccache[route['id']] = self._build_callback(route) - - def close(self): - ''' Close the application and all installed plugins. ''' - for plugin in self.plugins: - if hasattr(plugin, 'close'): plugin.close() - self.stopped = True - - def match(self, environ): - """ (deprecated) Search for a matching route and return a - (callback, urlargs) tuple. - The first element is the associated route callback with plugins - applied. The second value is a dictionary with parameters extracted - from the URL. The :class:`Router` raises :exc:`HTTPError` (404/405) - on a non-match.""" - depr("This method will change semantics in 0.10.") - return self._match(environ) - - def _match(self, environ): - handle, args = self.router.match(environ) - environ['route.handle'] = handle # TODO move to router? - environ['route.url_args'] = args - try: - return self.ccache[handle], args - except KeyError: - config = self.routes[handle] - callback = self.ccache[handle] = self._build_callback(config) - return callback, args - - def _build_callback(self, config): - ''' Apply plugins to a route and return a new callable. ''' - wrapped = config['callback'] - plugins = self.plugins + config['apply'] - skip = config['skip'] - try: - for plugin in reversed(plugins): - if True in skip: break - if plugin in skip or type(plugin) in skip: continue - if getattr(plugin, 'name', True) in skip: continue - if hasattr(plugin, 'apply'): - wrapped = plugin.apply(wrapped, config) - else: - wrapped = plugin(wrapped) - if not wrapped: break - functools.update_wrapper(wrapped, config['callback']) - return wrapped - except RouteReset: # A plugin may have changed the config dict inplace. - return self._build_callback(config) # Apply all plugins again. - - def get_url(self, routename, **kargs): - """ Return a string that matches a named route """ - scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/' - location = self.router.build(routename, **kargs).lstrip('/') - return urljoin(urljoin('/', scriptname), location) - - def route(self, path=None, method='GET', callback=None, name=None, - apply=None, skip=None, **config): - """ A decorator to bind a function to a request URL. Example:: - - @app.route('/hello/:name') - def hello(name): - return 'Hello %s' % name - - The ``:name`` part is a wildcard. See :class:`Router` for syntax - details. - - :param path: Request path or a list of paths to listen to. If no - path is specified, it is automatically generated from the - signature of the function. - :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of - methods to listen to. (default: `GET`) - :param callback: An optional shortcut to avoid the decorator - syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` - :param name: The name for this route. (default: None) - :param apply: A decorator or plugin or a list of plugins. These are - applied to the route callback in addition to installed plugins. - :param skip: A list of plugins, plugin classes or names. Matching - plugins are not installed to this route. ``True`` skips all. - - Any additional keyword arguments are stored as route-specific - configuration and passed to plugins (see :meth:`Plugin.apply`). - """ - if callable(path): path, callback = None, path - - plugins = makelist(apply) - skiplist = makelist(skip) - if 'decorate' in config: - depr("The 'decorate' parameter was renamed to 'apply'") # 0.9 - plugins += makelist(config.pop('decorate')) - if config.pop('no_hooks', False): - depr("The no_hooks parameter is no longer used. Add 'hooks' to the"\ - " list of skipped plugins instead.") # 0.9 - skiplist.append('hooks') - static = config.get('static', False) # depr 0.9 - - def decorator(callback): - for rule in makelist(path) or yieldroutes(callback): - for verb in makelist(method): - verb = verb.upper() - cfg = dict(rule=rule, method=verb, callback=callback, - name=name, app=self, config=config, - apply=plugins, skip=skiplist) - self.routes.append(cfg) - cfg['id'] = self.routes.index(cfg) - self.router.add(rule, verb, cfg['id'], name=name, static=static) - if DEBUG: self.ccache[cfg['id']] = self._build_callback(cfg) - return callback - - return decorator(callback) if callback else decorator - - def get(self, path=None, method='GET', **options): - """ Equals :meth:`route`. """ - return self.route(path, method, **options) - - def post(self, path=None, method='POST', **options): - """ Equals :meth:`route` with a ``POST`` method parameter. """ - return self.route(path, method, **options) - - def put(self, path=None, method='PUT', **options): - """ Equals :meth:`route` with a ``PUT`` method parameter. """ - return self.route(path, method, **options) - - def delete(self, path=None, method='DELETE', **options): - """ Equals :meth:`route` with a ``DELETE`` method parameter. """ - return self.route(path, method, **options) - - def error(self, code=500): - """ Decorator: Register an output handler for a HTTP error code""" - def wrapper(handler): - self.error_handler[int(code)] = handler - return handler - return wrapper - - def hook(self, name): - """ Return a decorator that attaches a callback to a hook. """ - def wrapper(func): - self.hooks.add(name, func) - return func - return wrapper - - def add_hook(self, name, func): - depr("Call Bottle.hooks.add() instead.") #0.9 - self.hooks.add(name, func) - - def remove_hook(self, name, func): - depr("Call Bottle.hooks.remove() instead.") #0.9 - self.hooks.remove(name, func) - - def handle(self, path, method='GET'): - """ (deprecated) Execute the first matching route callback and return - the result. :exc:`HTTPResponse` exceptions are caught and returned. - If :attr:`Bottle.catchall` is true, other exceptions are caught as - well and returned as :exc:`HTTPError` instances (500). - """ - depr("This method will change semantics in 0.10. Try to avoid it.") - if isinstance(path, dict): - return self._handle(path) - return self._handle({'PATH_INFO': path, 'REQUEST_METHOD': method.upper()}) - - def _handle(self, environ): - if not self.serve: - depr("Bottle.serve will be removed in 0.10.") - return HTTPError(503, "Server stopped") - try: - callback, args = self._match(environ) - return callback(**args) - except HTTPResponse as r: - return r - except RouteReset: # Route reset requested by the callback or a plugin. - del self.ccache[handle] - return self._handle(environ) # Try again. - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception as e: - if not self.catchall: raise - return HTTPError(500, "Internal Server Error", e, format_exc(10)) - - def _cast(self, out, request, response, peek=None): - """ Try to convert the parameter into something WSGI compatible and set - correct HTTP headers when possible. - Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, - iterable of strings and iterable of unicodes - """ - - # Empty output is done here - if not out: - response.headers['Content-Length'] = 0 - return [] - # Join lists of byte or unicode strings. Mixed lists are NOT supported - if isinstance(out, (tuple, list))\ - and isinstance(out[0], (bytes, six.text_type)): - out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' - # Encode unicode strings - if isinstance(out, six.text_type): - out = out.encode(response.charset) - # Byte Strings are just returned - if isinstance(out, bytes): - response.headers['Content-Length'] = str(len(out)) - return [out] - # HTTPError or HTTPException (recursive, because they may wrap anything) - if isinstance(out, HTTPError): - out.apply(response) - out = self.error_handler.get(out.status, repr)(out) - if isinstance(out, HTTPResponse): - depr('Error handlers must not return :exc:`HTTPResponse`.') #0.9 - return self._cast(out, request, response) - if isinstance(out, HTTPResponse): - out.apply(response) - return self._cast(out.output, request, response) - - # File-like objects. - if hasattr(out, 'read'): - if 'wsgi.file_wrapper' in request.environ: - return request.environ['wsgi.file_wrapper'](out) - elif hasattr(out, 'close') or not hasattr(out, '__iter__'): - return WSGIFileWrapper(out) - - # Handle Iterables. We peek into them to detect their inner type. - try: - out = iter(out) - first = next(out) - while not first: - first = next(out) - except StopIteration: - return self._cast('', request, response) - except HTTPResponse as e: - first = e - except Exception as e: - first = HTTPError(500, 'Unhandled exception', e, format_exc(10)) - if isinstance(e, (KeyboardInterrupt, SystemExit, MemoryError))\ - or not self.catchall: - raise - # These are the inner types allowed in iterator or generator objects. - if isinstance(first, HTTPResponse): - return self._cast(first, request, response) - if isinstance(first, bytes): - return itertools.chain([first], out) - if isinstance(first, six.text_type): - return itertools.imap(lambda x: x.encode(response.charset), - itertools.chain([first], out)) - return self._cast(HTTPError(500, 'Unsupported response type: %s'\ - % type(first)), request, response) - - def wsgi(self, environ, start_response): - """ The bottle WSGI-interface. """ - try: - environ['bottle.app'] = self - request.bind(environ) - response.bind() - out = self._handle(environ) - out = self._cast(out, request, response) - # rfc2616 section 4.3 - if response.status in (100, 101, 204, 304) or request.method == 'HEAD': - if hasattr(out, 'close'): out.close() - out = [] - status = '%d %s' % (response.status, HTTP_CODES[response.status]) - start_response(status, response.headerlist) - return out - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception as e: - if not self.catchall: raise - err = '

Critical error while processing request: %s

' \ - % environ.get('PATH_INFO', '/') - if DEBUG: - err += '

Error:

\n
%s
\n' % repr(e) - err += '

Traceback:

\n
%s
\n' % format_exc(10) - environ['wsgi.errors'].write(err) #TODO: wsgi.error should not get html - start_response('500 INTERNAL SERVER ERROR', [('Content-Type', 'text/html')]) - return [tob(err)] - - def __call__(self, environ, start_response): - return self.wsgi(environ, start_response) - - - - - - -############################################################################### -# HTTP and WSGI Tools ########################################################## -############################################################################### - - -class Request(threading.local, DictMixin): - """ Represents a single HTTP request using thread-local attributes. - The Request object wraps a WSGI environment and can be used as such. - """ - def __init__(self, environ=None): - """ Create a new Request instance. - - You usually don't do this but use the global `bottle.request` - instance instead. - """ - self.bind(environ or {},) - - def bind(self, environ): - """ Bind a new WSGI environment. - - This is done automatically for the global `bottle.request` - instance on every request. - """ - self.environ = environ - # These attributes are used anyway, so it is ok to compute them here - self.path = '/' + environ.get('PATH_INFO', '/').lstrip('/') - self.method = environ.get('REQUEST_METHOD', 'GET').upper() - - @property - def _environ(self): - depr("Request._environ renamed to Request.environ") - return self.environ - - def copy(self): - ''' Returns a copy of self ''' - return Request(self.environ.copy()) - - def path_shift(self, shift=1): - ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. - - :param shift: The number of path fragments to shift. May be negative - to change the shift direction. (default: 1) - ''' - script_name = self.environ.get('SCRIPT_NAME','/') - self['SCRIPT_NAME'], self.path = path_shift(script_name, self.path, shift) - self['PATH_INFO'] = self.path - - def __getitem__(self, key): return self.environ[key] - def __delitem__(self, key): self[key] = ""; del(self.environ[key]) - def __iter__(self): return iter(self.environ) - def __len__(self): return len(self.environ) - def keys(self): return list(self.environ.keys()) - def __setitem__(self, key, value): - """ Shortcut for Request.environ.__setitem__ """ - self.environ[key] = value - todelete = [] - if key in ('PATH_INFO','REQUEST_METHOD'): - self.bind(self.environ) - elif key == 'wsgi.input': todelete = ('body','forms','files','params') - elif key == 'QUERY_STRING': todelete = ('get','params') - elif key.startswith('HTTP_'): todelete = ('headers', 'cookies') - for key in todelete: - if 'bottle.' + key in self.environ: - del self.environ['bottle.' + key] - - @DictProperty('environ', 'bottle.urlparts', read_only=True) - def urlparts(self): - ''' Return a :class:`urlparse.SplitResult` tuple that can be used - to reconstruct the full URL as requested by the client. - The tuple contains: (scheme, host, path, query_string, fragment). - The fragment is always empty because it is not visible to the server. - ''' - env = self.environ - http = env.get('wsgi.url_scheme', 'http') - host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST') - if not host: - # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. - host = env.get('SERVER_NAME', '127.0.0.1') - port = env.get('SERVER_PORT') - if port and port != ('80' if http == 'http' else '443'): - host += ':' + port - spath = self.environ.get('SCRIPT_NAME','').rstrip('/') + '/' - rpath = self.path.lstrip('/') - path = urlquote(urljoin(spath, rpath)) - return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '') - - @property - def url(self): - """ Full URL as requested by the client. """ - return self.urlparts.geturl() - - @property - def fullpath(self): - """ Request path including SCRIPT_NAME (if present). """ - return urlunquote(self.urlparts[2]) - - @property - def query_string(self): - """ The part of the URL following the '?'. """ - return self.environ.get('QUERY_STRING', '') - - @property - def content_length(self): - """ Content-Length header as an integer, -1 if not specified """ - return int(self.environ.get('CONTENT_LENGTH', '') or -1) - - @property - def header(self): - depr("The Request.header property was renamed to Request.headers") - return self.headers - - @DictProperty('environ', 'bottle.headers', read_only=True) - def headers(self): - ''' Request HTTP Headers stored in a :class:`HeaderDict`. ''' - return WSGIHeaderDict(self.environ) - - @DictProperty('environ', 'bottle.get', read_only=True) - def GET(self): - """ The QUERY_STRING parsed into an instance of :class:`MultiDict`. """ - data = parse_qs(self.query_string, keep_blank_values=True) - get = self.environ['bottle.get'] = MultiDict() - for key, values in six.iteritems(data): - for value in values: - get[key] = value - return get - - @DictProperty('environ', 'bottle.post', read_only=True) - def POST(self): - """ The combined values from :attr:`forms` and :attr:`files`. Values are - either strings (form values) or instances of - :class:`cgi.FieldStorage` (file uploads). - """ - post = MultiDict() - safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi - for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'): - if key in self.environ: safe_env[key] = self.environ[key] - if NCTextIOWrapper: - fb = NCTextIOWrapper(self.body, encoding='ISO-8859-1', newline='\n') - else: - fb = self.body - data = cgi.FieldStorage(fp=fb, environ=safe_env, keep_blank_values=True) - for item in data.list or []: - post[item.name] = item if item.filename else item.value - return post - - @DictProperty('environ', 'bottle.forms', read_only=True) - def forms(self): - """ POST form values parsed into an instance of :class:`MultiDict`. - - This property contains form values parsed from an `url-encoded` - or `multipart/form-data` encoded POST request bidy. The values are - native strings. - """ - forms = MultiDict() - for name, item in self.POST.iterallitems(): - if not hasattr(item, 'filename'): - forms[name] = item - return forms - - @DictProperty('environ', 'bottle.files', read_only=True) - def files(self): - """ File uploads parsed into an instance of :class:`MultiDict`. - - This property contains file uploads parsed from an - `multipart/form-data` encoded POST request body. The values are - instances of :class:`cgi.FieldStorage`. - """ - files = MultiDict() - for name, item in self.POST.iterallitems(): - if hasattr(item, 'filename'): - files[name] = item - return files - - @DictProperty('environ', 'bottle.params', read_only=True) - def params(self): - """ A combined :class:`MultiDict` with values from :attr:`forms` and - :attr:`GET`. File-uploads are not included. """ - params = MultiDict(self.GET) - for key, value in self.forms.iterallitems(): - params[key] = value - return params - - @DictProperty('environ', 'bottle.body', read_only=True) - def _body(self): - """ The HTTP request body as a seekable file-like object. - - This property returns a copy of the `wsgi.input` stream and should - be used instead of `environ['wsgi.input']`. - """ - maxread = max(0, self.content_length) - stream = self.environ['wsgi.input'] - body = BytesIO() if maxread < MEMFILE_MAX else TemporaryFile(mode='w+b') - while maxread > 0: - part = stream.read(min(maxread, MEMFILE_MAX)) - if not part: break - body.write(part) - maxread -= len(part) - self.environ['wsgi.input'] = body - body.seek(0) - return body - - @property - def body(self): - self._body.seek(0) - return self._body - - @property - def auth(self): #TODO: Tests and docs. Add support for digest. namedtuple? - """ HTTP authorization data as a (user, passwd) tuple. (experimental) - - This implementation currently only supports basic auth and returns - None on errors. - """ - return parse_auth(self.headers.get('Authorization','')) - - @DictProperty('environ', 'bottle.cookies', read_only=True) - def COOKIES(self): - """ Cookies parsed into a dictionary. Signed cookies are NOT decoded - automatically. See :meth:`get_cookie` for details. - """ - raw_dict = SimpleCookie(self.headers.get('Cookie','')) - cookies = {} - for cookie in six.itervalues(raw_dict): - cookies[cookie.key] = cookie.value - return cookies - - def get_cookie(self, key, secret=None): - """ Return the content of a cookie. To read a `Signed Cookies`, use the - same `secret` as used to create the cookie (see - :meth:`Response.set_cookie`). If anything goes wrong, None is - returned. - """ - value = self.COOKIES.get(key) - if secret and value: - dec = cookie_decode(value, secret) # (key, value) tuple or None - return dec[1] if dec and dec[0] == key else None - return value or None - - @property - def is_ajax(self): - ''' True if the request was generated using XMLHttpRequest ''' - #TODO: write tests - return self.headers.get('X-Requested-With') == 'XMLHttpRequest' - - -class Response(threading.local): - """ Represents a single HTTP response using thread-local attributes. - """ - - def __init__(self): - self.bind() - - def bind(self): - """ Resets the Response object to its factory defaults. """ - self._COOKIES = None - self.status = 200 - self.headers = HeaderDict() - self.content_type = 'text/html; charset=UTF-8' - - @property - def header(self): - depr("Response.header renamed to Response.headers") - return self.headers - - def copy(self): - ''' Returns a copy of self. ''' - copy = Response() - copy.status = self.status - copy.headers = self.headers.copy() - copy.content_type = self.content_type - return copy - - def wsgiheader(self): - ''' Returns a wsgi conform list of header/value pairs. ''' - for c in self.COOKIES.values(): - if c.OutputString() not in self.headers.getall('Set-Cookie'): - self.headers.append('Set-Cookie', c.OutputString()) - # rfc2616 section 10.2.3, 10.3.5 - if self.status in (204, 304) and 'content-type' in self.headers: - del self.headers['content-type'] - if self.status == 304: - for h in ('allow', 'content-encoding', 'content-language', - 'content-length', 'content-md5', 'content-range', - 'content-type', 'last-modified'): # + c-location, expires? - if h in self.headers: - del self.headers[h] - return list(self.headers.iterallitems()) - headerlist = property(wsgiheader) - - @property - def charset(self): - """ Return the charset specified in the content-type header. - - This defaults to `UTF-8`. - """ - if 'charset=' in self.content_type: - return self.content_type.split('charset=')[-1].split(';')[0].strip() - return 'UTF-8' - - @property - def COOKIES(self): - """ A dict-like SimpleCookie instance. Use :meth:`set_cookie` instead. """ - if not self._COOKIES: - self._COOKIES = SimpleCookie() - return self._COOKIES - - def set_cookie(self, key, value, secret=None, **kargs): - ''' Add a cookie or overwrite an old one. If the `secret` parameter is - set, create a `Signed Cookie` (described below). - - :param key: the name of the cookie. - :param value: the value of the cookie. - :param secret: required for signed cookies. (default: None) - :param max_age: maximum age in seconds. (default: None) - :param expires: a datetime object or UNIX timestamp. (default: None) - :param domain: the domain that is allowed to read the cookie. - (default: current domain) - :param path: limits the cookie to a given path (default: /) - - If neither `expires` nor `max_age` are set (default), the cookie - lasts only as long as the browser is not closed. - - Signed cookies may store any pickle-able object and are - cryptographically signed to prevent manipulation. Keep in mind that - cookies are limited to 4kb in most browsers. - - Warning: Signed cookies are not encrypted (the client can still see - the content) and not copy-protected (the client can restore an old - cookie). The main intention is to make pickling and unpickling - save, not to store secret information at client side. - ''' - if secret: - value = touni(cookie_encode((key, value), secret)) - elif not isinstance(value, six.string_types): - raise TypeError('Secret missing for non-string Cookie.') - - self.COOKIES[key] = value - for k, v in six.iteritems(kargs): - self.COOKIES[key][k.replace('_', '-')] = v - - def delete_cookie(self, key, **kwargs): - ''' Delete a cookie. Be sure to use the same `domain` and `path` - parameters as used to create the cookie. ''' - kwargs['max_age'] = -1 - kwargs['expires'] = 0 - self.set_cookie(key, '', **kwargs) - - def get_content_type(self): - """ Current 'Content-Type' header. """ - return self.headers['Content-Type'] - - def set_content_type(self, value): - self.headers['Content-Type'] = value - - content_type = property(get_content_type, set_content_type, None, - get_content_type.__doc__) - - - - - - -############################################################################### -# Plugins ###################################################################### -############################################################################### - - - -class JSONPlugin(object): - name = 'json' - - def __init__(self, json_dumps=json_dumps): - self.json_dumps = json_dumps - - def apply(self, callback, context): - dumps = self.json_dumps - if not dumps: return callback - def wrapper(*a, **ka): - rv = callback(*a, **ka) - if isinstance(rv, dict): - response.content_type = 'application/json' - return dumps(rv) - return rv - return wrapper - - - -class HooksPlugin(object): - name = 'hooks' - - def __init__(self): - self.hooks = {'before_request': [], 'after_request': []} - self.app = None - - def _empty(self): - return not (self.hooks['before_request'] or self.hooks['after_request']) - - def setup(self, app): - self.app = app - - def add(self, name, func): - ''' Attach a callback to a hook. ''' - if name not in self.hooks: - raise ValueError("Unknown hook name %s" % name) - was_empty = self._empty() - self.hooks[name].append(func) - if self.app and was_empty and not self._empty(): self.app.reset() - - def remove(self, name, func): - ''' Remove a callback from a hook. ''' - if name not in self.hooks: - raise ValueError("Unknown hook name %s" % name) - was_empty = self._empty() - self.hooks[name].remove(func) - if self.app and not was_empty and self._empty(): self.app.reset() - - def apply(self, callback, context): - if self._empty(): return callback - before_request = self.hooks['before_request'] - after_request = self.hooks['after_request'] - def wrapper(*a, **ka): - for hook in before_request: hook() - rv = callback(*a, **ka) - for hook in after_request[::-1]: hook() - return rv - return wrapper - - - -class TypeFilterPlugin(object): - def __init__(self): - self.filter = [] - self.app = None - - def setup(self, app): - self.app = app - - def add(self, ftype, func): - if not isinstance(ftype, type): - raise TypeError("Expected type object, got %s" % type(ftype)) - self.filter = [(t, f) for (t, f) in self.filter if t != ftype] - self.filter.append((ftype, func)) - if len(self.filter) == 1 and self.app: self.app.reset() - - def apply(self, callback, context): - filter = self.filter - if not filter: return callback - def wrapper(*a, **ka): - rv = callback(*a, **ka) - for testtype, filterfunc in filter: - if isinstance(rv, testtype): - rv = filterfunc(rv) - return rv - return wrapper - - -class TemplatePlugin(object): - ''' This plugin applies the :func:`view` decorator to all routes with a - `template` config parameter. If the parameter is a tuple, the second - element must be a dict with additional options (e.g. `template_engine`) - or default variables for the template. ''' - name = 'template' - - def apply(self, callback, context): - conf = context['config'].get('template') - if isinstance(conf, (tuple, list)) and len(conf) == 2: - return view(conf[0], **conf[1])(callback) - elif isinstance(conf, str) and 'template_opts' in context['config']: - depr('The `template_opts` parameter is deprecated.') #0.9 - return view(conf, **context['config']['template_opts'])(callback) - elif isinstance(conf, str): - return view(conf)(callback) - else: - return callback - - -#: Not a plugin, but part of the plugin API. TODO: Find a better place. -class _ImportRedirect(object): - def __init__(self, name, impmask): - ''' Create a virtual package that redirects imports (see PEP 302). ''' - self.name = name - self.impmask = impmask - self.module = sys.modules.setdefault(name, imp.new_module(name)) - self.module.__dict__.update({'__file__': __file__, '__path__': [], - '__all__': [], '__loader__': self}) - sys.meta_path.append(self) - - def find_module(self, fullname, path=None): - if '.' not in fullname: return - packname, modname = fullname.rsplit('.', 1) - if packname != self.name: return - return self - - def load_module(self, fullname): - if fullname in sys.modules: return sys.modules[fullname] - packname, modname = fullname.rsplit('.', 1) - realname = self.impmask % modname - __import__(realname) - module = sys.modules[fullname] = sys.modules[realname] - setattr(self.module, modname, module) - module.__loader__ = self - return module - - - - - - -############################################################################### -# Common Utilities ############################################################# -############################################################################### - - -class MultiDict(DictMixin): - """ A dict that remembers old values for each key """ - # collections.MutableMapping would be better for Python >= 2.6 - def __init__(self, *a, **k): - self.dict = dict() - for k, v in six.iteritems(dict(*a, **k)): - self[k] = v - - def __len__(self): return len(self.dict) - def __iter__(self): return iter(self.dict) - def __contains__(self, key): return key in self.dict - def __delitem__(self, key): del self.dict[key] - def keys(self): return list(self.dict.keys()) - def __getitem__(self, key): return self.get(key, KeyError, -1) - def __setitem__(self, key, value): self.append(key, value) - - def append(self, key, value): self.dict.setdefault(key, []).append(value) - def replace(self, key, value): self.dict[key] = [value] - def getall(self, key): return self.dict.get(key) or [] - - def get(self, key, default=None, index=-1): - if key not in self.dict and default != KeyError: - return [default][index] - return self.dict[key][index] - - def iterallitems(self): - for key, values in six.iteritems(self.dict): - for value in values: - yield key, value - - -class HeaderDict(MultiDict): - """ Same as :class:`MultiDict`, but title()s the keys and overwrites. """ - def __contains__(self, key): - return MultiDict.__contains__(self, self.httpkey(key)) - def __getitem__(self, key): - return MultiDict.__getitem__(self, self.httpkey(key)) - def __delitem__(self, key): - return MultiDict.__delitem__(self, self.httpkey(key)) - def __setitem__(self, key, value): self.replace(key, value) - def get(self, key, default=None, index=-1): - return MultiDict.get(self, self.httpkey(key), default, index) - def append(self, key, value): - return MultiDict.append(self, self.httpkey(key), str(value)) - def replace(self, key, value): - return MultiDict.replace(self, self.httpkey(key), str(value)) - def getall(self, key): return MultiDict.getall(self, self.httpkey(key)) - def httpkey(self, key): return str(key).replace('_','-').title() - - -class WSGIHeaderDict(DictMixin): - ''' This dict-like class wraps a WSGI environ dict and provides convenient - access to HTTP_* fields. Keys and values are native strings - (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI - environment contains non-native string values, these are de- or encoded - using a lossless 'latin1' character set. - - The API will remain stable even on changes to the relevant PEPs. - Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one - that uses non-native strings.) - ''' - #: List of keys that do not have a 'HTTP_' prefix. - cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH') - - def __init__(self, environ): - self.environ = environ - - def _ekey(self, key): - ''' Translate header field name to CGI/WSGI environ key. ''' - key = key.replace('-','_').upper() - if key in self.cgikeys: - return key - return 'HTTP_' + key - - def raw(self, key, default=None): - ''' Return the header value as is (may be bytes or unicode). ''' - return self.environ.get(self._ekey(key), default) - - def __getitem__(self, key): - return tonat(self.environ[self._ekey(key)], 'latin1') - - def __setitem__(self, key, value): - raise TypeError("%s is read-only." % self.__class__) - - def __delitem__(self, key): - raise TypeError("%s is read-only." % self.__class__) - - def __iter__(self): - for key in self.environ: - if key[:5] == 'HTTP_': - yield key[5:].replace('_', '-').title() - elif key in self.cgikeys: - yield key.replace('_', '-').title() - - def keys(self): return list(self) - def __len__(self): return len(list(self)) - def __contains__(self, key): return self._ekey(key) in self.environ - - -class AppStack(list): - """ A stack-like list. Calling it returns the head of the stack. """ - - def __call__(self): - """ Return the current default application. """ - return self[-1] - - def push(self, value=None): - """ Add a new :class:`Bottle` instance to the stack """ - if not isinstance(value, Bottle): - value = Bottle() - self.append(value) - return value - - -class WSGIFileWrapper(object): - - def __init__(self, fp, buffer_size=1024*64): - self.fp, self.buffer_size = fp, buffer_size - for attr in ('fileno', 'close', 'read', 'readlines'): - if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr)) - - def __iter__(self): - read, buff = self.fp.read, self.buffer_size - while True: - part = read(buff) - if not part: break - yield part - - - - - - -############################################################################### -# Application Helper ########################################################### -############################################################################### - - -def dict2json(d): - depr('JSONPlugin is the preferred way to return JSON.') #0.9 - response.content_type = 'application/json' - return json_dumps(d) - - -def abort(code=500, text='Unknown Error: Application stopped.'): - """ Aborts execution and causes a HTTP error. """ - raise HTTPError(code, text) - - -def redirect(url, code=303): - """ Aborts execution and causes a 303 redirect. """ - location = urljoin(request.url, url) - raise HTTPResponse("", status=code, header=dict(Location=location)) - - -def send_file(*a, **k): #BC 0.6.4 - """ Raises the output of static_file(). (deprecated) """ - depr("Use 'raise static_file()' instead of 'send_file()'.") - raise static_file(*a, **k) - - -def static_file(filename, root, mimetype='auto', guessmime=True, download=False): - """ Open a file in a safe way and return :exc:`HTTPResponse` with status - code 200, 305, 401 or 404. Set Content-Type, Content-Encoding, - Content-Length and Last-Modified header. Obey If-Modified-Since header - and HEAD requests. - """ - root = os.path.abspath(root) + os.sep - filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) - header = dict() - - if not filename.startswith(root): - return HTTPError(403, "Access denied.") - if not os.path.exists(filename) or not os.path.isfile(filename): - return HTTPError(404, "File does not exist.") - if not os.access(filename, os.R_OK): - return HTTPError(403, "You do not have permission to access this file.") - - if not guessmime: #0.9 - if mimetype == 'auto': mimetype = 'text/plain' - depr("To disable mime-type guessing, specify a type explicitly.") - if mimetype == 'auto': - mimetype, encoding = mimetypes.guess_type(filename) - if mimetype: header['Content-Type'] = mimetype - if encoding: header['Content-Encoding'] = encoding - elif mimetype: - header['Content-Type'] = mimetype - - if download: - download = os.path.basename(filename if download == True else download) - header['Content-Disposition'] = 'attachment; filename="%s"' % download - - stats = os.stat(filename) - header['Content-Length'] = stats.st_size - lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)) - header['Last-Modified'] = lm - - ims = request.environ.get('HTTP_IF_MODIFIED_SINCE') - if ims: - ims = parse_date(ims.split(";")[0].strip()) - if ims is not None and ims >= int(stats.st_mtime): - header['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()) - return HTTPResponse(status=304, header=header) - - body = '' if request.method == 'HEAD' else open(filename, 'rb') - return HTTPResponse(body, header=header) - - - - - - -############################################################################### -# HTTP Utilities and MISC (TODO) ############################################### -############################################################################### - - -def debug(mode=True): - """ Change the debug level. - There is only one debug level supported at the moment.""" - global DEBUG - DEBUG = bool(mode) - - -def parse_date(ims): - """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """ - try: - ts = email.utils.parsedate_tz(ims) - return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone - except (TypeError, ValueError, IndexError, OverflowError): - return None - - -def parse_auth(header): - """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" - try: - method, data = header.split(None, 1) - if method.lower() == 'basic': - name, pwd = base64.b64decode(data).split(':', 1) - return name, pwd - except (KeyError, ValueError, TypeError): - return None - - -def _lscmp(a, b): - ''' Compares two strings in a cryptographically save way: - Runtime is not affected by length of common prefix. ''' - return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b) - - -def cookie_encode(data, key): - ''' Encode and sign a pickle-able object. Return a (byte) string ''' - msg = base64.b64encode(pickle.dumps(data, -1)) - sig = base64.b64encode(hmac.new(key, msg).digest()) - return tob('!') + sig + tob('?') + msg - - -def cookie_decode(data, key): - ''' Verify and decode an encoded string. Return an object or None.''' - data = tob(data) - if cookie_is_encoded(data): - sig, msg = data.split(tob('?'), 1) - if _lscmp(sig[1:], base64.b64encode(hmac.new(key, msg).digest())): - return pickle.loads(base64.b64decode(msg)) - return None - - -def cookie_is_encoded(data): - ''' Return True if the argument looks like a encoded cookie.''' - return bool(data.startswith(tob('!')) and tob('?') in data) - - -def yieldroutes(func): - """ Return a generator for routes that match the signature (name, args) - of the func parameter. This may yield more than one route if the function - takes optional keyword arguments. The output is best described by example:: - - a() -> '/a' - b(x, y) -> '/b/:x/:y' - c(x, y=5) -> '/c/:x' and '/c/:x/:y' - d(x=5, y=6) -> '/d' and '/d/:x' and '/d/:x/:y' - """ - import inspect # Expensive module. Only import if necessary. - path = '/' + func.__name__.replace('__','/').lstrip('/') - spec = inspect.getargspec(func) - argc = len(spec[0]) - len(spec[3] or []) - path += ('/:%s' * argc) % tuple(spec[0][:argc]) - yield path - for arg in spec[0][argc:]: - path += '/:%s' % arg - yield path - - -def path_shift(script_name, path_info, shift=1): - ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. - - :return: The modified paths. - :param script_name: The SCRIPT_NAME path. - :param script_name: The PATH_INFO path. - :param shift: The number of path fragments to shift. May be negative to - change the shift direction. (default: 1) - ''' - if shift == 0: return script_name, path_info - pathlist = path_info.strip('/').split('/') - scriptlist = script_name.strip('/').split('/') - if pathlist and pathlist[0] == '': pathlist = [] - if scriptlist and scriptlist[0] == '': scriptlist = [] - if shift > 0 and shift <= len(pathlist): - moved = pathlist[:shift] - scriptlist = scriptlist + moved - pathlist = pathlist[shift:] - elif shift < 0 and shift >= -len(scriptlist): - moved = scriptlist[shift:] - pathlist = moved + pathlist - scriptlist = scriptlist[:shift] - else: - empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO' - raise AssertionError("Cannot shift. Nothing left from %s" % empty) - new_script_name = '/' + '/'.join(scriptlist) - new_path_info = '/' + '/'.join(pathlist) - if path_info.endswith('/') and pathlist: new_path_info += '/' - return new_script_name, new_path_info - - - -# Decorators -#TODO: Replace default_app() with app() - -def validate(**vkargs): - """ - Validates and manipulates keyword arguments by user defined callables. - Handles ValueError and missing arguments by raising HTTPError(403). - """ - def decorator(func): - def wrapper(**kargs): - for key, value in six.iteritems(vkargs): - if key not in kargs: - abort(403, 'Missing parameter: %s' % key) - try: - kargs[key] = value(kargs[key]) - except ValueError: - abort(403, 'Wrong parameter format for: %s' % key) - return func(**kargs) - return wrapper - return decorator - - -def auth_basic(check, realm="private", text="Access denied"): - ''' Callback decorator to require HTTP auth (basic). - TODO: Add route(check_auth=...) parameter. ''' - def decorator(func): - def wrapper(*a, **ka): - user, password = request.auth or (None, None) - if user is None or not check(user, password): - response.headers['WWW-Authenticate'] = 'Basic realm="%s"' % realm - return HTTPError(401, text) - return func(*a, **ka) - return wrapper - return decorator - - -def make_default_app_wrapper(name): - ''' Return a callable that relays calls to the current default app. ''' - @functools.wraps(getattr(Bottle, name)) - def wrapper(*a, **ka): - return getattr(app(), name)(*a, **ka) - return wrapper - - -for name in '''route get post put delete error mount - hook install uninstall'''.split(): - globals()[name] = make_default_app_wrapper(name) -url = make_default_app_wrapper('get_url') -del name - - -def default(): - depr("The default() decorator is deprecated. Use @error(404) instead.") - return error(404) - - - - - - -############################################################################### -# Server Adapter ############################################################### -############################################################################### - - -class ServerAdapter(object): - quiet = False - def __init__(self, host='127.0.0.1', port=8080, **config): - self.options = config - self.host = host - self.port = int(port) - - def run(self, handler): # pragma: no cover - pass - - def __repr__(self): - args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()]) - return "%s(%s)" % (self.__class__.__name__, args) - - -class CGIServer(ServerAdapter): - quiet = True - def run(self, handler): # pragma: no cover - from wsgiref.handlers import CGIHandler - def fixed_environ(environ, start_response): - environ.setdefault('PATH_INFO', '') - return handler(environ, start_response) - CGIHandler().run(fixed_environ) - - -class FlupFCGIServer(ServerAdapter): - def run(self, handler): # pragma: no cover - import flup.server.fcgi - kwargs = {'bindAddress':(self.host, self.port)} - kwargs.update(self.options) # allow to override bindAddress and others - flup.server.fcgi.WSGIServer(handler, **kwargs).run() - - -class WSGIRefServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from wsgiref.simple_server import make_server, WSGIRequestHandler - if self.quiet: - class QuietHandler(WSGIRequestHandler): - def log_request(*args, **kw): pass - self.options['handler_class'] = QuietHandler - srv = make_server(self.host, self.port, handler, **self.options) - srv.serve_forever() - - -class CherryPyServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from cherrypy import wsgiserver - server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler) - try: - server.start() - finally: - server.stop() - -class PasteServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from paste import httpserver - if not self.quiet: - from paste.translogger import TransLogger - handler = TransLogger(handler) - httpserver.serve(handler, host=self.host, port=str(self.port), - **self.options) - -class MeinheldServer(ServerAdapter): - def run(self, handler): - from meinheld import server - server.listen((self.host, self.port)) - server.run(handler) - - -class FapwsServer(ServerAdapter): - """ Extremely fast webserver using libev. See http://www.fapws.org/ """ - def run(self, handler): # pragma: no cover - import fapws._evwsgi as evwsgi - from fapws import base, config - port = self.port - if float(config.SERVER_IDENT[-2:]) > 0.4: - # fapws3 silently changed its API in 0.5 - port = str(port) - evwsgi.start(self.host, port) - # fapws3 never releases the GIL. Complain upstream. I tried. No luck. - if 'BOTTLE_CHILD' in os.environ and not self.quiet: - print("WARNING: Auto-reloading does not work with Fapws3.") - print(" (Fapws3 breaks python thread support)") - evwsgi.set_base_module(base) - def app(environ, start_response): - environ['wsgi.multiprocess'] = False - return handler(environ, start_response) - evwsgi.wsgi_cb(('', app)) - evwsgi.run() - - -class TornadoServer(ServerAdapter): - """ The super hyped asynchronous server by facebook. Untested. """ - def run(self, handler): # pragma: no cover - import tornado.wsgi - import tornado.httpserver - import tornado.ioloop - container = tornado.wsgi.WSGIContainer(handler) - server = tornado.httpserver.HTTPServer(container) - server.listen(port=self.port) - tornado.ioloop.IOLoop.instance().start() - - -class AppEngineServer(ServerAdapter): - """ Adapter for Google App Engine. """ - quiet = True - def run(self, handler): - from google.appengine.ext.webapp import util - # A main() function in the handler script enables 'App Caching'. - # Lets makes sure it is there. This _really_ improves performance. - module = sys.modules.get('__main__') - if module and not hasattr(module, 'main'): - module.main = lambda: util.run_wsgi_app(handler) - util.run_wsgi_app(handler) - - -class TwistedServer(ServerAdapter): - """ Untested. """ - def run(self, handler): - from twisted.web import server, wsgi - from twisted.python.threadpool import ThreadPool - from twisted.internet import reactor - thread_pool = ThreadPool() - thread_pool.start() - reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop) - factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) - reactor.listenTCP(self.port, factory, interface=self.host) - reactor.run() - - -class DieselServer(ServerAdapter): - """ Untested. """ - def run(self, handler): - from diesel.protocols.wsgi import WSGIApplication - app = WSGIApplication(handler, port=self.port) - app.run() - - -class GeventServer(ServerAdapter): - """ Untested. Options: - - * `monkey` (default: True) fixes the stdlib to use greenthreads. - * `fast` (default: False) uses libevent's http server, but has some - issues: No streaming, no pipelining, no SSL. - """ - def run(self, handler): - from gevent import wsgi as wsgi_fast, pywsgi as wsgi, monkey - if self.options.get('monkey', True): - monkey.patch_all() - if self.options.get('fast', False): - wsgi = wsgi_fast - wsgi.WSGIServer((self.host, self.port), handler).serve_forever() - - -class GunicornServer(ServerAdapter): - """ Untested. """ - def run(self, handler): - from gunicorn.arbiter import Arbiter - from gunicorn.config import Config - handler.cfg = Config({'bind': "%s:%d" % (self.host, self.port), 'workers': 4}) - arbiter = Arbiter(handler) - arbiter.run() - - -class EventletServer(ServerAdapter): - """ Untested """ - def run(self, handler): - from eventlet import wsgi, listen - wsgi.server(listen((self.host, self.port)), handler) - - -class RocketServer(ServerAdapter): - """ Untested. As requested in issue 63 - https://github.com/defnull/bottle/issues/#issue/63 """ - def run(self, handler): - from rocket import Rocket - server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler }) - server.start() - - -class BjoernServer(ServerAdapter): - """ Screamingly fast server written in C: https://github.com/jonashaag/bjoern """ - def run(self, handler): - from bjoern import run - run(handler, self.host, self.port) - - -class AutoServer(ServerAdapter): - """ Untested. """ - adapters = [PasteServer, CherryPyServer, TwistedServer, WSGIRefServer] - def run(self, handler): - for sa in self.adapters: - try: - return sa(self.host, self.port, **self.options).run(handler) - except ImportError: - pass - - -server_names = { - 'cgi': CGIServer, - 'flup': FlupFCGIServer, - 'wsgiref': WSGIRefServer, - 'cherrypy': CherryPyServer, - 'paste': PasteServer, - 'fapws3': FapwsServer, - 'tornado': TornadoServer, - 'gae': AppEngineServer, - 'twisted': TwistedServer, - 'diesel': DieselServer, - 'meinheld': MeinheldServer, - 'gunicorn': GunicornServer, - 'eventlet': EventletServer, - 'gevent': GeventServer, - 'rocket': RocketServer, - 'bjoern' : BjoernServer, - 'auto': AutoServer, -} - - - - - - -############################################################################### -# Application Control ########################################################## -############################################################################### - - -def _load(target, **vars): - """ Fetch something from a module. The exact behaviour depends on the the - target string: - - If the target is a valid python import path (e.g. `package.module`), - the rightmost part is returned as a module object. - If the target contains a colon (e.g. `package.module:var`) the module - variable specified after the colon is returned. - If the part after the colon contains any non-alphanumeric characters - (e.g. `package.module:func(var)`) the result of the expression - is returned. The expression has access to keyword arguments supplied - to this function. - - Example:: - >>> _load('bottle') - - >>> _load('bottle:Bottle') - - >>> _load('bottle:cookie_encode(v, secret)', v='foo', secret='bar') - '!F+hN4dQxaDJ4QxxaZ+Z3jw==?gAJVA2Zvb3EBLg==' - - """ - module, target = target.split(":", 1) if ':' in target else (target, None) - if module not in sys.modules: - __import__(module) - if not target: - return sys.modules[module] - if target.isalnum(): - return getattr(sys.modules[module], target) - package_name = module.split('.')[0] - vars[package_name] = sys.modules[package_name] - return eval('%s.%s' % (module, target), vars) - - -def load_app(target): - """ Load a bottle application based on a target string and return the - application object. - - If the target is an import path (e.g. package.module), the application - stack is used to isolate the routes defined in that module. - If the target contains a colon (e.g. package.module:myapp) the - module variable specified after the colon is returned instead. - """ - tmp = app.push() # Create a new "default application" - rv = _load(target) # Import the target module - app.remove(tmp) # Remove the temporary added default application - return rv if isinstance(rv, Bottle) else tmp - - -def run(app=None, server='wsgiref', host='127.0.0.1', port=8080, - interval=1, reloader=False, quiet=False, **kargs): - """ Start a server instance. This method blocks until the server terminates. - - :param app: WSGI application or target string supported by - :func:`load_app`. (default: :func:`default_app`) - :param server: Server adapter to use. See :data:`server_names` keys - for valid names or pass a :class:`ServerAdapter` subclass. - (default: `wsgiref`) - :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on - all interfaces including the external one. (default: 127.0.0.1) - :param port: Server port to bind to. Values below 1024 require root - privileges. (default: 8080) - :param reloader: Start auto-reloading server? (default: False) - :param interval: Auto-reloader interval in seconds (default: 1) - :param quiet: Suppress output to stdout and stderr? (default: False) - :param options: Options passed to the server adapter. - """ - app = app or default_app() - if isinstance(app, six.string_types): - app = load_app(app) - if isinstance(server, six.string_types): - server = server_names.get(server) - if isinstance(server, type): - server = server(host=host, port=port, **kargs) - if not isinstance(server, ServerAdapter): - raise RuntimeError("Server must be a subclass of ServerAdapter") - server.quiet = server.quiet or quiet - if not server.quiet and not os.environ.get('BOTTLE_CHILD'): - print("Bottle server starting up (using %s)..." % repr(server)) - print("Listening on http://%s:%d/" % (server.host, server.port)) - print("Use Ctrl-C to quit.") - print() - try: - if reloader: - interval = min(interval, 1) - if os.environ.get('BOTTLE_CHILD'): - _reloader_child(server, app, interval) - else: - _reloader_observer(server, app, interval) - else: - server.run(app) - except KeyboardInterrupt: - pass - if not server.quiet and not os.environ.get('BOTTLE_CHILD'): - print("Shutting down...") - - -class FileCheckerThread(threading.Thread): - ''' Thread that periodically checks for changed module files. ''' - - def __init__(self, lockfile, interval): - threading.Thread.__init__(self) - self.lockfile, self.interval = lockfile, interval - #1: lockfile to old; 2: lockfile missing - #3: module file changed; 5: external exit - self.status = 0 - - def run(self): - exists = os.path.exists - mtime = lambda path: os.stat(path).st_mtime - files = dict() - for module in sys.modules.values(): - path = getattr(module, '__file__', '') - if path[-4:] in ('.pyo', '.pyc'): path = path[:-1] - if path and exists(path): files[path] = mtime(path) - while not self.status: - for path, lmtime in six.iteritems(files): - if not exists(path) or mtime(path) > lmtime: - self.status = 3 - if not exists(self.lockfile): - self.status = 2 - elif mtime(self.lockfile) < time.time() - self.interval - 5: - self.status = 1 - if not self.status: - time.sleep(self.interval) - if self.status != 5: - six.moves._thread.interrupt_main() - - -def _reloader_child(server, app, interval): - ''' Start the server and check for modified files in a background thread. - As soon as an update is detected, KeyboardInterrupt is thrown in - the main thread to exit the server loop. The process exists with status - code 3 to request a reload by the observer process. If the lockfile - is not modified in 2*interval second or missing, we assume that the - observer process died and exit with status code 1 or 2. - ''' - lockfile = os.environ.get('BOTTLE_LOCKFILE') - bgcheck = FileCheckerThread(lockfile, interval) - try: - bgcheck.start() - server.run(app) - except KeyboardInterrupt: - pass - bgcheck.status, status = 5, bgcheck.status - bgcheck.join() # bgcheck.status == 5 --> silent exit - if status: sys.exit(status) - - -def _reloader_observer(server, app, interval): - ''' Start a child process with identical commandline arguments and restart - it as long as it exists with status code 3. Also create a lockfile and - touch it (update mtime) every interval seconds. - ''' - fd, lockfile = tempfile.mkstemp(prefix='bottle-reloader.', suffix='.lock') - os.close(fd) # We only need this file to exist. We never write to it - try: - while os.path.exists(lockfile): - args = [sys.executable] + sys.argv - environ = os.environ.copy() - environ['BOTTLE_CHILD'] = 'true' - environ['BOTTLE_LOCKFILE'] = lockfile - p = subprocess.Popen(args, env=environ) - while p.poll() is None: # Busy wait... - os.utime(lockfile, None) # I am alive! - time.sleep(interval) - if p.poll() != 3: - if os.path.exists(lockfile): os.unlink(lockfile) - sys.exit(p.poll()) - elif not server.quiet: - print("Reloading server...") - except KeyboardInterrupt: - pass - if os.path.exists(lockfile): os.unlink(lockfile) - - - - - - -############################################################################### -# Template Adapters ############################################################ -############################################################################### - - -class TemplateError(HTTPError): - def __init__(self, message): - HTTPError.__init__(self, 500, message) - - -class BaseTemplate(object): - """ Base class and minimal API for template adapters """ - extentions = ['tpl','html','thtml','stpl'] - settings = {} #used in prepare() - defaults = {} #used in render() - - def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings): - """ Create a new template. - If the source parameter (str or buffer) is missing, the name argument - is used to guess a template filename. Subclasses can assume that - self.source and/or self.filename are set. Both are strings. - The lookup, encoding and settings parameters are stored as instance - variables. - The lookup parameter stores a list containing directory paths. - The encoding parameter should be used to decode byte strings or files. - The settings parameter contains a dict for engine-specific settings. - """ - self.name = name - self.source = source.read() if hasattr(source, 'read') else source - self.filename = source.filename if hasattr(source, 'filename') else None - self.lookup = list(map(os.path.abspath, lookup)) - self.encoding = encoding - self.settings = self.settings.copy() # Copy from class variable - self.settings.update(settings) # Apply - if not self.source and self.name: - self.filename = self.search(self.name, self.lookup) - if not self.filename: - raise TemplateError('Template %s not found.' % repr(name)) - if not self.source and not self.filename: - raise TemplateError('No template specified.') - self.prepare(**self.settings) - - @classmethod - def search(cls, name, lookup=[]): - """ Search name in all directories specified in lookup. - First without, then with common extensions. Return first hit. """ - if os.path.isfile(name): return name - for spath in lookup: - fname = os.path.join(spath, name) - if os.path.isfile(fname): - return fname - for ext in cls.extentions: - if os.path.isfile('%s.%s' % (fname, ext)): - return '%s.%s' % (fname, ext) - - @classmethod - def global_config(cls, key, *args): - ''' This reads or sets the global settings stored in class.settings. ''' - if args: - cls.settings[key] = args[0] - else: - return cls.settings[key] - - def prepare(self, **options): - """ Run preparations (parsing, caching, ...). - It should be possible to call this again to refresh a template or to - update settings. - """ - raise NotImplementedError - - def render(self, *args, **kwargs): - """ Render the template with the specified local variables and return - a single byte or unicode string. If it is a byte string, the encoding - must match self.encoding. This method must be thread-safe! - Local variables may be provided in dictionaries (*args) - or directly, as keywords (**kwargs). - """ - raise NotImplementedError - - -class MakoTemplate(BaseTemplate): - def prepare(self, **options): - from mako.template import Template - from mako.lookup import TemplateLookup - options.update({'input_encoding':self.encoding}) - options.setdefault('format_exceptions', bool(DEBUG)) - lookup = TemplateLookup(directories=self.lookup, **options) - if self.source: - self.tpl = Template(self.source, lookup=lookup, **options) - else: - self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) - - def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) - _defaults = self.defaults.copy() - _defaults.update(kwargs) - return self.tpl.render(**_defaults) - - -class CheetahTemplate(BaseTemplate): - def prepare(self, **options): - from Cheetah.Template import Template - self.context = threading.local() - self.context.vars = {} - options['searchList'] = [self.context.vars] - if self.source: - self.tpl = Template(source=self.source, **options) - else: - self.tpl = Template(file=self.filename, **options) - - def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) - self.context.vars.update(self.defaults) - self.context.vars.update(kwargs) - out = str(self.tpl) - self.context.vars.clear() - return [out] - - -class Jinja2Template(BaseTemplate): - def prepare(self, filters=None, tests=None, **kwargs): - from jinja2 import Environment, FunctionLoader - if 'prefix' in kwargs: # TODO: to be removed after a while - raise RuntimeError('The keyword argument `prefix` has been removed. ' - 'Use the full jinja2 environment name line_statement_prefix instead.') - self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) - if filters: self.env.filters.update(filters) - if tests: self.env.tests.update(tests) - if self.source: - self.tpl = self.env.from_string(self.source) - else: - self.tpl = self.env.get_template(self.filename) - - def render(self, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) - _defaults = self.defaults.copy() - _defaults.update(kwargs) - return self.tpl.render(**_defaults).encode("utf-8") - - def loader(self, name): - fname = self.search(name, self.lookup) - if fname: - with open(fname, "rb") as f: - return f.read().decode(self.encoding) - - -class SimpleTALTemplate(BaseTemplate): - ''' Untested! ''' - def prepare(self, **options): - from simpletal import simpleTAL - # TODO: add option to load METAL files during render - if self.source: - self.tpl = simpleTAL.compileHTMLTemplate(self.source) - else: - with open(self.filename, 'rb') as fp: - self.tpl = simpleTAL.compileHTMLTemplate(tonat(fp.read())) - - def render(self, *args, **kwargs): - from simpletal import simpleTALES - from StringIO import StringIO - for dictarg in args: kwargs.update(dictarg) - # TODO: maybe reuse a context instead of always creating one - context = simpleTALES.Context() - for k,v in self.defaults.items(): - context.addGlobal(k, v) - for k,v in kwargs.items(): - context.addGlobal(k, v) - output = StringIO() - self.tpl.expand(context, output) - return output.getvalue() - - -class SimpleTemplate(BaseTemplate): - blocks = ('if','elif','else','try','except','finally','for','while','with','def','class') - dedent_blocks = ('elif', 'else', 'except', 'finally') - - @lazy_attribute - def re_pytokens(cls): - ''' This matches comments and all kinds of quoted strings but does - NOT match comments (#...) within quoted strings. (trust me) ''' - return re.compile(r''' - (''(?!')|""(?!")|'{6}|"{6} # Empty strings (all 4 types) - |'(?:[^\\']|\\.)+?' # Single quotes (') - |"(?:[^\\"]|\\.)+?" # Double quotes (") - |'{3}(?:[^\\]|\\.|\n)+?'{3} # Triple-quoted strings (') - |"{3}(?:[^\\]|\\.|\n)+?"{3} # Triple-quoted strings (") - |\#.* # Comments - )''', re.VERBOSE) - - def prepare(self, escape_func=cgi.escape, noescape=False): - self.cache = {} - enc = self.encoding - self._str = lambda x: touni(x, enc) - self._escape = lambda x: escape_func(touni(x, enc)) - if noescape: - self._str, self._escape = self._escape, self._str - - @classmethod - def split_comment(cls, code): - """ Removes comments (#...) from python code. """ - if '#' not in code: return code - #: Remove comments only (leave quoted strings as they are) - subf = lambda m: '' if m.group(0)[0]=='#' else m.group(0) - return re.sub(cls.re_pytokens, subf, code) - - @cached_property - def co(self): - return compile(self.code, self.filename or '', 'exec') - - @cached_property - def code(self): - stack = [] # Current Code indentation - lineno = 0 # Current line of code - ptrbuffer = [] # Buffer for printable strings and token tuple instances - codebuffer = [] # Buffer for generated python code - multiline = dedent = oneline = False - template = self.source if self.source else open(self.filename).read() - - def yield_tokens(line): - for i, part in enumerate(re.split(r'\{\{(.*?)\}\}', line)): - if i % 2: - if part.startswith('!'): yield 'RAW', part[1:] - else: yield 'CMD', part - else: yield 'TXT', part - - def flush(): # Flush the ptrbuffer - if not ptrbuffer: return - cline = '' - for line in ptrbuffer: - for token, value in line: - if token == 'TXT': cline += repr(value) - elif token == 'RAW': cline += '_str(%s)' % value - elif token == 'CMD': cline += '_escape(%s)' % value - cline += ', ' - cline = cline[:-2] + '\\\n' - cline = cline[:-2] - if cline[:-1].endswith('\\\\\\\\\\n'): - cline = cline[:-7] + cline[-1] # 'nobr\\\\\n' --> 'nobr' - cline = '_printlist([' + cline + '])' - del ptrbuffer[:] # Do this before calling code() again - code(cline) - - def code(stmt): - for line in stmt.splitlines(): - codebuffer.append(' ' * len(stack) + line.strip()) - - for line in template.splitlines(True): - lineno += 1 - line = line if isinstance(line, six.text_type)\ - else six.text_type(line, encoding=self.encoding) - if lineno <= 2: - m = re.search(r"%.*coding[:=]\s*([-\w\.]+)", line) - if m: self.encoding = m.group(1) - if m: line = line.replace('coding','coding (removed)') - if line.strip()[:2].count('%') == 1: - line = line.split('%',1)[1].lstrip() # Full line following the % - cline = self.split_comment(line).strip() - cmd = re.split(r'[^a-zA-Z0-9_]', cline)[0] - flush() ##encodig (TODO: why?) - if cmd in self.blocks or multiline: - cmd = multiline or cmd - dedent = cmd in self.dedent_blocks # "else:" - if dedent and not oneline and not multiline: - cmd = stack.pop() - code(line) - oneline = not cline.endswith(':') # "if 1: pass" - multiline = cmd if cline.endswith('\\') else False - if not oneline and not multiline: - stack.append(cmd) - elif cmd == 'end' and stack: - code('#end(%s) %s' % (stack.pop(), line.strip()[3:])) - elif cmd == 'include': - p = cline.split(None, 2)[1:] - if len(p) == 2: - code("_=_include(%s, _stdout, %s)" % (repr(p[0]), p[1])) - elif p: - code("_=_include(%s, _stdout)" % repr(p[0])) - else: # Empty %include -> reverse of %rebase - code("_printlist(_base)") - elif cmd == 'rebase': - p = cline.split(None, 2)[1:] - if len(p) == 2: - code("globals()['_rebase']=(%s, dict(%s))" % (repr(p[0]), p[1])) - elif p: - code("globals()['_rebase']=(%s, {})" % repr(p[0])) - else: - code(line) - else: # Line starting with text (not '%') or '%%' (escaped) - if line.strip().startswith('%%'): - line = line.replace('%%', '%', 1) - ptrbuffer.append(yield_tokens(line)) - flush() - return '\n'.join(codebuffer) + '\n' - - def subtemplate(self, _name, _stdout, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) - if _name not in self.cache: - self.cache[_name] = self.__class__(name=_name, lookup=self.lookup) - return self.cache[_name].execute(_stdout, kwargs) - - def execute(self, _stdout, *args, **kwargs): - for dictarg in args: kwargs.update(dictarg) - env = self.defaults.copy() - env.update({'_stdout': _stdout, '_printlist': _stdout.extend, - '_include': self.subtemplate, '_str': self._str, - '_escape': self._escape}) - env.update(kwargs) - eval(self.co, env) - if '_rebase' in env: - subtpl, rargs = env['_rebase'] - subtpl = self.__class__(name=subtpl, lookup=self.lookup) - rargs['_base'] = _stdout[:] #copy stdout - del _stdout[:] # clear stdout - return subtpl.execute(_stdout, rargs) - return env - - def render(self, *args, **kwargs): - """ Render the template using keyword arguments as local variables. """ - for dictarg in args: kwargs.update(dictarg) - stdout = [] - self.execute(stdout, kwargs) - return ''.join(stdout) - - -def template(*args, **kwargs): - ''' - Get a rendered template as a string iterator. - You can use a name, a filename or a template string as first parameter. - Template rendering arguments can be passed as dictionaries - or directly (as keyword arguments). - ''' - tpl = args[0] if args else None - template_adapter = kwargs.pop('template_adapter', SimpleTemplate) - if tpl not in TEMPLATES or DEBUG: - settings = kwargs.pop('template_settings', {}) - lookup = kwargs.pop('template_lookup', TEMPLATE_PATH) - if isinstance(tpl, template_adapter): - TEMPLATES[tpl] = tpl - if settings: TEMPLATES[tpl].prepare(**settings) - elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl: - TEMPLATES[tpl] = template_adapter(source=tpl, lookup=lookup, **settings) - else: - TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup, **settings) - if not TEMPLATES[tpl]: - abort(500, 'Template (%s) not found' % tpl) - for dictarg in args[1:]: kwargs.update(dictarg) - return TEMPLATES[tpl].render(kwargs) - -mako_template = functools.partial(template, template_adapter=MakoTemplate) -cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) -jinja2_template = functools.partial(template, template_adapter=Jinja2Template) -simpletal_template = functools.partial(template, template_adapter=SimpleTALTemplate) - - -def view(tpl_name, **defaults): - ''' Decorator: renders a template for a handler. - The handler can control its behavior like that: - - - return a dict of template vars to fill out the template - - return something other than a dict and the view decorator will not - process the template, but return the handler result as is. - This includes returning a HTTPResponse(dict) to get, - for instance, JSON with autojson or other castfilters. - ''' - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - result = func(*args, **kwargs) - if isinstance(result, (dict, DictMixin)): - tplvars = defaults.copy() - tplvars.update(result) - return template(tpl_name, **tplvars) - return result - return wrapper - return decorator - -mako_view = functools.partial(view, template_adapter=MakoTemplate) -cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) -jinja2_view = functools.partial(view, template_adapter=Jinja2Template) -simpletal_view = functools.partial(view, template_adapter=SimpleTALTemplate) - - - - - - -############################################################################### -# Constants and Globals ######################################################## -############################################################################### - - -TEMPLATE_PATH = ['./', './views/'] -TEMPLATES = {} -DEBUG = False -MEMFILE_MAX = 1024*100 - -#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') -HTTP_CODES = six.moves.http_client.responses -HTTP_CODES[418] = "I'm a teapot" # RFC 2324 - -#: The default template used for error pages. Override with @error() -ERROR_PAGE_TEMPLATE = """ -%try: - %from bottle import DEBUG, HTTP_CODES, request, touni - %status_name = HTTP_CODES.get(e.status, 'Unknown').title() - - - - Error {{e.status}}: {{status_name}} - - - -

Error {{e.status}}: {{status_name}}

-

Sorry, the requested URL {{repr(request.url)}} caused an error:

-
{{e.output}}
- %if DEBUG and e.exception: -

Exception:

-
{{repr(e.exception)}}
- %end - %if DEBUG and e.traceback: -

Traceback:

-
{{e.traceback}}
- %end - - -%except ImportError: - ImportError: Could not generate the error page. Please add bottle to sys.path -%end -""" - -#: A thread-save instance of :class:`Request` representing the `current` request. -request = Request() - -#: A thread-save instance of :class:`Response` used to build the HTTP response. -response = Response() - -#: A thread-save namepsace. Not used by Bottle. -local = threading.local() - -# Initialize app stack (create first empty Bottle app) -# BC: 0.6.4 and needed for run() -app = default_app = AppStack() -app.push() - -#: A virtual package that redirects import statements. -#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. -ext = _ImportRedirect(__name__+'.ext', 'bottle_%s').module diff --git a/examples/analytics/css/analytics.css b/examples/analytics/css/analytics.css deleted file mode 100644 index b5b11d114..000000000 --- a/examples/analytics/css/analytics.css +++ /dev/null @@ -1,279 +0,0 @@ - -body { - width: 90%; - margin: 0px auto; -} -.event-table { - width: 100%; - margin-top: 30px; - margin-bottom: 30px; - display: table; - cellspacing: 0; - border-collapse: collapse; - border: 1px solid gainsboro; -} -.table-head { - background-color: transparent; - display: table-row; - border: 0; - margin: 0; - padding: 0; -} -.table-head-cell { - padding: 10px 15px; - color: white; - text-shadow: 0px 1px 1px #555; - font-weight: bold; - border-width: 0px 0px; - border-color: #1E304D; - border-style: solid; - text-align: right; - background: -webkit-gradient(linear, left top, left bottom, from(#5C9CCC), to(#0B61A4)); - background: -moz-linear-gradient(top, #5C9CCC, #0B61A4); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr="#5C9CCC", endColorstr="#0B61A4"); -} -.event-name-cell { - padding: 5px 15px; - cursor: pointer; - border-bottom: 1px solid gainsboro; - border-right: 1px solid gainsboro; - background: -webkit-gradient(linear, left top, left bottom, from(#DADADA), to(#DFDFDF)); - background: -moz-linear-gradient(top, #DADADA, #DFDFDF); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr="#DADADA", endColorstr="#DFDFDF"); - font-family: 'lucida grande', arial, tahoma, verdana, sans-serif; - font-size: 12px; - font-style: normal; - font-variant: normal; - font-weight: normal; -} -.event-table-cell { - padding: 5px 15px; - text-align: right; - background-color: white; - border-bottom: 1px solid gainsboro; - border-right: 1px solid gainsboro; -} -.graph { - margin-top: 30px; - margin-right: 10px; -} -.left { - float: left; -} -.right { - float: right; -} -.center { - text-align: center; -} -.clear { - clear: both; -} -.uppercase { - text-transform:uppercase; -} -a, a:visited { - text-decoration: none; - outline: 0; -} -a:hover { - text-decoration: underline; -} -.event-name-cell a { - color: #416590; -} -.graph { - width: 95%; - height: 400px; - margin: 0px auto; - margin-top: 10px; -} - -#clearSelection { - position: absolute; - top: 25px; - right: 5px; - margin-right: 40px; - z-index: 1000; -} - -#graph-and-legend { - position: relative; - border: 1px solid #565656; - margin-top: 10px; -} - -#legend { - width: 90%; - margin: 0px auto; - margin-top: 10px; - margin-bottom: 10px; - border: 1px solid black; - -moz-border-radius: 5px; - -webkit-border-radius: 5px; - -khtml-border-radius: 5px; - border-radius: 5px; -} - -.legend-text { - text-overflow: ellipsis; - overflow: hidden !important; - white-space: nowrap !important; - width: 100%; - margin-left: 2px; - margin-top: 5px; - font-size: 12px; - display: block; -} - -#legend .ui-button { - width: 23%; - margin: 5px 5px 5px 5px !important; - border: 0px; - height: 30px; -} - -#legend .ui-state-default { - background: white !important; - color: #DADADA !important; -} -#legend .ui-state-active { - background: white !important; - color: black !important; -} - -#legend label.ui-widget[aria-pressed=false] .legend-color { - background-color: #DADADA !important; -} - -.legend-color { - display: block; - width: 100%; - height: 5px; -} - -#tooltip { - position: absolute; - display: none; - border: 1px solid #fdd; - padding: 2px; - background-color: #fee; - opacity: 0.8; -} - -#tooltip #tooltip-label { - text-overflow: ellipsis !important; - overflow: hidden !important; - white-space: nowrap !important; - max-width: 150px !important; - float: left; -} - -.gray-gradient-box { - border: 1px solid #CFCFCF; - background: #F2F2F2; - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='white',endColorstr='#E4E4E4'); - background: -webkit-gradient(linear,left top,left bottom,from(white),to(#E4E4E4)); - background: -moz-linear-gradient(top,white,#E4E4E4); -} -.big-title { - color: #4E74A1; - font-size: 16pt; - font-weight: bold; - line-height: 18px; - margin: 5px; -} -.mini-title { - color: #4E74A1; - font-size: 14pt; - margin-left: 20px; -} - -div.mini-title sup { - font-size: 8pt; -} -.arrows { - font-size: 8pt; -} - -#properties-accordion { - margin-top: 30px; -} - -.hidden { - display: none; -} - -.visible { - display: block; -} -.clear-link { - display: inline; -} - - -#header { - position: relative; - min-height: 40px; -} -#time-range-div { - position: absolute; - top: 0px; - right: 0px; - height: 100%; -} - -#time-range-div .ui-selectmenu { - height: 100%; - border: 0px; -} - -#tooltip-time { - font-size: 10pt; - margin-top: 2px; - color: #777; -} - -.application-info { - margin-bottom: 6px; - background: #aaa; - border: 1px solid #DBDBDB; - font-weight: bold; - height: 44px; - line-height: 44px; - padding-left: 20px; - - background: -webkit-gradient(linear,left top,left bottom,from(white),to(#EEE)); - background: -moz-linear-gradient(top,white,#EEE); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='white',endColorstr='#EEE'); - -} -.application-name { - padding-left: 10px; -} -.application-event-count { - font-size: 14px; - padding-right: 10px; -} -.application-info a { - color: #416590; -} -#title { - margin-bottom: 6px; - background: #aaa; - border: 1px solid #DBDBDB; - font-weight: bold; - height: 44px; - line-height: 44px; - padding-left: 20px; - - background: -webkit-gradient(linear,left top,left bottom,from(white),to(#EEE)); - background: -moz-linear-gradient(top,white,#EEE); - filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='white',endColorstr='#EEE'); -} -#title-text { - font-size: 20px; - color: black; - padding-left: 10px; - text-align: center; -} \ No newline at end of file diff --git a/examples/analytics/css/jquery.ui.selectmenu.css b/examples/analytics/css/jquery.ui.selectmenu.css deleted file mode 100755 index 7f1b42fec..000000000 --- a/examples/analytics/css/jquery.ui.selectmenu.css +++ /dev/null @@ -1,30 +0,0 @@ -/* Selectmenu -----------------------------------*/ -.ui-selectmenu { display: block; display: inline-block; position: relative; height: 2.2em; vertical-align: middle; text-decoration: none; overflow: hidden; zoom: 1; } -.ui-selectmenu-icon { position:absolute; right:6px; margin-top:-8px; top: 50%; } -.ui-selectmenu-menu { padding:0; margin:0; list-style:none; position:absolute; top: 0; display: none; overflow: auto; z-index: 1005;} /* z-index: 1005 to make selectmenu work with dialog */ -.ui-selectmenu-open { display: block; } -.ui-selectmenu-menu-popup { margin-top: -1px; } -.ui-selectmenu-menu-dropdown { } -.ui-selectmenu-menu li { padding:0; margin:0; display: block; border-top: 1px dotted transparent; border-bottom: 1px dotted transparent; border-right-width: 0 !important; border-left-width: 0 !important; font-weight: normal !important; } -.ui-selectmenu-menu li a,.ui-selectmenu-status { line-height: 1.4em; display: block; padding: .405em 1em; outline:none; text-decoration:none; } -.ui-selectmenu-menu li.ui-state-disabled a, .ui-state-disabled { cursor: default; } -.ui-selectmenu-menu li.ui-selectmenu-hasIcon a, -.ui-selectmenu-hasIcon .ui-selectmenu-status { padding-left: 20px; position: relative; margin-left: 5px; } -.ui-selectmenu-menu li .ui-icon, .ui-selectmenu-status .ui-icon { position: absolute; top: 1em; margin-top: -8px; left: 0; } -.ui-selectmenu-status { line-height: 1.4em; } -.ui-selectmenu-open li.ui-selectmenu-item-focus a { } -.ui-selectmenu-open li.ui-selectmenu-item-selected { } -.ui-selectmenu-menu li span,.ui-selectmenu-status span { display:block; margin-bottom: .2em; } -.ui-selectmenu-menu li .ui-selectmenu-item-header { font-weight: bold; } -.ui-selectmenu-menu li .ui-selectmenu-item-content { } -.ui-selectmenu-menu li .ui-selectmenu-item-footer { opacity: .8; } -/* for optgroups */ -.ui-selectmenu-menu .ui-selectmenu-group { font-size: 1em; } -.ui-selectmenu-menu .ui-selectmenu-group .ui-selectmenu-group-label { line-height: 1.4em; display:block; padding: .6em .5em 0; font-weight: bold; } -.ui-selectmenu-menu .ui-selectmenu-group ul { margin: 0; padding: 0; } -/* IE6 workaround (dotted transparent borders) */ -* html .ui-selectmenu-menu li { border-color: pink; filter:chroma(color=pink); width:100%; } -* html .ui-selectmenu-menu li a { position: relative } -/* IE7 workaround (opacity disabled) */ -*+html .ui-state-disabled, *+html .ui-state-disabled a { color: silver; } \ No newline at end of file diff --git a/examples/analytics/css/showLoading.css b/examples/analytics/css/showLoading.css deleted file mode 100644 index b3bf1da4d..000000000 --- a/examples/analytics/css/showLoading.css +++ /dev/null @@ -1,13 +0,0 @@ -.loading-indicator { - height: 80px; - width: 80px; - background: url( '/static/images/loading.gif' ); - background-repeat: no-repeat; - background-position: center center; -} - -.loading-indicator-overlay { - background-color: #FFFFFF; - opacity: 0.6; - filter: alpha(opacity = 60); -} \ No newline at end of file diff --git a/examples/analytics/images/loading.gif b/examples/analytics/images/loading.gif deleted file mode 100644 index c69e93723..000000000 Binary files a/examples/analytics/images/loading.gif and /dev/null differ diff --git a/examples/analytics/input.py b/examples/analytics/input.py deleted file mode 100755 index 1bbd1db98..000000000 --- a/examples/analytics/input.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import sys, os -from splunklib import six -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -from datetime import datetime -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -__all__ = [ - "AnalyticsTracker", -] - -ANALYTICS_INDEX_NAME = "sample_analytics" -ANALYTICS_SOURCETYPE = "sample_analytics" -APPLICATION_KEY = "application" -EVENT_KEY = "event" -DISTINCT_KEY = "distinct_id" -EVENT_TERMINATOR = "\\r\\n-----end-event-----\\r\\n" -PROPERTY_PREFIX = "analytics_prop__" - -class AnalyticsTracker: - def __init__(self, application_name, splunk_info, index = ANALYTICS_INDEX_NAME): - self.application_name = application_name - self.splunk = client.connect(**splunk_info) - self.index = index - - if not self.index in self.splunk.indexes: - self.splunk.indexes.create(self.index) - assert(self.index in self.splunk.indexes) - - if ANALYTICS_SOURCETYPE not in self.splunk.confs['props']: - self.splunk.confs["props"].create(ANALYTICS_SOURCETYPE) - stanza = self.splunk.confs["props"][ANALYTICS_SOURCETYPE] - stanza.submit({ - "LINE_BREAKER": "(%s)" % EVENT_TERMINATOR, - "CHARSET": "UTF-8", - "SHOULD_LINEMERGE": "false" - }) - assert(ANALYTICS_SOURCETYPE in self.splunk.confs['props']) - - @staticmethod - def encode(props): - encoded = " " - for k,v in six.iteritems(props): - # We disallow dictionaries - it doesn't quite make sense. - assert(not isinstance(v, dict)) - - # We do not allow lists - assert(not isinstance(v, list)) - - # This is a hack to escape quotes - if isinstance(v, str): - v = v.replace('"', "'") - - encoded += ('%s%s="%s" ' % (PROPERTY_PREFIX, k, v)) - - return encoded - - def track(self, event_name, time = None, distinct_id = None, **props): - if time is None: - time = datetime.now().isoformat() - - event = '%s %s="%s" %s="%s" ' % ( - time, - APPLICATION_KEY, self.application_name, - EVENT_KEY, event_name) - - assert(not APPLICATION_KEY in list(props.keys())) - assert(not EVENT_KEY in list(props.keys())) - - if distinct_id is not None: - event += ('%s="%s" ' % (DISTINCT_KEY, distinct_id)) - assert(not DISTINCT_KEY in list(props.keys())) - - event += AnalyticsTracker.encode(props) - - self.splunk.indexes[self.index].submit(event, sourcetype=ANALYTICS_SOURCETYPE) - -def main(): - usage = "" - - argv = sys.argv[1:] - - splunk_opts = utils.parse(argv, {}, ".env", usage=usage) - tracker = AnalyticsTracker("cli_app", splunk_opts.kwargs) - - #tracker.track("test_event", "abc123", foo="bar", bar="foo") - -if __name__ == "__main__": - main() diff --git a/examples/analytics/js/date.format.js b/examples/analytics/js/date.format.js deleted file mode 100644 index 25daaa564..000000000 --- a/examples/analytics/js/date.format.js +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Date Format 1.2.3 - * (c) 2007-2009 Steven Levithan - * MIT license - * - * Includes enhancements by Scott Trenda - * and Kris Kowal - * - * Accepts a date, a mask, or a date and a mask. - * Returns a formatted version of the given date. - * The date defaults to the current date/time. - * The mask defaults to dateFormat.masks.default. - */ - -var dateFormat = function () { - var token = /d{1,4}|m{1,4}|yy(?:yy)?|([HhMsTt])\1?|[LloSZ]|"[^"]*"|'[^']*'/g, - timezone = /\b(?:[PMCEA][SDP]T|(?:Pacific|Mountain|Central|Eastern|Atlantic) (?:Standard|Daylight|Prevailing) Time|(?:GMT|UTC)(?:[-+]\d{4})?)\b/g, - timezoneClip = /[^-+\dA-Z]/g, - pad = function (val, len) { - val = String(val); - len = len || 2; - while (val.length < len) val = "0" + val; - return val; - }; - - // Regexes and supporting functions are cached through closure - return function (date, mask, utc) { - var dF = dateFormat; - - // You can't provide utc if you skip other args (use the "UTC:" mask prefix) - if (arguments.length == 1 && Object.prototype.toString.call(date) == "[object String]" && !/\d/.test(date)) { - mask = date; - date = undefined; - } - - // Passing date through Date applies Date.parse, if necessary - date = date ? new Date(date) : new Date; - if (isNaN(date)) throw SyntaxError("invalid date"); - - mask = String(dF.masks[mask] || mask || dF.masks["default"]); - - // Allow setting the utc argument via the mask - if (mask.slice(0, 4) == "UTC:") { - mask = mask.slice(4); - utc = true; - } - - var _ = utc ? "getUTC" : "get", - d = date[_ + "Date"](), - D = date[_ + "Day"](), - m = date[_ + "Month"](), - y = date[_ + "FullYear"](), - H = date[_ + "Hours"](), - M = date[_ + "Minutes"](), - s = date[_ + "Seconds"](), - L = date[_ + "Milliseconds"](), - o = utc ? 0 : date.getTimezoneOffset(), - flags = { - d: d, - dd: pad(d), - ddd: dF.i18n.dayNames[D], - dddd: dF.i18n.dayNames[D + 7], - m: m + 1, - mm: pad(m + 1), - mmm: dF.i18n.monthNames[m], - mmmm: dF.i18n.monthNames[m + 12], - yy: String(y).slice(2), - yyyy: y, - h: H % 12 || 12, - hh: pad(H % 12 || 12), - H: H, - HH: pad(H), - M: M, - MM: pad(M), - s: s, - ss: pad(s), - l: pad(L, 3), - L: pad(L > 99 ? Math.round(L / 10) : L), - t: H < 12 ? "a" : "p", - tt: H < 12 ? "am" : "pm", - T: H < 12 ? "A" : "P", - TT: H < 12 ? "AM" : "PM", - Z: utc ? "UTC" : (String(date).match(timezone) || [""]).pop().replace(timezoneClip, ""), - o: (o > 0 ? "-" : "+") + pad(Math.floor(Math.abs(o) / 60) * 100 + Math.abs(o) % 60, 4), - S: ["th", "st", "nd", "rd"][d % 10 > 3 ? 0 : (d % 100 - d % 10 != 10) * d % 10] - }; - - return mask.replace(token, function ($0) { - return $0 in flags ? flags[$0] : $0.slice(1, $0.length - 1); - }); - }; -}(); - -// Some common format strings -dateFormat.masks = { - "default": "ddd mmm dd yyyy HH:MM:ss", - shortDate: "m/d/yy", - mediumDate: "mmm d, yyyy", - longDate: "mmmm d, yyyy", - fullDate: "dddd, mmmm d, yyyy", - shortTime: "h:MM TT", - mediumTime: "h:MM:ss TT", - longTime: "h:MM:ss TT Z", - isoDate: "yyyy-mm-dd", - isoTime: "HH:MM:ss", - isoDateTime: "yyyy-mm-dd'T'HH:MM:ss", - isoUtcDateTime: "UTC:yyyy-mm-dd'T'HH:MM:ss'Z'" -}; - -// Internationalization strings -dateFormat.i18n = { - dayNames: [ - "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", - "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" - ], - monthNames: [ - "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", - "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" - ] -}; - -// For convenience... -Date.prototype.format = function (mask, utc) { - return dateFormat(this, mask, utc); -}; diff --git a/examples/analytics/js/jquery.flot.js b/examples/analytics/js/jquery.flot.js deleted file mode 100644 index 67b3c017b..000000000 --- a/examples/analytics/js/jquery.flot.js +++ /dev/null @@ -1,2599 +0,0 @@ -/*! Javascript plotting library for jQuery, v. 0.7. - * - * Released under the MIT license by IOLA, December 2007. - * - */ - -// first an inline dependency, jquery.colorhelpers.js, we inline it here -// for convenience - -/* Plugin for jQuery for working with colors. - * - * Version 1.1. - * - * Inspiration from jQuery color animation plugin by John Resig. - * - * Released under the MIT license by Ole Laursen, October 2009. - * - * Examples: - * - * $.color.parse("#fff").scale('rgb', 0.25).add('a', -0.5).toString() - * var c = $.color.extract($("#mydiv"), 'background-color'); - * console.log(c.r, c.g, c.b, c.a); - * $.color.make(100, 50, 25, 0.4).toString() // returns "rgba(100,50,25,0.4)" - * - * Note that .scale() and .add() return the same modified object - * instead of making a new one. - * - * V. 1.1: Fix error handling so e.g. parsing an empty string does - * produce a color rather than just crashing. - */ -(function(B){B.color={};B.color.make=function(F,E,C,D){var G={};G.r=F||0;G.g=E||0;G.b=C||0;G.a=D!=null?D:1;G.add=function(J,I){for(var H=0;H=1){return"rgb("+[G.r,G.g,G.b].join(",")+")"}else{return"rgba("+[G.r,G.g,G.b,G.a].join(",")+")"}};G.normalize=function(){function H(J,K,I){return KI?I:K)}G.r=H(0,parseInt(G.r),255);G.g=H(0,parseInt(G.g),255);G.b=H(0,parseInt(G.b),255);G.a=H(0,G.a,1);return G};G.clone=function(){return B.color.make(G.r,G.b,G.g,G.a)};return G.normalize()};B.color.extract=function(D,C){var E;do{E=D.css(C).toLowerCase();if(E!=""&&E!="transparent"){break}D=D.parent()}while(!B.nodeName(D.get(0),"body"));if(E=="rgba(0, 0, 0, 0)"){E="transparent"}return B.color.parse(E)};B.color.parse=function(F){var E,C=B.color.make;if(E=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10))}if(E=/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(F)){return C(parseInt(E[1],10),parseInt(E[2],10),parseInt(E[3],10),parseFloat(E[4]))}if(E=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55)}if(E=/rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(F)){return C(parseFloat(E[1])*2.55,parseFloat(E[2])*2.55,parseFloat(E[3])*2.55,parseFloat(E[4]))}if(E=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(F)){return C(parseInt(E[1],16),parseInt(E[2],16),parseInt(E[3],16))}if(E=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(F)){return C(parseInt(E[1]+E[1],16),parseInt(E[2]+E[2],16),parseInt(E[3]+E[3],16))}var D=B.trim(F).toLowerCase();if(D=="transparent"){return C(255,255,255,0)}else{E=A[D]||[0,0,0];return C(E[0],E[1],E[2])}};var A={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(jQuery); - -// the actual Flot code -(function($) { - function Plot(placeholder, data_, options_, plugins) { - // data is on the form: - // [ series1, series2 ... ] - // where series is either just the data as [ [x1, y1], [x2, y2], ... ] - // or { data: [ [x1, y1], [x2, y2], ... ], label: "some label", ... } - - var series = [], - options = { - // the color theme used for graphs - colors: ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"], - legend: { - show: true, - noColumns: 1, // number of colums in legend table - labelFormatter: null, // fn: string -> string - labelBoxBorderColor: "#ccc", // border color for the little label boxes - container: null, // container (as jQuery object) to put legend in, null means default on top of graph - position: "ne", // position of default legend container within plot - margin: 5, // distance from grid edge to default legend container within plot - backgroundColor: null, // null means auto-detect - backgroundOpacity: 0.85 // set to 0 to avoid background - }, - xaxis: { - show: null, // null = auto-detect, true = always, false = never - position: "bottom", // or "top" - mode: null, // null or "time" - color: null, // base color, labels, ticks - tickColor: null, // possibly different color of ticks, e.g. "rgba(0,0,0,0.15)" - transform: null, // null or f: number -> number to transform axis - inverseTransform: null, // if transform is set, this should be the inverse function - min: null, // min. value to show, null means set automatically - max: null, // max. value to show, null means set automatically - autoscaleMargin: null, // margin in % to add if auto-setting min/max - ticks: null, // either [1, 3] or [[1, "a"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks - tickFormatter: null, // fn: number -> string - labelWidth: null, // size of tick labels in pixels - labelHeight: null, - reserveSpace: null, // whether to reserve space even if axis isn't shown - tickLength: null, // size in pixels of ticks, or "full" for whole line - alignTicksWithAxis: null, // axis number or null for no sync - - // mode specific options - tickDecimals: null, // no. of decimals, null means auto - tickSize: null, // number or [number, "unit"] - minTickSize: null, // number or [number, "unit"] - monthNames: null, // list of names of months - timeformat: null, // format string to use - twelveHourClock: false // 12 or 24 time in time mode - }, - yaxis: { - autoscaleMargin: 0.02, - position: "left" // or "right" - }, - xaxes: [], - yaxes: [], - series: { - points: { - show: false, - radius: 3, - lineWidth: 2, // in pixels - fill: true, - fillColor: "#ffffff", - symbol: "circle" // or callback - }, - lines: { - // we don't put in show: false so we can see - // whether lines were actively disabled - lineWidth: 2, // in pixels - fill: false, - fillColor: null, - steps: false - }, - bars: { - show: false, - lineWidth: 2, // in pixels - barWidth: 1, // in units of the x axis - fill: true, - fillColor: null, - align: "left", // or "center" - horizontal: false - }, - shadowSize: 3 - }, - grid: { - show: true, - aboveData: false, - color: "#545454", // primary color used for outline and labels - backgroundColor: null, // null for transparent, else color - borderColor: null, // set if different from the grid color - tickColor: null, // color for the ticks, e.g. "rgba(0,0,0,0.15)" - labelMargin: 5, // in pixels - axisMargin: 8, // in pixels - borderWidth: 2, // in pixels - minBorderMargin: null, // in pixels, null means taken from points radius - markings: null, // array of ranges or fn: axes -> array of ranges - markingsColor: "#f4f4f4", - markingsLineWidth: 2, - // interactive stuff - clickable: false, - hoverable: false, - autoHighlight: true, // highlight in case mouse is near - mouseActiveRadius: 10 // how far the mouse can be away to activate an item - }, - hooks: {} - }, - canvas = null, // the canvas for the plot itself - overlay = null, // canvas for interactive stuff on top of plot - eventHolder = null, // jQuery object that events should be bound to - ctx = null, octx = null, - xaxes = [], yaxes = [], - plotOffset = { left: 0, right: 0, top: 0, bottom: 0}, - canvasWidth = 0, canvasHeight = 0, - plotWidth = 0, plotHeight = 0, - hooks = { - processOptions: [], - processRawData: [], - processDatapoints: [], - drawSeries: [], - draw: [], - bindEvents: [], - drawOverlay: [], - shutdown: [] - }, - plot = this; - - // public functions - plot.setData = setData; - plot.setupGrid = setupGrid; - plot.draw = draw; - plot.getPlaceholder = function() { return placeholder; }; - plot.getCanvas = function() { return canvas; }; - plot.getPlotOffset = function() { return plotOffset; }; - plot.width = function () { return plotWidth; }; - plot.height = function () { return plotHeight; }; - plot.offset = function () { - var o = eventHolder.offset(); - o.left += plotOffset.left; - o.top += plotOffset.top; - return o; - }; - plot.getData = function () { return series; }; - plot.getAxes = function () { - var res = {}, i; - $.each(xaxes.concat(yaxes), function (_, axis) { - if (axis) - res[axis.direction + (axis.n != 1 ? axis.n : "") + "axis"] = axis; - }); - return res; - }; - plot.getXAxes = function () { return xaxes; }; - plot.getYAxes = function () { return yaxes; }; - plot.c2p = canvasToAxisCoords; - plot.p2c = axisToCanvasCoords; - plot.getOptions = function () { return options; }; - plot.highlight = highlight; - plot.unhighlight = unhighlight; - plot.triggerRedrawOverlay = triggerRedrawOverlay; - plot.pointOffset = function(point) { - return { - left: parseInt(xaxes[axisNumber(point, "x") - 1].p2c(+point.x) + plotOffset.left), - top: parseInt(yaxes[axisNumber(point, "y") - 1].p2c(+point.y) + plotOffset.top) - }; - }; - plot.shutdown = shutdown; - plot.resize = function () { - getCanvasDimensions(); - resizeCanvas(canvas); - resizeCanvas(overlay); - }; - - // public attributes - plot.hooks = hooks; - - // initialize - initPlugins(plot); - parseOptions(options_); - setupCanvases(); - setData(data_); - setupGrid(); - draw(); - bindEvents(); - - - function executeHooks(hook, args) { - args = [plot].concat(args); - for (var i = 0; i < hook.length; ++i) - hook[i].apply(this, args); - } - - function initPlugins() { - for (var i = 0; i < plugins.length; ++i) { - var p = plugins[i]; - p.init(plot); - if (p.options) - $.extend(true, options, p.options); - } - } - - function parseOptions(opts) { - var i; - - $.extend(true, options, opts); - - if (options.xaxis.color == null) - options.xaxis.color = options.grid.color; - if (options.yaxis.color == null) - options.yaxis.color = options.grid.color; - - if (options.xaxis.tickColor == null) // backwards-compatibility - options.xaxis.tickColor = options.grid.tickColor; - if (options.yaxis.tickColor == null) // backwards-compatibility - options.yaxis.tickColor = options.grid.tickColor; - - if (options.grid.borderColor == null) - options.grid.borderColor = options.grid.color; - if (options.grid.tickColor == null) - options.grid.tickColor = $.color.parse(options.grid.color).scale('a', 0.22).toString(); - - // fill in defaults in axes, copy at least always the - // first as the rest of the code assumes it'll be there - for (i = 0; i < Math.max(1, options.xaxes.length); ++i) - options.xaxes[i] = $.extend(true, {}, options.xaxis, options.xaxes[i]); - for (i = 0; i < Math.max(1, options.yaxes.length); ++i) - options.yaxes[i] = $.extend(true, {}, options.yaxis, options.yaxes[i]); - - // backwards compatibility, to be removed in future - if (options.xaxis.noTicks && options.xaxis.ticks == null) - options.xaxis.ticks = options.xaxis.noTicks; - if (options.yaxis.noTicks && options.yaxis.ticks == null) - options.yaxis.ticks = options.yaxis.noTicks; - if (options.x2axis) { - options.xaxes[1] = $.extend(true, {}, options.xaxis, options.x2axis); - options.xaxes[1].position = "top"; - } - if (options.y2axis) { - options.yaxes[1] = $.extend(true, {}, options.yaxis, options.y2axis); - options.yaxes[1].position = "right"; - } - if (options.grid.coloredAreas) - options.grid.markings = options.grid.coloredAreas; - if (options.grid.coloredAreasColor) - options.grid.markingsColor = options.grid.coloredAreasColor; - if (options.lines) - $.extend(true, options.series.lines, options.lines); - if (options.points) - $.extend(true, options.series.points, options.points); - if (options.bars) - $.extend(true, options.series.bars, options.bars); - if (options.shadowSize != null) - options.series.shadowSize = options.shadowSize; - - // save options on axes for future reference - for (i = 0; i < options.xaxes.length; ++i) - getOrCreateAxis(xaxes, i + 1).options = options.xaxes[i]; - for (i = 0; i < options.yaxes.length; ++i) - getOrCreateAxis(yaxes, i + 1).options = options.yaxes[i]; - - // add hooks from options - for (var n in hooks) - if (options.hooks[n] && options.hooks[n].length) - hooks[n] = hooks[n].concat(options.hooks[n]); - - executeHooks(hooks.processOptions, [options]); - } - - function setData(d) { - series = parseData(d); - fillInSeriesOptions(); - processData(); - } - - function parseData(d) { - var res = []; - for (var i = 0; i < d.length; ++i) { - var s = $.extend(true, {}, options.series); - - if (d[i].data != null) { - s.data = d[i].data; // move the data instead of deep-copy - delete d[i].data; - - $.extend(true, s, d[i]); - - d[i].data = s.data; - } - else - s.data = d[i]; - res.push(s); - } - - return res; - } - - function axisNumber(obj, coord) { - var a = obj[coord + "axis"]; - if (typeof a == "object") // if we got a real axis, extract number - a = a.n; - if (typeof a != "number") - a = 1; // default to first axis - return a; - } - - function allAxes() { - // return flat array without annoying null entries - return $.grep(xaxes.concat(yaxes), function (a) { return a; }); - } - - function canvasToAxisCoords(pos) { - // return an object with x/y corresponding to all used axes - var res = {}, i, axis; - for (i = 0; i < xaxes.length; ++i) { - axis = xaxes[i]; - if (axis && axis.used) - res["x" + axis.n] = axis.c2p(pos.left); - } - - for (i = 0; i < yaxes.length; ++i) { - axis = yaxes[i]; - if (axis && axis.used) - res["y" + axis.n] = axis.c2p(pos.top); - } - - if (res.x1 !== undefined) - res.x = res.x1; - if (res.y1 !== undefined) - res.y = res.y1; - - return res; - } - - function axisToCanvasCoords(pos) { - // get canvas coords from the first pair of x/y found in pos - var res = {}, i, axis, key; - - for (i = 0; i < xaxes.length; ++i) { - axis = xaxes[i]; - if (axis && axis.used) { - key = "x" + axis.n; - if (pos[key] == null && axis.n == 1) - key = "x"; - - if (pos[key] != null) { - res.left = axis.p2c(pos[key]); - break; - } - } - } - - for (i = 0; i < yaxes.length; ++i) { - axis = yaxes[i]; - if (axis && axis.used) { - key = "y" + axis.n; - if (pos[key] == null && axis.n == 1) - key = "y"; - - if (pos[key] != null) { - res.top = axis.p2c(pos[key]); - break; - } - } - } - - return res; - } - - function getOrCreateAxis(axes, number) { - if (!axes[number - 1]) - axes[number - 1] = { - n: number, // save the number for future reference - direction: axes == xaxes ? "x" : "y", - options: $.extend(true, {}, axes == xaxes ? options.xaxis : options.yaxis) - }; - - return axes[number - 1]; - } - - function fillInSeriesOptions() { - var i; - - // collect what we already got of colors - var neededColors = series.length, - usedColors = [], - assignedColors = []; - for (i = 0; i < series.length; ++i) { - var sc = series[i].color; - if (sc != null) { - --neededColors; - if (typeof sc == "number") - assignedColors.push(sc); - else - usedColors.push($.color.parse(series[i].color)); - } - } - - // we might need to generate more colors if higher indices - // are assigned - for (i = 0; i < assignedColors.length; ++i) { - neededColors = Math.max(neededColors, assignedColors[i] + 1); - } - - // produce colors as needed - var colors = [], variation = 0; - i = 0; - while (colors.length < neededColors) { - var c; - if (options.colors.length == i) // check degenerate case - c = $.color.make(100, 100, 100); - else - c = $.color.parse(options.colors[i]); - - // vary color if needed - var sign = variation % 2 == 1 ? -1 : 1; - c.scale('rgb', 1 + sign * Math.ceil(variation / 2) * 0.2) - - // FIXME: if we're getting to close to something else, - // we should probably skip this one - colors.push(c); - - ++i; - if (i >= options.colors.length) { - i = 0; - ++variation; - } - } - - // fill in the options - var colori = 0, s; - for (i = 0; i < series.length; ++i) { - s = series[i]; - - // assign colors - if (s.color == null) { - s.color = colors[colori].toString(); - ++colori; - } - else if (typeof s.color == "number") - s.color = colors[s.color].toString(); - - // turn on lines automatically in case nothing is set - if (s.lines.show == null) { - var v, show = true; - for (v in s) - if (s[v] && s[v].show) { - show = false; - break; - } - if (show) - s.lines.show = true; - } - - // setup axes - s.xaxis = getOrCreateAxis(xaxes, axisNumber(s, "x")); - s.yaxis = getOrCreateAxis(yaxes, axisNumber(s, "y")); - } - } - - function processData() { - var topSentry = Number.POSITIVE_INFINITY, - bottomSentry = Number.NEGATIVE_INFINITY, - fakeInfinity = Number.MAX_VALUE, - i, j, k, m, length, - s, points, ps, x, y, axis, val, f, p; - - function updateAxis(axis, min, max) { - if (min < axis.datamin && min != -fakeInfinity) - axis.datamin = min; - if (max > axis.datamax && max != fakeInfinity) - axis.datamax = max; - } - - $.each(allAxes(), function (_, axis) { - // init axis - axis.datamin = topSentry; - axis.datamax = bottomSentry; - axis.used = false; - }); - - for (i = 0; i < series.length; ++i) { - s = series[i]; - s.datapoints = { points: [] }; - - executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]); - } - - // first pass: clean and copy data - for (i = 0; i < series.length; ++i) { - s = series[i]; - - var data = s.data, format = s.datapoints.format; - - if (!format) { - format = []; - // find out how to copy - format.push({ x: true, number: true, required: true }); - format.push({ y: true, number: true, required: true }); - - if (s.bars.show || (s.lines.show && s.lines.fill)) { - format.push({ y: true, number: true, required: false, defaultValue: 0 }); - if (s.bars.horizontal) { - delete format[format.length - 1].y; - format[format.length - 1].x = true; - } - } - - s.datapoints.format = format; - } - - if (s.datapoints.pointsize != null) - continue; // already filled in - - s.datapoints.pointsize = format.length; - - ps = s.datapoints.pointsize; - points = s.datapoints.points; - - insertSteps = s.lines.show && s.lines.steps; - s.xaxis.used = s.yaxis.used = true; - - for (j = k = 0; j < data.length; ++j, k += ps) { - p = data[j]; - - var nullify = p == null; - if (!nullify) { - for (m = 0; m < ps; ++m) { - val = p[m]; - f = format[m]; - - if (f) { - if (f.number && val != null) { - val = +val; // convert to number - if (isNaN(val)) - val = null; - else if (val == Infinity) - val = fakeInfinity; - else if (val == -Infinity) - val = -fakeInfinity; - } - - if (val == null) { - if (f.required) - nullify = true; - - if (f.defaultValue != null) - val = f.defaultValue; - } - } - - points[k + m] = val; - } - } - - if (nullify) { - for (m = 0; m < ps; ++m) { - val = points[k + m]; - if (val != null) { - f = format[m]; - // extract min/max info - if (f.x) - updateAxis(s.xaxis, val, val); - if (f.y) - updateAxis(s.yaxis, val, val); - } - points[k + m] = null; - } - } - else { - // a little bit of line specific stuff that - // perhaps shouldn't be here, but lacking - // better means... - if (insertSteps && k > 0 - && points[k - ps] != null - && points[k - ps] != points[k] - && points[k - ps + 1] != points[k + 1]) { - // copy the point to make room for a middle point - for (m = 0; m < ps; ++m) - points[k + ps + m] = points[k + m]; - - // middle point has same y - points[k + 1] = points[k - ps + 1]; - - // we've added a point, better reflect that - k += ps; - } - } - } - } - - // give the hooks a chance to run - for (i = 0; i < series.length; ++i) { - s = series[i]; - - executeHooks(hooks.processDatapoints, [ s, s.datapoints]); - } - - // second pass: find datamax/datamin for auto-scaling - for (i = 0; i < series.length; ++i) { - s = series[i]; - points = s.datapoints.points, - ps = s.datapoints.pointsize; - - var xmin = topSentry, ymin = topSentry, - xmax = bottomSentry, ymax = bottomSentry; - - for (j = 0; j < points.length; j += ps) { - if (points[j] == null) - continue; - - for (m = 0; m < ps; ++m) { - val = points[j + m]; - f = format[m]; - if (!f || val == fakeInfinity || val == -fakeInfinity) - continue; - - if (f.x) { - if (val < xmin) - xmin = val; - if (val > xmax) - xmax = val; - } - if (f.y) { - if (val < ymin) - ymin = val; - if (val > ymax) - ymax = val; - } - } - } - - if (s.bars.show) { - // make sure we got room for the bar on the dancing floor - var delta = s.bars.align == "left" ? 0 : -s.bars.barWidth/2; - if (s.bars.horizontal) { - ymin += delta; - ymax += delta + s.bars.barWidth; - } - else { - xmin += delta; - xmax += delta + s.bars.barWidth; - } - } - - updateAxis(s.xaxis, xmin, xmax); - updateAxis(s.yaxis, ymin, ymax); - } - - $.each(allAxes(), function (_, axis) { - if (axis.datamin == topSentry) - axis.datamin = null; - if (axis.datamax == bottomSentry) - axis.datamax = null; - }); - } - - function makeCanvas(skipPositioning, cls) { - var c = document.createElement('canvas'); - c.className = cls; - c.width = canvasWidth; - c.height = canvasHeight; - - if (!skipPositioning) - $(c).css({ position: 'absolute', left: 0, top: 0 }); - - $(c).appendTo(placeholder); - - if (!c.getContext) // excanvas hack - c = window.G_vmlCanvasManager.initElement(c); - - // used for resetting in case we get replotted - c.getContext("2d").save(); - - return c; - } - - function getCanvasDimensions() { - canvasWidth = placeholder.width(); - canvasHeight = placeholder.height(); - - if (canvasWidth <= 0 || canvasHeight <= 0) - throw "Invalid dimensions for plot, width = " + canvasWidth + ", height = " + canvasHeight; - } - - function resizeCanvas(c) { - // resizing should reset the state (excanvas seems to be - // buggy though) - if (c.width != canvasWidth) - c.width = canvasWidth; - - if (c.height != canvasHeight) - c.height = canvasHeight; - - // so try to get back to the initial state (even if it's - // gone now, this should be safe according to the spec) - var cctx = c.getContext("2d"); - cctx.restore(); - - // and save again - cctx.save(); - } - - function setupCanvases() { - var reused, - existingCanvas = placeholder.children("canvas.base"), - existingOverlay = placeholder.children("canvas.overlay"); - - if (existingCanvas.length == 0 || existingOverlay == 0) { - // init everything - - placeholder.html(""); // make sure placeholder is clear - - placeholder.css({ padding: 0 }); // padding messes up the positioning - - if (placeholder.css("position") == 'static') - placeholder.css("position", "relative"); // for positioning labels and overlay - - getCanvasDimensions(); - - canvas = makeCanvas(true, "base"); - overlay = makeCanvas(false, "overlay"); // overlay canvas for interactive features - - reused = false; - } - else { - // reuse existing elements - - canvas = existingCanvas.get(0); - overlay = existingOverlay.get(0); - - reused = true; - } - - ctx = canvas.getContext("2d"); - octx = overlay.getContext("2d"); - - // we include the canvas in the event holder too, because IE 7 - // sometimes has trouble with the stacking order - eventHolder = $([overlay, canvas]); - - if (reused) { - // run shutdown in the old plot object - placeholder.data("plot").shutdown(); - - // reset reused canvases - plot.resize(); - - // make sure overlay pixels are cleared (canvas is cleared when we redraw) - octx.clearRect(0, 0, canvasWidth, canvasHeight); - - // then whack any remaining obvious garbage left - eventHolder.unbind(); - placeholder.children().not([canvas, overlay]).remove(); - } - - // save in case we get replotted - placeholder.data("plot", plot); - } - - function bindEvents() { - // bind events - if (options.grid.hoverable) { - eventHolder.mousemove(onMouseMove); - eventHolder.mouseleave(onMouseLeave); - } - - if (options.grid.clickable) - eventHolder.click(onClick); - - executeHooks(hooks.bindEvents, [eventHolder]); - } - - function shutdown() { - if (redrawTimeout) - clearTimeout(redrawTimeout); - - eventHolder.unbind("mousemove", onMouseMove); - eventHolder.unbind("mouseleave", onMouseLeave); - eventHolder.unbind("click", onClick); - - executeHooks(hooks.shutdown, [eventHolder]); - } - - function setTransformationHelpers(axis) { - // set helper functions on the axis, assumes plot area - // has been computed already - - function identity(x) { return x; } - - var s, m, t = axis.options.transform || identity, - it = axis.options.inverseTransform; - - // precompute how much the axis is scaling a point - // in canvas space - if (axis.direction == "x") { - s = axis.scale = plotWidth / Math.abs(t(axis.max) - t(axis.min)); - m = Math.min(t(axis.max), t(axis.min)); - } - else { - s = axis.scale = plotHeight / Math.abs(t(axis.max) - t(axis.min)); - s = -s; - m = Math.max(t(axis.max), t(axis.min)); - } - - // data point to canvas coordinate - if (t == identity) // slight optimization - axis.p2c = function (p) { return (p - m) * s; }; - else - axis.p2c = function (p) { return (t(p) - m) * s; }; - // canvas coordinate to data point - if (!it) - axis.c2p = function (c) { return m + c / s; }; - else - axis.c2p = function (c) { return it(m + c / s); }; - } - - function measureTickLabels(axis) { - var opts = axis.options, i, ticks = axis.ticks || [], labels = [], - l, w = opts.labelWidth, h = opts.labelHeight, dummyDiv; - - function makeDummyDiv(labels, width) { - return $('
' + - '
' - + labels.join("") + '
') - .appendTo(placeholder); - } - - if (axis.direction == "x") { - // to avoid measuring the widths of the labels (it's slow), we - // construct fixed-size boxes and put the labels inside - // them, we don't need the exact figures and the - // fixed-size box content is easy to center - if (w == null) - w = Math.floor(canvasWidth / (ticks.length > 0 ? ticks.length : 1)); - - // measure x label heights - if (h == null) { - labels = []; - for (i = 0; i < ticks.length; ++i) { - l = ticks[i].label; - if (l) - labels.push('
' + l + '
'); - } - - if (labels.length > 0) { - // stick them all in the same div and measure - // collective height - labels.push('
'); - dummyDiv = makeDummyDiv(labels, "width:10000px;"); - h = dummyDiv.height(); - dummyDiv.remove(); - } - } - } - else if (w == null || h == null) { - // calculate y label dimensions - for (i = 0; i < ticks.length; ++i) { - l = ticks[i].label; - if (l) - labels.push('
' + l + '
'); - } - - if (labels.length > 0) { - dummyDiv = makeDummyDiv(labels, ""); - if (w == null) - w = dummyDiv.children().width(); - if (h == null) - h = dummyDiv.find("div.tickLabel").height(); - dummyDiv.remove(); - } - } - - if (w == null) - w = 0; - if (h == null) - h = 0; - - axis.labelWidth = w; - axis.labelHeight = h; - } - - function allocateAxisBoxFirstPhase(axis) { - // find the bounding box of the axis by looking at label - // widths/heights and ticks, make room by diminishing the - // plotOffset - - var lw = axis.labelWidth, - lh = axis.labelHeight, - pos = axis.options.position, - tickLength = axis.options.tickLength, - axismargin = options.grid.axisMargin, - padding = options.grid.labelMargin, - all = axis.direction == "x" ? xaxes : yaxes, - index; - - // determine axis margin - var samePosition = $.grep(all, function (a) { - return a && a.options.position == pos && a.reserveSpace; - }); - if ($.inArray(axis, samePosition) == samePosition.length - 1) - axismargin = 0; // outermost - - // determine tick length - if we're innermost, we can use "full" - if (tickLength == null) - tickLength = "full"; - - var sameDirection = $.grep(all, function (a) { - return a && a.reserveSpace; - }); - - var innermost = $.inArray(axis, sameDirection) == 0; - if (!innermost && tickLength == "full") - tickLength = 5; - - if (!isNaN(+tickLength)) - padding += +tickLength; - - // compute box - if (axis.direction == "x") { - lh += padding; - - if (pos == "bottom") { - plotOffset.bottom += lh + axismargin; - axis.box = { top: canvasHeight - plotOffset.bottom, height: lh }; - } - else { - axis.box = { top: plotOffset.top + axismargin, height: lh }; - plotOffset.top += lh + axismargin; - } - } - else { - lw += padding; - - if (pos == "left") { - axis.box = { left: plotOffset.left + axismargin, width: lw }; - plotOffset.left += lw + axismargin; - } - else { - plotOffset.right += lw + axismargin; - axis.box = { left: canvasWidth - plotOffset.right, width: lw }; - } - } - - // save for future reference - axis.position = pos; - axis.tickLength = tickLength; - axis.box.padding = padding; - axis.innermost = innermost; - } - - function allocateAxisBoxSecondPhase(axis) { - // set remaining bounding box coordinates - if (axis.direction == "x") { - axis.box.left = plotOffset.left; - axis.box.width = plotWidth; - } - else { - axis.box.top = plotOffset.top; - axis.box.height = plotHeight; - } - } - - function setupGrid() { - var i, axes = allAxes(); - - // first calculate the plot and axis box dimensions - - $.each(axes, function (_, axis) { - axis.show = axis.options.show; - if (axis.show == null) - axis.show = axis.used; // by default an axis is visible if it's got data - - axis.reserveSpace = axis.show || axis.options.reserveSpace; - - setRange(axis); - }); - - allocatedAxes = $.grep(axes, function (axis) { return axis.reserveSpace; }); - - plotOffset.left = plotOffset.right = plotOffset.top = plotOffset.bottom = 0; - if (options.grid.show) { - $.each(allocatedAxes, function (_, axis) { - // make the ticks - setupTickGeneration(axis); - setTicks(axis); - snapRangeToTicks(axis, axis.ticks); - - // find labelWidth/Height for axis - measureTickLabels(axis); - }); - - // with all dimensions in house, we can compute the - // axis boxes, start from the outside (reverse order) - for (i = allocatedAxes.length - 1; i >= 0; --i) - allocateAxisBoxFirstPhase(allocatedAxes[i]); - - // make sure we've got enough space for things that - // might stick out - var minMargin = options.grid.minBorderMargin; - if (minMargin == null) { - minMargin = 0; - for (i = 0; i < series.length; ++i) - minMargin = Math.max(minMargin, series[i].points.radius + series[i].points.lineWidth/2); - } - - for (var a in plotOffset) { - plotOffset[a] += options.grid.borderWidth; - plotOffset[a] = Math.max(minMargin, plotOffset[a]); - } - } - - plotWidth = canvasWidth - plotOffset.left - plotOffset.right; - plotHeight = canvasHeight - plotOffset.bottom - plotOffset.top; - - // now we got the proper plotWidth/Height, we can compute the scaling - $.each(axes, function (_, axis) { - setTransformationHelpers(axis); - }); - - if (options.grid.show) { - $.each(allocatedAxes, function (_, axis) { - allocateAxisBoxSecondPhase(axis); - }); - - insertAxisLabels(); - } - - insertLegend(); - } - - function setRange(axis) { - var opts = axis.options, - min = +(opts.min != null ? opts.min : axis.datamin), - max = +(opts.max != null ? opts.max : axis.datamax), - delta = max - min; - - if (delta == 0.0) { - // degenerate case - var widen = max == 0 ? 1 : 0.01; - - if (opts.min == null) - min -= widen; - // always widen max if we couldn't widen min to ensure we - // don't fall into min == max which doesn't work - if (opts.max == null || opts.min != null) - max += widen; - } - else { - // consider autoscaling - var margin = opts.autoscaleMargin; - if (margin != null) { - if (opts.min == null) { - min -= delta * margin; - // make sure we don't go below zero if all values - // are positive - if (min < 0 && axis.datamin != null && axis.datamin >= 0) - min = 0; - } - if (opts.max == null) { - max += delta * margin; - if (max > 0 && axis.datamax != null && axis.datamax <= 0) - max = 0; - } - } - } - axis.min = min; - axis.max = max; - } - - function setupTickGeneration(axis) { - var opts = axis.options; - - // estimate number of ticks - var noTicks; - if (typeof opts.ticks == "number" && opts.ticks > 0) - noTicks = opts.ticks; - else - // heuristic based on the model a*sqrt(x) fitted to - // some data points that seemed reasonable - noTicks = 0.3 * Math.sqrt(axis.direction == "x" ? canvasWidth : canvasHeight); - - var delta = (axis.max - axis.min) / noTicks, - size, generator, unit, formatter, i, magn, norm; - - if (opts.mode == "time") { - // pretty handling of time - - // map of app. size of time units in milliseconds - var timeUnitSize = { - "second": 1000, - "minute": 60 * 1000, - "hour": 60 * 60 * 1000, - "day": 24 * 60 * 60 * 1000, - "month": 30 * 24 * 60 * 60 * 1000, - "year": 365.2425 * 24 * 60 * 60 * 1000 - }; - - - // the allowed tick sizes, after 1 year we use - // an integer algorithm - var spec = [ - [1, "second"], [2, "second"], [5, "second"], [10, "second"], - [30, "second"], - [1, "minute"], [2, "minute"], [5, "minute"], [10, "minute"], - [30, "minute"], - [1, "hour"], [2, "hour"], [4, "hour"], - [8, "hour"], [12, "hour"], - [1, "day"], [2, "day"], [3, "day"], - [0.25, "month"], [0.5, "month"], [1, "month"], - [2, "month"], [3, "month"], [6, "month"], - [1, "year"] - ]; - - var minSize = 0; - if (opts.minTickSize != null) { - if (typeof opts.tickSize == "number") - minSize = opts.tickSize; - else - minSize = opts.minTickSize[0] * timeUnitSize[opts.minTickSize[1]]; - } - - for (var i = 0; i < spec.length - 1; ++i) - if (delta < (spec[i][0] * timeUnitSize[spec[i][1]] - + spec[i + 1][0] * timeUnitSize[spec[i + 1][1]]) / 2 - && spec[i][0] * timeUnitSize[spec[i][1]] >= minSize) - break; - size = spec[i][0]; - unit = spec[i][1]; - - // special-case the possibility of several years - if (unit == "year") { - magn = Math.pow(10, Math.floor(Math.log(delta / timeUnitSize.year) / Math.LN10)); - norm = (delta / timeUnitSize.year) / magn; - if (norm < 1.5) - size = 1; - else if (norm < 3) - size = 2; - else if (norm < 7.5) - size = 5; - else - size = 10; - - size *= magn; - } - - axis.tickSize = opts.tickSize || [size, unit]; - - generator = function(axis) { - var ticks = [], - tickSize = axis.tickSize[0], unit = axis.tickSize[1], - d = new Date(axis.min); - - var step = tickSize * timeUnitSize[unit]; - - if (unit == "second") - d.setUTCSeconds(floorInBase(d.getUTCSeconds(), tickSize)); - if (unit == "minute") - d.setUTCMinutes(floorInBase(d.getUTCMinutes(), tickSize)); - if (unit == "hour") - d.setUTCHours(floorInBase(d.getUTCHours(), tickSize)); - if (unit == "month") - d.setUTCMonth(floorInBase(d.getUTCMonth(), tickSize)); - if (unit == "year") - d.setUTCFullYear(floorInBase(d.getUTCFullYear(), tickSize)); - - // reset smaller components - d.setUTCMilliseconds(0); - if (step >= timeUnitSize.minute) - d.setUTCSeconds(0); - if (step >= timeUnitSize.hour) - d.setUTCMinutes(0); - if (step >= timeUnitSize.day) - d.setUTCHours(0); - if (step >= timeUnitSize.day * 4) - d.setUTCDate(1); - if (step >= timeUnitSize.year) - d.setUTCMonth(0); - - - var carry = 0, v = Number.NaN, prev; - do { - prev = v; - v = d.getTime(); - ticks.push(v); - if (unit == "month") { - if (tickSize < 1) { - // a bit complicated - we'll divide the month - // up but we need to take care of fractions - // so we don't end up in the middle of a day - d.setUTCDate(1); - var start = d.getTime(); - d.setUTCMonth(d.getUTCMonth() + 1); - var end = d.getTime(); - d.setTime(v + carry * timeUnitSize.hour + (end - start) * tickSize); - carry = d.getUTCHours(); - d.setUTCHours(0); - } - else - d.setUTCMonth(d.getUTCMonth() + tickSize); - } - else if (unit == "year") { - d.setUTCFullYear(d.getUTCFullYear() + tickSize); - } - else - d.setTime(v + step); - } while (v < axis.max && v != prev); - - return ticks; - }; - - formatter = function (v, axis) { - var d = new Date(v); - - // first check global format - if (opts.timeformat != null) - return $.plot.formatDate(d, opts.timeformat, opts.monthNames); - - var t = axis.tickSize[0] * timeUnitSize[axis.tickSize[1]]; - var span = axis.max - axis.min; - var suffix = (opts.twelveHourClock) ? " %p" : ""; - - if (t < timeUnitSize.minute) - fmt = "%h:%M:%S" + suffix; - else if (t < timeUnitSize.day) { - if (span < 2 * timeUnitSize.day) - fmt = "%h:%M" + suffix; - else - fmt = "%b %d %h:%M" + suffix; - } - else if (t < timeUnitSize.month) - fmt = "%b %d"; - else if (t < timeUnitSize.year) { - if (span < timeUnitSize.year) - fmt = "%b"; - else - fmt = "%b %y"; - } - else - fmt = "%y"; - - return $.plot.formatDate(d, fmt, opts.monthNames); - }; - } - else { - // pretty rounding of base-10 numbers - var maxDec = opts.tickDecimals; - var dec = -Math.floor(Math.log(delta) / Math.LN10); - if (maxDec != null && dec > maxDec) - dec = maxDec; - - magn = Math.pow(10, -dec); - norm = delta / magn; // norm is between 1.0 and 10.0 - - if (norm < 1.5) - size = 1; - else if (norm < 3) { - size = 2; - // special case for 2.5, requires an extra decimal - if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) { - size = 2.5; - ++dec; - } - } - else if (norm < 7.5) - size = 5; - else - size = 10; - - size *= magn; - - if (opts.minTickSize != null && size < opts.minTickSize) - size = opts.minTickSize; - - axis.tickDecimals = Math.max(0, maxDec != null ? maxDec : dec); - axis.tickSize = opts.tickSize || size; - - generator = function (axis) { - var ticks = []; - - // spew out all possible ticks - var start = floorInBase(axis.min, axis.tickSize), - i = 0, v = Number.NaN, prev; - do { - prev = v; - v = start + i * axis.tickSize; - ticks.push(v); - ++i; - } while (v < axis.max && v != prev); - return ticks; - }; - - formatter = function (v, axis) { - return v.toFixed(axis.tickDecimals); - }; - } - - if (opts.alignTicksWithAxis != null) { - var otherAxis = (axis.direction == "x" ? xaxes : yaxes)[opts.alignTicksWithAxis - 1]; - if (otherAxis && otherAxis.used && otherAxis != axis) { - // consider snapping min/max to outermost nice ticks - var niceTicks = generator(axis); - if (niceTicks.length > 0) { - if (opts.min == null) - axis.min = Math.min(axis.min, niceTicks[0]); - if (opts.max == null && niceTicks.length > 1) - axis.max = Math.max(axis.max, niceTicks[niceTicks.length - 1]); - } - - generator = function (axis) { - // copy ticks, scaled to this axis - var ticks = [], v, i; - for (i = 0; i < otherAxis.ticks.length; ++i) { - v = (otherAxis.ticks[i].v - otherAxis.min) / (otherAxis.max - otherAxis.min); - v = axis.min + v * (axis.max - axis.min); - ticks.push(v); - } - return ticks; - }; - - // we might need an extra decimal since forced - // ticks don't necessarily fit naturally - if (axis.mode != "time" && opts.tickDecimals == null) { - var extraDec = Math.max(0, -Math.floor(Math.log(delta) / Math.LN10) + 1), - ts = generator(axis); - - // only proceed if the tick interval rounded - // with an extra decimal doesn't give us a - // zero at end - if (!(ts.length > 1 && /\..*0$/.test((ts[1] - ts[0]).toFixed(extraDec)))) - axis.tickDecimals = extraDec; - } - } - } - - axis.tickGenerator = generator; - if ($.isFunction(opts.tickFormatter)) - axis.tickFormatter = function (v, axis) { return "" + opts.tickFormatter(v, axis); }; - else - axis.tickFormatter = formatter; - } - - function setTicks(axis) { - var oticks = axis.options.ticks, ticks = []; - if (oticks == null || (typeof oticks == "number" && oticks > 0)) - ticks = axis.tickGenerator(axis); - else if (oticks) { - if ($.isFunction(oticks)) - // generate the ticks - ticks = oticks({ min: axis.min, max: axis.max }); - else - ticks = oticks; - } - - // clean up/labelify the supplied ticks, copy them over - var i, v; - axis.ticks = []; - for (i = 0; i < ticks.length; ++i) { - var label = null; - var t = ticks[i]; - if (typeof t == "object") { - v = +t[0]; - if (t.length > 1) - label = t[1]; - } - else - v = +t; - if (label == null) - label = axis.tickFormatter(v, axis); - if (!isNaN(v)) - axis.ticks.push({ v: v, label: label }); - } - } - - function snapRangeToTicks(axis, ticks) { - if (axis.options.autoscaleMargin && ticks.length > 0) { - // snap to ticks - if (axis.options.min == null) - axis.min = Math.min(axis.min, ticks[0].v); - if (axis.options.max == null && ticks.length > 1) - axis.max = Math.max(axis.max, ticks[ticks.length - 1].v); - } - } - - function draw() { - ctx.clearRect(0, 0, canvasWidth, canvasHeight); - - var grid = options.grid; - - // draw background, if any - if (grid.show && grid.backgroundColor) - drawBackground(); - - if (grid.show && !grid.aboveData) - drawGrid(); - - for (var i = 0; i < series.length; ++i) { - executeHooks(hooks.drawSeries, [ctx, series[i]]); - drawSeries(series[i]); - } - - executeHooks(hooks.draw, [ctx]); - - if (grid.show && grid.aboveData) - drawGrid(); - } - - function extractRange(ranges, coord) { - var axis, from, to, key, axes = allAxes(); - - for (i = 0; i < axes.length; ++i) { - axis = axes[i]; - if (axis.direction == coord) { - key = coord + axis.n + "axis"; - if (!ranges[key] && axis.n == 1) - key = coord + "axis"; // support x1axis as xaxis - if (ranges[key]) { - from = ranges[key].from; - to = ranges[key].to; - break; - } - } - } - - // backwards-compat stuff - to be removed in future - if (!ranges[key]) { - axis = coord == "x" ? xaxes[0] : yaxes[0]; - from = ranges[coord + "1"]; - to = ranges[coord + "2"]; - } - - // auto-reverse as an added bonus - if (from != null && to != null && from > to) { - var tmp = from; - from = to; - to = tmp; - } - - return { from: from, to: to, axis: axis }; - } - - function drawBackground() { - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, "rgba(255, 255, 255, 0)"); - ctx.fillRect(0, 0, plotWidth, plotHeight); - ctx.restore(); - } - - function drawGrid() { - var i; - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - // draw markings - var markings = options.grid.markings; - if (markings) { - if ($.isFunction(markings)) { - var axes = plot.getAxes(); - // xmin etc. is backwards compatibility, to be - // removed in the future - axes.xmin = axes.xaxis.min; - axes.xmax = axes.xaxis.max; - axes.ymin = axes.yaxis.min; - axes.ymax = axes.yaxis.max; - - markings = markings(axes); - } - - for (i = 0; i < markings.length; ++i) { - var m = markings[i], - xrange = extractRange(m, "x"), - yrange = extractRange(m, "y"); - - // fill in missing - if (xrange.from == null) - xrange.from = xrange.axis.min; - if (xrange.to == null) - xrange.to = xrange.axis.max; - if (yrange.from == null) - yrange.from = yrange.axis.min; - if (yrange.to == null) - yrange.to = yrange.axis.max; - - // clip - if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max || - yrange.to < yrange.axis.min || yrange.from > yrange.axis.max) - continue; - - xrange.from = Math.max(xrange.from, xrange.axis.min); - xrange.to = Math.min(xrange.to, xrange.axis.max); - yrange.from = Math.max(yrange.from, yrange.axis.min); - yrange.to = Math.min(yrange.to, yrange.axis.max); - - if (xrange.from == xrange.to && yrange.from == yrange.to) - continue; - - // then draw - xrange.from = xrange.axis.p2c(xrange.from); - xrange.to = xrange.axis.p2c(xrange.to); - yrange.from = yrange.axis.p2c(yrange.from); - yrange.to = yrange.axis.p2c(yrange.to); - - if (xrange.from == xrange.to || yrange.from == yrange.to) { - // draw line - ctx.beginPath(); - ctx.strokeStyle = m.color || options.grid.markingsColor; - ctx.lineWidth = m.lineWidth || options.grid.markingsLineWidth; - ctx.moveTo(xrange.from, yrange.from); - ctx.lineTo(xrange.to, yrange.to); - ctx.stroke(); - } - else { - // fill area - ctx.fillStyle = m.color || options.grid.markingsColor; - ctx.fillRect(xrange.from, yrange.to, - xrange.to - xrange.from, - yrange.from - yrange.to); - } - } - } - - // draw the ticks - var axes = allAxes(), bw = options.grid.borderWidth; - - for (var j = 0; j < axes.length; ++j) { - var axis = axes[j], box = axis.box, - t = axis.tickLength, x, y, xoff, yoff; - if (!axis.show || axis.ticks.length == 0) - continue - - ctx.strokeStyle = axis.options.tickColor || $.color.parse(axis.options.color).scale('a', 0.22).toString(); - ctx.lineWidth = 1; - - // find the edges - if (axis.direction == "x") { - x = 0; - if (t == "full") - y = (axis.position == "top" ? 0 : plotHeight); - else - y = box.top - plotOffset.top + (axis.position == "top" ? box.height : 0); - } - else { - y = 0; - if (t == "full") - x = (axis.position == "left" ? 0 : plotWidth); - else - x = box.left - plotOffset.left + (axis.position == "left" ? box.width : 0); - } - - // draw tick bar - if (!axis.innermost) { - ctx.beginPath(); - xoff = yoff = 0; - if (axis.direction == "x") - xoff = plotWidth; - else - yoff = plotHeight; - - if (ctx.lineWidth == 1) { - x = Math.floor(x) + 0.5; - y = Math.floor(y) + 0.5; - } - - ctx.moveTo(x, y); - ctx.lineTo(x + xoff, y + yoff); - ctx.stroke(); - } - - // draw ticks - ctx.beginPath(); - for (i = 0; i < axis.ticks.length; ++i) { - var v = axis.ticks[i].v; - - xoff = yoff = 0; - - if (v < axis.min || v > axis.max - // skip those lying on the axes if we got a border - || (t == "full" && bw > 0 - && (v == axis.min || v == axis.max))) - continue; - - if (axis.direction == "x") { - x = axis.p2c(v); - yoff = t == "full" ? -plotHeight : t; - - if (axis.position == "top") - yoff = -yoff; - } - else { - y = axis.p2c(v); - xoff = t == "full" ? -plotWidth : t; - - if (axis.position == "left") - xoff = -xoff; - } - - if (ctx.lineWidth == 1) { - if (axis.direction == "x") - x = Math.floor(x) + 0.5; - else - y = Math.floor(y) + 0.5; - } - - ctx.moveTo(x, y); - ctx.lineTo(x + xoff, y + yoff); - } - - ctx.stroke(); - } - - - // draw border - if (bw) { - ctx.lineWidth = bw; - ctx.strokeStyle = options.grid.borderColor; - ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw); - } - - ctx.restore(); - } - - function insertAxisLabels() { - placeholder.find(".tickLabels").remove(); - - var html = ['
']; - - var axes = allAxes(); - for (var j = 0; j < axes.length; ++j) { - var axis = axes[j], box = axis.box; - if (!axis.show) - continue; - //debug: html.push('
') - html.push('
'); - for (var i = 0; i < axis.ticks.length; ++i) { - var tick = axis.ticks[i]; - if (!tick.label || tick.v < axis.min || tick.v > axis.max) - continue; - - var pos = {}, align; - - if (axis.direction == "x") { - align = "center"; - pos.left = Math.round(plotOffset.left + axis.p2c(tick.v) - axis.labelWidth/2); - if (axis.position == "bottom") - pos.top = box.top + box.padding; - else - pos.bottom = canvasHeight - (box.top + box.height - box.padding); - } - else { - pos.top = Math.round(plotOffset.top + axis.p2c(tick.v) - axis.labelHeight/2); - if (axis.position == "left") { - pos.right = canvasWidth - (box.left + box.width - box.padding) - align = "right"; - } - else { - pos.left = box.left + box.padding; - align = "left"; - } - } - - pos.width = axis.labelWidth; - - var style = ["position:absolute", "text-align:" + align ]; - for (var a in pos) - style.push(a + ":" + pos[a] + "px") - - html.push('
' + tick.label + '
'); - } - html.push('
'); - } - - html.push('
'); - - placeholder.append(html.join("")); - } - - function drawSeries(series) { - if (series.lines.show) - drawSeriesLines(series); - if (series.bars.show) - drawSeriesBars(series); - if (series.points.show) - drawSeriesPoints(series); - } - - function drawSeriesLines(series) { - function plotLine(datapoints, xoffset, yoffset, axisx, axisy) { - var points = datapoints.points, - ps = datapoints.pointsize, - prevx = null, prevy = null; - - ctx.beginPath(); - for (var i = ps; i < points.length; i += ps) { - var x1 = points[i - ps], y1 = points[i - ps + 1], - x2 = points[i], y2 = points[i + 1]; - - if (x1 == null || x2 == null) - continue; - - // clip with ymin - if (y1 <= y2 && y1 < axisy.min) { - if (y2 < axisy.min) - continue; // line segment is outside - // compute new intersection point - x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.min; - } - else if (y2 <= y1 && y2 < axisy.min) { - if (y1 < axisy.min) - continue; - x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.min; - } - - // clip with ymax - if (y1 >= y2 && y1 > axisy.max) { - if (y2 > axisy.max) - continue; - x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.max; - } - else if (y2 >= y1 && y2 > axisy.max) { - if (y1 > axisy.max) - continue; - x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.max; - } - - // clip with xmin - if (x1 <= x2 && x1 < axisx.min) { - if (x2 < axisx.min) - continue; - y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.min; - } - else if (x2 <= x1 && x2 < axisx.min) { - if (x1 < axisx.min) - continue; - y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.min; - } - - // clip with xmax - if (x1 >= x2 && x1 > axisx.max) { - if (x2 > axisx.max) - continue; - y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.max; - } - else if (x2 >= x1 && x2 > axisx.max) { - if (x1 > axisx.max) - continue; - y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.max; - } - - if (x1 != prevx || y1 != prevy) - ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset); - - prevx = x2; - prevy = y2; - ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset); - } - ctx.stroke(); - } - - function plotLineArea(datapoints, axisx, axisy) { - var points = datapoints.points, - ps = datapoints.pointsize, - bottom = Math.min(Math.max(0, axisy.min), axisy.max), - i = 0, top, areaOpen = false, - ypos = 1, segmentStart = 0, segmentEnd = 0; - - // we process each segment in two turns, first forward - // direction to sketch out top, then once we hit the - // end we go backwards to sketch the bottom - while (true) { - if (ps > 0 && i > points.length + ps) - break; - - i += ps; // ps is negative if going backwards - - var x1 = points[i - ps], - y1 = points[i - ps + ypos], - x2 = points[i], y2 = points[i + ypos]; - - if (areaOpen) { - if (ps > 0 && x1 != null && x2 == null) { - // at turning point - segmentEnd = i; - ps = -ps; - ypos = 2; - continue; - } - - if (ps < 0 && i == segmentStart + ps) { - // done with the reverse sweep - ctx.fill(); - areaOpen = false; - ps = -ps; - ypos = 1; - i = segmentStart = segmentEnd + ps; - continue; - } - } - - if (x1 == null || x2 == null) - continue; - - // clip x values - - // clip with xmin - if (x1 <= x2 && x1 < axisx.min) { - if (x2 < axisx.min) - continue; - y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.min; - } - else if (x2 <= x1 && x2 < axisx.min) { - if (x1 < axisx.min) - continue; - y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.min; - } - - // clip with xmax - if (x1 >= x2 && x1 > axisx.max) { - if (x2 > axisx.max) - continue; - y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x1 = axisx.max; - } - else if (x2 >= x1 && x2 > axisx.max) { - if (x1 > axisx.max) - continue; - y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; - x2 = axisx.max; - } - - if (!areaOpen) { - // open area - ctx.beginPath(); - ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom)); - areaOpen = true; - } - - // now first check the case where both is outside - if (y1 >= axisy.max && y2 >= axisy.max) { - ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max)); - continue; - } - else if (y1 <= axisy.min && y2 <= axisy.min) { - ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min)); - continue; - } - - // else it's a bit more complicated, there might - // be a flat maxed out rectangle first, then a - // triangular cutout or reverse; to find these - // keep track of the current x values - var x1old = x1, x2old = x2; - - // clip the y values, without shortcutting, we - // go through all cases in turn - - // clip with ymin - if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) { - x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.min; - } - else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) { - x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.min; - } - - // clip with ymax - if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) { - x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y1 = axisy.max; - } - else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) { - x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; - y2 = axisy.max; - } - - // if the x value was changed we got a rectangle - // to fill - if (x1 != x1old) { - ctx.lineTo(axisx.p2c(x1old), axisy.p2c(y1)); - // it goes to (x1, y1), but we fill that below - } - - // fill triangular section, this sometimes result - // in redundant points if (x1, y1) hasn't changed - // from previous line to, but we just ignore that - ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1)); - ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); - - // fill the other rectangle if it's there - if (x2 != x2old) { - ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); - ctx.lineTo(axisx.p2c(x2old), axisy.p2c(y2)); - } - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - ctx.lineJoin = "round"; - - var lw = series.lines.lineWidth, - sw = series.shadowSize; - // FIXME: consider another form of shadow when filling is turned on - if (lw > 0 && sw > 0) { - // draw shadow as a thick and thin line with transparency - ctx.lineWidth = sw; - ctx.strokeStyle = "rgba(0,0,0,0.1)"; - // position shadow at angle from the mid of line - var angle = Math.PI/18; - plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/2), Math.cos(angle) * (lw/2 + sw/2), series.xaxis, series.yaxis); - ctx.lineWidth = sw/2; - plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/4), Math.cos(angle) * (lw/2 + sw/4), series.xaxis, series.yaxis); - } - - ctx.lineWidth = lw; - ctx.strokeStyle = series.color; - var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight); - if (fillStyle) { - ctx.fillStyle = fillStyle; - plotLineArea(series.datapoints, series.xaxis, series.yaxis); - } - - if (lw > 0) - plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis); - ctx.restore(); - } - - function drawSeriesPoints(series) { - function plotPoints(datapoints, radius, fillStyle, offset, shadow, axisx, axisy, symbol) { - var points = datapoints.points, ps = datapoints.pointsize; - - for (var i = 0; i < points.length; i += ps) { - var x = points[i], y = points[i + 1]; - if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) - continue; - - ctx.beginPath(); - x = axisx.p2c(x); - y = axisy.p2c(y) + offset; - if (symbol == "circle") - ctx.arc(x, y, radius, 0, shadow ? Math.PI : Math.PI * 2, false); - else - symbol(ctx, x, y, radius, shadow); - ctx.closePath(); - - if (fillStyle) { - ctx.fillStyle = fillStyle; - ctx.fill(); - } - ctx.stroke(); - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - var lw = series.points.lineWidth, - sw = series.shadowSize, - radius = series.points.radius, - symbol = series.points.symbol; - if (lw > 0 && sw > 0) { - // draw shadow in two steps - var w = sw / 2; - ctx.lineWidth = w; - ctx.strokeStyle = "rgba(0,0,0,0.1)"; - plotPoints(series.datapoints, radius, null, w + w/2, true, - series.xaxis, series.yaxis, symbol); - - ctx.strokeStyle = "rgba(0,0,0,0.2)"; - plotPoints(series.datapoints, radius, null, w/2, true, - series.xaxis, series.yaxis, symbol); - } - - ctx.lineWidth = lw; - ctx.strokeStyle = series.color; - plotPoints(series.datapoints, radius, - getFillStyle(series.points, series.color), 0, false, - series.xaxis, series.yaxis, symbol); - ctx.restore(); - } - - function drawBar(x, y, b, barLeft, barRight, offset, fillStyleCallback, axisx, axisy, c, horizontal, lineWidth) { - var left, right, bottom, top, - drawLeft, drawRight, drawTop, drawBottom, - tmp; - - // in horizontal mode, we start the bar from the left - // instead of from the bottom so it appears to be - // horizontal rather than vertical - if (horizontal) { - drawBottom = drawRight = drawTop = true; - drawLeft = false; - left = b; - right = x; - top = y + barLeft; - bottom = y + barRight; - - // account for negative bars - if (right < left) { - tmp = right; - right = left; - left = tmp; - drawLeft = true; - drawRight = false; - } - } - else { - drawLeft = drawRight = drawTop = true; - drawBottom = false; - left = x + barLeft; - right = x + barRight; - bottom = b; - top = y; - - // account for negative bars - if (top < bottom) { - tmp = top; - top = bottom; - bottom = tmp; - drawBottom = true; - drawTop = false; - } - } - - // clip - if (right < axisx.min || left > axisx.max || - top < axisy.min || bottom > axisy.max) - return; - - if (left < axisx.min) { - left = axisx.min; - drawLeft = false; - } - - if (right > axisx.max) { - right = axisx.max; - drawRight = false; - } - - if (bottom < axisy.min) { - bottom = axisy.min; - drawBottom = false; - } - - if (top > axisy.max) { - top = axisy.max; - drawTop = false; - } - - left = axisx.p2c(left); - bottom = axisy.p2c(bottom); - right = axisx.p2c(right); - top = axisy.p2c(top); - - // fill the bar - if (fillStyleCallback) { - c.beginPath(); - c.moveTo(left, bottom); - c.lineTo(left, top); - c.lineTo(right, top); - c.lineTo(right, bottom); - c.fillStyle = fillStyleCallback(bottom, top); - c.fill(); - } - - // draw outline - if (lineWidth > 0 && (drawLeft || drawRight || drawTop || drawBottom)) { - c.beginPath(); - - // FIXME: inline moveTo is buggy with excanvas - c.moveTo(left, bottom + offset); - if (drawLeft) - c.lineTo(left, top + offset); - else - c.moveTo(left, top + offset); - if (drawTop) - c.lineTo(right, top + offset); - else - c.moveTo(right, top + offset); - if (drawRight) - c.lineTo(right, bottom + offset); - else - c.moveTo(right, bottom + offset); - if (drawBottom) - c.lineTo(left, bottom + offset); - else - c.moveTo(left, bottom + offset); - c.stroke(); - } - } - - function drawSeriesBars(series) { - function plotBars(datapoints, barLeft, barRight, offset, fillStyleCallback, axisx, axisy) { - var points = datapoints.points, ps = datapoints.pointsize; - - for (var i = 0; i < points.length; i += ps) { - if (points[i] == null) - continue; - drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, offset, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal, series.bars.lineWidth); - } - } - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - // FIXME: figure out a way to add shadows (for instance along the right edge) - ctx.lineWidth = series.bars.lineWidth; - ctx.strokeStyle = series.color; - var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2; - var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null; - plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, 0, fillStyleCallback, series.xaxis, series.yaxis); - ctx.restore(); - } - - function getFillStyle(filloptions, seriesColor, bottom, top) { - var fill = filloptions.fill; - if (!fill) - return null; - - if (filloptions.fillColor) - return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor); - - var c = $.color.parse(seriesColor); - c.a = typeof fill == "number" ? fill : 0.4; - c.normalize(); - return c.toString(); - } - - function insertLegend() { - placeholder.find(".legend").remove(); - - if (!options.legend.show) - return; - - var fragments = [], rowStarted = false, - lf = options.legend.labelFormatter, s, label; - for (var i = 0; i < series.length; ++i) { - s = series[i]; - label = s.label; - if (!label) - continue; - - if (i % options.legend.noColumns == 0) { - if (rowStarted) - fragments.push(''); - fragments.push(''); - rowStarted = true; - } - - if (lf) - label = lf(label, s); - - fragments.push( - '
' + - '' + label + ''); - } - if (rowStarted) - fragments.push(''); - - if (fragments.length == 0) - return; - - var table = '' + fragments.join("") + '
'; - if (options.legend.container != null) - $(options.legend.container).html(table); - else { - var pos = "", - p = options.legend.position, - m = options.legend.margin; - if (m[0] == null) - m = [m, m]; - if (p.charAt(0) == "n") - pos += 'top:' + (m[1] + plotOffset.top) + 'px;'; - else if (p.charAt(0) == "s") - pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;'; - if (p.charAt(1) == "e") - pos += 'right:' + (m[0] + plotOffset.right) + 'px;'; - else if (p.charAt(1) == "w") - pos += 'left:' + (m[0] + plotOffset.left) + 'px;'; - var legend = $('
' + table.replace('style="', 'style="position:absolute;' + pos +';') + '
').appendTo(placeholder); - if (options.legend.backgroundOpacity != 0.0) { - // put in the transparent background - // separately to avoid blended labels and - // label boxes - var c = options.legend.backgroundColor; - if (c == null) { - c = options.grid.backgroundColor; - if (c && typeof c == "string") - c = $.color.parse(c); - else - c = $.color.extract(legend, 'background-color'); - c.a = 1; - c = c.toString(); - } - var div = legend.children(); - $('
').prependTo(legend).css('opacity', options.legend.backgroundOpacity); - } - } - } - - - // interactive features - - var highlights = [], - redrawTimeout = null; - - // returns the data item the mouse is over, or null if none is found - function findNearbyItem(mouseX, mouseY, seriesFilter) { - var maxDistance = options.grid.mouseActiveRadius, - smallestDistance = maxDistance * maxDistance + 1, - item = null, foundPoint = false, i, j; - - for (i = series.length - 1; i >= 0; --i) { - if (!seriesFilter(series[i])) - continue; - - var s = series[i], - axisx = s.xaxis, - axisy = s.yaxis, - points = s.datapoints.points, - ps = s.datapoints.pointsize, - mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster - my = axisy.c2p(mouseY), - maxx = maxDistance / axisx.scale, - maxy = maxDistance / axisy.scale; - - // with inverse transforms, we can't use the maxx/maxy - // optimization, sadly - if (axisx.options.inverseTransform) - maxx = Number.MAX_VALUE; - if (axisy.options.inverseTransform) - maxy = Number.MAX_VALUE; - - if (s.lines.show || s.points.show) { - for (j = 0; j < points.length; j += ps) { - var x = points[j], y = points[j + 1]; - if (x == null) - continue; - - // For points and lines, the cursor must be within a - // certain distance to the data point - if (x - mx > maxx || x - mx < -maxx || - y - my > maxy || y - my < -maxy) - continue; - - // We have to calculate distances in pixels, not in - // data units, because the scales of the axes may be different - var dx = Math.abs(axisx.p2c(x) - mouseX), - dy = Math.abs(axisy.p2c(y) - mouseY), - dist = dx * dx + dy * dy; // we save the sqrt - - // use <= to ensure last point takes precedence - // (last generally means on top of) - if (dist < smallestDistance) { - smallestDistance = dist; - item = [i, j / ps]; - } - } - } - - if (s.bars.show && !item) { // no other point can be nearby - var barLeft = s.bars.align == "left" ? 0 : -s.bars.barWidth/2, - barRight = barLeft + s.bars.barWidth; - - for (j = 0; j < points.length; j += ps) { - var x = points[j], y = points[j + 1], b = points[j + 2]; - if (x == null) - continue; - - // for a bar graph, the cursor must be inside the bar - if (series[i].bars.horizontal ? - (mx <= Math.max(b, x) && mx >= Math.min(b, x) && - my >= y + barLeft && my <= y + barRight) : - (mx >= x + barLeft && mx <= x + barRight && - my >= Math.min(b, y) && my <= Math.max(b, y))) - item = [i, j / ps]; - } - } - } - - if (item) { - i = item[0]; - j = item[1]; - ps = series[i].datapoints.pointsize; - - return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps), - dataIndex: j, - series: series[i], - seriesIndex: i }; - } - - return null; - } - - function onMouseMove(e) { - if (options.grid.hoverable) - triggerClickHoverEvent("plothover", e, - function (s) { return s["hoverable"] != false; }); - } - - function onMouseLeave(e) { - if (options.grid.hoverable) - triggerClickHoverEvent("plothover", e, - function (s) { return false; }); - } - - function onClick(e) { - triggerClickHoverEvent("plotclick", e, - function (s) { return s["clickable"] != false; }); - } - - // trigger click or hover event (they send the same parameters - // so we share their code) - function triggerClickHoverEvent(eventname, event, seriesFilter) { - var offset = eventHolder.offset(), - canvasX = event.pageX - offset.left - plotOffset.left, - canvasY = event.pageY - offset.top - plotOffset.top, - pos = canvasToAxisCoords({ left: canvasX, top: canvasY }); - - pos.pageX = event.pageX; - pos.pageY = event.pageY; - - var item = findNearbyItem(canvasX, canvasY, seriesFilter); - - if (item) { - // fill in mouse pos for any listeners out there - item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left); - item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top); - } - - if (options.grid.autoHighlight) { - // clear auto-highlights - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.auto == eventname && - !(item && h.series == item.series && - h.point[0] == item.datapoint[0] && - h.point[1] == item.datapoint[1])) - unhighlight(h.series, h.point); - } - - if (item) - highlight(item.series, item.datapoint, eventname); - } - - placeholder.trigger(eventname, [ pos, item ]); - } - - function triggerRedrawOverlay() { - if (!redrawTimeout) - redrawTimeout = setTimeout(drawOverlay, 30); - } - - function drawOverlay() { - redrawTimeout = null; - - // draw highlights - octx.save(); - octx.clearRect(0, 0, canvasWidth, canvasHeight); - octx.translate(plotOffset.left, plotOffset.top); - - var i, hi; - for (i = 0; i < highlights.length; ++i) { - hi = highlights[i]; - - if (hi.series.bars.show) - drawBarHighlight(hi.series, hi.point); - else - drawPointHighlight(hi.series, hi.point); - } - octx.restore(); - - executeHooks(hooks.drawOverlay, [octx]); - } - - function highlight(s, point, auto) { - if (typeof s == "number") - s = series[s]; - - if (typeof point == "number") { - var ps = s.datapoints.pointsize; - point = s.datapoints.points.slice(ps * point, ps * (point + 1)); - } - - var i = indexOfHighlight(s, point); - if (i == -1) { - highlights.push({ series: s, point: point, auto: auto }); - - triggerRedrawOverlay(); - } - else if (!auto) - highlights[i].auto = false; - } - - function unhighlight(s, point) { - if (s == null && point == null) { - highlights = []; - triggerRedrawOverlay(); - } - - if (typeof s == "number") - s = series[s]; - - if (typeof point == "number") - point = s.data[point]; - - var i = indexOfHighlight(s, point); - if (i != -1) { - highlights.splice(i, 1); - - triggerRedrawOverlay(); - } - } - - function indexOfHighlight(s, p) { - for (var i = 0; i < highlights.length; ++i) { - var h = highlights[i]; - if (h.series == s && h.point[0] == p[0] - && h.point[1] == p[1]) - return i; - } - return -1; - } - - function drawPointHighlight(series, point) { - var x = point[0], y = point[1], - axisx = series.xaxis, axisy = series.yaxis; - - if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) - return; - - var pointRadius = series.points.radius + series.points.lineWidth / 2; - octx.lineWidth = pointRadius; - octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString(); - var radius = 1.5 * pointRadius, - x = axisx.p2c(x), - y = axisy.p2c(y); - - octx.beginPath(); - if (series.points.symbol == "circle") - octx.arc(x, y, radius, 0, 2 * Math.PI, false); - else - series.points.symbol(octx, x, y, radius, false); - octx.closePath(); - octx.stroke(); - } - - function drawBarHighlight(series, point) { - octx.lineWidth = series.bars.lineWidth; - octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString(); - var fillStyle = $.color.parse(series.color).scale('a', 0.5).toString(); - var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2; - drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth, - 0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal, series.bars.lineWidth); - } - - function getColorOrGradient(spec, bottom, top, defaultColor) { - if (typeof spec == "string") - return spec; - else { - // assume this is a gradient spec; IE currently only - // supports a simple vertical gradient properly, so that's - // what we support too - var gradient = ctx.createLinearGradient(0, top, 0, bottom); - - for (var i = 0, l = spec.colors.length; i < l; ++i) { - var c = spec.colors[i]; - if (typeof c != "string") { - var co = $.color.parse(defaultColor); - if (c.brightness != null) - co = co.scale('rgb', c.brightness) - if (c.opacity != null) - co.a *= c.opacity; - c = co.toString(); - } - gradient.addColorStop(i / (l - 1), c); - } - - return gradient; - } - } - } - - $.plot = function(placeholder, data, options) { - //var t0 = new Date(); - var plot = new Plot($(placeholder), data, options, $.plot.plugins); - //(window.console ? console.log : alert)("time used (msecs): " + ((new Date()).getTime() - t0.getTime())); - return plot; - }; - - $.plot.version = "0.7"; - - $.plot.plugins = []; - - // returns a string with the date d formatted according to fmt - $.plot.formatDate = function(d, fmt, monthNames) { - var leftPad = function(n) { - n = "" + n; - return n.length == 1 ? "0" + n : n; - }; - - var r = []; - var escape = false, padNext = false; - var hours = d.getUTCHours(); - var isAM = hours < 12; - if (monthNames == null) - monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; - - if (fmt.search(/%p|%P/) != -1) { - if (hours > 12) { - hours = hours - 12; - } else if (hours == 0) { - hours = 12; - } - } - for (var i = 0; i < fmt.length; ++i) { - var c = fmt.charAt(i); - - if (escape) { - switch (c) { - case 'h': c = "" + hours; break; - case 'H': c = leftPad(hours); break; - case 'M': c = leftPad(d.getUTCMinutes()); break; - case 'S': c = leftPad(d.getUTCSeconds()); break; - case 'd': c = "" + d.getUTCDate(); break; - case 'm': c = "" + (d.getUTCMonth() + 1); break; - case 'y': c = "" + d.getUTCFullYear(); break; - case 'b': c = "" + monthNames[d.getUTCMonth()]; break; - case 'p': c = (isAM) ? ("" + "am") : ("" + "pm"); break; - case 'P': c = (isAM) ? ("" + "AM") : ("" + "PM"); break; - case '0': c = ""; padNext = true; break; - } - if (c && padNext) { - c = leftPad(c); - padNext = false; - } - r.push(c); - if (!padNext) - escape = false; - } - else { - if (c == "%") - escape = true; - else - r.push(c); - } - } - return r.join(""); - }; - - // round to nearby lower multiple of base - function floorInBase(n, base) { - return base * Math.floor(n / base); - } - -})(jQuery); \ No newline at end of file diff --git a/examples/analytics/js/jquery.flot.selection.js b/examples/analytics/js/jquery.flot.selection.js deleted file mode 100644 index ca3cf7cfd..000000000 --- a/examples/analytics/js/jquery.flot.selection.js +++ /dev/null @@ -1,344 +0,0 @@ -/* -Flot plugin for selecting regions. - -The plugin defines the following options: - - selection: { - mode: null or "x" or "y" or "xy", - color: color - } - -Selection support is enabled by setting the mode to one of "x", "y" or -"xy". In "x" mode, the user will only be able to specify the x range, -similarly for "y" mode. For "xy", the selection becomes a rectangle -where both ranges can be specified. "color" is color of the selection -(if you need to change the color later on, you can get to it with -plot.getOptions().selection.color). - -When selection support is enabled, a "plotselected" event will be -emitted on the DOM element you passed into the plot function. The -event handler gets a parameter with the ranges selected on the axes, -like this: - - placeholder.bind("plotselected", function(event, ranges) { - alert("You selected " + ranges.xaxis.from + " to " + ranges.xaxis.to) - // similar for yaxis - with multiple axes, the extra ones are in - // x2axis, x3axis, ... - }); - -The "plotselected" event is only fired when the user has finished -making the selection. A "plotselecting" event is fired during the -process with the same parameters as the "plotselected" event, in case -you want to know what's happening while it's happening, - -A "plotunselected" event with no arguments is emitted when the user -clicks the mouse to remove the selection. - -The plugin also adds the following methods to the plot object: - -- setSelection(ranges, preventEvent) - - Set the selection rectangle. The passed in ranges is on the same - form as returned in the "plotselected" event. If the selection mode - is "x", you should put in either an xaxis range, if the mode is "y" - you need to put in an yaxis range and both xaxis and yaxis if the - selection mode is "xy", like this: - - setSelection({ xaxis: { from: 0, to: 10 }, yaxis: { from: 40, to: 60 } }); - - setSelection will trigger the "plotselected" event when called. If - you don't want that to happen, e.g. if you're inside a - "plotselected" handler, pass true as the second parameter. If you - are using multiple axes, you can specify the ranges on any of those, - e.g. as x2axis/x3axis/... instead of xaxis, the plugin picks the - first one it sees. - -- clearSelection(preventEvent) - - Clear the selection rectangle. Pass in true to avoid getting a - "plotunselected" event. - -- getSelection() - - Returns the current selection in the same format as the - "plotselected" event. If there's currently no selection, the - function returns null. - -*/ - -(function ($) { - function init(plot) { - var selection = { - first: { x: -1, y: -1}, second: { x: -1, y: -1}, - show: false, - active: false - }; - - // FIXME: The drag handling implemented here should be - // abstracted out, there's some similar code from a library in - // the navigation plugin, this should be massaged a bit to fit - // the Flot cases here better and reused. Doing this would - // make this plugin much slimmer. - var savedhandlers = {}; - - var mouseUpHandler = null; - - function onMouseMove(e) { - if (selection.active) { - updateSelection(e); - - plot.getPlaceholder().trigger("plotselecting", [ getSelection() ]); - } - } - - function onMouseDown(e) { - if (e.which != 1) // only accept left-click - return; - - // cancel out any text selections - document.body.focus(); - - // prevent text selection and drag in old-school browsers - if (document.onselectstart !== undefined && savedhandlers.onselectstart == null) { - savedhandlers.onselectstart = document.onselectstart; - document.onselectstart = function () { return false; }; - } - if (document.ondrag !== undefined && savedhandlers.ondrag == null) { - savedhandlers.ondrag = document.ondrag; - document.ondrag = function () { return false; }; - } - - setSelectionPos(selection.first, e); - - selection.active = true; - - // this is a bit silly, but we have to use a closure to be - // able to whack the same handler again - mouseUpHandler = function (e) { onMouseUp(e); }; - - $(document).one("mouseup", mouseUpHandler); - } - - function onMouseUp(e) { - mouseUpHandler = null; - - // revert drag stuff for old-school browsers - if (document.onselectstart !== undefined) - document.onselectstart = savedhandlers.onselectstart; - if (document.ondrag !== undefined) - document.ondrag = savedhandlers.ondrag; - - // no more dragging - selection.active = false; - updateSelection(e); - - if (selectionIsSane()) - triggerSelectedEvent(); - else { - // this counts as a clear - plot.getPlaceholder().trigger("plotunselected", [ ]); - plot.getPlaceholder().trigger("plotselecting", [ null ]); - } - - return false; - } - - function getSelection() { - if (!selectionIsSane()) - return null; - - var r = {}, c1 = selection.first, c2 = selection.second; - $.each(plot.getAxes(), function (name, axis) { - if (axis.used) { - var p1 = axis.c2p(c1[axis.direction]), p2 = axis.c2p(c2[axis.direction]); - r[name] = { from: Math.min(p1, p2), to: Math.max(p1, p2) }; - } - }); - return r; - } - - function triggerSelectedEvent() { - var r = getSelection(); - - plot.getPlaceholder().trigger("plotselected", [ r ]); - - // backwards-compat stuff, to be removed in future - if (r.xaxis && r.yaxis) - plot.getPlaceholder().trigger("selected", [ { x1: r.xaxis.from, y1: r.yaxis.from, x2: r.xaxis.to, y2: r.yaxis.to } ]); - } - - function clamp(min, value, max) { - return value < min ? min: (value > max ? max: value); - } - - function setSelectionPos(pos, e) { - var o = plot.getOptions(); - var offset = plot.getPlaceholder().offset(); - var plotOffset = plot.getPlotOffset(); - pos.x = clamp(0, e.pageX - offset.left - plotOffset.left, plot.width()); - pos.y = clamp(0, e.pageY - offset.top - plotOffset.top, plot.height()); - - if (o.selection.mode == "y") - pos.x = pos == selection.first ? 0 : plot.width(); - - if (o.selection.mode == "x") - pos.y = pos == selection.first ? 0 : plot.height(); - } - - function updateSelection(pos) { - if (pos.pageX == null) - return; - - setSelectionPos(selection.second, pos); - if (selectionIsSane()) { - selection.show = true; - plot.triggerRedrawOverlay(); - } - else - clearSelection(true); - } - - function clearSelection(preventEvent) { - if (selection.show) { - selection.show = false; - plot.triggerRedrawOverlay(); - if (!preventEvent) - plot.getPlaceholder().trigger("plotunselected", [ ]); - } - } - - // function taken from markings support in Flot - function extractRange(ranges, coord) { - var axis, from, to, key, axes = plot.getAxes(); - - for (var k in axes) { - axis = axes[k]; - if (axis.direction == coord) { - key = coord + axis.n + "axis"; - if (!ranges[key] && axis.n == 1) - key = coord + "axis"; // support x1axis as xaxis - if (ranges[key]) { - from = ranges[key].from; - to = ranges[key].to; - break; - } - } - } - - // backwards-compat stuff - to be removed in future - if (!ranges[key]) { - axis = coord == "x" ? plot.getXAxes()[0] : plot.getYAxes()[0]; - from = ranges[coord + "1"]; - to = ranges[coord + "2"]; - } - - // auto-reverse as an added bonus - if (from != null && to != null && from > to) { - var tmp = from; - from = to; - to = tmp; - } - - return { from: from, to: to, axis: axis }; - } - - function setSelection(ranges, preventEvent) { - var axis, range, o = plot.getOptions(); - - if (o.selection.mode == "y") { - selection.first.x = 0; - selection.second.x = plot.width(); - } - else { - range = extractRange(ranges, "x"); - - selection.first.x = range.axis.p2c(range.from); - selection.second.x = range.axis.p2c(range.to); - } - - if (o.selection.mode == "x") { - selection.first.y = 0; - selection.second.y = plot.height(); - } - else { - range = extractRange(ranges, "y"); - - selection.first.y = range.axis.p2c(range.from); - selection.second.y = range.axis.p2c(range.to); - } - - selection.show = true; - plot.triggerRedrawOverlay(); - if (!preventEvent && selectionIsSane()) - triggerSelectedEvent(); - } - - function selectionIsSane() { - var minSize = 5; - return Math.abs(selection.second.x - selection.first.x) >= minSize && - Math.abs(selection.second.y - selection.first.y) >= minSize; - } - - plot.clearSelection = clearSelection; - plot.setSelection = setSelection; - plot.getSelection = getSelection; - - plot.hooks.bindEvents.push(function(plot, eventHolder) { - var o = plot.getOptions(); - if (o.selection.mode != null) { - eventHolder.mousemove(onMouseMove); - eventHolder.mousedown(onMouseDown); - } - }); - - - plot.hooks.drawOverlay.push(function (plot, ctx) { - // draw selection - if (selection.show && selectionIsSane()) { - var plotOffset = plot.getPlotOffset(); - var o = plot.getOptions(); - - ctx.save(); - ctx.translate(plotOffset.left, plotOffset.top); - - var c = $.color.parse(o.selection.color); - - ctx.strokeStyle = c.scale('a', 0.8).toString(); - ctx.lineWidth = 1; - ctx.lineJoin = "round"; - ctx.fillStyle = c.scale('a', 0.4).toString(); - - var x = Math.min(selection.first.x, selection.second.x), - y = Math.min(selection.first.y, selection.second.y), - w = Math.abs(selection.second.x - selection.first.x), - h = Math.abs(selection.second.y - selection.first.y); - - ctx.fillRect(x, y, w, h); - ctx.strokeRect(x, y, w, h); - - ctx.restore(); - } - }); - - plot.hooks.shutdown.push(function (plot, eventHolder) { - eventHolder.unbind("mousemove", onMouseMove); - eventHolder.unbind("mousedown", onMouseDown); - - if (mouseUpHandler) - $(document).unbind("mouseup", mouseUpHandler); - }); - - } - - $.plot.plugins.push({ - init: init, - options: { - selection: { - mode: null, // one of null, "x", "y" or "xy" - color: "#e8cfac" - } - }, - name: 'selection', - version: '1.1' - }); -})(jQuery); \ No newline at end of file diff --git a/examples/analytics/js/jquery.showLoading.js b/examples/analytics/js/jquery.showLoading.js deleted file mode 100644 index 5afd24296..000000000 --- a/examples/analytics/js/jquery.showLoading.js +++ /dev/null @@ -1,250 +0,0 @@ -/* - * jQuery showLoading plugin v1.0 - * - * Copyright (c) 2009 Jim Keller - * Context - http://www.contextllc.com - * - * Dual licensed under the MIT and GPL licenses. - * - */ - - jQuery.fn.showLoading = function(options) { - - var indicatorID; - var settings = { - 'addClass': '', - 'beforeShow': '', - 'afterShow': '', - 'hPos': 'center', - 'vPos': 'center', - 'indicatorZIndex' : 5001, - 'overlayZIndex': 5000, - 'parent': '', - 'marginTop': 0, - 'marginLeft': 0, - 'overlayWidth': null, - 'overlayHeight': null - }; - - jQuery.extend(settings, options); - - var loadingDiv = jQuery('
'); - var overlayDiv = jQuery('
'); - - // - // Set up ID and classes - // - if ( settings.indicatorID ) { - indicatorID = settings.indicatorID; - } - else { - indicatorID = jQuery(this).attr('id'); - } - - jQuery(loadingDiv).attr('id', 'loading-indicator-' + indicatorID ); - jQuery(loadingDiv).addClass('loading-indicator'); - - if ( settings.addClass ){ - jQuery(loadingDiv).addClass(settings.addClass); - } - - - - // - // Create the overlay - // - jQuery(overlayDiv).css('display', 'none'); - - // Append to body, otherwise position() doesn't work on Webkit-based browsers - jQuery(document.body).append(overlayDiv); - - // - // Set overlay classes - // - jQuery(overlayDiv).attr('id', 'loading-indicator-' + indicatorID + '-overlay'); - - jQuery(overlayDiv).addClass('loading-indicator-overlay'); - - if ( settings.addClass ){ - jQuery(overlayDiv).addClass(settings.addClass + '-overlay'); - } - - // - // Set overlay position - // - - var overlay_width; - var overlay_height; - - var border_top_width = jQuery(this).css('border-top-width'); - var border_left_width = jQuery(this).css('border-left-width'); - - // - // IE will return values like 'medium' as the default border, - // but we need a number - // - border_top_width = isNaN(parseInt(border_top_width)) ? 0 : border_top_width; - border_left_width = isNaN(parseInt(border_left_width)) ? 0 : border_left_width; - - var overlay_left_pos = jQuery(this).offset().left + parseInt(border_left_width); - var overlay_top_pos = jQuery(this).offset().top + parseInt(border_top_width); - - if ( settings.overlayWidth !== null ) { - overlay_width = settings.overlayWidth; - } - else { - overlay_width = parseInt(jQuery(this).width()) + parseInt(jQuery(this).css('padding-right')) + parseInt(jQuery(this).css('padding-left')); - } - - if ( settings.overlayHeight !== null ) { - overlay_height = settings.overlayWidth; - } - else { - overlay_height = parseInt(jQuery(this).height()) + parseInt(jQuery(this).css('padding-top')) + parseInt(jQuery(this).css('padding-bottom')); - } - - - jQuery(overlayDiv).css('width', overlay_width.toString() + 'px'); - jQuery(overlayDiv).css('height', overlay_height.toString() + 'px'); - - jQuery(overlayDiv).css('left', overlay_left_pos.toString() + 'px'); - jQuery(overlayDiv).css('position', 'absolute'); - - jQuery(overlayDiv).css('top', overlay_top_pos.toString() + 'px' ); - jQuery(overlayDiv).css('z-index', settings.overlayZIndex); - - // - // Set any custom overlay CSS - // - if ( settings.overlayCSS ) { - jQuery(overlayDiv).css ( settings.overlayCSS ); - } - - - // - // We have to append the element to the body first - // or .width() won't work in Webkit-based browsers (e.g. Chrome, Safari) - // - jQuery(loadingDiv).css('display', 'none'); - jQuery(document.body).append(loadingDiv); - - jQuery(loadingDiv).css('position', 'absolute'); - jQuery(loadingDiv).css('z-index', settings.indicatorZIndex); - - // - // Set top margin - // - - var indicatorTop = overlay_top_pos; - - if ( settings.marginTop ) { - indicatorTop += parseInt(settings.marginTop); - } - - var indicatorLeft = overlay_left_pos; - - if ( settings.marginLeft ) { - indicatorLeft += parseInt(settings.marginTop); - } - - - // - // set horizontal position - // - if ( settings.hPos.toString().toLowerCase() == 'center' ) { - jQuery(loadingDiv).css('left', (indicatorLeft + ((jQuery(overlayDiv).width() - parseInt(jQuery(loadingDiv).width())) / 2)).toString() + 'px'); - } - else if ( settings.hPos.toString().toLowerCase() == 'left' ) { - jQuery(loadingDiv).css('left', (indicatorLeft + parseInt(jQuery(overlayDiv).css('margin-left'))).toString() + 'px'); - } - else if ( settings.hPos.toString().toLowerCase() == 'right' ) { - jQuery(loadingDiv).css('left', (indicatorLeft + (jQuery(overlayDiv).width() - parseInt(jQuery(loadingDiv).width()))).toString() + 'px'); - } - else { - jQuery(loadingDiv).css('left', (indicatorLeft + parseInt(settings.hPos)).toString() + 'px'); - } - - // - // set vertical position - // - if ( settings.vPos.toString().toLowerCase() == 'center' ) { - jQuery(loadingDiv).css('top', (indicatorTop + ((jQuery(overlayDiv).height() - parseInt(jQuery(loadingDiv).height())) / 2)).toString() + 'px'); - } - else if ( settings.vPos.toString().toLowerCase() == 'top' ) { - jQuery(loadingDiv).css('top', indicatorTop.toString() + 'px'); - } - else if ( settings.vPos.toString().toLowerCase() == 'bottom' ) { - jQuery(loadingDiv).css('top', (indicatorTop + (jQuery(overlayDiv).height() - parseInt(jQuery(loadingDiv).height()))).toString() + 'px'); - } - else { - jQuery(loadingDiv).css('top', (indicatorTop + parseInt(settings.vPos)).toString() + 'px' ); - } - - - - - // - // Set any custom css for loading indicator - // - if ( settings.css ) { - jQuery(loadingDiv).css ( settings.css ); - } - - - // - // Set up callback options - // - var callback_options = - { - 'overlay': overlayDiv, - 'indicator': loadingDiv, - 'element': this - }; - - // - // beforeShow callback - // - if ( typeof(settings.beforeShow) == 'function' ) { - settings.beforeShow( callback_options ); - } - - // - // Show the overlay - // - jQuery(overlayDiv).show(); - - // - // Show the loading indicator - // - jQuery(loadingDiv).show(); - - // - // afterShow callback - // - if ( typeof(settings.afterShow) == 'function' ) { - settings.afterShow( callback_options ); - } - - return this; - }; - - - jQuery.fn.hideLoading = function(options) { - - - var settings = {}; - - jQuery.extend(settings, options); - - if ( settings.indicatorID ) { - indicatorID = settings.indicatorID; - } - else { - indicatorID = jQuery(this).attr('id'); - } - - jQuery(document.body).find('#loading-indicator-' + indicatorID ).remove(); - jQuery(document.body).find('#loading-indicator-' + indicatorID + '-overlay' ).remove(); - - return this; - }; diff --git a/examples/analytics/js/jquery.ui.selectmenu.js b/examples/analytics/js/jquery.ui.selectmenu.js deleted file mode 100755 index 073f8de92..000000000 --- a/examples/analytics/js/jquery.ui.selectmenu.js +++ /dev/null @@ -1,802 +0,0 @@ - /* - * jQuery UI selectmenu version 1.1.0 - * - * Copyright (c) 2009 AUTHORS.txt (http://jqueryui.com/about) - * Dual licensed under the MIT (MIT-LICENSE.txt) - * and GPL (GPL-LICENSE.txt) licenses. - * - * http://docs.jquery.com/UI - * https://github.com/fnagel/jquery-ui/wiki/Selectmenu - */ - -(function($) { - -$.widget("ui.selectmenu", { - getter: "value", - version: "1.8", - eventPrefix: "selectmenu", - options: { - transferClasses: true, - typeAhead: "sequential", - style: 'dropdown', - positionOptions: { - my: "left top", - at: "left bottom", - offset: null - }, - width: null, - menuWidth: null, - handleWidth: 26, - maxHeight: null, - icons: null, - format: null, - bgImage: function() {}, - wrapperElement: "" - }, - - _create: function() { - var self = this, o = this.options; - - // set a default id value, generate a new random one if not set by developer - var selectmenuId = this.element.attr('id') || 'ui-selectmenu-' + Math.random().toString(16).slice(2, 10); - - // quick array of button and menu id's - this.ids = [ selectmenuId + '-button', selectmenuId + '-menu' ]; - - // define safe mouseup for future toggling - this._safemouseup = true; - - // create menu button wrapper - this.newelement = $('') - .insertAfter(this.element); - this.newelement.wrap(o.wrapperElement); - - // transfer tabindex - var tabindex = this.element.attr('tabindex'); - if (tabindex) { - this.newelement.attr('tabindex', tabindex); - } - - // save reference to select in data for ease in calling methods - this.newelement.data('selectelement', this.element); - - // menu icon - this.selectmenuIcon = $('') - .prependTo(this.newelement); - - // append status span to button - this.newelement.prepend(''); - - // make associated form label trigger focus - $('label[for="' + this.element.attr('id') + '"]') - .attr('for', this.ids[0]) - .bind('click.selectmenu', function() { - self.newelement[0].focus(); - return false; - }); - - // click toggle for menu visibility - this.newelement - .bind('mousedown.selectmenu', function(event) { - self._toggle(event, true); - // make sure a click won't open/close instantly - if (o.style == "popup") { - self._safemouseup = false; - setTimeout(function() { self._safemouseup = true; }, 300); - } - return false; - }) - .bind('click.selectmenu', function() { - return false; - }) - .bind("keydown.selectmenu", function(event) { - var ret = false; - switch (event.keyCode) { - case $.ui.keyCode.ENTER: - ret = true; - break; - case $.ui.keyCode.SPACE: - self._toggle(event); - break; - case $.ui.keyCode.UP: - if (event.altKey) { - self.open(event); - } else { - self._moveSelection(-1); - } - break; - case $.ui.keyCode.DOWN: - if (event.altKey) { - self.open(event); - } else { - self._moveSelection(1); - } - break; - case $.ui.keyCode.LEFT: - self._moveSelection(-1); - break; - case $.ui.keyCode.RIGHT: - self._moveSelection(1); - break; - case $.ui.keyCode.TAB: - ret = true; - break; - default: - ret = true; - } - return ret; - }) - .bind('keypress.selectmenu', function(event) { - self._typeAhead(event.which, 'mouseup'); - return true; - }) - .bind('mouseover.selectmenu focus.selectmenu', function() { - if (!o.disabled) { - $(this).addClass(self.widgetBaseClass + '-focus ui-state-hover'); - } - }) - .bind('mouseout.selectmenu blur.selectmenu', function() { - if (!o.disabled) { - $(this).removeClass(self.widgetBaseClass + '-focus ui-state-hover'); - } - }); - - // document click closes menu - $(document).bind("mousedown.selectmenu", function(event) { - self.close(event); - }); - - // change event on original selectmenu - this.element - .bind("click.selectmenu", function() { - self._refreshValue(); - }) - // FIXME: newelement can be null under unclear circumstances in IE8 - // TODO not sure if this is still a problem (fnagel 20.03.11) - .bind("focus.selectmenu", function() { - if (self.newelement) { - self.newelement[0].focus(); - } - }); - - // set width when not set via options - if (!o.width) { - o.width = this.element.outerWidth(); - } - // set menu button width - this.newelement.width(o.width); - - // hide original selectmenu element - this.element.hide(); - - // create menu portion, append to body - this.list = $('').appendTo('body'); - this.list.wrap(o.wrapperElement); - - // transfer menu click to menu button - this.list - .bind("keydown.selectmenu", function(event) { - var ret = false; - switch (event.keyCode) { - case $.ui.keyCode.UP: - if (event.altKey) { - self.close(event, true); - } else { - self._moveFocus(-1); - } - break; - case $.ui.keyCode.DOWN: - if (event.altKey) { - self.close(event, true); - } else { - self._moveFocus(1); - } - break; - case $.ui.keyCode.LEFT: - self._moveFocus(-1); - break; - case $.ui.keyCode.RIGHT: - self._moveFocus(1); - break; - case $.ui.keyCode.HOME: - self._moveFocus(':first'); - break; - case $.ui.keyCode.PAGE_UP: - self._scrollPage('up'); - break; - case $.ui.keyCode.PAGE_DOWN: - self._scrollPage('down'); - break; - case $.ui.keyCode.END: - self._moveFocus(':last'); - break; - case $.ui.keyCode.ENTER: - case $.ui.keyCode.SPACE: - self.close(event, true); - $(event.target).parents('li:eq(0)').trigger('mouseup'); - break; - case $.ui.keyCode.TAB: - ret = true; - self.close(event, true); - $(event.target).parents('li:eq(0)').trigger('mouseup'); - break; - case $.ui.keyCode.ESCAPE: - self.close(event, true); - break; - default: - ret = true; - } - return ret; - }) - .bind('keypress.selectmenu', function(event) { - self._typeAhead(event.which, 'focus'); - return true; - }) - // this allows for using the scrollbar in an overflowed list - .bind( 'mousedown.selectmenu mouseup.selectmenu', function() { return false; }); - - - // needed when window is resized - $(window).bind( "resize.selectmenu", $.proxy( self._refreshPosition, this ) ); - }, - - _init: function() { - var self = this, o = this.options; - - // serialize selectmenu element options - var selectOptionData = []; - this.element - .find('option') - .each(function() { - selectOptionData.push({ - value: $(this).attr('value'), - text: self._formatText($(this).text()), - selected: $(this).attr('selected'), - disabled: $(this).attr('disabled'), - classes: $(this).attr('class'), - typeahead: $(this).attr('typeahead'), - parentOptGroup: $(this).parent('optgroup'), - bgImage: o.bgImage.call($(this)) - }); - }); - - // active state class is only used in popup style - var activeClass = (self.options.style == "popup") ? " ui-state-active" : ""; - - // empty list so we can refresh the selectmenu via selectmenu() - this.list.html(""); - - // write li's - for (var i = 0; i < selectOptionData.length; i++) { - var thisLi = $('') - .data('index', i) - .addClass(selectOptionData[i].classes) - .data('optionClasses', selectOptionData[i].classes || '') - .bind("mouseup.selectmenu", function(event) { - if (self._safemouseup && !self._disabled(event.currentTarget) && !self._disabled($( event.currentTarget ).parents( "ul>li." + self.widgetBaseClass + "-group " )) ) { - var changed = $(this).data('index') != self._selectedIndex(); - self.index($(this).data('index')); - self.select(event); - if (changed) { - self.change(event); - } - self.close(event, true); - } - return false; - }) - .bind("click.selectmenu", function() { - return false; - }) - .bind('mouseover.selectmenu focus.selectmenu', function(e) { - // no hover if diabled - if (!$(e.currentTarget).hasClass(self.namespace + '-state-disabled')) { - self._selectedOptionLi().addClass(activeClass); - self._focusedOptionLi().removeClass(self.widgetBaseClass + '-item-focus ui-state-hover'); - $(this).removeClass('ui-state-active').addClass(self.widgetBaseClass + '-item-focus ui-state-hover'); - } - }) - .bind('mouseout.selectmenu blur.selectmenu', function() { - if ($(this).is(self._selectedOptionLi().selector)) { - $(this).addClass(activeClass); - } - $(this).removeClass(self.widgetBaseClass + '-item-focus ui-state-hover'); - }); - - // optgroup or not... - if ( selectOptionData[i].parentOptGroup.length ) { - var optGroupName = self.widgetBaseClass + '-group-' + this.element.find( 'optgroup' ).index( selectOptionData[i].parentOptGroup ); - if (this.list.find( 'li.' + optGroupName ).length ) { - this.list.find( 'li.' + optGroupName + ':last ul' ).append( thisLi ); - } else { - $(' ') - .appendTo( this.list ) - .find( 'ul' ) - .append( thisLi ); - } - } else { - thisLi.appendTo(this.list); - } - - // append icon if option is specified - if (o.icons) { - for (var j in o.icons) { - if (thisLi.is(o.icons[j].find)) { - thisLi - .data('optionClasses', selectOptionData[i].classes + ' ' + self.widgetBaseClass + '-hasIcon') - .addClass(self.widgetBaseClass + '-hasIcon'); - var iconClass = o.icons[j].icon || ""; - thisLi - .find('a:eq(0)') - .prepend(''); - if (selectOptionData[i].bgImage) { - thisLi.find('span').css('background-image', selectOptionData[i].bgImage); - } - } - } - } - } - - // we need to set and unset the CSS classes for dropdown and popup style - var isDropDown = (o.style == 'dropdown'); - this.newelement - .toggleClass(self.widgetBaseClass + "-dropdown", isDropDown) - .toggleClass(self.widgetBaseClass + "-popup", !isDropDown); - this.list - .toggleClass(self.widgetBaseClass + "-menu-dropdown ui-corner-bottom", isDropDown) - .toggleClass(self.widgetBaseClass + "-menu-popup ui-corner-all", !isDropDown) - // add corners to top and bottom menu items - .find('li:first') - .toggleClass("ui-corner-top", !isDropDown) - .end().find('li:last') - .addClass("ui-corner-bottom"); - this.selectmenuIcon - .toggleClass('ui-icon-triangle-1-s', isDropDown) - .toggleClass('ui-icon-triangle-2-n-s', !isDropDown); - - // transfer classes to selectmenu and list - if (o.transferClasses) { - var transferClasses = this.element.attr('class') || ''; - this.newelement.add(this.list).addClass(transferClasses); - } - - // set menu width to either menuWidth option value, width option value, or select width - if (o.style == 'dropdown') { - this.list.width(o.menuWidth ? o.menuWidth : o.width); - } else { - this.list.width(o.menuWidth ? o.menuWidth : o.width - o.handleWidth); - } - - // calculate default max height - if (o.maxHeight) { - // set max height from option - if (o.maxHeight < this.list.height()) { - this.list.height(o.maxHeight); - } - } else { - if (!o.format && ($(window).height() / 3) < this.list.height()) { - o.maxHeight = $(window).height() / 3; - this.list.height(o.maxHeight); - } - } - - // save reference to actionable li's (not group label li's) - this._optionLis = this.list.find('li:not(.' + self.widgetBaseClass + '-group)'); - - // transfer disabled state - if ( this.element.attr( 'disabled' ) === true ) { - this.disable(); - } else { - this.enable() - } - - // update value - this.index(this._selectedIndex()); - - // needed when selectmenu is placed at the very bottom / top of the page - window.setTimeout(function() { - self._refreshPosition(); - }, 200); - }, - - destroy: function() { - this.element.removeData( this.widgetName ) - .removeClass( this.widgetBaseClass + '-disabled' + ' ' + this.namespace + '-state-disabled' ) - .removeAttr( 'aria-disabled' ) - .unbind( ".selectmenu" ); - - $( window ).unbind( ".selectmenu" ); - $( document ).unbind( ".selectmenu" ); - - // unbind click on label, reset its for attr - $( 'label[for=' + this.newelement.attr('id') + ']' ) - .attr( 'for', this.element.attr( 'id' ) ) - .unbind( '.selectmenu' ); - - if ( this.options.wrapperElement ) { - this.newelement.find( this.options.wrapperElement ).remove(); - this.list.find( this.options.wrapperElement ).remove(); - } else { - this.newelement.remove(); - this.list.remove(); - } - this.element.show(); - - // call widget destroy function - $.Widget.prototype.destroy.apply(this, arguments); - }, - - _typeAhead: function(code, eventType){ - var self = this, focusFound = false, C = String.fromCharCode(code).toUpperCase(); - c = C.toLowerCase(); - - if (self.options.typeAhead == 'sequential') { - // clear the timeout so we can use _prevChar - window.clearTimeout('ui.selectmenu-' + self.selectmenuId); - - // define our find var - var find = typeof(self._prevChar) == 'undefined' ? '' : self._prevChar.join(''); - - function focusOptSeq(elem, ind, c){ - focusFound = true; - $(elem).trigger(eventType); - typeof(self._prevChar) == 'undefined' ? self._prevChar = [c] : self._prevChar[self._prevChar.length] = c; - } - this.list.find('li a').each(function(i) { - if (!focusFound) { - // allow the typeahead attribute on the option tag for a more specific lookup - var thisText = $(this).attr('typeahead') || $(this).text(); - if (thisText.indexOf(find+C) == 0) { - focusOptSeq(this,i,C) - } else if (thisText.indexOf(find+c) == 0) { - focusOptSeq(this,i,c) - } - } - }); - // set a 1 second timeout for sequenctial typeahead - // keep this set even if we have no matches so it doesnt typeahead somewhere else - window.setTimeout(function(el) { - self._prevChar = undefined; - }, 1000, self); - - } else { - //define self._prevChar if needed - if (!self._prevChar){ self._prevChar = ['',0]; } - - var focusFound = false; - function focusOpt(elem, ind){ - focusFound = true; - $(elem).trigger(eventType); - self._prevChar[1] = ind; - } - this.list.find('li a').each(function(i){ - if(!focusFound){ - var thisText = $(this).text(); - if( thisText.indexOf(C) == 0 || thisText.indexOf(c) == 0){ - if(self._prevChar[0] == C){ - if(self._prevChar[1] < i){ focusOpt(this,i); } - } - else{ focusOpt(this,i); } - } - } - }); - this._prevChar[0] = C; - } - }, - - // returns some usefull information, called by callbacks only - _uiHash: function() { - var index = this.index(); - return { - index: index, - option: $("option", this.element).get(index), - value: this.element[0].value - }; - }, - - open: function(event) { - var self = this; - if ( this.newelement.attr("aria-disabled") != 'true' ) { - this._closeOthers(event); - this.newelement - .addClass('ui-state-active'); - if (self.options.wrapperElement) { - this.list.parent().appendTo('body'); - } else { - this.list.appendTo('body'); - } - - this.list.addClass(self.widgetBaseClass + '-open') - .attr('aria-hidden', false) - .find('li:not(.' + self.widgetBaseClass + '-group):eq(' + this._selectedIndex() + ') a')[0].focus(); - if ( this.options.style == "dropdown" ) { - this.newelement.removeClass('ui-corner-all').addClass('ui-corner-top'); - } - this._refreshPosition(); - this._trigger("open", event, this._uiHash()); - } - }, - - close: function(event, retainFocus) { - if ( this.newelement.is('.ui-state-active') ) { - this.newelement - .removeClass('ui-state-active'); - this.list - .attr('aria-hidden', true) - .removeClass(this.widgetBaseClass + '-open'); - if ( this.options.style == "dropdown" ) { - this.newelement.removeClass('ui-corner-top').addClass('ui-corner-all'); - } - if ( retainFocus ) { - this.newelement.focus(); - } - this._trigger("close", event, this._uiHash()); - } - }, - - change: function(event) { - this.element.trigger("change"); - this._trigger("change", event, this._uiHash()); - }, - - select: function(event) { - if (this._disabled(event.currentTarget)) { return false; } - this._trigger("select", event, this._uiHash()); - }, - - _closeOthers: function(event) { - $('.' + this.widgetBaseClass + '.ui-state-active').not(this.newelement).each(function() { - $(this).data('selectelement').selectmenu('close', event); - }); - $('.' + this.widgetBaseClass + '.ui-state-hover').trigger('mouseout'); - }, - - _toggle: function(event, retainFocus) { - if ( this.list.is('.' + this.widgetBaseClass + '-open') ) { - this.close(event, retainFocus); - } else { - this.open(event); - } - }, - - _formatText: function(text) { - return (this.options.format ? this.options.format(text) : text); - }, - - _selectedIndex: function() { - return this.element[0].selectedIndex; - }, - - _selectedOptionLi: function() { - return this._optionLis.eq(this._selectedIndex()); - }, - - _focusedOptionLi: function() { - return this.list.find('.' + this.widgetBaseClass + '-item-focus'); - }, - - _moveSelection: function(amt, recIndex) { - var currIndex = parseInt(this._selectedOptionLi().data('index') || 0, 10); - var newIndex = currIndex + amt; - // do not loop when using up key - - if (newIndex < 0) { - newIndex = 0; - } - if (newIndex > this._optionLis.size() - 1) { - newIndex = this._optionLis.size() - 1; - } - //Occurs when a full loop has been made - if (newIndex === recIndex) { return false; } - - if (this._optionLis.eq(newIndex).hasClass( this.namespace + '-state-disabled' )) { - // if option at newIndex is disabled, call _moveFocus, incrementing amt by one - (amt > 0) ? ++amt : --amt; - this._moveSelection(amt, newIndex); - } else { - return this._optionLis.eq(newIndex).trigger('mouseup'); - } - }, - - _moveFocus: function(amt, recIndex) { - if (!isNaN(amt)) { - var currIndex = parseInt(this._focusedOptionLi().data('index') || 0, 10); - var newIndex = currIndex + amt; - } - else { - var newIndex = parseInt(this._optionLis.filter(amt).data('index'), 10); - } - - if (newIndex < 0) { - newIndex = 0; - } - if (newIndex > this._optionLis.size() - 1) { - newIndex = this._optionLis.size() - 1; - } - - //Occurs when a full loop has been made - if (newIndex === recIndex) { return false; } - - var activeID = this.widgetBaseClass + '-item-' + Math.round(Math.random() * 1000); - - this._focusedOptionLi().find('a:eq(0)').attr('id', ''); - - if (this._optionLis.eq(newIndex).hasClass( this.namespace + '-state-disabled' )) { - // if option at newIndex is disabled, call _moveFocus, incrementing amt by one - (amt > 0) ? ++amt : --amt; - this._moveFocus(amt, newIndex); - } else { - this._optionLis.eq(newIndex).find('a:eq(0)').attr('id',activeID).focus(); - } - - this.list.attr('aria-activedescendant', activeID); - }, - - _scrollPage: function(direction) { - var numPerPage = Math.floor(this.list.outerHeight() / this.list.find('li:first').outerHeight()); - numPerPage = (direction == 'up' ? -numPerPage : numPerPage); - this._moveFocus(numPerPage); - }, - - _setOption: function(key, value) { - this.options[key] = value; - // set - if (key == 'disabled') { - this.close(); - this.element - .add(this.newelement) - .add(this.list)[value ? 'addClass' : 'removeClass']( - this.widgetBaseClass + '-disabled' + ' ' + - this.namespace + '-state-disabled') - .attr("aria-disabled", value); - } - }, - - disable: function(index, type){ - // if options is not provided, call the parents disable function - if ( typeof( index ) == 'undefined' ) { - this._setOption( 'disabled', true ); - } else { - if ( type == "optgroup" ) { - this._disableOptgroup(index); - } else { - this._disableOption(index); - } - } - }, - - enable: function(index, type) { - // if options is not provided, call the parents enable function - if ( typeof( index ) == 'undefined' ) { - this._setOption('disabled', false); - } else { - if ( type == "optgroup" ) { - this._enableOptgroup(index); - } else { - this._enableOption(index); - } - } - }, - - _disabled: function(elem) { - return $(elem).hasClass( this.namespace + '-state-disabled' ); - }, - - - _disableOption: function(index) { - var optionElem = this._optionLis.eq(index); - if (optionElem) { - optionElem.addClass(this.namespace + '-state-disabled') - .find("a").attr("aria-disabled", true); - this.element.find("option").eq(index).attr("disabled", "disabled"); - } - }, - - _enableOption: function(index) { - var optionElem = this._optionLis.eq(index); - if (optionElem) { - optionElem.removeClass( this.namespace + '-state-disabled' ) - .find("a").attr("aria-disabled", false); - this.element.find("option").eq(index).removeAttr("disabled"); - } - }, - - _disableOptgroup: function(index) { - var optGroupElem = this.list.find( 'li.' + this.widgetBaseClass + '-group-' + index ); - if (optGroupElem) { - optGroupElem.addClass(this.namespace + '-state-disabled') - .attr("aria-disabled", true); - this.element.find("optgroup").eq(index).attr("disabled", "disabled"); - } - }, - - _enableOptgroup: function(index) { - var optGroupElem = this.list.find( 'li.' + this.widgetBaseClass + '-group-' + index ); - if (optGroupElem) { - optGroupElem.removeClass(this.namespace + '-state-disabled') - .attr("aria-disabled", false); - this.element.find("optgroup").eq(index).removeAttr("disabled"); - } - }, - - index: function(newValue) { - if (arguments.length) { - if (!this._disabled($(this._optionLis[newValue]))) { - this.element[0].selectedIndex = newValue; - this._refreshValue(); - } else { - return false; - } - } else { - return this._selectedIndex(); - } - }, - - value: function(newValue) { - if (arguments.length) { - this.element[0].value = newValue; - this._refreshValue(); - } else { - return this.element[0].value; - } - }, - - _refreshValue: function() { - var activeClass = (this.options.style == "popup") ? " ui-state-active" : ""; - var activeID = this.widgetBaseClass + '-item-' + Math.round(Math.random() * 1000); - // deselect previous - this.list - .find('.' + this.widgetBaseClass + '-item-selected') - .removeClass(this.widgetBaseClass + "-item-selected" + activeClass) - .find('a') - .attr('aria-selected', 'false') - .attr('id', ''); - // select new - this._selectedOptionLi() - .addClass(this.widgetBaseClass + "-item-selected" + activeClass) - .find('a') - .attr('aria-selected', 'true') - .attr('id', activeID); - - // toggle any class brought in from option - var currentOptionClasses = (this.newelement.data('optionClasses') ? this.newelement.data('optionClasses') : ""); - var newOptionClasses = (this._selectedOptionLi().data('optionClasses') ? this._selectedOptionLi().data('optionClasses') : ""); - this.newelement - .removeClass(currentOptionClasses) - .data('optionClasses', newOptionClasses) - .addClass( newOptionClasses ) - .find('.' + this.widgetBaseClass + '-status') - .html( - this._selectedOptionLi() - .find('a:eq(0)') - .html() - ); - - this.list.attr('aria-activedescendant', activeID); - }, - - _refreshPosition: function() { - var o = this.options; - // if its a native pop-up we need to calculate the position of the selected li - if (o.style == "popup" && !o.positionOptions.offset) { - var selected = this._selectedOptionLi(); - var _offset = "0 -" + (selected.outerHeight() + selected.offset().top - this.list.offset().top); - } - // update zIndex if jQuery UI is able to process - var zIndexElement = this.element.zIndex(); - if (zIndexElement) { - this.list.css({ - zIndex: zIndexElement - }); - } - this.list.position({ - // set options for position plugin - of: o.positionOptions.of || this.newelement, - my: o.positionOptions.my, - at: o.positionOptions.at, - offset: o.positionOptions.offset || _offset, - collision: o.positionOptions.collision || 'flip' - }); - } -}); - -})(jQuery); diff --git a/examples/analytics/output.py b/examples/analytics/output.py deleted file mode 100755 index cbbb697f5..000000000 --- a/examples/analytics/output.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import sys, os -from splunklib import six -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -import splunklib.client as client -import splunklib.results as results -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -__all__ = [ - "TimeRange", - "AnalyticsRetriever" -] - -ANALYTICS_INDEX_NAME = "sample_analytics" -ANALYTICS_SOURCETYPE = "sample_analytics" -APPLICATION_KEY = "application" -EVENT_KEY = "event" -DISTINCT_KEY = "distinct_id" -EVENT_TERMINATOR = "\\r\\n-----end-event-----\\r\\n" -PROPERTY_PREFIX = "analytics_prop__" - -class TimeRange: - DAY="1d" - WEEK="1w" - MONTH="1mon" - -def counts(job, result_key): - applications = [] - reader = results.ResultsReader(job.results()) - for result in reader: - if isinstance(result, dict): - applications.append({ - "name": result[result_key], - "count": int(result["count"] or 0) - }) - return applications - - -class AnalyticsRetriever: - def __init__(self, application_name, splunk_info, index = ANALYTICS_INDEX_NAME): - self.application_name = application_name - self.splunk = client.connect(**splunk_info) - self.index = index - - def applications(self): - query = "search index=%s | stats count by application" % (self.index) - job = self.splunk.jobs.create(query, exec_mode="blocking") - return counts(job, "application") - - def events(self): - query = "search index=%s application=%s | stats count by event" % (self.index, self.application_name) - job = self.splunk.jobs.create(query, exec_mode="blocking") - return counts(job, "event") - - def properties(self, event_name): - query = 'search index=%s application=%s event="%s" | stats dc(%s*) as *' % ( - self.index, self.application_name, event_name, PROPERTY_PREFIX - ) - job = self.splunk.jobs.create(query, exec_mode="blocking") - - properties = [] - reader = results.ResultsReader(job.results()) - for result in reader: - if not isinstance(result, dict): - continue - for field, count in six.iteritems(result): - # Ignore internal ResultsReader properties - if field.startswith("$"): - continue - - properties.append({ - "name": field, - "count": int(count or 0) - }) - - return properties - - def property_values(self, event_name, property): - query = 'search index=%s application=%s event="%s" | stats count by %s | rename %s as %s' % ( - self.index, self.application_name, event_name, - PROPERTY_PREFIX + property, - PROPERTY_PREFIX + property, property - ) - job = self.splunk.jobs.create(query, exec_mode="blocking") - - values = [] - reader = results.ResultsReader(job.results()) - for result in reader: - if isinstance(result, dict): - if result[property]: - values.append({ - "name": result[property], - "count": int(result["count"] or 0) - }) - - return values - - def events_over_time(self, event_name = "", time_range = TimeRange.MONTH, property = ""): - query = 'search index=%s application=%s event="%s" | timechart span=%s count by %s | fields - _span*' % ( - self.index, self.application_name, (event_name or "*"), - time_range, - (PROPERTY_PREFIX + property) if property else "event", - ) - job = self.splunk.jobs.create(query, exec_mode="blocking") - - over_time = {} - reader = results.ResultsReader(job.results()) - for result in reader: - if isinstance(result, dict): - # Get the time for this entry - time = result["_time"] - del result["_time"] - - # The rest is in the form of [event/property]:count - # pairs, so we decode those - for key,count in six.iteritems(result): - # Ignore internal ResultsReader properties - if key.startswith("$"): - continue - - entry = over_time.get(key, []) - entry.append({ - "count": int(count or 0), - "time": time, - }) - over_time[key] = entry - - return over_time - -def main(): - usage = "" - - argv = sys.argv[1:] - - opts = utils.parse(argv, {}, ".env", usage=usage) - retriever = AnalyticsRetriever(opts.args[0], opts.kwargs) - - #events = retriever.events() - #print events - #for event in events: - # print retriever.properties(event["name"]) - - #print retriever.property_values("critical", "version") - #print retriever.events_over_time(time_range = TimeRange.MONTH) - #print retriever.applications() - #print retriever.events_over_time() - -if __name__ == "__main__": - main() diff --git a/examples/analytics/server.py b/examples/analytics/server.py deleted file mode 100755 index a1235e52e..000000000 --- a/examples/analytics/server.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import sys, os -from splunklib import six -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -from .bottle import route, run, debug, template, static_file, request - -from time import strptime, mktime - -from .input import AnalyticsTracker -from .output import AnalyticsRetriever, TimeRange -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -splunk_opts = None -retrievers = {} - -def get_retriever(name): - global retrievers - retriever = None - if name in retrievers: - retriever = retrievers[name] - else: - retriever = AnalyticsRetriever(name, splunk_opts) - retrievers[name] = retriever - - return retriever - -@route('/static/:file#.+#') -def help(file): - raise static_file(file, root='.') - -@route('/applications') -def applications(): - tracker.track("list_applications") - - retriever = get_retriever("") - applications = retriever.applications() - - output = template('templates/applications', applications=applications) - return output - -def track_app_detail(event, event_name, prop_name, time_range = None): - properties = {} - if event_name is not None and not event_name == "": - properties["ev_name"] = event_name - if prop_name is not None and not prop_name == "": - properties["prop_name"] = prop_name - if time_range is not None and not time_range == "": - properties["time_range"] = time_range - - tracker.track(event, **properties) - -@route('/api/application/:name') -def application(name): - retriever = get_retriever(name) - event_name = request.GET.get("event_name", "") - property_name = request.GET.get("property", "") - time_range = request.GET.get("time_range", TimeRange.MONTH) - - # Track the event - track_app_detail("api_app_details", event_name, property_name, time_range = time_range) - - events = retriever.events() - - events_over_time = retriever.events_over_time(event_name=event_name, property=property_name, time_range=time_range) - properties = [] - if event_name: - properties = retriever.properties(event_name) - - # We need to format the events to something the graphing library can handle - data = [] - for name, ticks in six.iteritems(events_over_time): - # We ignore the cases - if name == "VALUE" or name == "NULL": - continue - - event_ticks = [] - for tick in ticks: - time = strptime(tick["time"][:-6] ,'%Y-%m-%dT%H:%M:%S.%f') - count = tick["count"] - event_ticks.append([int(mktime(time)*1000),count]) - - data.append({ - "label": name, - "data": event_ticks, - }) - - result = { - "events": events, - "event_name": event_name, - "application_name": retriever.application_name, - "properties": properties, - "data": data, - "property_name": property_name, - } - - return result - -@route('/application/:name') -def application(name): - retriever = get_retriever(name) - event_name = request.GET.get("event_name", "") - property_name = request.GET.get("property", "") - - # Track the event - track_app_detail("app_details", event_name, property_name) - - events = retriever.events() - - events_over_time = retriever.events_over_time(event_name=event_name, property=property_name) - properties = [] - if event_name: - properties = retriever.properties(event_name) - - output = template('templates/application', - events=events, - event_name=event_name, - application_name=retriever.application_name, - properties=properties, - property_name=property_name, - open_tag="{{", - close_tag="}}") - - return output - -def main(): - argv = sys.argv[1:] - - opts = utils.parse(argv, {}, ".env") - global splunk_opts - splunk_opts = opts.kwargs - - global tracker - tracker = AnalyticsTracker("analytics", splunk_opts) - - debug(True) - run(reloader=True) - -if __name__ == "__main__": - main() diff --git a/examples/analytics/templates/application.tpl b/examples/analytics/templates/application.tpl deleted file mode 100644 index 8d9dc9005..000000000 --- a/examples/analytics/templates/application.tpl +++ /dev/null @@ -1,396 +0,0 @@ -%#template to generate a HTML table from a list of tuples (or list of lists, or tuple of tuples or ...) - - -{{application_name}}{{" -- " if event_name else ""}}{{event_name or ""}} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- -
- -
- - - - - - - - - - - -%for event in events: - %name = event["name"] - %count = event["count"] - - - - -%end -
Event NameEvent Count
{{name}}{{count}}
- - - \ No newline at end of file diff --git a/examples/analytics/templates/applications.tpl b/examples/analytics/templates/applications.tpl deleted file mode 100644 index 0b439b1ed..000000000 --- a/examples/analytics/templates/applications.tpl +++ /dev/null @@ -1,52 +0,0 @@ -%#template to generate a HTML table from a list of tuples (or list of lists, or tuple of tuples or ...) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Splunk Analytics Sample - - -
- Splunk Analytics Sample -
-
-%for application in applications: -
- -
- {{application["count"]}} events -
-
-%end -
- - \ No newline at end of file diff --git a/examples/analytics/templates/make_table.tpl b/examples/analytics/templates/make_table.tpl deleted file mode 100644 index 87811a264..000000000 --- a/examples/analytics/templates/make_table.tpl +++ /dev/null @@ -1,11 +0,0 @@ -%#template to generate a HTML table from a list of tuples (or list of lists, or tuple of tuples or ...) -

The open items are as follows:

- -%for row in rows: - - %for col in row: - - %end - -%end -
{{col}}
\ No newline at end of file diff --git a/examples/async/README.md b/examples/async/README.md deleted file mode 100644 index b142c8176..000000000 --- a/examples/async/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# 'Async' use of the Python SDK - -This example is meant to serve two purposes. The first is an example of how -to use the pluggable HTTP capabilities of the SDK binding layer, and the -other is how one could use a coroutine-based library to achieve high -concurrency with the SDK. - -## Pluggable HTTP - -The example provides an implementation of the Splunk HTTP class using -`urllib2` rather than the usual `httplib`. The reason is that most -coroutine-based concurrency libraries tend to provide a modified version -of `urllib2`. The implementation here is simplified: it does not handle -proxies, certificates and other advanced features. Instead, it just shows -how one could write a custom HTTP handling class for their usage of the SDK. - -## Concurrency - -You can run the example in two modes: synchronous and asynchronous. - -### Synchronous Mode - -To run the example in synchronous mode, use the following command: - - python async.py sync - -This will execute the same search multiple times, and due to the -synchronous nature of the builtin Python implementation of `urllib2`, -we will wait until each search is finished before moving on to the next -one. - -### Asynchronous Mode - -To run the example in asynchronous mode, use the following command: - - python async.py async - -This will do the same thing as the synchronous version, except it will -use the [`eventlet`](http://eventlet.net/) library to do so. `eventlet` -provides its own version of the `urllib2` library, which makes full use -of its coroutine nature. This means that when we execute an HTTP request -(for example, `service.jobs.create(query, exec_mode="blocking")`), instead -of blocking the entire program until it returns, we will "switch" out of the -current context and into a new one. In the new context, we can issue another -HTTP request, which will in turn block, and we move to another context, and so -on. This allows us to have many requests "in-flight", and thus not block the -execution of other requests. - -In async mode, we finish the example in about a third of the time (relative to -synchronous mdoe). \ No newline at end of file diff --git a/examples/async/async.py b/examples/async/async.py deleted file mode 100755 index 097e50b3c..000000000 --- a/examples/async/async.py +++ /dev/null @@ -1,207 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# A sample that demonstrates a custom HTTP handler for the Splunk service, -# as well as showing how you could use the Splunk SDK for Python with coroutine -# based systems like Eventlet. - -#### Main Code -from __future__ import absolute_import -from __future__ import print_function -import sys, os, datetime -import urllib -import ssl -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -import splunklib.binding as binding -import splunklib.client as client -try: - from utils import parse, error -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - - -# Placeholder for a specific implementation of `urllib2`, -# to be defined depending on whether or not we are running -# this sample in async or sync mode. -urllib2 = None - -def _spliturl(url): - scheme, part = url.split(':', 1) - host, path = urllib.splithost(part) - host, port = urllib.splitnport(host, 80) - return scheme, host, port, path - -def main(argv): - global urllib2 - usage = "async.py " - - # Parse the command line args. - opts = parse(argv, {}, ".env") - - # We have to see if we got either the "sync" or - # "async" command line arguments. - allowed_args = ["sync", "async"] - if len(opts.args) == 0 or opts.args[0] not in allowed_args: - error("Must supply either of: %s" % allowed_args, 2) - - # Note whether or not we are async. - is_async = opts.args[0] == "async" - - # If we're async, we'' import `eventlet` and `eventlet`'s version - # of `urllib2`. Otherwise, import the stdlib version of `urllib2`. - # - # The reason for the funky import syntax is that Python imports - # are scoped to functions, and we need to make it global. - # In a real application, you would only import one of these. - if is_async: - urllib2 = __import__('eventlet.green', globals(), locals(), - ['urllib2'], -1).urllib2 - else: - urllib2 = __import__("urllib2", globals(), locals(), [], -1) - - - # Create the service instance using our custom HTTP request handler. - service = client.Service(handler=request, **opts.kwargs) - service.login() - - # Record the current time at the start of the - # "benchmark". - oldtime = datetime.datetime.now() - - def do_search(query): - # Create a search job for the query. - - # In the async case, eventlet will "relinquish" the coroutine - # worker, and let others go through. In the sync case, we will - # block the entire thread waiting for the request to complete. - job = service.jobs.create(query, exec_mode="blocking") - - # We fetch the results, and cancel the job - results = job.results() - job.cancel() - - return results - - # We specify many queries to get show the advantages - # of parallelism. - queries = [ - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - 'search * | head 100', - ] - - # Check if we are async or not, and execute all the - # specified queries. - if is_async: - import eventlet - - # Create an `eventlet` pool of workers. - pool = eventlet.GreenPool(16) - - # If we are async, we use our worker pool to farm - # out all the queries. We just pass, as we don't - # actually care about the result. - for results in pool.imap(do_search, queries): - pass - else: - # If we are sync, then we just execute the queries one by one, - # and we can also ignore the result. - for query in queries: - do_search(query) - - # Record the current time at the end of the benchmark, - # and print the delta elapsed time. - newtime = datetime.datetime.now() - print("Elapsed Time: %s" % (newtime - oldtime)) - - -##### Custom `urllib2`-based HTTP handler - -def request(url, message, **kwargs): - # Split the URL into constituent components. - scheme, host, port, path = _spliturl(url) - body = message.get("body", "") - - # Setup the default headers. - head = { - "Content-Length": str(len(body)), - "Host": host, - "User-Agent": "http.py/1.0", - "Accept": "*/*", - } - - # Add in the passed in headers. - for key, value in message["headers"]: - head[key] = value - - # Note the HTTP method we're using, defaulting - # to `GET`. - method = message.get("method", "GET") - - # Note that we do not support proxies in this example - # If running Python 2.7.9+, disable SSL certificate validation - if sys.version_info >= (2, 7, 9): - unverified_ssl_handler = urllib2.HTTPSHandler(context=ssl._create_unverified_context()) - opener = urllib2.build_opener(unverified_ssl_handler) - else: - opener = urllib2.build_opener() - - # Unfortunately, we need to use the hack of - # "overriding" `request.get_method` to specify - # a method other than `GET` or `POST`. - request = urllib2.Request(url, body, head) - request.get_method = lambda: method - - # Make the request and get the response - response = None - try: - response = opener.open(request) - except Exception as e: - response = e - - # Normalize the response to something the SDK expects, and - # return it. - return { - 'status': response.code, - 'reason': response.msg, - 'headers': response.info().dict, - 'body': binding.ResponseReader(response) - } - -if __name__ == "__main__": - main(sys.argv[1:]) - diff --git a/examples/binding1.py b/examples/binding1.py deleted file mode 100755 index 19c850879..000000000 --- a/examples/binding1.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""An example that shows how to use the Splunk binding module to create a - convenient 'wrapper' interface around the Splunk REST APIs. The example - binds to a sampling of endpoints showing how to access collections, - entities and 'method-like' endpoints.""" - -from __future__ import absolute_import -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.binding import connect - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - - -class Service: - def __init__(self, context): - self.context = context - - def apps(self): - return self.context.get("apps/local") - - def indexes(self): - return self.context.get("data/indexes") - - def info(self): - return self.context.get("/services/server/info") - - def settings(self): - return self.context.get("/services/server/settings") - - def search(self, query, **kwargs): - return self.context.post("search/jobs/export", search=query, **kwargs) - -def main(argv): - opts = parse(argv, {}, ".env") - context = connect(**opts.kwargs) - service = Service(context) - assert service.apps().status == 200 - assert service.indexes().status == 200 - assert service.info().status == 200 - assert service.settings().status == 200 - assert service.search("search 404").status == 200 - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/examples/conf.py b/examples/conf.py deleted file mode 100755 index f4163be80..000000000 --- a/examples/conf.py +++ /dev/null @@ -1,174 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Create, delete or list stanza information from/to Splunk confs.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib import six -from splunklib.client import connect - -try: - from utils import error, parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -class Program: - """Break up operations into specific methods.""" - def __init__(self, service): - self.service = service - - def create(self, opts): - """Create a conf stanza.""" - - argv = opts.args - count = len(argv) - - # unflagged arguments are conf, stanza, key. In this order - # however, we must have a conf and stanza. - cpres = True if count > 0 else False - spres = True if count > 1 else False - kpres = True if count > 2 else False - - if kpres: - kvpair = argv[2].split("=") - if len(kvpair) != 2: - error("Creating a k/v pair requires key and value", 2) - else: - key, value = kvpair - - if not cpres and not spres: - error("Conf name and stanza name is required for create", 2) - - name = argv[0] - stan = argv[1] - conf = self.service.confs[name] - - if not kpres: - # create stanza - conf.create(stan) - return - - # create key/value pair under existing stanza - stanza = conf[stan] - stanza.submit({key: value}) - - - def delete(self, opts): - """Delete a conf stanza.""" - - argv = opts.args - count = len(argv) - - # unflagged arguments are conf, stanza, key. In this order - # however, we must have a conf and stanza. - cpres = True if count > 0 else False - spres = True if count > 1 else False - kpres = True if count > 2 else False - - if not cpres: - error("Conf name is required for delete", 2) - - if not cpres and not spres: - error("Conf name and stanza name is required for delete", 2) - - if kpres: - error("Cannot delete individual keys from a stanza", 2) - - name = argv[0] - stan = argv[1] - conf = self.service.confs[name] - conf.delete(stan) - - def list(self, opts): - """List all confs or if a conf is given, all the stanzas in it.""" - - argv = opts.args - count = len(argv) - - # unflagged arguments are conf, stanza, key. In this order - # but all are optional - cpres = True if count > 0 else False - spres = True if count > 1 else False - kpres = True if count > 2 else False - - if not cpres: - # List out the available confs - for conf in self.service.confs: - print(conf.name) - else: - # Print out detail on the requested conf - # check for optional stanza, or key requested (or all) - name = argv[0] - conf = self.service.confs[name] - - for stanza in conf: - if (spres and argv[1] == stanza.name) or not spres: - print("[%s]" % stanza.name) - for key, value in six.iteritems(stanza.content): - if (kpres and argv[2] == key) or not kpres: - print("%s = %s" % (key, value)) - print() - - def run(self, command, opts): - """Dispatch the given command & args.""" - handlers = { - 'create': self.create, - 'delete': self.delete, - 'list': self.list - } - handler = handlers.get(command, None) - if handler is None: - error("Unrecognized command: %s" % command, 2) - handler(opts) - -def main(): - """Main program.""" - - usage = "usage: %prog [options] []" - - argv = sys.argv[1:] - - command = None - commands = ['create', 'delete', 'list'] - - # parse args, connect and setup - opts = parse(argv, {}, ".env", usage=usage) - service = connect(**opts.kwargs) - program = Program(service) - - if len(opts.args) == 0: - # no args means list - command = "list" - elif opts.args[0] in commands: - # args and the first in our list of commands, extract - # command and remove from regular args - command = opts.args[0] - opts.args.remove(command) - else: - # first one not in our list, default to list - command = "list" - - program.run(command, opts) - -if __name__ == "__main__": - main() - diff --git a/examples/dashboard/README.md b/examples/dashboard/README.md deleted file mode 100644 index 5f45688a6..000000000 --- a/examples/dashboard/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Leftronic Dashboard Integration Sample - -This sample shows how to use the Python SDK and Splunk to integrate with -a third party tool (or service). In this specific case, we use a -Leftronic Dashboard to show real-time Twitter data that we are indexing -using the `twitted` example in the SDK. - -## How It Works - -There are two logical components to the sample: getting data from Splunk and -pushing data to Leftronic. - -In order to get data from Splunk, we start a variety of real time searches. -For example, we have searches to get the current top hashtags (in a 5 minute -sliding window), where users are tweeting from, etc. - -We then start a loop which will ask each search job for new results, and we -then put the results in a form that Leftronic can understand. Once the results -are formed, we send them over to Leftronic using their API. - -## How To Run It - -You need to change the code file to include your Leftronic access key. Once you -do, you can simply run it by executing: - - ./feed.py - -You will also need to run the `twitted` sample at the same time. \ No newline at end of file diff --git a/examples/dashboard/feed.py b/examples/dashboard/feed.py deleted file mode 100755 index e61f1ba72..000000000 --- a/examples/dashboard/feed.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# This example shows how to integrate Splunk with 3rd party services using -# the Python SDK. In this case, we use Twitter data and Leftronic -# (http://www.leftronic.com) dashboards. You can find more information -# in the README. - - -from __future__ import absolute_import -from __future__ import print_function -import sys, os, urllib2, json -from six.moves import zip -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -from xml.etree import ElementTree - -import splunklib.client as client -import splunklib.results as results -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - - -leftronic_access_key = "" - -def send_data(access_key, stream_name, point = None, command = None): - data = { - "accessKey": access_key, - "streamName": stream_name - } - - if not point is None: - data["point"] = point - if not command is None: - data["command"] = command - - request = urllib2.Request("https://www.leftronic.com/customSend/", - data = json.dumps(data) - ) - response = urllib2.urlopen(request) - - -def top_sources(service): - query = "search index=twitter status_source=* | stats count(status_source) as count by status_source | sort -count | head 5" - created_job = service.jobs.create(query, search_mode="realtime", earliest_time="rt-5m", latest_time="rt") - - def iterate(job): - reader = results.ResultsReader(job.preview()) - data = [] - - for result in reader: - if isinstance(result, dict): - status_source_xml = result["status_source"].strip() - source = status_source_xml - if status_source_xml.startswith("- for {name} to return all fired alerts. For example:\n\n
\n
\ncurl -k -u admin:pass https://localhost:8089/servicesNS/admin/search/alerts/fired_alerts/-\n
\n
\n", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "apps/appinstall": { - "methods": { - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specifies the app to install. Can be either a path to the app on a local disk or a URL to an app, such as the apps available from Splunkbase.", - "validation": "" - }, - "update": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, installs an update to an app, overwriting the existing app folder.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to install app." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Installs a Splunk app from a local file or from a URL.", - "urlParams": {} - } - }, - "summary": "Provides for installation of apps from a URL or local file." - }, - "apps/apptemplates": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view app templates." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists app templates that are used to create apps from the Mangager interface in Splunk Web.\n\nAn app template is valid as the \"template\" argument to POST to /services/apps/local. The app templates can be found by enumerating $SPLUNK_HOME/share/splunk/app_templates. Adding a new template takes effect without restarting splunkd or SplunkWeb.", - "urlParams": {} - } - }, - "summary": "Provides access to app templates that can be used to create new Splunk apps." - }, - "apps/apptemplates/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view app template." - }, - "404": { - "summary": "app template does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieves information about a specific app template.\n\nThis call is rarely used, as all the information is provided by the apps/templates endpoint, which does not require an explicit name.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "apps/local": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "refresh": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Scan for new apps and reload any objects those new apps contain.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view local apps." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information on all locally-installed apps.\n\nSplunkbase can correlate locally-installed apps with the same app on Splunkbase to notify users about app updates.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "author": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For apps you intend to post to Splunkbase, enter the username of your splunk.com account.\n\nFor internal-use-only apps, include your full name and/or contact info (for example, email).", - "validation": "" - }, - "configured": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates if the application's custom setup has been performed.\n'''Note''': This parameter is new with Splunk 4.2.4.", - "validation": "" - }, - "description": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Short explanatory string displayed underneath the app's title in Launcher.\n\nTypically, short descriptions of 200 characters are more effective.", - "validation": "" - }, - "label": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Defines the name of the app shown in the Splunk GUI and Launcher.\n\n* Must be between 5 and 80 characters.\n* Must not include \"Splunk For\" prefix.\n\nExamples of good labels:\n* IMAP\n* SQL Server Integration Services\n* FISMA Compliance", - "validation": "" - }, - "manageable": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": " Indicates that the Splunk Manager can manage the app.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Name of the application to create. The name you select becomes the name of the folder on disk that contains the app.", - "validation": "" - }, - "template": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (barebones | sample_app)\n\nIndicates the app template to use when creating the app.\n\nSpecify either of the following:\n\n* barebones - contains basic framework for an app\n* sample_app - contains example views and searches\n\nYou can also specify any valid app template you may have previously added.", - "validation": "" - }, - "visible": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": " Indicates if the app is visible and navigable from the UI.\n\nVisible apps require at least 1 view that is available from the UI", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create local app." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new application.", - "urlParams": {} - } - }, - "summary": "Endpoint for creating new Splunk apps, and subsequently accessing, updating, and deleting local apps." - }, - "apps/local/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete local app." - }, - "404": { - "summary": "Local app does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the locally installed app with the name specified by {name}.\n\nAfter deleting an app, there might also be some manual cleanup. See \"Uninstall an app\" in the \"Meet Splunk Web and Splunk apps\" section of the Splunk Admin manual.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "params": { - "refresh": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Reloads the objects contained in the locally installed app with the name specified by {name}.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view local app." - }, - "404": { - "summary": "Local app does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about the locally installed app with the name specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "author": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "check_for_updates": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, Splunk checks Splunkbase for updates to this app.", - "validation": "validate(is_bool($check_for_updates$), \"Value of argument 'check_for_updates' must be a boolean\")" - }, - "configured": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($configured$), \"Value of argument 'configured' must be a boolean\")" - }, - "description": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "label": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "manageable": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "version": { - "datatype": "version string", - "default": "", - "required": "false", - "summary": "Specifies the version for the app. Each release of an app must change the version number.\n\nVersion numbers are a number followed by a sequence of numbers or dots. Pre-release versions can append a space and a single-word suffix like \"beta2\". Examples:\n\n* 1.2\n* 11.0.34\n* 2.0 beta\n* 1.3 beta2\n* 1.0 b2\n* 12.4 alpha\n* 11.0.34.234.254", - "validation": "" - }, - "visible": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit local app." - }, - "404": { - "summary": "Local app does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the app specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "apps/local/{name}/package": { - "methods": { - "GET": { - "config": "", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Package file for the app created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to create package for the app." - }, - "404": { - "summary": "App specified by {name} does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Archives the app specified by {name}, placing the archive in the following directory on your Splunk installation:\n\n:$SPLUNK_HOME/etc/system/static/app-packages/{name}.spl\n\nThe archive can then be downloaded from the management port of your Splunk installation:\n\n:https://[Splunk Host]:[Management Port]/static/app-packages/{name}.spl", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "apps/local/{name}/setup": { - "methods": { - "GET": { - "config": "", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Set up information returned successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to setup app." - }, - "404": { - "summary": "App does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns set up information for the app specified by {name}. In the response to this operation, the actual setup script is listed under the key value, \"eai:setup.\" \n\nSome apps contain setup scripts that must be run before the app is enabled. For example, the [http://splunk-base.splunk.com/apps/22314/splunk-for-unix-and-linux Splunk for Unix and Linux app], available from [http://splunk-base.splunk.com/ Splunkbase], contains a setup script. \n\nFor more information on setup scripts, see [[Documentation:Splunk:Developer:SetupApp|Configure a setup screen]] in the [[Documentation:Splunk:Developer:Whatsinthismanual|Splunk Developer manual]].", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "apps/local/{name}/update": { - "methods": { - "GET": { - "config": "", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Update information for the app was returned successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to update app." - }, - "404": { - "summary": "App does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns any update information available for the app specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "auth/login": { - "methods": { - "POST": { - "params": { - "password": { - "datatype": "String", - "default": "", - "required": "True", - "summary": "The password for the user specified with username.", - "validation": "" - }, - "username": { - "datatype": "String", - "default": "", - "required": "True", - "summary": "The Splunk account username.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Authenticated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - } - }, - "summary": "Returns a session key to be used when making REST calls to splunkd.", - "urlParams": {} - } - }, - "summary": "Provides user authentication. \n\nNote: This endpoint is under 'auth' and not 'authentication' for backwards compatibility." - }, - "authentication/auth-tokens": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view auth-tokens." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Does nothing. Is a placeholder for potential future information.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is a special key, always being \"_create\"", - "validation": "" - }, - "nonce": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "An alphanumeric string representing a unique identifier for this request", - "validation": "" - }, - "peername": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The name of the splunk server requesting this token", - "validation": "" - }, - "sig": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "A cryptographic signature of the \"userid\", \"username\", \"nonce\", and \"ts\" arguments", - "validation": "" - }, - "ts": { - "datatype": "Number", - "default": "", - "required": "true", - "summary": "The unix time at which the signature was created", - "validation": "" - }, - "userid": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the user requesting this token", - "validation": "" - }, - "username": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the user requesting this token", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create auth-tokens." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates an authentication token", - "urlParams": {} - } - }, - "summary": "Allows for creation of authentication tokens" - }, - "authentication/current-context": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view current-context." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists one item named \"context\" which contains the name of the current user", - "urlParams": {} - } - }, - "summary": "Allows for displaying the current user context" - }, - "authentication/current-context/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view current-context." - }, - "404": { - "summary": "current-context does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Displays an item (always named \"context\") that contains the name of the current user.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "authentication/httpauth-tokens": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view httpauth-tokens." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List all currently active session tokens", - "urlParams": {} - } - }, - "summary": "Allows for management of session tokens" - }, - "authentication/httpauth-tokens/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete httpauth-token." - }, - "404": { - "summary": "httpauth-token does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "End the session associated with this token", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view httpauth-tokens." - }, - "404": { - "summary": "httpauth-token does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Get information about a specific session token", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "authentication/users": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view users." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns a list of all the users registered on the server.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "createrole": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The name of a role to create for the user. After creating the role, you can later edit that role to specify what access that user has to Splunk.", - "validation": "" - }, - "defaultApp": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a default app for the user.\n\nThe default app specified here overrides the default app inherited from the user's roles.", - "validation": "" - }, - "email": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify an email address for the user.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The Splunk username for the user to login to splunk.\n\nusernames must be unique on the system.", - "validation": "" - }, - "password": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The user's password.", - "validation": "" - }, - "realname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A full name to associate with the user.", - "validation": "" - }, - "restart_background_jobs": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether to restart background search jobs when Splunk restarts.\n\nIf true, a background search job for this user that has not completed is restarted when Splunk restarts.", - "validation": "" - }, - "roles": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A role to assign to this user. To assign multiple roles, send them in separate roles parameters.\n\nWhen creating a user, at least one role is required. Either specify one or more roles with this parameter or create a role using the createrole parameter.", - "validation": "" - }, - "tz": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Timezone to use when displaying dates for this user.\n'''Note''': This parameter is new with Splunk 4.3.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create user." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new user.\n\nWhen creating a user you must specify at least one role. You can specify one or more roles with the roles parameter, or you can use the createrole parameter to create a role for the user.\n\nRefer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities. ", - "urlParams": {} - } - }, - "summary": "Provides access to Splunk users.\n\nRefer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities. " - }, - "authentication/users/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete user." - }, - "404": { - "summary": "User does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the user from the system.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view user." - }, - "404": { - "summary": "User does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about the user.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "defaultApp": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "email": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "password": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "realname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "restart_background_jobs": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "roles": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "tz": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit user." - }, - "404": { - "summary": "User does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Update information about the user specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "authorization/capabilities": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view capabilities." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List all system capabiilities.\n\nRefer to the [[Documentation:Splunk:Admin:Addandeditroles#List_of_available_capabilities|List of available capabilities]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details.", - "urlParams": {} - } - }, - "summary": "Provides access to Splunk's capability authorization system.\n\nRefer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities." - }, - "authorization/capabilities/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view capabilities." - }, - "404": { - "summary": "Capability does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a particular system capability name. This does not list any further information besides the name.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "authorization/roles": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view roles." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all roles and the permissions for each role. Refer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities. ", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "capabilities": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A capability to assign to this role. To send multiple capabilities, send this argument multiple times.\n\nRoles inherit all capabilities from imported roles\n\nCapabilities available are:\n\n* admin_all_objects\n* change_authentication\n* change_own_password\n* delete_by_keyword\n* edit_deployment_client\n* edit _depoyment_server\n* edit_dist_peer\n* edit_forwarders\n* edit_httpauths\n* edit_input_defaults\n* edit_monitor\n* edit_scripted\n* edit_search_server\n* edit_splunktcp\n* edit_splunktcp_ssl\n* edit_tcp\n* edit_udp\n* edit_web_settings\n* get_metadata\n* get_typeahead\n* indexes_edit\n* license_edit\n* license_tab\n* list_deployment_client\n* list_forwarders\n* list_httpauths\n* list_inputs\n* request_remote_tok\n* rest_apps_management\n* rest_apps_view\n* rest_properties_get\n* rest_properties_set\n* restart_splunkd\n* rtsearch\n* schedule_search\n* search\n* use_file_operator", - "validation": "" - }, - "defaultApp": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify the name of the app to use as the default app for the role.A user-specific default app will override this.\n\nThe name you specify is the name of the folder containing the app.", - "validation": "" - }, - "imported_roles": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a role to import attributes from. Specify many of these separately to import multiple roles. By default a role imports no other roles.\n\nImporting other roles imports all aspects of that role, such as capabilities and allowed indexes to search. In combining multiple roles, the effective value for each attribute is value with the broadest permissions.\n\nDefault Splunk roles are:\n\n* admin\n* can_delete\n* power\n* user\n\nYou can specify additional roles that have been created.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the user role to create.", - "validation": "" - }, - "rtSrchJobsQuota": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specify the maximum number of concurrent real time search jobs for this role.\n\nThis count is independent from the normal search jobs limit.", - "validation": "" - }, - "srchDiskQuota": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specifies the maximum disk space in MB that can be used by a user's search jobs. For example, 100 limits this role to 100 MB total.", - "validation": "" - }, - "srchFilter": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a search string that restricts the scope of searches run by this role. Search results for this role only show events that also match the search string you specify. In the case that a user has multiple roles with different search filters, they are combined with an OR.\n\nThe search string can include source, host, index, eventtype, sourcetype, search fields, *, OR and, AND. \n\nExample: \"host=web* OR source=/var/log/*\"\n\nNote: You can also use the srchIndexesAllowed and srchIndexesDefault parameters to limit the search on indexes.", - "validation": "" - }, - "srchIndexesAllowed": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "An index this role has permissions to search. To set several of these, pass this argument several times. These may be wildcarded, but the index name must begin with an underscore to match internal indexes.\n\nSearch indexes available by default from Splunk include:\n\n* All internal indexes\n* All non-internal indexes\n* _audit\n* _blocksignature\n* _internal\n* _thefishbucket\n* history\n* main\n\nYou can also specify other search indexes that have been added to the server.", - "validation": "" - }, - "srchIndexesDefault": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A search index that searches for this role default to when no index is specified. To set several of these, pass this argument multiple times. These may be wildcarded, but the index name must begin with an underscore to match internal indexes.\n\nA user with this role can search other indexes using \"index= \" \n\nFor example, \"index=special_index\".\n\nSearch indexes available by default from Splunk include:\n\n* All internal indexes\n* All non-internal indexes\n* _audit\n* _blocksignature\n* _internal\n* _thefishbucket\n* history\n* main\n* other search indexes that have been added to the server\n\nThese indexes can be wildcarded, with the exception that '*' does not match internal indexes. To match internal indexes, start with '_'. All internal indexes are represented by '_*'.", - "validation": "" - }, - "srchJobsQuota": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "The maximum number of concurrent searches a user with this role is allowed to run. In the event of many roles per user, the maximum of these quotas is applied.", - "validation": "" - }, - "srchTimeWin": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Maximum time span of a search, in seconds.\n \nBy default, searches are not limited to any specific time window. To override any search time windows from imported roles, set srchTimeWin to '0', as the 'admin' role does.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create role." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a user role. Refer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities.", - "urlParams": {} - } - }, - "summary": "Provides access to Splunk user roles.\n\nRefer to [[Documentation:Splunk:Admin:Aboutusersandroles|About users and roles]] in the [[Documentation:Splunk:Admin:Whatsinthismanual|Splunk Admin manual]] for details about Splunk users, roles, and capabilities. " - }, - "authorization/roles/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete role." - }, - "404": { - "summary": "Role does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes the role specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view role." - }, - "404": { - "summary": "Role does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the permissions for the role specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "capabilities": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "defaultApp": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "imported_roles": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "rtSrchJobsQuota": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchDiskQuota": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchFilter": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchIndexesAllowed": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchIndexesDefault": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchJobsQuota": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "srchTimeWin": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit role." - }, - "404": { - "summary": "Role does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the role specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "configs/conf-{file}": { - "methods": { - "GET": { - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Maximum number of items to return.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Direction to sort by (asc/desc).", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Collating sequence for the sort (auto, alpha, alpha_case, num).", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration file." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all stanzas contained in the named configuration file.", - "urlParams": { - "file": { - "required": "true", - "summary": "file" - } - } - }, - "POST": { - "params": { - "<key>": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "This operation accepts an arbitrary set of key/value pairs to populate in the created stanza. (There is no actual parameter named \"key\".)", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the stanza to create.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to create configuration stanza." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Allows for creating the stanza specified by \"name\" in the configuration file specified by {file}.", - "urlParams": { - "file": { - "required": "true", - "summary": "file" - } - } - } - }, - "summary": "Provides raw access to Splunk's \".conf\" configuration files.\n\nRefer to [[Documentation:Splunk:RESTAPI:RESTconfigurations|Accessing and updating Splunk configurations]] for a comparison of these endpoints with the properties/ endpoints." - }, - "configs/conf-{file}/{name}": { - "methods": { - "DELETE": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete configuration stanza." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes the named stanza in the named configuration file.", - "urlParams": { - "file": { - "required": "true", - "summary": "file" - }, - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration stanza." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Display only the named stanza from the named configuration file.", - "urlParams": { - "file": { - "required": "true", - "summary": "file" - }, - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "params": { - "<key>": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "This operation accepts an arbitrary set of key/value pairs to populate in the created stanza. (There is no actual parameter named \"key\".)", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to edit configuration stanza." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Allows for editing the named stanza from the named configuration file.", - "urlParams": { - "file": { - "required": "true", - "summary": "file" - }, - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/commands": { - "methods": { - "GET": { - "config": "commands", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view commands." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List all python search commands.", - "urlParams": {} - } - }, - "summary": "Provides access to Python search commands used in Splunk." - }, - "data/commands/{name}": { - "methods": { - "GET": { - "config": "commands", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view command." - }, - "404": { - "summary": "Command does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Provide information about a specific python search command.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/indexes": { - "methods": { - "GET": { - "config": "indexes", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - }, - "summarize": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "If true, leaves out certain index details in order to provide a faster response.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "OK" - }, - "400": { - "summary": "TO DO: provide the rest of the status codes" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view indexes." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the recognized indexes on the server.", - "urlParams": {} - }, - "POST": { - "config": "indexes", - "params": { - "assureUTF8": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "Verifies that all data retreived from the index is proper UTF8.\n\nWill degrade indexing performance when enabled (set to true).\n\nCan only be set globally", - "validation": "" - }, - "blockSignSize": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Controls how many events make up a block for block signatures.\n\nIf this is set to 0, block signing is disabled for this index.\n\nA recommended value is 100.", - "validation": "validate(isint(blockSignSize) AND blockSignSize >= 0,\"blockSignSize must be a non-negative integer\")" - }, - "coldPath": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "An absolute path that contains the colddbs for the index. The path must be readable and writable. Cold databases are opened as needed when searching. May be defined in terms of a volume definition (see volume section below).\n\nRequired. Splunk will not start if an index lacks a valid coldPath.", - "validation": "" - }, - "coldToFrozenDir": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Destination path for the frozen archive. Use as an alternative to a coldToFrozenScript. Splunk automatically puts frozen buckets in this directory.\n\nBucket freezing policy is as follows:\n* New style buckets (4.2 and on): removes all files but the rawdata\n:To thaw, run splunk rebuild on the bucket, then move to the thawed directory\n* Old style buckets (Pre-4.2): gzip all the .data and .tsidx files\n:To thaw, gunzip the zipped files and move the bucket into the thawed directory\n\nIf both coldToFrozenDir and coldToFrozenScript are specified, coldToFrozenDir takes precedence", - "validation": "" - }, - "coldToFrozenScript": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Path to the archiving script.\n\nIf your script requires a program to run it (for example, python), specify the program followed by the path. The script must be in $SPLUNK_HOME/bin or one of its subdirectories.\n\nSplunk ships with an example archiving script in $SPLUNK_HOME/bin called coldToFrozenExample.py. Splunk DOES NOT recommend using this example script directly. It uses a default path, and if modified in place any changes will be overwritten on upgrade.\n\nSplunk recommends copying the example script to a new file in bin and modifying it for your system. Most importantly, change the default archive path to an existing directory that fits your needs.\n\nIf your new script in bin/ is named myColdToFrozen.py, set this key to the following:\n\ncoldToFrozenScript = \"$SPLUNK_HOME/bin/python\" \"$SPLUNK_HOME/bin/myColdToFrozen.py\"\n\nBy default, the example script has two possible behaviors when archiving:\n* For buckets created from version 4.2 and on, it removes all files except for rawdata. To thaw: cd to the frozen bucket and type splunk rebuild ., then copy the bucket to thawed for that index. We recommend using the coldToFrozenDir parameter unless you need to perform a more advanced operation upon freezing buckets.\n* For older-style buckets, we simply gzip all the .tsidx files. To thaw: cd to the frozen bucket and unzip the tsidx files, then copy the bucket to thawed for that index", - "validation": "" - }, - "compressRawdata": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "This parameter is ignored. The splunkd process always compresses raw data.", - "validation": "" - }, - "enableOnlineBucketRepair": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "Enables asynchronous \"online fsck\" bucket repair, which runs concurrently with Splunk.\n\nWhen enabled, you do not have to wait until buckets are repaired to start Splunk. However, you might observe a slight performance degratation.\n\n'''Note:''' This endpoint is new in Splunk 4.3.", - "validation": "" - }, - "frozenTimePeriodInSecs": { - "datatype": "Number", - "default": "188697600", - "required": "false", - "summary": "Number of seconds after which indexed data rolls to frozen. Defaults to 188697600 (6 years).\n\nFreezing data means it is removed from the index. If you need to archive your data, refer to coldToFrozenDir and coldToFrozenScript parameter documentation.", - "validation": "validate(isint(frozenTimePeriodInSecs) AND frozenTimePeriodInSecs >= 0,\"frozenTimePeriodInSecs must be a non-negative integer\")" - }, - "homePath": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "An absolute path that contains the hot and warm buckets for the index.\n\nRequired. Splunk will not start if an index lacks a valid homePath.\n\nCAUTION: Path MUST be readable and writable.", - "validation": "" - }, - "maxBloomBackfillBucketAge": { - "datatype": "Number", - "default": "30d", - "required": "false", - "summary": "Valid values are: Integer[m|s|h|d]\n\nIf a warm or cold bucket is older than the specified age, do not create or rebuild its bloomfilter. Specify 0 to never rebuild bloomfilters.\n\nFor example, if a bucket is older than specified with maxBloomBackfillBucketAge, and the rebuilding of its bloomfilter started but did not finish, do not rebuild it.", - "validation": "" - }, - "maxConcurrentOptimizes": { - "datatype": "Number", - "default": "3", - "required": "false", - "summary": "The number of concurrent optimize processes that can run against a hot bucket.\n\nThis number should be increased if instructed by Splunk Support. Typically the default value should suffice.\n", - "validation": "validate(isint(maxConcurrentOptimizes) AND maxConcurrentOptimizes >= 0,\"maxConcurrentOptimizes must be a non-negative integer\")" - }, - "maxDataSize": { - "datatype": "Number", - "default": "auto", - "required": "false", - "summary": "The maximum size in MB for a hot DB to reach before a roll to warm is triggered. Specifying \"auto\" or \"auto_high_volume\" causes Splunk to autotune this parameter (recommended).Use \"auto_high_volume\" for high volume indexes (such as the main index); otherwise, use \"auto\". A \"high volume index\" would typically be considered one that gets over 10GB of data per day.\n* \"auto\" sets the size to 750MB.\n* \"auto_high_volume\" sets the size to 10GB on 64-bit, and 1GB on 32-bit systems.\n\nAlthough the maximum value you can set this is 1048576 MB, which corresponds to 1 TB, a reasonable number ranges anywhere from 100 - 50000. Any number outside this range should be approved by Splunk Support before proceeding.\n\nIf you specify an invalid number or string, maxDataSize will be auto tuned.\n\nNOTE: The precise size of your warm buckets may vary from maxDataSize, due to post-processing and timing issues with the rolling policy.", - "validation": "validate(maxDataSize == \"auto\" OR maxDataSize == \"auto_high_volume\" OR isint(maxDataSize) AND maxDataSize >= 0,\"maxDataSize must be one of auto, auto_high_volume or non-negative integer\")" - }, - "maxHotBuckets": { - "datatype": "Number", - "default": "3", - "required": "false", - "summary": "Maximum hot buckets that can exist per index. Defaults to 3.\n\nWhen maxHotBuckets is exceeded, Splunk rolls the least recently used (LRU) hot bucket to warm. Both normal hot buckets and quarantined hot buckets count towards this total. This setting operates independently of maxHotIdleSecs, which can also cause hot buckets to roll.", - "validation": "validate(isint(maxHotBuckets) AND maxHotBuckets >= 0,\"maxHotBuckets must be a non-negative integer\")" - }, - "maxHotIdleSecs": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "\"Maximum life, in seconds, of a hot bucket. Defaults to 0.\n\nIf a hot bucket exceeds maxHotIdleSecs, Splunk rolls it to warm. This setting operates independently of maxHotBuckets, which can also cause hot buckets to roll. A value of 0 turns off the idle check (equivalent to INFINITE idle time).", - "validation": "validate(isint(maxHotIdleSecs) AND maxHotIdleSecs >= 0,\"maxHotIdleSecs must be a non-negative integer\")" - }, - "maxHotSpanSecs": { - "datatype": "Number", - "default": "7776000", - "required": "false", - "summary": "Upper bound of target maximum timespan of hot/warm buckets in seconds. Defaults to 7776000 seconds (90 days).\n\nNOTE: f you set this too small, you can get an explosion of hot/warm buckets in the filesystem. The system sets a lower bound implicitly for this parameter at 3600, but this is an advanced parameter that should be set with care and understanding of the characteristics of your data.", - "validation": "validate(isint(maxHotSpanSecs) AND maxHotSpanSecs >= 0,\"maxHotSpanSecs must be a non-negative integer\")" - }, - "maxMemMB": { - "datatype": "Number", - "default": "5", - "required": "false", - "summary": "The amount of memory, expressed in MB, to allocate for buffering a single tsidx file into memory before flushing to disk. Defaults to 5. The default is recommended for all environments.\n\nIMPORTANT: Calculate this number carefully. Setting this number incorrectly may have adverse effects on your systems memory and/or splunkd stability/performance.", - "validation": "validate(isint(maxMemMB) AND maxMemMB >= 0,\"maxMemMB must be a non-negative integer\")" - }, - "maxMetaEntries": { - "datatype": "Number", - "default": "1000000", - "required": "false", - "summary": "Sets the maximum number of unique lines in .data files in a bucket, which may help to reduce memory consumption. If set to 0, this setting is ignored (it is treated as infinite).\n\nIf exceeded, a hot bucket is rolled to prevent further increase. If your buckets are rolling due to Strings.data hitting this limit, the culprit may be the punct field in your data. If you don't use punct, it may be best to simply disable this (see props.conf.spec in $SPLUNK_HOME/etc/system/README).\n\nThere is a small time delta between when maximum is exceeded and bucket is rolled. This means a bucket may end up with epsilon more lines than specified, but this is not a major concern unless excess is significant.", - "validation": "" - }, - "maxTotalDataSizeMB": { - "datatype": "Number", - "default": "500000", - "required": "false", - "summary": "The maximum size of an index (in MB). If an index grows larger than the maximum size, the oldest data is frozen.", - "validation": "validate(isint(maxTotalDataSizeMB) AND maxTotalDataSizeMB >= 0,\"maxTotalDataSizeMB must be a non-negative integer\")" - }, - "maxWarmDBCount": { - "datatype": "Number", - "default": "300", - "required": "false", - "summary": "The maximum number of warm buckets. If this number is exceeded, the warm bucket/s with the lowest value for their latest times will be moved to cold.", - "validation": "validate(isint(maxWarmDBCount) AND maxWarmDBCount >= 0,\"maxWarmDBCount must be a non-negative integer\")" - }, - "minRawFileSyncSecs": { - "datatype": "Number", - "default": "disable", - "required": "false", - "summary": "Specify an integer (or \"disable\") for this parameter.\n\nThis parameter sets how frequently splunkd forces a filesystem sync while compressing journal slices.\n\nDuring this interval, uncompressed slices are left on disk even after they are compressed. Then splunkd forces a filesystem sync of the compressed journal and removes the accumulated uncompressed files.\n\nIf 0 is specified, splunkd forces a filesystem sync after every slice completes compressing. Specifying \"disable\" disables syncing entirely: uncompressed slices are removed as soon as compression is complete.\n\nNOTE: Some filesystems are very inefficient at performing sync operations, so only enable this if you are sure it is needed", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the index to create.", - "validation": "" - }, - "partialServiceMetaPeriod": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Related to serviceMetaPeriod. If set, it enables metadata sync every seconds, but only for records where the sync can be done efficiently in-place, without requiring a full re-write of the metadata file. Records that require full re-write are be sync'ed at serviceMetaPeriod.\n\npartialServiceMetaPeriod specifies, in seconds, how frequently it should sync. Zero means that this feature is turned off and serviceMetaPeriod is the only time when metadata sync happens.\n\nIf the value of partialServiceMetaPeriod is greater than serviceMetaPeriod, this setting has no effect.\n\nBy default it is turned off (zero).", - "validation": "" - }, - "quarantineFutureSecs": { - "datatype": "Number", - "default": "2592000", - "required": "false", - "summary": "Events with timestamp of quarantineFutureSecs newer than \"now\" are dropped into quarantine bucket. Defaults to 2592000 (30 days).\n\nThis is a mechanism to prevent main hot buckets from being polluted with fringe events.", - "validation": "validate(isint(quarantineFutureSecs) AND quarantineFutureSecs >= 0,\"quarantineFutureSecs must be a non-negative integer\")" - }, - "quarantinePastSecs": { - "datatype": "Number", - "default": "77760000", - "required": "false", - "summary": "Events with timestamp of quarantinePastSecs older than \"now\" are dropped into quarantine bucket. Defaults to 77760000 (900 days).\n\nThis is a mechanism to prevent the main hot buckets from being polluted with fringe events.", - "validation": "validate(isint(quarantinePastSecs) AND quarantinePastSecs >= 0,\"quarantinePastSecs must be a non-negative integer\")" - }, - "rawChunkSizeBytes": { - "datatype": "Number", - "default": "131072", - "required": "false", - "summary": "Target uncompressed size in bytes for individual raw slice in the rawdata journal of the index. Defaults to 131072 (128KB). 0 is not a valid value. If 0 is specified, rawChunkSizeBytes is set to the default value.\n\nNOTE: rawChunkSizeBytes only specifies a target chunk size. The actual chunk size may be slightly larger by an amount proportional to an individual event size.\n\nWARNING: This is an advanced parameter. Only change it if you are instructed to do so by Splunk Support.", - "validation": "validate(isint(rawChunkSizeBytes) AND rawChunkSizeBytes >= 0,\"rawChunkSizeBytes must be a non-negative integer\")" - }, - "rotatePeriodInSecs": { - "datatype": "Number", - "default": "60", - "required": "false", - "summary": "How frequently (in seconds) to check if a new hot bucket needs to be created. Also, how frequently to check if there are any warm/cold buckets that should be rolled/frozen.", - "validation": "validate(isint(rotatePeriodInSecs) AND rotatePeriodInSecs >= 0,\"rotatePeriodInSecs must be a non-negative integer\")" - }, - "serviceMetaPeriod": { - "datatype": "Number", - "default": "25", - "required": "false", - "summary": "Defines how frequently metadata is synced to disk, in seconds. Defaults to 25 (seconds).\n\nYou may want to set this to a higher value if the sum of your metadata file sizes is larger than many tens of megabytes, to avoid the hit on I/O in the indexing fast path.", - "validation": "" - }, - "syncMeta": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "When true, a sync operation is called before file descriptor is closed on metadata file updates. This functionality improves integrity of metadata files, especially in regards to operating system crashes/machine failures.\n\nNote: Do not change this parameter without the input of a Splunk Support.", - "validation": "" - }, - "thawedPath": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "An absolute path that contains the thawed (resurrected) databases for the index.\n\nCannot be defined in terms of a volume definition.\n\nRequired. Splunk will not start if an index lacks a valid thawedPath.\n\n", - "validation": "" - }, - "throttleCheckPeriod": { - "datatype": "Number", - "default": "15", - "required": "false", - "summary": "Defines how frequently Splunk checks for index throttling condition, in seconds. Defaults to 15 (seconds).\n\nNote: Do not change this parameter without the input of a Splunk Support.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Index created successfully; followed by header:\n\nLocation: /services/data/indexes/{name}" - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create index." - }, - "409": { - "summary": "The index name already exists." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new index with the given name.", - "urlParams": {} - } - }, - "summary": "Provides services to create and manage data indexes." - }, - "data/indexes/{name}": { - "methods": { - "GET": { - "config": "indexes", - "params": { - "summarize": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "If true, leaves out certain index details in order to provide a faster response.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view index." - }, - "404": { - "summary": "Index does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieves information about the named index.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "indexes", - "params": { - "assureUTF8": { - "datatype": "INHERITED", - "default": "false", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blockSignSize": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(blockSignSize) AND blockSignSize >= 0,\"blockSignSize must be a non-negative integer\")" - }, - "coldToFrozenDir": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "coldToFrozenScript": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "compressRawdata": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "enableOnlineBucketRepair": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "frozenTimePeriodInSecs": { - "datatype": "INHERITED", - "default": "188697600", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(frozenTimePeriodInSecs) AND frozenTimePeriodInSecs >= 0,\"frozenTimePeriodInSecs must be a non-negative integer\")" - }, - "maxBloomBackfillBucketAge": { - "datatype": "INHERITED", - "default": "30d", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxConcurrentOptimizes": { - "datatype": "INHERITED", - "default": "3", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxConcurrentOptimizes) AND maxConcurrentOptimizes >= 0,\"maxConcurrentOptimizes must be a non-negative integer\")" - }, - "maxDataSize": { - "datatype": "INHERITED", - "default": "auto", - "required": "false", - "summary": "INHERITED", - "validation": "validate(maxDataSize == \"auto\" OR maxDataSize == \"auto_high_volume\" OR isint(maxDataSize) AND maxDataSize >= 0,\"maxDataSize must be one of auto, auto_high_volume or non-negative integer\")" - }, - "maxHotBuckets": { - "datatype": "INHERITED", - "default": "3", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxHotBuckets) AND maxHotBuckets >= 0,\"maxHotBuckets must be a non-negative integer\")" - }, - "maxHotIdleSecs": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxHotIdleSecs) AND maxHotIdleSecs >= 0,\"maxHotIdleSecs must be a non-negative integer\")" - }, - "maxHotSpanSecs": { - "datatype": "INHERITED", - "default": "7776000", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxHotSpanSecs) AND maxHotSpanSecs >= 0,\"maxHotSpanSecs must be a non-negative integer\")" - }, - "maxMemMB": { - "datatype": "INHERITED", - "default": "5", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxMemMB) AND maxMemMB >= 0,\"maxMemMB must be a non-negative integer\")" - }, - "maxMetaEntries": { - "datatype": "INHERITED", - "default": "1000000", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxTotalDataSizeMB": { - "datatype": "INHERITED", - "default": "500000", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxTotalDataSizeMB) AND maxTotalDataSizeMB >= 0,\"maxTotalDataSizeMB must be a non-negative integer\")" - }, - "maxWarmDBCount": { - "datatype": "INHERITED", - "default": "300", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(maxWarmDBCount) AND maxWarmDBCount >= 0,\"maxWarmDBCount must be a non-negative integer\")" - }, - "minRawFileSyncSecs": { - "datatype": "INHERITED", - "default": "disable", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "partialServiceMetaPeriod": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "quarantineFutureSecs": { - "datatype": "INHERITED", - "default": "2592000", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(quarantineFutureSecs) AND quarantineFutureSecs >= 0,\"quarantineFutureSecs must be a non-negative integer\")" - }, - "quarantinePastSecs": { - "datatype": "INHERITED", - "default": "77760000", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(quarantinePastSecs) AND quarantinePastSecs >= 0,\"quarantinePastSecs must be a non-negative integer\")" - }, - "rawChunkSizeBytes": { - "datatype": "INHERITED", - "default": "131072", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(rawChunkSizeBytes) AND rawChunkSizeBytes >= 0,\"rawChunkSizeBytes must be a non-negative integer\")" - }, - "rotatePeriodInSecs": { - "datatype": "INHERITED", - "default": "60", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint(rotatePeriodInSecs) AND rotatePeriodInSecs >= 0,\"rotatePeriodInSecs must be a non-negative integer\")" - }, - "serviceMetaPeriod": { - "datatype": "INHERITED", - "default": "25", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "syncMeta": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "throttleCheckPeriod": { - "datatype": "INHERITED", - "default": "15", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Properties for the index were updated successfully." - }, - "400": { - "summary": "Some arguments were invalid" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit index." - }, - "404": { - "summary": "The specified index was not found." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Unspecified error" - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the data index specified by {name} with information specified with index attributes.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/ad": { - "methods": { - "GET": { - "config": "admon", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort the entries returned in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view AD monitoring configuration." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets current AD monitoring configuration.", - "urlParams": {} - }, - "POST": { - "config": "admon", - "params": { - "disabled": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Indicates whether the monitoring is disabled.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The index in which to store the gathered data.", - "validation": "" - }, - "monitorSubtree": { - "datatype": "Number", - "default": "1", - "required": "true", - "summary": "Whether or not to monitor the subtree(s) of a given directory tree path. 1 means yes, 0 means no.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "A unique name that represents a configuration or set of configurations for a specific domain controller (DC).", - "validation": "" - }, - "startingNode": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Where in the Active Directory directory tree to start monitoring. If not specified, will attempt to start at the root of the directory tree.", - "validation": "" - }, - "targetDc": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies a fully qualified domain name of a valid, network-accessible DC. If not specified, Splunk will obtain the local computer's DC.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create monitoring stanza." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates new or modifies existing performance monitoring settings.", - "urlParams": {} - } - }, - "summary": "Provides access to Active Directory monitoring input." - }, - "data/inputs/ad/{name}": { - "methods": { - "DELETE": { - "config": "admon", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete AD monitoring stanza." - }, - "404": { - "summary": "AD monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes a given AD monitoring stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "admon", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view AD monitoring configuration." - }, - "404": { - "summary": "AD monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets the current configuration for a given AD monitoring stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "admon", - "params": { - "disabled": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "monitorSubtree": { - "datatype": "INHERITED", - "default": "1", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "startingNode": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "targetDc": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit AD monitoring stanza." - }, - "404": { - "summary": "AD monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modifies a given AD monitoring stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/monitor": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view monitored input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List enabled and disabled monitor inputs.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "blacklist": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a regular expression for a file path. The file path that matches this regular expression is not indexed.", - "validation": "" - }, - "check-index": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, the \"index\" value will be checked to ensure that it is the name of a valid index.", - "validation": "is_bool('check-index')" - }, - "check-path": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, the \"name\" value will be checked to ensure that it exists.", - "validation": "is_bool('check-path')" - }, - "crc-salt": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A string that modifies the file tracking identity for files in this input. The magic value \"\" invokes special behavior (see admin documentation).", - "validation": "" - }, - "followTail": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, files that are seen for the first time will be read from the end.", - "validation": "is_bool('followTail')" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the host field for events from this data input.", - "validation": "" - }, - "host_regex": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a regular expression for a file path. If the path for a file matches this regular expression, the captured value is used to populate the host field for events from this data input. The regular expression must have one capture group.", - "validation": "" - }, - "host_segment": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Use the specified slash-separate segment of the filepath as the host field value.", - "validation": "is_pos_int('host_segment')" - }, - "ignore-older-than": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a time value. If the modification time of a file being monitored falls outside of this rolling time window, the file is no longer being monitored.", - "validation": "validate(match('ignore-older-than', \"^\\\\d+[dms]$\"),\"'Ignore older than' must be a number immediately followed by d(ays), m(inutes), or s(econds).\")" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "Which index events from this input should be stored in.", - "validation": "is_index('index')" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The file or directory path to monitor on the system.", - "validation": "validate(len(name) < 4096, 'Must be less than 4096 characters.')" - }, - "recursive": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Setting this to \"false\" will prevent monitoring of any subdirectories encountered within this data input.", - "validation": "is_bool('recursive')" - }, - "rename-source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the source field for events from this data input. The same source should not be used for multiple data inputs.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the sourcetype field for incoming events.", - "validation": "" - }, - "time-before-close": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "When Splunk reaches the end of a file that is being read, the file will be kept open for a minimum of the number of seconds specified in this value. After this period has elapsed, the file will be checked again for more data.", - "validation": "is_pos_int('time-before-close')" - }, - "whitelist": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a regular expression for a file path. Only file paths that match this regular expression are indexed.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create monitored input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new file or directory monitor input.", - "urlParams": {} - } - }, - "summary": "Provides access to monitor inputs." - }, - "data/inputs/monitor/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete monitored input." - }, - "404": { - "summary": "Monitored input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Disable the named monitor data input and remove it from the configuration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view monitored input." - }, - "404": { - "summary": "Monitored input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List the properties of a single monitor data input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "blacklist": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "check-index": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_bool('check-index')" - }, - "check-path": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_bool('check-path')" - }, - "crc-salt": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "followTail": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_bool('followTail')" - }, - "host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host_regex": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host_segment": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_pos_int('host_segment')" - }, - "ignore-older-than": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(match('ignore-older-than', \"^\\\\d+[dms]$\"),\"'Ignore older than' must be a number immediately followed by d(ays), m(inutes), or s(econds).\")" - }, - "index": { - "datatype": "INHERITED", - "default": "default", - "required": "false", - "summary": "INHERITED", - "validation": "is_index('index')" - }, - "recursive": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_bool('recursive')" - }, - "rename-source": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sourcetype": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "time-before-close": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "is_pos_int('time-before-close')" - }, - "whitelist": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit monitored input." - }, - "404": { - "summary": "Monitored input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Update properties of the named monitor input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/monitor/{name}/members": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view monitored input's files." - }, - "404": { - "summary": "Monitor input does not exist or does not have any members." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all files monitored under the named monitor input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/oneshot": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view inputs." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerates in-progress oneshot inputs. As soon as an input is complete, it is removed from this list.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value of the \"host\" field to be applied to data from this file.", - "validation": "" - }, - "host_regex": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A regex to be used to extract a \"host\" field from the path.\n\nIf the path matches this regular expression, the captured value is used to populate the host field for events from this data input. The regular expression must have one capture group.", - "validation": "" - }, - "host_segment": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Use the specified slash-separate segment of the path as the host field value.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The destination index for data processed from this file.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The path to the file to be indexed. The file must be locally accessible by the server.", - "validation": "" - }, - "rename-source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value of the \"source\" field to be applied to data from this file.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value of the \"sourcetype\" field to be applied to data from this file.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Queues a file for immediate indexing by the file input subsystem. The file must be locally accessible from the server.\n\nThis endpoint can handle any single file: plain, compressed or archive. The file is indexed in full, regardless of whether it has been indexed before.", - "urlParams": {} - } - }, - "summary": "Provides access to oneshot inputs." - }, - "data/inputs/oneshot/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Finds information about a single in-flight one shot input. This is a subset of the information in the full enumeration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/registry": { - "methods": { - "GET": { - "config": "regmon-filters", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort the entries returned in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view registry monitoring configuration." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets current registry monitoring configuration.", - "urlParams": {} - }, - "POST": { - "config": "regmon-filters", - "params": { - "baseline": { - "datatype": "Number", - "default": "0", - "required": "true", - "summary": "Specifies whether or not to establish a baseline value for the registry keys. 1 means yes, 0 no.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Indicates whether the monitoring is disabled.", - "validation": "" - }, - "hive": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specifies the registry hive under which to monitor for changes.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The index in which to store the gathered data.", - "validation": "" - }, - "monitorSubnodes": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "If set to '1', will monitor all sub-nodes under a given hive.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Name of the configuration stanza.", - "validation": "" - }, - "proc": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specifies a regex. If specified, will only collected changes if a process name matches that regex.", - "validation": "" - }, - "type": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "A regular expression that specifies the type(s) of Registry event(s) that you want to monitor.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create registry monitoring stanza." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates new or modifies existing registry monitoring settings.", - "urlParams": {} - } - }, - "summary": "Provides access to Windows registry monitoring input." - }, - "data/inputs/registry/{name}": { - "methods": { - "DELETE": { - "config": "regmon-filters", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete registry configuration stanza." - }, - "404": { - "summary": "Registry monitoring configuration stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes registry monitoring configuration stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "regmon-filters", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view registry monitoring configuration stanza." - }, - "404": { - "summary": "Registry monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets current registry monitoring configuration stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "regmon-filters", - "params": { - "baseline": { - "datatype": "INHERITED", - "default": "0", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "hive": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "monitorSubnodes": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "proc": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "type": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit registry monitoring stanza." - }, - "404": { - "summary": "Registry monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modifies given registry monitoring stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/script": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view script." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets the configuration settings for scripted inputs.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specifies whether the input script is disabled.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the host for events from this input. Defaults to whatever host sent the event.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "Sets the index for events from this input. Defaults to the main index.", - "validation": "is_index(index)" - }, - "interval": { - "datatype": "Number", - "default": "60", - "required": "true", - "summary": "Specify an integer or cron schedule. This parameter specifies how often to execute the specified script, in seconds or a valid cron schedule. If you specify a cron schedule, the script is not executed on start-up.", - "validation": "isint(interval)OR is_cron(interval)" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specify the name of the scripted input.", - "validation": "" - }, - "passAuth": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "User to run the script as.\n\nIf you provide a username, Splunk generates an auth token for that user and passes it to the script.", - "validation": "" - }, - "rename-source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a new name for the source field for the script.", - "validation": "" - }, - "source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the source key/field for events from this input. Defaults to the input file path.\n\nSets the source key's initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.\n\nNote: Overriding the source key is generally not recommended. Typically, the input layer provides a more accurate string to aid in problem analysis and investigation, accurately recording the file from which the data was retreived. Consider use of source types, tagging, and search wildcards before overriding this value.\n\n", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the sourcetype key/field for events from this input. If unset, Splunk picks a source type based on various aspects of the data. As a convenience, the chosen string is prepended with 'sourcetype::'. There is no hard-coded default.\n\nSets the sourcetype key's initial value. The key is used during parsing/indexing, in particular to set the source type field during indexing. It is also the source type field used at search time.\n\nPrimarily used to explicitly declare the source type for this data, as opposed to allowing it to be determined via automated methods. This is typically important both for searchability and for applying the relevant configuration for this type of data during parsing and indexing.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create script." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures settings for new scripted inputs.", - "urlParams": {} - } - }, - "summary": "Provides access to scripted inputs." - }, - "data/inputs/script/restart": { - "methods": { - "POST": { - "config": "inputs", - "params": { - "script": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Path to the script to be restarted. This path must match an already-configured existing scripted input.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Scripted input restarted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to restart scripted input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Causes a restart on a given scripted input.", - "urlParams": {} - } - }, - "summary": "Allows for restarting scripted inputs." - }, - "data/inputs/script/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete script." - }, - "404": { - "summary": "Script does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the scripted input specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view script." - }, - "404": { - "summary": "Script does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the configuration settings for the scripted input specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "default", - "required": "false", - "summary": "INHERITED", - "validation": "is_index(index)" - }, - "interval": { - "datatype": "INHERITED", - "default": "60", - "required": "false", - "summary": "INHERITED", - "validation": "isint(interval)OR is_cron(interval)" - }, - "passAuth": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "rename-source": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "source": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sourcetype": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit script." - }, - "404": { - "summary": "Script does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures settings for scripted input specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/tcp/cooked": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view inputs." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about all cooked TCP inputs.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "SSL": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If SSL is not already configured, error is returned", - "validation": "" - }, - "connection_host": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (ip | dns | none)\n\nSet the host for the remote server that is sending data.\n\nip sets the host to the IP address of the remote server sending data.\n\ndns sets the host to the reverse DNS entry for the IP address of the remote server sending data. \n\nnone leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.\n\nDefault value is ip.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the input is disabled.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The default value to fill in for events lacking a host value.", - "validation": "" - }, - "port": { - "datatype": "Number", - "default": "", - "required": "true", - "summary": "The port number of this input.", - "validation": "" - }, - "restrictToHost": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Restrict incoming connections on this port to the host specified here.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Some arguments were invalid" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "There was an error; see body contents for messages" - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new container for managing cooked data.", - "urlParams": {} - } - }, - "summary": "Provides access to tcp inputs from forwarders.\n\nForwarders can transmit three types of data: raw, unparsed, or parsed. Cooked data refers to parsed and unparsed formats." - }, - "data/inputs/tcp/cooked/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the cooked TCP inputs for port or host:port specified by {name}", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "OK" - }, - "400": { - "summary": "''TO DO: provide the rest of the status codes''" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information for the cooked TCP input specified by {name}.\n\nIf port is restricted to a host, name should be URI-encoded host:port.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "SSL": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "connection_host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "restrictToHost": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the container for managaing cooked data.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/tcp/cooked/{name}/connections": { - "methods": { - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed connections successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input's connections." - }, - "404": { - "summary": "TCP input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieves list of active connections to the named port.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/tcp/raw": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about all raw TCP inputs.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "SSL": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "", - "validation": "" - }, - "connection_host": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (ip | dns | none)\n\nSet the host for the remote server that is sending data.\n\nip sets the host to the IP address of the remote server sending data.\n\ndns sets the host to the reverse DNS entry for the IP address of the remote server sending data. \n\nnone leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.\n\nDefault value is ip.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the inputs are disabled.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The host from which the indexer gets data.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "The index in which to store all generated events.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The input port which splunk receives raw data in.", - "validation": "" - }, - "queue": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (parsingQueue | indexQueue)\n\nSpecifies where the input processor should deposit the events it reads. Defaults to parsingQueue.\n\nSet queue to parsingQueue to apply props.conf and other parsing rules to your data. For more information about props.conf and rules for timestamping and linebreaking, refer to props.conf and the online documentation at [[Documentation:Splunk:Data:Editinputs.conf Edit inputs.conf]]\n\nSet queue to indexQueue to send your data directly into the index.", - "validation": "" - }, - "rawTcpDoneTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specifies in seconds the timeout value for adding a Done-key. Default value is 10 seconds.\n\nIf a connection over the port specified by name remains idle after receiving data for specified number of seconds, it adds a Done-key. This implies the last event has been completely received.", - "validation": "" - }, - "restrictToHost": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Allows for restricting this input to only accept data from the host specified here.", - "validation": "" - }, - "source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the source key/field for events from this input. Defaults to the input file path.\n\nSets the source key's initial value. The key is used during parsing/indexing, in particular to set the source field during indexing. It is also the source field used at search time. As a convenience, the chosen string is prepended with 'source::'.\n\n'''Note:''' Overriding the source key is generally not recommended.Typically, the input layer provides a more accurate string to aid in problem analysis and investigation, accurately recording the file from which the data was retreived. Consider use of source types, tagging, and search wildcards before overriding this value.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Set the source type for events from this input.\n\n\"sourcetype=\" is automatically prepended to .\n\nDefaults to audittrail (if signedaudit=true) or fschange (if signedaudit=false).", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Some arguments were invalid" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "There was an error; see body contents for messages" - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new data input for accepting raw TCP data.", - "urlParams": {} - } - }, - "summary": "Container for managing raw tcp inputs from forwarders.\n\nForwarders can tramsmit three types of data: raw, unparsed, or parsed. Cooked data refers to parsed and unparsed formats." - }, - "data/inputs/tcp/raw/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the raw inputs for port or host:port specified by {name}", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "OK" - }, - "400": { - "summary": "''TO DO: provide the rest of the status codes''" - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about raw TCP input port {name}.\n\nIf port is restricted to a host, name should be URI-encoded host:port.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "SSL": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "connection_host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "default", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "queue": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "rawTcpDoneTimeout": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "restrictToHost": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "source": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sourcetype": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit input." - }, - "404": { - "summary": "Inpuat does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the container for managing raw data.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/tcp/raw/{name}/connections": { - "methods": { - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed connections successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input's connections." - }, - "404": { - "summary": "TCP input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "View all connections to the named data input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/tcp/ssl": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view inputs." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns SSL configuration. There is only one SSL configuration for all input ports.", - "urlParams": {} - } - }, - "summary": "Provides access to the SSL configuration of a Splunk server." - }, - "data/inputs/tcp/ssl/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the SSL configuration for the host {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the inputs are disabled.", - "validation": "" - }, - "password": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Server certifcate password, if any.", - "validation": "" - }, - "requireClientCert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Determines whether a client must authenticate.", - "validation": "" - }, - "rootCA": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Certificate authority list (root file)", - "validation": "" - }, - "serverCert": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Full path to the server certificate.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures SSL attributes for the host {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/udp": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view inputs." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List enabled and disabled UDP data inputs.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "connection_host": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (ip | dns | none)\n\nSet the host for the remote server that is sending data.\n\nip sets the host to the IP address of the remote server sending data.\n\ndns sets the host to the reverse DNS entry for the IP address of the remote server sending data. \n\nnone leaves the host as specified in inputs.conf, which is typically the Splunk system hostname.\n\nDefault value is ip.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the host field for incoming events. \n\nThis is used during parsing/indexing, in particular to set the host field. It is also the host field used at search time.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "Which index events from this input should be stored in.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The UDP port that this input should listen on.", - "validation": "is_avail_udp_port(name)" - }, - "no_appending_timestamp": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, prevents Splunk from prepending a timestamp and hostname to incoming events.", - "validation": "" - }, - "no_priority_stripping": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, Splunk will not remove the priority field from incoming syslog events.", - "validation": "" - }, - "queue": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Which queue events from this input should be sent to. Generally this does not need to be changed.", - "validation": "" - }, - "restrictToHost": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Restrict incoming connections on this port to the host specified here.\n\nIf this is not set, the value specified in [udp://:] in inputs.conf is used.", - "validation": "" - }, - "source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the source field for incoming events. The same source should not be used for multiple data inputs.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the sourcetype field for incoming events.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create input." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new UDP data input.", - "urlParams": {} - } - }, - "summary": "Provides access to UPD data inputs." - }, - "data/inputs/udp/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Disable the named UDP data input and remove it from the configuration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input configuration." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List the properties of a single UDP data input port or host:port {name}.\nIf port is restricted to a host, name should be URI-encoded host:port.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "connection_host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "host": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "default", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "no_appending_timestamp": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "no_priority_stripping": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "queue": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "restrictToHost": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "source": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sourcetype": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit input." - }, - "404": { - "summary": "Input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Edit properties of the named UDP data input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/udp/{name}/connections": { - "methods": { - "GET": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed connections successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view input connections." - }, - "404": { - "summary": "UDP input does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists connections to the named UDP input.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/win-event-log-collections": { - "methods": { - "GET": { - "config": "inputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "lookup_host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For internal use. Used by the UI when editing the initial host from which we gather event log data.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort the entries returned in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view event log collections." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieves a list of configured event log collections.", - "urlParams": {} - }, - "POST": { - "config": "inputs", - "params": { - "hosts": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of addtional hosts to be used for monitoring. The first host should be specified with \"lookup_host\", and the additional ones using this parameter.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "The index in which to store the gathered data.", - "validation": "" - }, - "logs": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of event log names to gather data from.", - "validation": "" - }, - "lookup_host": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is a host from which we will monitor log events. To specify additional hosts to be monitored via WMI, use the \"hosts\" parameter.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is the name of the collection. This name will appear in configuration file, as well as the source and the sourcetype of the indexed data. If the value is \"localhost\", it will use native event log collection; otherwise, it will use WMI.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create event log collections." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates of modifies existing event log collection settings. You can configure both native and WMI collection with this endpoint.", - "urlParams": {} - } - }, - "summary": "Provides access to all configured event log collections." - }, - "data/inputs/win-event-log-collections/{name}": { - "methods": { - "DELETE": { - "config": "inputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete event log collections." - }, - "404": { - "summary": "Event log collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes a given event log collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "wmi", - "params": { - "lookup_host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For internal use. Used by the UI when editing the initial host from which we gather event log data.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view event log collections." - }, - "404": { - "summary": "Event log collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets the configuration settings for a given event log collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "inputs", - "params": { - "hosts": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "default", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "logs": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "lookup_host": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit event log collections." - }, - "404": { - "summary": "Event log collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modifies existing event log collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/win-perfmon": { - "methods": { - "GET": { - "config": "perfmon", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort the entries returned in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view performance monitoring configuration." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets current performance monitoring configuration.", - "urlParams": {} - }, - "POST": { - "config": "perfmon", - "params": { - "counters": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of all counters to monitor. A '*' is equivalent to all counters.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Disables a given monitoring stanza.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The index in which to store the gathered data.", - "validation": "" - }, - "instances": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Comma-separated list of counter instances. A '*' is equivalent to all instances.", - "validation": "" - }, - "interval": { - "datatype": "Number", - "default": "", - "required": "true", - "summary": "How frequently to poll the performance counters.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is the name of the collection. This name will appear in configuration file, as well as the source and the sourcetype of the indexed data.", - "validation": "" - }, - "object": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "A valid performance monitor object (for example, 'Process,' 'Server,' 'PhysicalDisk.')", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create monitoring stanza." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates new or modifies existing performance monitoring collection settings.", - "urlParams": {} - } - }, - "summary": "Provides access to performance monitoring configuration. This input allows you to poll Windows performance monitor counters." - }, - "data/inputs/win-perfmon/{name}": { - "methods": { - "DELETE": { - "config": "perfmon", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete monitoring stanza." - }, - "404": { - "summary": "Monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes a given monitoring stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "perfmon", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration settings." - }, - "404": { - "summary": "Performance stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets settings for a given perfmon stanza.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "perfmon", - "params": { - "counters": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "instances": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "interval": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "object": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit monitoring stanza." - }, - "404": { - "summary": "Monitoring stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modifies existing monitoring stanza", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/inputs/win-wmi-collections": { - "methods": { - "GET": { - "config": "wmi", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Boolean predicate to filter results.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort the entries returned in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to sort by.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view collections." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Provides access to all configure WMI collections.", - "urlParams": {} - }, - "POST": { - "config": "wmi", - "params": { - "classes": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "A valid WMI class name.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Disables the given collection.", - "validation": "" - }, - "fields": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of all properties that you want to gather from the given class.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The index in which to store the gathered data.", - "validation": "" - }, - "instances": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Instances of a given class for which data is gathered.\n\nSpecify each instance as a separate argument to the POST operation.", - "validation": "" - }, - "interval": { - "datatype": "Number", - "default": "", - "required": "true", - "summary": "The interval at which the WMI provider(s) will be queried.", - "validation": "" - }, - "lookup_host": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is the server from which we will be gathering WMI data. If you need to gather data from more than one machine, additional servers can be specified in the 'server' parameter.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "This is the name of the collection. This name will appear in configuration file, as well as the source and the sourcetype of the indexed data.", - "validation": "" - }, - "server": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of additional servers that you want to gather data from. Use this if you need to gather from more than a single machine. See also lookup_host parameter.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create this collection." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates or modifies existing WMI collection settings.", - "urlParams": {} - } - }, - "summary": "Provides access to all configured WMI collections." - }, - "data/inputs/win-wmi-collections/{name}": { - "methods": { - "DELETE": { - "config": "wmi", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete a given collection." - }, - "404": { - "summary": "Given collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes a given collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "wmi", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view WMI collections." - }, - "404": { - "summary": "Given collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Gets information about a single collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "wmi", - "params": { - "classes": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "fields": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "index": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "instances": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "interval": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "lookup_host": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "server": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit collection." - }, - "404": { - "summary": "Collection does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modifies a given WMI collection.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/lookup-table-files": { - "methods": { - "GET": { - "config": "lookups", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view lookup-table file." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List lookup table files.", - "urlParams": {} - }, - "POST": { - "config": "lookups", - "params": { - "eai:data": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Move a lookup table file from the given path into $SPLUNK_HOME. This path must have the lookup staging area as an ancestor.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The lookup table filename.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create lookup-table file." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a lookup table file by moving a file from the upload staging area into $SPLUNK_HOME.", - "urlParams": {} - } - }, - "summary": "Provides access to lookup table files." - }, - "data/lookup-table-files/{name}": { - "methods": { - "DELETE": { - "config": "lookups", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete look-up table file." - }, - "404": { - "summary": "Look-up table file does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named lookup table file.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "lookups", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view look-up table files." - }, - "404": { - "summary": "Look-up table file does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single lookup table file.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "lookups", - "params": { - "eai:data": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit look-up tble file." - }, - "404": { - "summary": "Look-up table file does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify a lookup table file by replacing it with a file from the upload staging area.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/outputs/tcp/default": { - "methods": { - "GET": { - "config": "outputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view outputs." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the current tcpout properties.", - "urlParams": {} - }, - "POST": { - "config": "outputs", - "params": { - "defaultGroup": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Comma-separated list of one or more target group names, specified later in [tcpout:] stanzas of outputs.conf.spec file.\n\nThe forwarder sends all data to the specified groups. If you don't want to forward data automatically, don't set this attribute. Can be overridden by an inputs.conf _TCP_ROUTING setting, which in turn can be overridden by a props.conf/transforms.conf modifier.\n\nStarting with 4.2, this attribute is no longer required.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Disables default tcpout settings", - "validation": "" - }, - "dropEventsOnQueueFull": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "If set to a positive number, wait the specified number of seconds before throwing out all new events until the output queue has space. Defaults to -1 (do not drop events).\n\nCAUTION: Do not set this value to a positive integer if you are monitoring files.\n\nSetting this to -1 or 0 causes the output queue to block when it gets full, whih causes further blocking up the processing chain. If any target group's queue is blocked, no more data reaches any other target group.\n\nUsing auto load-balancing is the best way to minimize this condition, because, in that case, multiple receivers must be down (or jammed up) before queue blocking can occur.", - "validation": "" - }, - "heartbeatFrequency": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "How often (in seconds) to send a heartbeat packet to the receiving server.\n\nHeartbeats are only sent if sendCookedData=true. Defaults to 30 seconds.", - "validation": "" - }, - "indexAndForward": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specifies whether to index all data locally, in addition to forwarding it. Defaults to false.\n\nThis is known as an \"index-and-forward\" configuration. This attribute is only available for heavy forwarders. It is available only at the top level [tcpout] stanza in outputs.conf. It cannot be overridden in a target group.", - "validation": "" - }, - "maxQueueSize": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specify an integer or integer[KB|MB|GB].\n\nSets the maximum size of the forwarder's output queue. It also sets the maximum size of the wait queue to 3x this value, if you have enabled indexer acknowledgment (useACK=true).\n\nAlthough the wait queue and the output queues are both configured by this attribute, they are separate queues. The setting determines the maximum size of the queue's in-memory (RAM) buffer.\n\nFor heavy forwarders sending parsed data, maxQueueSize is the maximum number of events. Since events are typically much shorter than data blocks, the memory consumed by the queue on a parsing forwarder will likely be much smaller than on a non-parsing forwarder, if you use this version of the setting.\n\nIf specified as a lone integer (for example, maxQueueSize=100), maxQueueSize indicates the maximum number of queued events (for parsed data) or blocks of data (for unparsed data). A block of data is approximately 64KB. For non-parsing forwarders, such as universal forwarders, that send unparsed data, maxQueueSize is the maximum number of data blocks.\n\nIf specified as an integer followed by KB, MB, or GB (for example, maxQueueSize=100MB), maxQueueSize indicates the maximum RAM allocated to the queue buffer. Defaults to 500KB (which means a maximum size of 500KB for the output queue and 1500KB for the wait queue, if any).", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Configuration to be edited. The only valid value is \"tcpout\".", - "validation": "" - }, - "sendCookedData": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, events are cooked (have been processed by Splunk). If false, events are raw and untouched prior to sending. Defaults to true.\n\nSet to false if you are sending to a third-party system.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create output." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures global tcpout properties.", - "urlParams": {} - } - }, - "summary": "Provides access to global TCP out properties." - }, - "data/outputs/tcp/default/{name}": { - "methods": { - "DELETE": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to disable forwarding settings." - }, - "404": { - "summary": "Forwarding settings do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Disable the default forwarding settings.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view forwaring settings." - }, - "404": { - "summary": "Forwarding settings do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieve the named configuration. The only valid name here is \"tcpout\".", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "outputs", - "params": { - "defaultGroup": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dropEventsOnQueueFull": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "heartbeatFrequency": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "indexAndForward": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxQueueSize": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sendCookedData": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit forwarding settings." - }, - "404": { - "summary": "Forwarding settings do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configure global forwarding properties.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/outputs/tcp/group": { - "methods": { - "GET": { - "config": "outputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view group." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns configuration information about target groups. ", - "urlParams": {} - }, - "POST": { - "config": "outputs", - "params": { - "autoLB": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "If set to true, forwarder performs automatic load balancing. In automatic mode, the forwarder selects a new indexer every autoLBFrequency seconds. If the connection to the current indexer is lost, the forwarder selects a new live indexer to forward data to.\n\nDo not alter the default setting, unless you have some overriding need to use round-robin load balancing. Round-robin load balancing (autoLB=false) was previously the default load balancing method. Starting with release 4.2, however, round-robin load balancing has been deprecated, and the default has been changed to automatic load balancing (autoLB=true).", - "validation": "" - }, - "compressed": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "If true, forwarder sends compressed data.\n\nIf set to true, the receiver port must also have compression turned on.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "If true, disables the group.", - "validation": "" - }, - "dropEventsOnQueueFull": { - "datatype": "Number", - "default": "-1", - "required": "false", - "summary": "If set to a positive number, wait the specified number of seconds before throwing out all new events until the output queue has space. Defaults to -1 (do not drop events).\n\nCAUTION: Do not set this value to a positive integer if you are monitoring files.\n\nSetting this to -1 or 0 causes the output queue to block when it gets full, which causes further blocking up the processing chain. If any target group's queue is blocked, no more data reaches any other target group.\n\nUsing auto load-balancing is the best way to minimize this condition, because, in that case, multiple receivers must be down (or jammed up) before queue blocking can occur.", - "validation": "" - }, - "heartbeatFrequency": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "How often (in seconds) to send a heartbeat packet to the group.\n\nHeartbeats are only sent if sendCookedData=true. Defaults to 30 seconds.", - "validation": "" - }, - "maxQueueSize": { - "datatype": "Number", - "default": "500KB", - "required": "false", - "summary": "Specify either an integer or integer[KB|MB|GB].\n\nSets the maximum size of the forwarder's output queue. It also sets the maximum size of the wait queue to 3x this value, if you have enabled indexer acknowledgment (useACK=true).\n\nAlthough the wait queue and the output queues are both configured by this attribute, they are separate queues. The setting determines the maximum size of the queue's in-memory (RAM) buffer.\n\nFor heavy forwarders sending parsed data, maxQueueSize is the maximum number of events. Since events are typically much shorter than data blocks, the memory consumed by the queue on a parsing forwarder will likely be much smaller than on a non-parsing forwarder, if you use this version of the setting.\n\nIf specified as a lone integer (for example, maxQueueSize=100), maxQueueSize indicates the maximum number of queued events (for parsed data) or blocks of data (for unparsed data). A block of data is approximately 64KB. For non-parsing forwarders, such as universal forwarders, that send unparsed data, maxQueueSize is the maximum number of data blocks.\n\nIf specified as an integer followed by KB, MB, or GB (for example, maxQueueSize=100MB), maxQueueSize indicates the maximum RAM allocated to the queue buffer. Defaults to 500KB (which means a maximum size of 500KB for the output queue and 1500KB for the wait queue, if any).", - "validation": "" - }, - "method": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (tcpout | syslog)\n\nSpecifies the type of output processor.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the group of receivers.", - "validation": "" - }, - "sendCookedData": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "If true, send cooked events (events that have been processed by Splunk).\n\nIf false, events are raw and untouched prior to sending. Set to false if you are sending to a third-party system.\n\nDefaults to true.", - "validation": "" - }, - "servers": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Comma-separated list of servers to include in the group.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create group." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures a group of one or more data forwarding destinations.", - "urlParams": {} - } - }, - "summary": "Provides access to the configuration of a group of one or more data forwarding destinations." - }, - "data/outputs/tcp/group/{name}": { - "methods": { - "DELETE": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete group." - }, - "404": { - "summary": "Group does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes the target group specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view group." - }, - "404": { - "summary": "Group does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns configuration information about the target group specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "outputs", - "params": { - "autoLB": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "compressed": { - "datatype": "INHERITED", - "default": "false", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "false", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dropEventsOnQueueFull": { - "datatype": "INHERITED", - "default": "-1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "heartbeatFrequency": { - "datatype": "INHERITED", - "default": "30", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxQueueSize": { - "datatype": "INHERITED", - "default": "500KB", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "method": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sendCookedData": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "servers": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit group." - }, - "404": { - "summary": "Group does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the configuration of the target group.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/outputs/tcp/server": { - "methods": { - "GET": { - "config": "outputs", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view forwarded servers." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists existing forwarded servers.", - "urlParams": {} - }, - "POST": { - "config": "outputs", - "params": { - "backoffAtStartup": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets in seconds how long to wait to retry the first time a retry is needed. Compare to initialBackoff.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables the forwarder.", - "validation": "" - }, - "initialBackoff": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets how long, in seconds, to wait to retry every time after the first retry. Compare to backoffAtStartup.", - "validation": "" - }, - "maxBackoff": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specifies the number of times in seconds before reaching the maximum backoff frequency.", - "validation": "" - }, - "maxNumberOfRetriesAtHighestBackoff": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specifies the number of times the system should retry after reaching the highest back-off period, before stopping completely. -1 (default value) means to try forever.\n\nCaution: Splunk recommends that you not change this from the default, or the forwarder will completely stop forwarding to a downed URI at some point.\n", - "validation": "" - }, - "method": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (clone | balance | autobalance)\n\nThe data distribution method used when two or more servers exist in the same forwarder group. ", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": ": of the Splunk receiver. can be either an ip address or server name. is the that port that the Splunk receiver is listening on.", - "validation": "" - }, - "sslAltNameToCheck": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The alternate name to match in the remote server's SSL certificate.", - "validation": "" - }, - "sslCertPath": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Path to the client certificate. If specified, connection uses SSL.", - "validation": "" - }, - "sslCipher": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "SSL Cipher in the form ALL:!aNULL:!eNULL:!LOW:!EXP:RC4+RSA:+HIGH:+MEDIUM", - "validation": "" - }, - "sslCommonNameToCheck": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Check the common name of the server's certificate against this name.\n\nIf there is no match, assume that Splunk is not authenticated against this server. You must specify this setting if sslVerifyServerCert is true.", - "validation": "" - }, - "sslPassword": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The password associated with the CAcert.\n\nThe default Splunk CAcert uses the password \"password.\"", - "validation": "" - }, - "sslRootCAPath": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The path to the root certificate authority file (optional).", - "validation": "" - }, - "sslVerifyServerCert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": " If true, make sure that the server you are connecting to is a valid one (authenticated). Both the common name and the alternate name of the server are then checked for a match.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create a forwarded server." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new forwarder output.", - "urlParams": {} - } - }, - "summary": "Provides access to data forwarding configurations." - }, - "data/outputs/tcp/server/{name}": { - "methods": { - "DELETE": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete forwarded server configuration." - }, - "404": { - "summary": "Forwarded server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes the configuration for the forwarded server specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view forwarded server." - }, - "404": { - "summary": "Forwarded server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists information aobut the forwarded server specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "outputs", - "params": { - "backoffAtStartup": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "initialBackoff": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxBackoff": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "maxNumberOfRetriesAtHighestBackoff": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "method": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslAltNameToCheck": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslCertPath": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslCipher": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslCommonNameToCheck": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslPassword": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslRootCAPath": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "sslVerifyServerCert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit configuratin for forwarded server." - }, - "404": { - "summary": "Forwarded server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures the forwarded server specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/outputs/tcp/server/{name}/allconnections": { - "methods": { - "GET": { - "config": "outputs", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed connections successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to list ouput connections." - }, - "404": { - "summary": "Output server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List current connections to forwarded server specified by {name} ", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/outputs/tcp/syslog": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration of forwarded servers." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Provides access to syslog data forwarding configurations.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables global syslog settings.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Name of the forwarder to send data in standard syslog format.", - "validation": "" - }, - "priority": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets syslog priority value.", - "validation": "" - }, - "server": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "host:port of the server where syslog data should be sent", - "validation": "" - }, - "timestampformat": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Format of timestamp to add at start of the events to be forwarded.", - "validation": "" - }, - "type": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Protocol to use to send syslog data. Valid values: (tcp | udp ).", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to configure a forwarded server." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Configures a forwarder to send data in standard syslog format.", - "urlParams": {} - } - }, - "summary": "Provides access to the configuration of a forwarded server configured to provide data in standard syslog format." - }, - "data/outputs/tcp/syslog/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete forwarded server configuration." - }, - "404": { - "summary": "Forwarded server configuration does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes the configuration for the forwarder specified by {name} that sends data in syslog format.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view forwarded server configuration." - }, - "404": { - "summary": "Forwarded server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns configuration information for the forwarder specified by {name} that sends data in standard syslog format.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "priority": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "server": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "timestampformat": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "type": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit forwarded server configuration." - }, - "404": { - "summary": "Forwarded server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the configuration of the forwarder specified by {name} that sends data in syslog format.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/props/extractions": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view extractions." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List field extractions.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The user-specified part of the field extraction name. The full name of the field extraction includes this identifier as a suffix.", - "validation": "" - }, - "stanza": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The props.conf stanza to which this field extraction applies, e.g. the sourcetype or source that triggers this field extraction. The full name of the field extraction includes this stanza name as a prefix.", - "validation": "validate(len(trim($stanza$)) > 0, \"Value of argument 'stanza' may not be empty\")" - }, - "type": { - "datatype": "Enum", - "default": "", - "required": "true", - "summary": "Valid values: (REPORT | EXTRACT)\n\nAn EXTRACT-type field extraction is defined with an \"inline\" regular expression. A REPORT-type field extraction refers to a transforms.conf stanza.", - "validation": "validate(($type$ == 'REPORT') OR ($type$ == 'EXTRACT'), \"Value of 'type' must be one of { REPORT, EXTRACT }\")" - }, - "value": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "If this is an EXTRACT-type field extraction, specify a regular expression with named capture groups that define the desired fields. If this is a REPORT-type field extraction, specify a comma- or space-delimited list of transforms.conf stanza names that define the field transformations to apply.", - "validation": "validate(len(trim($value$)) > 0, \"Value of argument 'value' may not be empty\")" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create extraction." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new field extraction.", - "urlParams": {} - } - }, - "summary": "Provides access to search-time field extractions in props.conf." - }, - "data/props/extractions/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete named extraction." - }, - "404": { - "summary": "Named extraction does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named field extraction.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view named extraction." - }, - "404": { - "summary": "Named extraction does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single field extraction.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "value": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "validate(len(trim($value$)) > 0, \"Value of argument 'value' may not be empty\")" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit named extraction." - }, - "404": { - "summary": "Named extraction does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify the named field extraction.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/props/fieldaliases": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view filed aliases." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List field aliases.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "alias.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The alias for a given field. For example, supply a value of \"bar\" for an argument \"alias.foo\" to alias \"foo\" to \"bar\".", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The user-specified part of the field alias name. The full name of the field alias includes this identifier as a suffix.", - "validation": "" - }, - "stanza": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The props.conf stanza to which this field alias applies, e.g. the sourcetype or source that causes this field alias to be applied. The full name of the field alias includes this stanza name as a prefix.", - "validation": "validate(len(trim($stanza$)) > 0, \"Value of argument 'stanza' may not be empty\")" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create field alias." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new field alias.", - "urlParams": {} - } - }, - "summary": "Provides access to field aliases in props.conf." - }, - "data/props/fieldaliases/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete field alias." - }, - "404": { - "summary": "Field alias does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named field alias.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view field alias." - }, - "404": { - "summary": "Field alias does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single field alias.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "alias.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit field alias." - }, - "404": { - "summary": "Field alias does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify the named field alias.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/props/lookups": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view lookups." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List automatic lookups.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "lookup.field.input.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A column in the lookup table to match against. Supply a non-empty value if the corresponding field has a different name in your actual events.\n\n'''Note:''' This parameter is new in Splunk 4.3.", - "validation": "" - }, - "lookup.field.output.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A column in the lookup table to output. Supply a non-empty value if the field should have a different name in your actual events.\n\n'''Note:''' This parameter is new in Splunk 4.3.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The user-specified part of the automatic lookup name. The full name of the automatic lookup includes this identifier as a suffix.", - "validation": "" - }, - "overwrite": { - "datatype": "Boolean", - "default": "", - "required": "true", - "summary": "If set to true, output fields are always overridden. If set to false, output fields are only written out if they do not already exist.", - "validation": "" - }, - "stanza": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The props.conf stanza to which this automatic lookup applies, e.g. the sourcetype or source that automatically triggers this lookup. The full name of the automatic lookup includes this stanza name as a prefix.", - "validation": "validate(len(trim($stanza$)) > 0, \"Value of argument 'stanza' may not be empty\")" - }, - "transform": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The transforms.conf stanza that defines the lookup to apply.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create a lookup." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new automatic lookup.", - "urlParams": {} - } - }, - "summary": "Provides access to automatic lookups in props.conf." - }, - "data/props/lookups/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete lookup." - }, - "404": { - "summary": "Lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named automatic lookup.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view lookup." - }, - "404": { - "summary": "Lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single automatic lookup.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "lookup.field.input.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "lookup.field.output.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "overwrite": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "transform": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit lookup." - }, - "404": { - "summary": "Lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify the named automatic lookup.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/props/sourcetype-rename": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view sourcetype renames." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List renamed sourcetypes.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The original sourcetype name.", - "validation": "" - }, - "value": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The new sourcetype name.", - "validation": "validate(len(trim($value$)) > 0, \"Value of argument 'value' may not be empty\")" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create a rename for a sourcetype." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Rename a sourcetype.", - "urlParams": {} - } - }, - "summary": "Provides access to renamed sourcetypes which are configured in props.conf." - }, - "data/props/sourcetype-rename/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete the rename for the sourcetype." - }, - "404": { - "summary": "Rename for the sourcetype does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Restore a sourcetype's original name.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view renames for sourcetypes." - }, - "404": { - "summary": "Rename for sourcetype does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single renamed sourcetype.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "value": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "validate(len(trim($value$)) > 0, \"Value of argument 'value' may not be empty\")" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit renames for the sourcetype." - }, - "404": { - "summary": "Rename for the sourcetype does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Rename a sourcetype again, i.e. modify a sourcetype's new name.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/transforms/extractions": { - "methods": { - "GET": { - "config": "transforms", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view field transformations." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List field transformations.", - "urlParams": {} - }, - "POST": { - "config": "transforms", - "params": { - "CAN_OPTIMIZE": { - "datatype": "Bool", - "default": "True", - "required": "false", - "summary": "Controls whether Splunk can optimize this extraction out (another way of saying the extraction is disabled). You might use this when you have field discovery turned off--it ensures that certain fields are *always* discovered. Splunk only disables an extraction if it can determine that none of the fields identified by the extraction will ever be needed for the successful evaluation of a search.\n\nNOTE: This option should rarely be set to false.", - "validation": "validate(is_bool($CAN_OPTIMIZE$), \"Value of argument 'CAN_OPTIMIZE' must be a boolean\")" - }, - "CLEAN_KEYS": { - "datatype": "Boolean", - "default": "True", - "required": "false", - "summary": "If set to true, Splunk \"cleans\" the field names extracted at search time by replacing non-alphanumeric characters with underscores and stripping leading underscores.", - "validation": "validate(is_bool($CLEAN_KEYS$), \"Value of argument 'CLEAN_KEYS' must be a boolean\")" - }, - "FORMAT": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "This option is valid for both index-time and search-time field extractions. However, FORMAT behaves differently depending on whether the extraction is performed at index time or search time.\n\nThis attribute specifies the format of the event, including any field names or values you want to add.\n\nFORMAT for index-time extractions:\n\nUse $n (for example $1, $2, etc) to specify the output of each REGEX match.\n\nIf REGEX does not have n groups, the matching fails.\n\nThe special identifier $0 represents what was in the DEST_KEY before the REGEX was performed.\n\nAt index-time only, you can use FORMAT to create concatenated fields: FORMAT = ipaddress::$1.$2.$3.$4\n\nWhen you create concatenated fields with FORMAT, \"$\" is the only special character. It is treated as a prefix for regex-capturing groups only if it is followed by a number and only if the number applies to an existing capturing group. So if REGEX has only one capturing group and its value is \"bar\", then:\n\\t\"FORMAT = foo$1\" yields \"foobar\"\n\\t\"FORMAT = foo$bar\" yields \"foo$bar\"\n\\t\"FORMAT = foo$1234\" yields \"foo$1234\"\n\\t\"FORMAT = foo$1\\\\$2\" yields \"foobar\\\\$2\"\n\nAt index-time, FORMAT defaults to ::$1\n\nFORMAT for search-time extractions:\n\nThe format of this field as used during search time extractions is as follows:\n\\tFORMAT = ::( ::)*\n\\tfield-name = [|$]\n\\tfield-value = [|$]\n\nSearch-time extraction examples:\n\\tFORMAT = first::$1 second::$2 third::other-value\n\\tFORMAT = $1::$2\n\nYou cannot create concatenated fields with FORMAT at search time. That functionality is only available at index time.\n\nAt search-time, FORMAT defaults to an empty string.", - "validation": "" - }, - "KEEP_EMPTY_VALS": { - "datatype": "Boolean", - "default": "False", - "required": "false", - "summary": "If set to true, Splunk preserves extracted fields with empty values.", - "validation": "validate(is_bool($KEEP_EMPTY_VALS$), \"Value of argument 'KEEP_EMPTY_VALS' must be a boolean\")" - }, - "MV_ADD": { - "datatype": "Boolean", - "default": "False", - "required": "false", - "summary": "If Splunk extracts a field that already exists and MV_ADD is set to true, the field becomes multivalued, and the newly-extracted value is appended. If MV_ADD is set to false, the newly-extracted value is discarded.", - "validation": "validate(is_bool($MV_ADD$), \"Value of argument 'MV_ADD' must be a boolean\")" - }, - "REGEX": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specify a regular expression to operate on your data.\n\nThis attribute is valid for both index-time and search-time field extractions:\n\\tREGEX is required for all search-time transforms unless you are setting up a delimiter-based field extraction, in which case you use DELIMS (see the DELIMS attribute description, below).\n\\tREGEX is required for all index-time transforms.\n\nREGEX and the FORMAT attribute:\n\nName-capturing groups in the REGEX are extracted directly to fields. This means that you do not need to specify the FORMAT attribute for simple field extraction cases.\n\nIf the REGEX extracts both the field name and its corresponding field value, you can use the following special capturing groups if you want to skip specifying the mapping in FORMAT: _KEY_, _VAL_.\n\nFor example, the following are equivalent:\n\\tUsing FORMAT:\n\\t\\tREGEX = ([a-z]+)=([a-z]+)\n\\t\\tFORMAT = $1::$2\n\\tWithout using FORMAT\n\\t\\tREGEX = (?<_KEY_1>[a-z]+)=(?<_VAL_1>[a-z]+)\n\nREGEX defaults to an empty string.", - "validation": "" - }, - "SOURCE_KEY": { - "datatype": "String", - "default": "_raw", - "required": "true", - "summary": "Specify the KEY to which Splunk applies REGEX.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specifies whether the field transformation is disabled.", - "validation": "validate(is_bool($disabled$), \"Value of argument 'disabled' must be a boolean\")" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the field transformation.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create field transformation." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new field transformation.", - "urlParams": {} - } - }, - "summary": "Provides access to field transformations, i.e. field extraction definitions." - }, - "data/transforms/extractions/{name}": { - "methods": { - "DELETE": { - "config": "transforms", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete named field transformation." - }, - "404": { - "summary": "Named field transformation does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named field transformation.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "transforms", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view named field transformation." - }, - "404": { - "summary": "Named field transformation does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single field transformation.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "transforms", - "params": { - "CAN_OPTIMIZE": { - "datatype": "INHERITED", - "default": "True", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($CAN_OPTIMIZE$), \"Value of argument 'CAN_OPTIMIZE' must be a boolean\")" - }, - "CLEAN_KEYS": { - "datatype": "INHERITED", - "default": "True", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($CLEAN_KEYS$), \"Value of argument 'CLEAN_KEYS' must be a boolean\")" - }, - "FORMAT": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "KEEP_EMPTY_VALS": { - "datatype": "INHERITED", - "default": "False", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($KEEP_EMPTY_VALS$), \"Value of argument 'KEEP_EMPTY_VALS' must be a boolean\")" - }, - "MV_ADD": { - "datatype": "INHERITED", - "default": "False", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($MV_ADD$), \"Value of argument 'MV_ADD' must be a boolean\")" - }, - "REGEX": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "SOURCE_KEY": { - "datatype": "INHERITED", - "default": "_raw", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($disabled$), \"Value of argument 'disabled' must be a boolean\")" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit named field transformation." - }, - "404": { - "summary": "Named field transformation does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify the named field transformation.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "data/transforms/lookups": { - "methods": { - "GET": { - "config": "transforms", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view lookups." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List lookup definitions.", - "urlParams": {} - }, - "POST": { - "config": "transforms", - "params": { - "default_match": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "If min_matches is greater than zero and Splunk has less than min_matches for any given input, it provides this default_match value one or more times until the min_matches threshold is reached.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specifies whether the lookup definition is disabled.", - "validation": "validate(is_bool($disabled$), \"Value of argument 'disabled' must be a boolean\")" - }, - "external_cmd": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Provides the command and arguments to invoke to perform a lookup. Use this for external (or \"scripted\") lookups, where you interface with with an external script rather than a lookup table.", - "validation": "" - }, - "fields_list": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma- and space-delimited list of all fields that are supported by the external command. Use this for external (or \"scripted\") lookups.", - "validation": "" - }, - "filename": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The name of the static lookup table file.", - "validation": "" - }, - "max_matches": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "The maximum number of possible matches for each input lookup value.", - "validation": "" - }, - "max_offset_secs": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "For temporal lookups, this is the maximum time (in seconds) that the event timestamp can be later than the lookup entry time for a match to occur.", - "validation": "" - }, - "min_matches": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "The minimum number of possible matches for each input lookup value.", - "validation": "" - }, - "min_offset_secs": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "For temporal lookups, this is the minimum time (in seconds) that the event timestamp can be later than the lookup entry timestamp for a match to occur.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the lookup definition.", - "validation": "" - }, - "time_field": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For temporal lookups, this is the field in the lookup table that represents the timestamp.", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For temporal lookups, this specifies the \"strptime\" format of the timestamp field.", - "validation": "validate(is_time_format($time_format$), \"Value of argument 'time_format' must be a time format string\")" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create lookup." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a new lookup definition.", - "urlParams": {} - } - }, - "summary": "Provides access to lookup definitions in transforms.conf." - }, - "data/transforms/lookups/{name}": { - "methods": { - "DELETE": { - "config": "transforms", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete named lookup." - }, - "404": { - "summary": "Named lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the named lookup definition.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "transforms", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view named lookup." - }, - "404": { - "summary": "Named lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List a single lookup definition.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "transforms", - "params": { - "default_match": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($disabled$), \"Value of argument 'disabled' must be a boolean\")" - }, - "external_cmd": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "fields_list": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "filename": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "max_matches": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "max_offset_secs": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "min_matches": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "min_offset_secs": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "time_field": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "time_format": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_time_format($time_format$), \"Value of argument 'time_format' must be a time format string\")" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit named lookup." - }, - "404": { - "summary": "Named lookup does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Modify the named lookup definition.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "deployment/client": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view deployment client status." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the status of the deployment client in this Splunk instance, including the host/port of its deployment server, and which server classes it is a part of.\n\nA deployment client is a Splunk instance remotely configured by a deployment server. A Splunk instance can be both a deployment server and client at the same time. A Splunk deployment client belongs to one or more server classes.", - "urlParams": {} - } - }, - "summary": "Provides access to deployment client configuration and status." - }, - "deployment/client/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view deployment client." - }, - "404": { - "summary": "Deployment client does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the configuration for the named deployment client. The only valid name here is \"deployment-client\". This is identical to accessing deployment/client without specifying a name.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables this deployment client.", - "validation": "" - }, - "targetUri": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "URI of the deployment server for this deployment client.\n\nInclude the management port the server is listening on. For example:\n\ndeployment_server_uri:mgmtPort\n\nThe default management port is 8089.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit deployment client." - }, - "404": { - "summary": "Deployment client does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the configuration for this deployment client.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "deployment/client/{name}/reload": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deployment client restarted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to restart deployment client." - }, - "404": { - "summary": "Deployment client does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Restarts the deployment client, reloading configuration from disk.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "deployment/server": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view all deployment server configurations." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the configurations of all deployment servers.\n\nA deployment server is a Splunk instance that acts as a centralized configuration manager.\nDeployment clients poll server periodically to retrieve configurations.", - "urlParams": {} - } - }, - "summary": "Provides access to the configurations of all deployment servers." - }, - "deployment/server/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view this deployment server configuration." - }, - "404": { - "summary": "Requested deployment server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Get the configuration information for this deployment server.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "check-new": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, this deployment server reviews the information in its configuration to find out if there is something new or updated to push out to a deployment client.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables this deployment server.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit this deployment server configuration." - }, - "404": { - "summary": "Requested deployment server does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates deployment server instance configuration", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "deployment/serverclass": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view deployment server classes." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all server classes defined for a deployment server.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "blacklist": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "used to blacklist hosts for this serverclass", - "validation": "" - }, - "blacklist.": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "used to blacklist hosts for this serverclass", - "validation": "" - }, - "blacklist.0": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.1": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.2": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.3": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.4": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.5": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.6": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.7": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.8": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "blacklist.9": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to disallow this server class", - "validation": "" - }, - "continueMatching": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": " Controls how configuration is layered across classes and server-specific settings.\n\nIf true, configuration lookups continue matching server classes, beyond the first match. If false, only the first match is used. Matching is done in the order that server classes are defined. Defaults to true.\n\nA serverClass can override this property and stop the matching.\n", - "validation": "" - }, - "endpoint": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a URL template string, which specifies the endpoint from which content can be downloaded by a deployment client. The deployment client knows how to substitute the values of the variables in the URL. Any custom URL can also be supplied here as long as it uses the specified variables.\n\nThis attribute does not need to be specified unless you have a very specific need, for example: to acquire deployment application files from a third-party httpd, for extremely large environments.\n\nCan be overridden at the serverClass level.\n\nDefaults to $deploymentServerUri$/services/streams/deployment?name=$serverClassName$:$appName$", - "validation": "" - }, - "filterType": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (whitelist | blacklist)\n\nDetermines the order of execution of filters. If filterType is whitelist, all whitelist filters are applied first, followed by blacklist filters. If filterType is blacklist, all blacklist filters are applied first, followed by whitelist filters.\n\nThe whitelist setting indicates a filtering strategy that pulls in a subset:\n\n* Items are not considered to match the server class by default.\n* Items that match any whitelist entry, and do not match any blacklist entry, are considered to match the server class.\n* Items that match any blacklist entry are not considered to match the server class, regardless of whitelist.\n\nThe blacklist setting indicates a filtering strategy that rules out a subset:\n\n* Items are considered to match the server class by default.\n* Items that match any blacklist entry, and do not match any whitelist entry, are considered to not match the server class.\n* Items that match any whitelist entry are considered to match the server class.\n\nMore briefly:\n\nwhitelist: default no-match -> whitelists enable -> blacklists disable
\nblacklist: default match -> blacklists disable-> whitelists enable\n\nYou can override this value at the serverClass and serverClass:app levels. If you specify whitelist at the global level, and then specify blacklist for an individual server class, the setting becomes blacklist for that server class, and you have to provide another filter in that server class definition to replace the one you overrode.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the server class.", - "validation": "" - }, - "repositoryLocation": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The location on the deployment server to store the content that is to be deployed for this server class.\n\nFor example: $SPLUNK_HOME/etc/deployment-apps", - "validation": "" - }, - "targetRepositoryLocation": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The location on the deployment client where the content to be deployed for this server class should be installed. \n\nYou can override this in deploymentclient.conf on the deployment client.", - "validation": "" - }, - "tmpFolder": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Working folder used by the deployment server.\n\nDefaults to $SPLUNK_HOME@OsDirSep@var@OsDirSep@run@OsDirSep@tmp", - "validation": "" - }, - "whitelist": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "list of hosts to accept for this serverclass", - "validation": "" - }, - "whitelist.": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "list of hosts to accept for this serverclass", - "validation": "" - }, - "whitelist.0": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.1": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.2": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.3": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.4": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.5": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.6": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.7": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.8": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - }, - "whitelist.9": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Criteria used to identify deployment clients to allow access to this server class", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create a deployment server class." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a server class.", - "urlParams": {} - } - }, - "summary": "Provides access to the configuration of a server class.\n\nA server class defines a deployment configuration shared by a group of deployment clients. It defines both the criteria for being a member of the class and the set of content to deploy to members of the class. This content (encapsulated as \"deployment apps\") can consist of Splunk apps, Splunk configurations, and other related content, such as scripts, images, and supporting material. You can define different server classes to reflect the different requirements, OSes, machine types, or functions of your deployment clients." - }, - "deployment/serverclass/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view deployment server class." - }, - "404": { - "summary": "Deployment server class does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about this server class.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "blacklist": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.0": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.1": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.2": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.3": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.4": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.5": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.6": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.7": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.8": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "blacklist.9": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "continueMatching": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "endpoint": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "filterType": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "repositoryLocation": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "targetRepositoryLocation": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "tmpFolder": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.0": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.1": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.2": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.3": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.4": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.5": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.6": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.7": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.8": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "whitelist.9": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit deployment server class." - }, - "404": { - "summary": "Deployment server class does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new server class.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "deployment/tenants": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view deployment tenants configuration." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the multi-tenants configuration for this Splunk instance.\n\nMulti-tenants configuration is a type of deployment server topology where more than one deployment server is running on the same Splunk instance, and each of those deployment servers serves content to its own set of deployment clients.", - "urlParams": {} - } - }, - "summary": "Provides access to the multi-tenants configuration for this Splunk instance." - }, - "deployment/tenants/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view the deployment tenants configuration." - }, - "404": { - "summary": "Deployment tenants configuration does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the configuration for this deployment server in a multi-tenant configuration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "check-new": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, this deployment server in a multi-tenant configuration reviews the information in its configuration to find out if there is something new or updated to push out to a deployment client.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables this deployment server, which is in a multi-tenant configuration.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit the deployment tenants configuration." - }, - "404": { - "summary": "Deployment tenants configuration does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the configuration for this deployment server in a multi-tenant configuration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "directory": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view user configurable objects." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Provides an enumeration of the following app scoped objects:\n\n* event types\n* saved searches\n* time configurations\n* views\n* navs\n* manager XML\n* quickstart XML\n* search commands\n* macros\n* tags\n* field extractions\n* lookups\n* workflow actions\n* field aliases\n* sourcetype renames\n\nThis is useful to see which apps provide which objects, or all the objects provided by a specific app. To change the visibility of an object type in this listing, use the showInDirSvc in restmap.conf.", - "urlParams": {} - } - }, - "summary": "Provides access to user configurable objects.\n\nThese objects includes search commands, UI views, UI navigation, saved searches and event types. This is useful to see which objects are provided by all apps, or a specific app when the call is namespaced. The specific configuration in restmap.conf is showInDirSvc.\n\n'''Note:''' This endpoint is new for Splunk 4.3. It replaces the deprecated endpoint accessible from /admin/directory." - }, - "directory/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view the user configurable object." - }, - "404": { - "summary": "User configurable object does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Displays information about a single entity in the directory service enumeration.\n\nThis is rarely used. Typically after using the directory service enumeration, a client follows the specific link for an object in an enumeration.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "indexing/preview": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - } - }, - "summary": "Return a list of all data preview jobs. Data returned includes the Splunk management URI to access each preview job.\n\nUse the data preview job ID as the search_id parameter in [[Documentation:Splunk:RESTAPI:RESTsearch#GET_search.2Fjobs.2F.7Bsearch_id.7D.2Fresults_preview|GET /search/jobs/{search_id}/results_preview]] to preview events from the source file.\n\n'''Note: ''' Use the POST operation of this endpoint to create a data preview job and return the corresponding data preview job ID.", - "urlParams": {} - }, - "POST": { - "params": { - "input.path": { - "datatype": "String", - "default": "", - "required": "True", - "summary": "The absolute file path to a local file that you want to preview data returned from indexing.", - "validation": "" - }, - "props.<props_attr>": { - "datatype": "String", - "default": "", - "required": "False", - "summary": "Define a new sourcetype in props.conf for preview data that you are indexing.\n\nTypically, you first examine preveiw data events returned from GET /search/jobs/{job_id}events. Then you define new sourcetypes as needed with this endpoint.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - } - }, - "summary": "Create a preview data job for the specified source file, returning the preview data job ID. Use the preview job ID as the search_id parameter in [[Documentation:Splunk:RESTAPI:RESTsearch#GET_search.2Fjobs.2F.7Bsearch_id.7D.2Fresults_preview|GET /search/jobs/{search_id}/results_preview]] to obtain a data preview.\n\nYou can optionally define sourcetypes for preview data job in props.conf.", - "urlParams": {} - } - }, - "summary": "Preview events from a source file before you index the file.\n\nTypically, you create a data preview job for a source file. Use the resulting data preview job ID as the search_id parameter in [[Documentation:Splunk:RESTAPI:RESTsearch#GET_search.2Fjobs.2F.7Bsearch_id.7D.2Fresults_preview|GET /search/jobs/{search_id}/results_preview]] to preview events that would be generated from indexing the source file.\n\nYou can also check the status of a data preview job with GET /search/jobs/{search_id} to obtain information such as the dispatchState, doneProgress, and eventCount. For more information, see [[Documentation:Splunk:RESTAPI:RESTsearch#GET_search.2Fjobs.2F.7Bsearch_id.7D|GET /search/jobs/{search_id}]].\n\n'''Note:''' This endpoint is new in Splunk 4.3." - }, - "indexing/preview/{job_id}": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Specified job ID does not exist." - } - }, - "summary": "Returns the props.conf settings for the data preview job specified by {job_id}.", - "urlParams": { - "job_id": { - "required": "true", - "summary": "job_id" - } - } - } - } - }, - "licenser/groups": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser groups." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all licenser groups.", - "urlParams": {} - } - }, - "summary": "Provides access to the configuration of licenser groups.\n\nA licenser group contains one or more licenser stacks that can operate concurrently. Only one licenser group is active at any given time" - }, - "licenser/groups/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser groups." - }, - "404": { - "summary": "Licenser groups does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists a specific licenser group. A licenser group contains one or more licenser stacks that can operate concurrently. Only one licenser group is active at any given time", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "is_active": { - "datatype": "Boolean", - "default": "", - "required": "true", - "summary": "Active specific licenser group", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit licenser group." - }, - "404": { - "summary": "Licenser group does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Activates specific licenser group with the side effect of deactivating the previously active one.\n\nThere can only be a single active licenser group for a given instance of Splunk. Use this to switch between, for example, free to enterprise, or download-trial to free.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "licenser/licenses": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenses." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all licenses that have been added. Only a subset of these licenses may be active however, this is simply listing all licenses in every stack/group, regardless of which group is active", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "string", - "default": "", - "required": "true", - "summary": "Path to license file on server. If the payload parameter is specified, the name parameter is ignored.", - "validation": "" - }, - "payload": { - "datatype": "string", - "default": "", - "required": "false", - "summary": "String representation of license, encoded in xml", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to add a license." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Add a license entitlement to this instance.", - "urlParams": {} - } - }, - "summary": "Provides access to the licenses for this Splunk instance.\n\nA license enables various features for a splunk instance, including but not limitted to indexing quota, auth, search, forwarding, and so forth." - }, - "licenser/licenses/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete license." - }, - "404": { - "summary": "License does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the license with hash corresponding to {name}.\n\nNOTE: You cannot delete the last license out of an active group. First, deactivate the group (by switching to another group) and then perform the delete.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view license." - }, - "404": { - "summary": "License does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List attributes of specific license. The {name} portion of URL is actually the hash of the license payload.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "licenser/messages": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser messages." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all messages/alerts/persisted warnings for this node.", - "urlParams": {} - } - }, - "summary": "Provides access to licenser messages.\n\nMessages may range from helpful warnings about being close to violations, licenses expiring or more severe alerts regarding overages and exceeding license warning window." - }, - "licenser/messages/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser messages." - }, - "404": { - "summary": "Licenser message does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List specific message whose msgId corresponds to {name} component.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "licenser/pools": { - "methods": { - "GET": { - "config": "server", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser pools." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerates all pools. A pool logically partitions the daily volume entitlements of a stack. You can use a pool to divide license privileges amongst multiple slaves", - "urlParams": {} - }, - "POST": { - "config": "server", - "params": { - "description": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "description of this pool", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Edit the properties of the specified pool", - "validation": "" - }, - "quota": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Defines the byte quota of this pool.\n\nValid values:\n\nMAX: maximum amount allowed by the license. You can only have one pool with MAX size in a stack.\n\nNumber[MB|GB]: Specify a specific size. For example, 552428800, or simply specify 50MB.", - "validation": "" - }, - "slaves": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Comma-separated list of slaveids that are members of this pool, or '*' to accept all slaves.\n\nYou can also specify a comma-separated list guids to specify slaves that can connect to this pool.", - "validation": "" - }, - "stack_id": { - "datatype": "Enum", - "default": "", - "required": "true", - "summary": "Valid values: (download-trial | enterprise | forwarder | free)\n\nStack ID of the stack corresponding to this pool", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create licenser pools." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a license pool.", - "urlParams": {} - } - }, - "summary": "Provides access to the licenser pools configuration.\n\nA pool logically partitions the daily volume entitlements of a stack. You can use a license pool to divide license privileges amongst multiple slaves" - }, - "licenser/pools/{name}": { - "methods": { - "DELETE": { - "config": "server", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete licenser pool." - }, - "404": { - "summary": "Licenser pool does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete specified pool. Deleting pools is not supported for every pool. Certain stacks have fixed pools which cannot be deleted.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "server", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view licenser pools." - }, - "404": { - "summary": "Licenser pool does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists details of the pool specified by {name}.\n\nA pool logically partitions the daily volume entitlements of a stack. A pool can be used to divide license privileges amongst multiple slaves", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "server", - "params": { - "append_slaves": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Flag which controls whether newly specified slaves will be appended to existing slaves list or overwritten", - "validation": "" - }, - "description": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "quota": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "slaves": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit licenser pool." - }, - "404": { - "summary": "Licenser pool does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Edit properties of the pool specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "licenser/slaves": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "poolid": { - "datatype": "n/a", - "default": "", - "required": "false", - "summary": "Do not use.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - }, - "stackid": { - "datatype": "string", - "default": "", - "required": "false", - "summary": "Do not use.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view license slaves." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List all slaves registered to this license master. Any slave that attempts to connect to master is reported, regardless of whether it is allocated to a master licenser pool.", - "urlParams": {} - } - }, - "summary": "Provides access to slaves reporting to this license master." - }, - "licenser/slaves/{name}": { - "methods": { - "GET": { - "config": "", - "params": { - "poolid": { - "datatype": "Do not use.", - "default": "", - "required": "false", - "summary": "Do not use.", - "validation": "" - }, - "stackid": { - "datatype": "string", - "default": "", - "required": "false", - "summary": "do not use", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view license slave." - }, - "404": { - "summary": "License slave does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List attributes of slave specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "licenser/stacks": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view license stacks." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerate all license stacks.", - "urlParams": {} - } - }, - "summary": "Provides access to the license stack configuration.\n\nA license stack is comprised of one or more licenses of the same \"type\". The daily indexing quota of a license stack is additive, so a stack represents the aggregate entitlement for a collection of licenses." - }, - "licenser/stacks/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view license stacks." - }, - "404": { - "summary": "License stack does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieve details of specific license stacks. A license stack is comprised of one or more licenses of the same \"type\". The daily indexing quota of a license stack is additive, so a stack represents the aggregate entitlement for a collection of licenses.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "messages": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view messages." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerate all systemwide messages. This is typically used for splunkd to advertise issues such as license quotas, license expirations, misconfigured indexes, and disk space.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The primary key of this message.", - "validation": "" - }, - "value": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The text of the message.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create message." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create a persistent message displayed at /services/messages.", - "urlParams": {} - } - }, - "summary": "Provides access to Splunk system messages. Most messages are created by splunkd to inform the user of system problems.\n\nSplunk Web typically displays these as bulletin board messages." - }, - "messages/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete message." - }, - "404": { - "summary": "Message does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes a message identified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view message." - }, - "404": { - "summary": "Message does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Get the entry corresponding of a single message identified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "properties": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - } - }, - "summary": "Returns a list of configurations that are saved in configuration files.", - "urlParams": {} - }, - "POST": { - "params": { - "__conf": { - "datatype": "String", - "default": "", - "required": "True", - "summary": "The name of the configuration file to create.\n\nNote: Double underscore before conf.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Creates a new configuration file.", - "urlParams": {} - } - }, - "summary": "Provides access to configuration files.\n\nRefer to [[Documentation:Splunk:RESTAPI:RESTconfigurations|Accessing and updating Splunk configurations]] for a comparison of these endpoints with the configs/conf-{file} endpoints.\n\n'''Note: ''' The DELETE operation from the properties endpoint is deprecated and will be removed from future releases. Instead, use the DELETE operation from the [[Documentation:Splunk:RESTAPI:RESTconfig#DELETE_configs.2Fconf-.7Bfile.7D.2F.7Bname.7D|configs/conf-{file}/{name} endpoint]]." - }, - "properties/{file_name}": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Named file does not exist." - } - }, - "summary": "Returns a list of stanzas in the configuration file specified by {name}.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - } - } - }, - "POST": { - "params": { - "__stanza": { - "datatype": "String", - "default": "", - "required": "True", - "summary": "The name of the stanza to create.\n\nNote: Double underscore before stanza.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Stanza created successfully." - }, - "303": { - "summary": "Stanza already exists." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Creates a new stanza in the configuratin file specified by {name}.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - } - } - } - } - }, - "properties/{file_name}/{stanza_name}": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Stanza does not exist." - } - }, - "summary": "Returns the configuration values for the stanza represented by {stanza_name} in the configuration file specified by {file_name}.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - }, - "stanza_name": { - "required": "true", - "summary": "stanza_name" - } - } - }, - "POST": { - "params": { - "<key_name>": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Specifies a key/value pair to update.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "404": { - "summary": "Stanza does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See returned XML for explanation." - } - }, - "summary": "Adds or updates key/value pairs in the specified stanza. One or more key/value pairs may be passed at one time to this endpoint.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - }, - "stanza_name": { - "required": "true", - "summary": "stanza_name" - } - } - } - } - }, - "properties/{file_name}/{stanza_name}/{key_name}": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Key in the stanza does not exist." - } - }, - "summary": "Returns the value of the key in plain text for specified stanza and configuration file.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - }, - "key_name": { - "required": "true", - "summary": "key_name" - }, - "stanza_name": { - "required": "true", - "summary": "stanza_name" - } - } - }, - "POST": { - "params": { - "value": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The value to set for the named key in this named stanza in the named configuration file.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "404": { - "summary": "Key does not exist in the stanza." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See returned XML for explanation." - } - }, - "summary": "Update an existing key value.", - "urlParams": { - "file_name": { - "required": "true", - "summary": "file_name" - }, - "key_name": { - "required": "true", - "summary": "key_name" - }, - "stanza_name": { - "required": "true", - "summary": "stanza_name" - } - } - } - } - }, - "receivers/simple": { - "methods": { - "POST": { - "params": { - "<arbitrary_data>": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Raw event text. This will be the entirety of the HTTP request body.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the host field for events from this data input.", - "validation": "" - }, - "host_regex": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A regular expression used to extract the host value from each event.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "default", - "required": "false", - "summary": "The index to send events from this input to.", - "validation": "" - }, - "source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The source value to fill in the metadata for this input's events.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The sourcetype to apply to events from this input.", - "validation": "" - } - }, - "request": "Note that all metadata is specified via GET parameters.", - "response": "", - "returns": { - "200": { - "summary": "Data accepted." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "404": { - "summary": "Receiver does not exist." - } - }, - "summary": "Create events from the contents contained in the HTTP body.", - "urlParams": {} - } - }, - "summary": "Allows for sending events to Splunk in an HTTP request." - }, - "receivers/stream": { - "methods": { - "POST": { - "params": { - "<data_stream>": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Raw event text. This does not need to be presented as a complete HTTP request, but can be streamed in as data is available.", - "validation": "" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The value to populate in the host field for events from this data input.", - "validation": "" - }, - "host_regex": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A regular expression used to extract the host value from each event.", - "validation": "" - }, - "index": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The index to send events from this input to.", - "validation": "" - }, - "source": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The source value to fill in the metadata for this input's events.", - "validation": "" - }, - "sourcetype": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The sourcetype to apply to events from this input.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Data accepted." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "404": { - "summary": "Receiver does not exist." - } - }, - "summary": "Create events from the stream of data following HTTP headers.", - "urlParams": {} - } - }, - "summary": "Opens a socket for streaming events to Splunk." - }, - "saved/eventtypes": { - "methods": { - "GET": { - "config": "eventtypes", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view event types." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Retrieve saved event types.", - "urlParams": {} - }, - "POST": { - "config": "eventtypes", - "params": { - "description": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Human-readable description of this event type.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "If True, disables the event type.", - "validation": "" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name for the event type.", - "validation": "" - }, - "priority": { - "datatype": "Number", - "default": "1", - "required": "false", - "summary": "Specify an integer from 1 to 10 for the value used to determine the order in which the matching event types of an event are displayed. 1 is the highest priority.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Search terms for this event type.", - "validation": "" - }, - "tags": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Deprecated. Use tags.conf.spec file to assign tags to groups of events with related field values.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create an event type." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a new event type.", - "urlParams": {} - } - }, - "summary": "Provides access to saved event types." - }, - "saved/eventtypes/{name}": { - "methods": { - "DELETE": { - "config": "eventtypes", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete event type." - }, - "404": { - "summary": "Event type does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes this event type.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "eventtypes", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view event type." - }, - "404": { - "summary": "Event type does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information on this event type.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "eventtypes", - "params": { - "description": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "priority": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "search": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "tags": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit event type." - }, - "404": { - "summary": "Event type does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates this event type.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For scheduled searches display all the scheduled times starting from this time (not just the next run time)", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "For scheduled searches display all the scheduled times until this time (not just the next run time)", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view saved search." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information on all saved searches.", - "urlParams": {} - }, - "POST": { - "config": "savedsearches", - "params": { - "action.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Wildcard argument that accepts any action.", - "validation": "" - }, - "action.email": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "The state of the email action. Read-only attribute. Value ignored on POST. Use actions to specify a list of enabled actions.", - "validation": "" - }, - "action.email.auth_password": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The password to use when authenticating with the SMTP server. Normally this value will be set when editing the email settings, however you can set a clear text password here and it will be encrypted on the next Splunk restart.\n\nDefaults to empty string.", - "validation": "" - }, - "action.email.auth_username": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The username to use when authenticating with the SMTP server. If this is empty string, no authentication is attempted. Defaults to empty string.\n\nNOTE: Your SMTP server might reject unauthenticated emails.", - "validation": "" - }, - "action.email.bcc": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "BCC email address to use if action.email is enabled. ", - "validation": "" - }, - "action.email.cc": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "CC email address to use if action.email is enabled.", - "validation": "" - }, - "action.email.command": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The search command (or pipeline) which is responsible for executing the action.\n\nGenerally the command is a template search pipeline which is realized with values from the saved search. To reference saved search field values wrap them in $, for example to reference the savedsearch name use $name$, to reference the search use $search$.", - "validation": "" - }, - "action.email.format": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (plain | html | raw | csv)\n\nSpecify the format of text in the email. This value also applies to any attachments.", - "validation": "" - }, - "action.email.from": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Email address from which the email action originates.\n\nDefaults to splunk@$LOCALHOST or whatever value is set in alert_actions.conf.", - "validation": "" - }, - "action.email.hostname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the hostname used in the web link (url) sent in email actions.\n\nThis value accepts two forms:\n\nhostname (for example, splunkserver, splunkserver.example.com)\n\nprotocol://hostname:port (for example, http://splunkserver:8000, https://splunkserver.example.com:443)\n\nWhen this value is a simple hostname, the protocol and port which are configured within splunk are used to construct the base of the url.\n\nWhen this value begins with 'http://', it is used verbatim. NOTE: This means the correct port must be specified if it is not the default port for http or https. This is useful in cases when the Splunk server is not aware of how to construct an externally referencable url, such as SSO environments, other proxies, or when the Splunk server hostname is not generally resolvable.\n\nDefaults to current hostname provided by the operating system, or if that fails \"localhost\". When set to empty, default behavior is used.", - "validation": "" - }, - "action.email.inline": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the search results are contained in the body of the email.\n\nResults can be either inline or attached to an email. See action.email.sendresults.", - "validation": "" - }, - "action.email.mailserver": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Set the address of the MTA server to be used to send the emails.\n\nDefaults to (or whatever is set in alert_actions.conf).", - "validation": "" - }, - "action.email.maxresults": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets the global maximum number of search results to send when email.action is enabled.\n\nDefaults to 100.", - "validation": "" - }, - "action.email.maxtime": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are Integer[m|s|h|d].\n\nSpecifies the maximum amount of time the execution of an email action takes before the action is aborted. Defaults to 5m.", - "validation": "" - }, - "action.email.pdfview": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The name of the view to deliver if sendpdf is enabled", - "validation": "" - }, - "action.email.preprocess_results": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search string to preprocess results before emailing them. Defaults to empty string (no preprocessing).\n\nUsually the preprocessing consists of filtering out unwanted internal fields.", - "validation": "" - }, - "action.email.reportPaperOrientation": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (portrait | landscape)\n\nSpecifies the paper orientation: portrait or landscape. Defaults to portrait.", - "validation": "" - }, - "action.email.reportPaperSize": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (letter | legal | ledger | a2 | a3 | a4 | a5)\n\nSpecifies the paper size for PDFs. Defaults to letter.", - "validation": "" - }, - "action.email.reportServerEnabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the PDF server is enabled. Defaults to false.", - "validation": "" - }, - "action.email.reportServerURL": { - "datatype": "String", - "default": "", - "required": "false", - "summary": " The URL of the PDF report server, if one is set up and available on the network.\n\nFor a default locally installed report server, the URL is http://localhost:8091/", - "validation": "" - }, - "action.email.sendpdf": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether to create and send the results as a PDF. Defaults to false.", - "validation": "" - }, - "action.email.sendresults": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether to attach the search results in the email.\n\nResults can be either attached or inline. See action.email.inline. ", - "validation": "" - }, - "action.email.subject": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies an alternate email subject.\n\nDefaults to SplunkAlert-.", - "validation": "" - }, - "action.email.to": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma or semicolon separated list of recipient email addresses. Required if this search is scheduled and the email alert action is enabled.", - "validation": "" - }, - "action.email.track_alert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the execution of this action signifies a trackable alert.", - "validation": "" - }, - "action.email.ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are Integer[p].\n\nSpecifies the minimum time-to-live in seconds of the search artifacts if this action is triggered. If p follows <Integer>, int is the number of scheduled periods. Defaults to 86400 (24 hours).\n\nIf no actions are triggered, the artifacts have their ttl determined by dispatch.ttl in savedsearches.conf.", - "validation": "" - }, - "action.email.use_ssl": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether to use SSL when communicating with the SMTP server.\n\nDefaults to false.", - "validation": "" - }, - "action.email.use_tls": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether to use TLS (transport layer security) when communicating with the SMTP server (starttls).\n\nDefaults to false.", - "validation": "" - }, - "action.email.width_sort_columns": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether columns should be sorted from least wide to mos wide, left to right.\n\nOnly valid if format=text.", - "validation": "" - }, - "action.populate_lookup": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "The state of the populate lookup action. Read-only attribute. Value ignored on POST. Use actions to specify a list of enabled actions.", - "validation": "" - }, - "action.populate_lookup.command": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The search command (or pipeline) which is responsible for executing the action.\n\nGenerally the command is a template search pipeline which is realized with values from the saved search. To reference saved search field values wrap them in $, for example to reference the savedsearch name use $name$, to reference the search use $search$.", - "validation": "" - }, - "action.populate_lookup.dest": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Lookup name of path of the lookup to populate", - "validation": "" - }, - "action.populate_lookup.hostname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the hostname used in the web link (url) sent in alert actions.\n\nThis value accepts two forms:\n\nhostname (for example, splunkserver, splunkserver.example.com)\n\nprotocol://hostname:port (for example, http://splunkserver:8000, https://splunkserver.example.com:443)\n\nSee action.email.hostname for details.", - "validation": "" - }, - "action.populate_lookup.maxresults": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets the maximum number of search results sent via alerts. Defaults to 100.", - "validation": "" - }, - "action.populate_lookup.maxtime": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[m|s|h|d]\n\nSets the maximum amount of time the execution of an action takes before the action is aborted. Defaults to 5m.", - "validation": "" - }, - "action.populate_lookup.track_alert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the execution of this action signifies a trackable alert.", - "validation": "" - }, - "action.populate_lookup.ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are Integer[p]\n\nSpecifies the minimum time-to-live in seconds of the search artifacts if this action is triggered. If p follows Integer, then this specifies the number of scheduled periods. Defaults to 10p.\n\nIf no actions are triggered, the artifacts have their ttl determined by dispatch.ttl in savedsearches.conf.", - "validation": "" - }, - "action.rss": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "The state of the rss action. Read-only attribute. Value ignored on POST. Use actions to specify a list of enabled actions.", - "validation": "" - }, - "action.rss.command": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The search command (or pipeline) which is responsible for executing the action.\n\nGenerally the command is a template search pipeline which is realized with values from the saved search. To reference saved search field values wrap them in $, for example to reference the savedsearch name use $name$, to reference the search use $search$.", - "validation": "" - }, - "action.rss.hostname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the hostname used in the web link (url) sent in alert actions.\n\nThis value accepts two forms:\n\nhostname (for example, splunkserver, splunkserver.example.com)\n\nprotocol://hostname:port (for example, http://splunkserver:8000, https://splunkserver.example.com:443)\n\nSee action.email.hostname for details.", - "validation": "" - }, - "action.rss.maxresults": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets the maximum number of search results sent via alerts. Defaults to 100.", - "validation": "" - }, - "action.rss.maxtime": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are Integer[m|s|h|d].\n\nSets the maximum amount of time the execution of an action takes before the action is aborted. Defaults to 1m.", - "validation": "" - }, - "action.rss.track_alert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the execution of this action signifies a trackable alert.", - "validation": "" - }, - "action.rss.ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[p]\n\nSpecifies the minimum time-to-live in seconds of the search artifacts if this action is triggered. If p follows Integer, specifies the number of scheduled periods. Defaults to 86400 (24 hours).\n\nIf no actions are triggered, the artifacts have their ttl determined by dispatch.ttl in savedsearches.conf.", - "validation": "" - }, - "action.script": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "The state of the script action. Read-only attribute. Value ignored on POST. Use actions to specify a list of enabled actions.", - "validation": "" - }, - "action.script.command": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The search command (or pipeline) which is responsible for executing the action.\n\nGenerally the command is a template search pipeline which is realized with values from the saved search. To reference saved search field values wrap them in $, for example to reference the savedsearch name use $name$, to reference the search use $search$.", - "validation": "" - }, - "action.script.filename": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "File name of the script to call. Required if script action is enabled", - "validation": "" - }, - "action.script.hostname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the hostname used in the web link (url) sent in alert actions.\n\nThis value accepts two forms:\n\nhostname (for example, splunkserver, splunkserver.example.com)\n\nprotocol://hostname:port (for example, http://splunkserver:8000, https://splunkserver.example.com:443)\n\nSee action.email.hostname for details.", - "validation": "" - }, - "action.script.maxresults": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets the maximum number of search results sent via alerts. Defaults to 100.", - "validation": "" - }, - "action.script.maxtime": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[m|s|h|d]\n\nSets the maximum amount of time the execution of an action takes before the action is aborted. Defaults to 5m.", - "validation": "" - }, - "action.script.track_alert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the execution of this action signifies a trackable alert.", - "validation": "" - }, - "action.script.ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[p]\n\nSpecifies the minimum time-to-live in seconds of the search artifacts if this action is triggered. If p follows Integer, specifies the number of scheduled periods. Defaults to 600 (10 minutes).\n\nIf no actions are triggered, the artifacts have their ttl determined by dispatch.ttl in savedsearches.conf.", - "validation": "" - }, - "action.summary_index": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "The state of the summary index action. Read-only attribute. Value ignored on POST. Use actions to specify a list of enabled actions.\n\nDefaults to 0", - "validation": "" - }, - "action.summary_index._name": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies the name of the summary index where the results of the scheduled search are saved.\n\nDefaults to \"summary.\"", - "validation": "" - }, - "action.summary_index.command": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The search command (or pipeline) which is responsible for executing the action.\n\nGenerally the command is a template search pipeline which is realized with values from the saved search. To reference saved search field values wrap them in $, for example to reference the savedsearch name use $name$, to reference the search use $search$.", - "validation": "" - }, - "action.summary_index.hostname": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Sets the hostname used in the web link (url) sent in alert actions.\n\nThis value accepts two forms:\n\nhostname (for example, splunkserver, splunkserver.example.com)\n\nprotocol://hostname:port (for example, http://splunkserver:8000, https://splunkserver.example.com:443)\n\nSee action.email.hostname for details.", - "validation": "" - }, - "action.summary_index.inline": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Determines whether to execute the summary indexing action as part of the scheduled search. \n\nNOTE: This option is considered only if the summary index action is enabled and is always executed (in other words, if counttype = always).\n\nDefaults to true", - "validation": "" - }, - "action.summary_index.maxresults": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Sets the maximum number of search results sent via alerts. Defaults to 100.", - "validation": "" - }, - "action.summary_index.maxtime": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[m|s|h|d]\n\nSets the maximum amount of time the execution of an action takes before the action is aborted. Defaults to 5m.", - "validation": "" - }, - "action.summary_index.track_alert": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether the execution of this action signifies a trackable alert.", - "validation": "" - }, - "action.summary_index.ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values are: Integer[p]\n\nSpecifies the minimum time-to-live in seconds of the search artifacts if this action is triggered. If p follows Integer, specifies the number of scheduled periods. Defaults to 10p.\n\nIf no actions are triggered, the artifacts have their ttl determined by dispatch.ttl in savedsearches.conf.", - "validation": "" - }, - "actions": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "List of enabled actions", - "validation": "" - }, - "alert.digest_mode": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Specifies whether Splunk applies the alert actions to the entire result set or on each individual result.\n\nDefaults to true.\n", - "validation": "" - }, - "alert.expires": { - "datatype": "Number", - "default": "24h", - "required": "false", - "summary": "Valid values: [number][time-unit]\n\nSets the period of time to show the alert in the dashboard. Defaults to 24h.\n\nUse [number][time-unit] to specify a time. For example: 60 = 60 seconds, 1m = 1 minute, 1h = 60 minutes = 1 hour.", - "validation": "" - }, - "alert.severity": { - "datatype": "Enum", - "default": "3", - "required": "false", - "summary": "Valid values: (1 | 2 | 3 | 4 | 5 | 6)\n\nSets the alert severity level.\n\nValid values are:\n\n1 DEBUG\n2 INFO\n3 WARN\n4 ERROR\n5 SEVERE\n6 FATAL", - "validation": "" - }, - "alert.suppress": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "Indicates whether alert suppression is enabled for this schedules search.", - "validation": "" - }, - "alert.suppress.fields": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Comma delimited list of fields to use for suppression when doing per result alerting. Required if suppression is turned on and per result alerting is enabled.", - "validation": "" - }, - "alert.suppress.period": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Valid values: [number][time-unit]\n\nSpecifies the suppresion period. Only valid if alert.supress is enabled.\n\nUse [number][time-unit] to specify a time. For example: 60 = 60 seconds, 1m = 1 minute, 1h = 60 minutes = 1 hour.", - "validation": "" - }, - "alert.track": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (true | false | auto)\n\nSpecifies whether to track the actions triggered by this scheduled search.\n\nauto - determine whether to track or not based on the tracking setting of each action, do not track scheduled searches that always trigger actions.\n\ntrue - force alert tracking.\n\nfalse - disable alert tracking for this search.\n", - "validation": "" - }, - "alert_comparator": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "One of the following strings: greater than, less than, equal to, rises by, drops by, rises by perc, drops by perc", - "validation": "" - }, - "alert_condition": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Contains a conditional search that is evaluated against the results of the saved search. Defaults to an empty string.\n\nAlerts are triggered if the specified search yields a non-empty search result list.\n\nNOTE: If you specify an alert_condition, do not set counttype, relation, or quantity.\n", - "validation": "" - }, - "alert_threshold": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "The value to compare to before triggering the alert actions. Valid values are: Integer[%]?", - "validation": "" - }, - "alert_type": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "What to base the alert on, overriden by alert_condition if it is specified. Valid values are: always, custom, number of events, number of hosts, number of sources ", - "validation": "" - }, - "args.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Wildcard argument that accepts any saved search template argument, such as args.username=foobar when the search is search $username$.", - "validation": "" - }, - "cron_schedule": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Valid values: cron string\n\nThe cron schedule to execute this search. For example: */5 * * * * causes the search to execute every 5 minutes.\n\ncron lets you use standard cron notation to define your scheduled search interval. In particular, cron can accept this type of notation: 00,20,40 * * * *, which runs the search every hour at hh:00, hh:20, hh:40. Along the same lines, a cron of 03,23,43 * * * * runs the search every hour at hh:03, hh:23, hh:43.\n\nSplunk recommends that you schedule your searches so that they are staggered over time. This reduces system load. Running all of them every 20 minutes (*/20) means they would all launch at hh:00 (20, 40) and might slow your system every 20 minutes.", - "validation": "" - }, - "description": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Human-readable description of this saved search. Defaults to empty string.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "Indicates if the saved search is enabled.\n\nDisabled saved searches are not visible in Splunk Web.", - "validation": "" - }, - "dispatch.*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Wildcard argument that accepts any dispatch related argument.", - "validation": "" - }, - "dispatch.buckets": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The maximum nuber of timeline buckets.", - "validation": "validate(isint($dispatch.buckets$) AND $dispatch.buckets$>=0, \"Value of argument 'dispatch.buckets' must be a non-negative integer\")" - }, - "dispatch.earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A time string that specifies the earliest time for this search. Can be a relative or absolute time.\n\nIf this value is an absolute time, use the dispatch.time_format to format the value.", - "validation": "" - }, - "dispatch.latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A time string that specifies the latest time for this saved search. Can be a relative or absolute time.\n\nIf this value is an absolute time, use the dispatch.time_format to format the value.", - "validation": "" - }, - "dispatch.lookups": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Enables or disables the lookups for this search.", - "validation": "validate(is_bool($dispatch.lookups$), \"Value of argument 'dispatch.lookups' must be a boolean\")" - }, - "dispatch.max_count": { - "datatype": "Number", - "default": "500000", - "required": "false", - "summary": "The maximum number of results before finalizing the search.", - "validation": "validate(isint($dispatch.max_count$) AND $dispatch.max_count$>=0, \"Value of argument 'dispatch.max_count' must be a non-negative integer\")" - }, - "dispatch.max_time": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Indicates the maximum amount of time (in seconds) before finalizing the search.", - "validation": "" - }, - "dispatch.reduce_freq": { - "datatype": "Number", - "default": "10", - "required": "false", - "summary": "Specifies how frequently Splunk should run the MapReduce reduce phase on accumulated map values.", - "validation": "" - }, - "dispatch.rt_backfill": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "Whether to back fill the real time window for this search. Parameter valid only if this is a real time search", - "validation": "" - }, - "dispatch.spawn_process": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Specifies whether Splunk spawns a new search process when this saved search is executed.", - "validation": "validate(is_bool($dispatch.spawn_process$), \"Value of argument 'dispatch.spawn_process' must be a boolean\")" - }, - "dispatch.time_format": { - "datatype": "String", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "A time format string that defines the time format that Splunk uses to specify the earliest and latest time.", - "validation": "validate(is_time_format($dispatch.time_format$), \"Value of argument 'dispatch.time_format' must be a time format string\")" - }, - "dispatch.ttl": { - "datatype": "Number", - "default": "2p", - "required": "false", - "summary": "Valid values: Integer[p]<\n\nIndicates the time to live (in seconds) for the artifacts of the scheduled search, if no actions are triggered.\n\nIf an action is triggered Splunk changes the ttl to that action's ttl. If multiple actions are triggered, Splunk applies the maximum ttl to the artifacts. To set the action's ttl, refer to alert_actions.conf.spec.\n\nIf the integer is followed by the letter 'p' Splunk interprets the ttl as a multiple of the scheduled search's period.", - "validation": "" - }, - "displayview": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Defines the default UI view name (not label) in which to load the results. Accessibility is subject to the user having sufficient permissions.", - "validation": "" - }, - "is_scheduled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Whether this search is to be ran on a schedule", - "validation": "validate(is_bool($is_scheduled$), \"Value of argument 'is_scheduled' must be a boolean\")" - }, - "is_visible": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "Specifies whether this saved search should be listed in the visible saved search list.", - "validation": "validate(is_bool($is_visible$), \"Value of argument 'is_visible' must be a boolean\")" - }, - "max_concurrent": { - "datatype": "Number", - "default": "1", - "required": "false", - "summary": "The maximum number of concurrent instances of this search the scheduler is allowed to run.", - "validation": "validate(isint($max_concurrent$) AND $max_concurrent$>=0, \"Value of argument 'max_concurrent' must be a non-negative integer\")" - }, - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Use this parameter to specify multiple actions.\n\nFor example, you can specify:\n\ncurl -k -u admin:pass https://localhost:8089/servicesNS/admin/search/saved/searches -d name=MySavedSearch42 --data-urlencode search=\"index=_internal source=*metrics.log\" -d action.email.cc=receiver@example.com&action.email.bcc=receiver@example.com\n", - "validation": "" - }, - "next_scheduled_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Read-only attribute. Value ignored on POST. There are some old clients who still send this value", - "validation": "" - }, - "qualifiedSearch": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Read-only attribute. Value ignored on POST. Splunk computes this value during runtime.", - "validation": "" - }, - "realtime_schedule": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Controls the way the scheduler computes the next execution time of a scheduled search. If this value is set to 1, the scheduler bases its determination of the next scheduled search execution time on the current time.\n\nIf this value is set to 0, the scheduler bases its determination of the next scheduled search on the last search execution time. This is called continuous scheduling. If set to 0, the scheduler never skips scheduled execution periods. However, the execution of the saved search might fall behind depending on the scheduler's load. Use continuous scheduling whenever you enable the summary index option.\n\nIf set to 1, the scheduler might skip some execution periods to make sure that the scheduler is executing the searches running over the most recent time range.\n\nThe scheduler tries to execute searches that have realtime_schedule set to 1 before it executes searches that have continuous scheduling (realtime_schedule = 0).", - "validation": "validate(is_bool($realtime_schedule$), \"Value of argument 'realtime_schedule' must be a boolean\")" - }, - "request.ui_dispatch_app": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies a field used by Splunk UI to denote the app this search should be dispatched in.", - "validation": "" - }, - "request.ui_dispatch_view": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies a field used by Splunk UI to denote the view this search should be displayed in.", - "validation": "" - }, - "restart_on_searchpeer_add": { - "datatype": "Boolean", - "default": "1", - "required": "false", - "summary": "Specifies whether to restart a real-time search managed by the scheduler when a search peer becomes available for this saved search.\n\nNOTE: The peer can be a newly added peer or a peer that has been down and has become available.", - "validation": "" - }, - "run_on_startup": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "Indicates whether this search runs when Splunk starts. If it does not run on startup, it runs at the next scheduled time.\n\nSplunk recommends that you set run_on_startup to true for scheduled searches that populate lookup tables.", - "validation": "validate(is_bool($run_on_startup$), \"Value of argument 'run_on_startup' must be a boolean\")" - }, - "search": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The search to save.", - "validation": "" - }, - "vsid": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Defines the viewstate id associated with the UI view listed in 'displayview'.\n\nMust match up to a stanza in viewstates.conf.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create saved search." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Creates a saved search.", - "urlParams": {} - } - }, - "summary": "Provides access to the configuration of saved searches." - }, - "saved/searches/{name}": { - "methods": { - "DELETE": { - "config": "savedsearches", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete saved search." - }, - "404": { - "summary": "Saved search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Deletes this saved search.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "savedsearches", - "params": { - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "If the search is scheduled display scheduled times starting from this time", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "If the search is scheduled display scheduled times ending at this time", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view saved search." - }, - "404": { - "summary": "Saved search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information on this saved search.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "savedsearches", - "params": { - "action.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.auth_password": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.auth_username": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.bcc": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.cc": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.command": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.format": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.from": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.hostname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.inline": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.mailserver": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.maxresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.maxtime": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.pdfview": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.preprocess_results": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.reportPaperOrientation": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.reportPaperSize": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.reportServerEnabled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.reportServerURL": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.sendpdf": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.sendresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.subject": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.to": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.track_alert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.ttl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.use_ssl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.use_tls": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.email.width_sort_columns": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.command": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.dest": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.hostname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.maxresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.maxtime": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.track_alert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.populate_lookup.ttl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.command": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.hostname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.maxresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.maxtime": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.track_alert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.rss.ttl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.command": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.filename": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.hostname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.maxresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.maxtime": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.track_alert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.script.ttl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index._name": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.command": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.hostname": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.inline": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.maxresults": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.maxtime": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.track_alert": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "action.summary_index.ttl": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "actions": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.digest_mode": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.expires": { - "datatype": "INHERITED", - "default": "24h", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.severity": { - "datatype": "INHERITED", - "default": "3", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.suppress": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.suppress.fields": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.suppress.period": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert.track": { - "datatype": "INHERITED", - "default": "auto", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert_comparator": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert_condition": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert_threshold": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "alert_type": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "args.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "cron_schedule": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "description": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "disabled": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.*": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.buckets": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint($dispatch.buckets$) AND $dispatch.buckets$>=0, \"Value of argument 'dispatch.buckets' must be a non-negative integer\")" - }, - "dispatch.earliest_time": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.latest_time": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.lookups": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($dispatch.lookups$), \"Value of argument 'dispatch.lookups' must be a boolean\")" - }, - "dispatch.max_count": { - "datatype": "INHERITED", - "default": "500000", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint($dispatch.max_count$) AND $dispatch.max_count$>=0, \"Value of argument 'dispatch.max_count' must be a non-negative integer\")" - }, - "dispatch.max_time": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.reduce_freq": { - "datatype": "INHERITED", - "default": "10", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.rt_backfill": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "dispatch.spawn_process": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($dispatch.spawn_process$), \"Value of argument 'dispatch.spawn_process' must be a boolean\")" - }, - "dispatch.time_format": { - "datatype": "INHERITED", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_time_format($dispatch.time_format$), \"Value of argument 'dispatch.time_format' must be a time format string\")" - }, - "dispatch.ttl": { - "datatype": "INHERITED", - "default": "2p", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "displayview": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "is_scheduled": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($is_scheduled$), \"Value of argument 'is_scheduled' must be a boolean\")" - }, - "is_visible": { - "datatype": "INHERITED", - "default": "true", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($is_visible$), \"Value of argument 'is_visible' must be a boolean\")" - }, - "max_concurrent": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "validate(isint($max_concurrent$) AND $max_concurrent$>=0, \"Value of argument 'max_concurrent' must be a non-negative integer\")" - }, - "next_scheduled_time": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "qualifiedSearch": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "realtime_schedule": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($realtime_schedule$), \"Value of argument 'realtime_schedule' must be a boolean\")" - }, - "request.ui_dispatch_app": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "request.ui_dispatch_view": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "restart_on_searchpeer_add": { - "datatype": "INHERITED", - "default": "1", - "required": "false", - "summary": "INHERITED", - "validation": "" - }, - "run_on_startup": { - "datatype": "INHERITED", - "default": "0", - "required": "false", - "summary": "INHERITED", - "validation": "validate(is_bool($run_on_startup$), \"Value of argument 'run_on_startup' must be a boolean\")" - }, - "search": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "vsid": { - "datatype": "INHERITED", - "default": "", - "required": "false", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit saved search." - }, - "404": { - "summary": "Saved search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates this saved search.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches/{name}/acknowledge": { - "methods": { - "POST": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Suppression was acknowledged successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to acknowledge the suppression." - }, - "404": { - "summary": "Named save search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Acknowledge the suppression of the alerts from this saved search and resume alerting. Action available only with POST", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches/{name}/dispatch": { - "methods": { - "POST": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Dispatched the saved search successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to dispatch the saved search." - }, - "404": { - "summary": "Named save search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Dispatch the saved search just like the scheduler would. Action available only through POST. The following optional arguments are accepted:\ndispatch.now: [time] dispatch the search as if it this was the current time \ndispatch.*: any dispatch.* field of the search can be overriden\nnow: [time] deprecated, same as dispatch.now use that instead\ntrigger_actions: [bool] whether to trigger alert actions \nforce_dispatch: [bool] should a new search be started even if another instance of this search is already running", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches/{name}/history": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Retrieved the dispatch history successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to retrieve dispatch history for this saved search." - }, - "404": { - "summary": "Named save search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Get a list of available search jobs created from this saved search", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches/{name}/scheduled_times": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Scheduled times returned successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to get scheduled times." - }, - "404": { - "summary": "Scheduled times do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the scheduled times for a saved search. Specify a time range for the data returned using earliest_time and latest_time parameters.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "saved/searches/{name}/suppress": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Retrieved/updated the suppression state successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to retrieve/update the suppression state." - }, - "404": { - "summary": "Named save search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Check the suppression state of alerts from this saved search.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "scheduled/views": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view scheduled view." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists all scheduled view objects", - "urlParams": {} - } - }, - "summary": "Allows for management of scheduled (for pdf delivery) views. Scheduled views are dummy/noop scheduled saved searches that email a pdf version of a view" - }, - "scheduled/views/{name}": { - "methods": { - "DELETE": { - "config": "savedsearches", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete scheduled view." - }, - "404": { - "summary": "Scheduled view does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete a scheduled view", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "savedsearches", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view scheduled view." - }, - "404": { - "summary": "Scheduled view does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List one scheduled view object", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "savedsearches", - "params": { - "action.email*": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Wildcard argument that accepts any email action.", - "validation": "" - }, - "action.email.to": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Comma or semicolon separated list of email addresses to send the view to", - "validation": "" - }, - "cron_schedule": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The cron schedule to use for delivering the view", - "validation": "" - }, - "description": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "User readable description of this scheduled view object", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "0", - "required": "false", - "summary": "Whether this object is enabled or disabled", - "validation": "" - }, - "is_scheduled": { - "datatype": "Boolean", - "default": "", - "required": "true", - "summary": "Whether this pdf delivery should be scheduled", - "validation": "validate(is_bool($is_scheduled$), \"Value of argument 'is_scheduled' must be a boolean\")" - }, - "next_scheduled_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The next time when the view will be delivered. Ignored on edit, here only for backwards compatability", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit scheduled view." - }, - "404": { - "summary": "Scheudled view does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Edit a scheduled view, e.g. change schedule, enable disable schedule etc", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "scheduled/views/{name}/dispatch": { - "methods": { - "POST": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Dispatched the scheduled view successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to dispatch a scheduled view." - }, - "404": { - "summary": "Named view does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Dispatch the scheduled search (powering the scheduled view) just like the scheduler would. Action available only through POST. The following optional arguments are accepted:\"dispatch.now: [time] dispatch the search as if it this was the current time\ndispatch.*: any dispatch.* field of the search can be overriden\nnow: [time] deprecated, same as dispatch.now use that instead\ntrigger_actions: [bool] whether to trigger the alert actions\nforce_dispatch: [bool] should a new search be started even if another instance of this search is already running", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "scheduled/views/{name}/history": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Retrieved scheduled view history successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to retrieve scheduled view history." - }, - "404": { - "summary": "Named view does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Get a list of search jobs used to deliver this scheduled view", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "scheduled/views/{name}/scheduled_times": { - "methods": { - "GET": { - "config": "savedsearches", - "params": { - "<arbitrary_key>": { - "datatype": "UNDONE", - "default": "", - "required": "false", - "summary": "UNDONE", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Scheduled times returned successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to access scheduled times." - }, - "404": { - "summary": "Scheudled times do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the scheduled times for a scheduled view. Specify a time range for the data returned using earliest_time and latest_time parameters.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "search/distributed/config": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration for distributed search." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the configuration options for the distributed search system.", - "urlParams": {} - } - }, - "summary": "Provides access to Splunk's distributed search options. This option is not for adding search peers." - }, - "search/distributed/config/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete configuration for distributed search." - }, - "404": { - "summary": "Configuration for distributed search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Disables the distributed search feature. Note that \"distributedSearch\" is the only valid name here.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view configuration for distributed search." - }, - "404": { - "summary": "Configuration for distributed search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Displays configuration options. Note that \"distributedSearch\" is the only valid name here.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "autoAddServers": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, automatically add all discovered servers.", - "validation": "" - }, - "blacklistNames": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A comma-separated list of servers that you do not want to peer with. \n\nServers are the 'server name' that is created at startup time.", - "validation": "" - }, - "blacklistURLs": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a comma separated list of server names or URIs to specify servers to blacklist.\n\nYou can blacklist on server name or server URI (x.x.x.x:port).", - "validation": "" - }, - "checkTimedOutServersFrequency": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Rechecks servers at the specified frequency (in seconds). If this is set to 0, then no recheck occurs. Defaults to 60.\n\nThis attribute is ONLY relevant if removeTimedOutServers is set to true. If removeTimedOutServers is false, this attribute is ignored.\n", - "validation": "" - }, - "connectionTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Amount of time, in seconds, to use as a timeout during search peer connection establishment.", - "validation": "" - }, - "disabled": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, disables the distributed search.\n\nDefaults to false (the distributed search is enabled).", - "validation": "" - }, - "heartbeatFrequency": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "The period between heartbeat messages, in seconds. \n\nUse 0 to disable sending of heartbeats. Defaults to 0.", - "validation": "" - }, - "heartbeatMcastAddr": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify an IP address to set a multicast address where each Splunk server sends and listens for heart beat messages.\n\nThis allows Splunk servers to auto-discover other Splunk servers on your network. Defaults to 224.0.0.37.", - "validation": "" - }, - "heartbeatPort": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a port to set the heartbeat port where each Splunk server sends and listens for heart beat messages.\n\nThis allows Splunk servers to auto-discover other Splunk servers on the network. Defaults to 8888.", - "validation": "" - }, - "receiveTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Amount of time in seconds to use as a timeout while trying to read/receive data from a search peer.", - "validation": "" - }, - "removedTimedOutServers": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, removes a server connection that cannot be made within serverTimeout.\n\nIf false, every call to that server attempts to connect. This may result in a slow user interface.\n\nDefaults to false.", - "validation": "" - }, - "sendTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Amount of time in seconds to use as a timeout while trying to write/send data to a search peer.", - "validation": "" - }, - "serverTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Deprected. Use connectionTimeout, sendTimeout, and receiveTimeout.", - "validation": "" - }, - "servers": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a comma-separated list of server to set the initial list of servers. \n\nIf operating completely in autoAddServers mode (discovering all servers), there is no need to list any servers here.", - "validation": "" - }, - "shareBundles": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Indicates whether this server uses bundle replication to share search time configuration with search peers. \n\nIf set to false, the search head assumes that the search peers can access the correct bundles using an NFS share and have correctly configured the options listed under: \"SEARCH HEAD BUNDLE MOUNTING OPTIONS.\"\n\nDefaults to true.", - "validation": "" - }, - "skipOurselves": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If set to true, this server does NOT participate as a server in any search or other call.\n\nThis is used for building a node that does nothing but merge the results from other servers. \n\nDefaults to false.", - "validation": "" - }, - "statusTimeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Set connection timeout when gathering a search peer's basic info (/services/server/info). Defaults to 10.\n\nNote: Read/write timeouts are automatically set to twice this value.\n", - "validation": "" - }, - "ttl": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Time to live (ttl) of the heartbeat messages. Defaults to 1 (this subnet).\n\nIncreasing this number allows the UDP multicast packets to spread beyond the current subnet to the specified number of hops.\n\nNOTE: This only works if routers along the way are configured to pass UDP multicast packets.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit configuration for distributed search." - }, - "404": { - "summary": "Configuration for distributed search does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Update the configuration for the distributed search feature. Note that \"distributedSearch\" is the only valid name here.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "search/distributed/peers": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "discoveredPeersOnly": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "If set to true, only list peers that have been auto-discovered.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view search peer." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns a list of configured search peers that this search head is configured to distribute searches to. This includes configured search peers that have been disabled.", - "urlParams": {} - }, - "POST": { - "config": "", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The name of the search peer.\n\nDefined as hostname:port, where port is the management port.", - "validation": "" - }, - "remotePassword": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The password of the remote user.\n\nThis is used to authenicate with the search peer to exchange certificates.", - "validation": "" - }, - "remoteUsername": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The username of a user with admin privileges in the search peer server.\n\nThis is used to exchange certificates.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create search peer." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Adds a new search peer.", - "urlParams": {} - } - }, - "summary": "Provides distributed peer server management.\n\nA search peer is defined as a splunk server to which another splunk server distributes searches. The splunk server where the search request originates is referred to as the search head." - }, - "search/distributed/peers/{name}": { - "methods": { - "DELETE": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete search peer." - }, - "404": { - "summary": "Search peer does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Removes the distributed search peer specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "", - "params": { - "discoveredPeersOnly": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "If true, return only auto-discovered search peers.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view search peer." - }, - "404": { - "summary": "Search peer does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns information about the distributed search peer specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "remotePassword": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - }, - "remoteUsername": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit search peer." - }, - "404": { - "summary": "Search peer does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Update the configuration of the distributed search peer specified by {name}.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "search/fields": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - } - }, - "summary": "Returns a list of fields registered for field configuration.", - "urlParams": {} - } - }, - "summary": "Provides management for search field configurations.\n\nField configuration is specified in $SPLUNK_HOME/etc/system/default/fields.conf, with overriden values in $SPLUNK_HOME/etc/system/local/fields.conf." - }, - "search/fields/{field_name}": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Retrieves information about the named field.", - "urlParams": { - "field_name": { - "required": "true", - "summary": "field_name" - } - } - } - } - }, - "search/fields/{field_name}/tags": { - "methods": { - "GET": { - "request": "", - "response": "Because fields exist only at search time, this endpoint returns a 200 response for any non-empty request.", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Named field does not exist." - } - }, - "summary": "Returns a list of tags that have been associated with the field specified by {field_name}.", - "urlParams": { - "field_name": { - "required": "true", - "summary": "field_name" - } - } - }, - "POST": { - "params": { - "add": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The tag to attach to this field_name:value combination.", - "validation": "" - }, - "delete": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The tag to remove to this field_name::value combination.", - "validation": "" - }, - "value": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The specific field value on which to bind the tags.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Tags updated." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Update the tags associated with the field specified by {field_name}.\n\nThe value parameter specifies the specific value on which to bind tag actions. Multiple tags can be attached by passing multiple add or delete form parameters. The server processes all of the adds first, and then processes the deletes.\n\nYou must specify at least one add or delete parameter.", - "urlParams": { - "field_name": { - "required": "true", - "summary": "field_name" - } - } - } - } - }, - "search/jobs": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - } - }, - "summary": "Returns a list of current searches. \n\nOptional filter arguments can be passed to specify searches. The user id is implied by the authentication to the call. See the response properties for /search/jobs/{search_id} for descriptions of the job properties.", - "urlParams": {} - }, - "POST": { - "params": { - "auto_cancel": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "If specified, the job automatically cancels after this many seconds of inactivity. (0 means never auto-cancel)", - "validation": "" - }, - "auto_finalize_ec": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Auto-finalize the search after at least this many events have been processed. \n\nSpecify 0 to indicate no limit.", - "validation": "" - }, - "auto_pause": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "If specified, the job automatically cancels after this many seconds of inactivity. (0 means never auto-pause)", - "validation": "" - }, - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a time string. Sets the earliest (inclusive), respectively, time bounds for the search. \n\nThe time string can be either a UTC time (with fractional seconds), a relative time specifier (to now) or a formatted time string. (Also see comment for the search_mode variable.)", - "validation": "" - }, - "enable_lookups": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "Indicates whether lookups should be applied to events. \n\nSpecifying true (the default) may slow searches significantly depending on the nature of the lookups.\n", - "validation": "" - }, - "exec_mode": { - "datatype": "Enum", - "default": "normal", - "required": "false", - "summary": "Valid values: (blocking | oneshot | normal)\n\nIf set to normal, runs an asynchronous search. \n\nIf set to blocking, returns the sid when the job is complete. \n\nIf set to oneshot, returns results in the same call.", - "validation": "" - }, - "force_bundle_replication": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "Specifies whether this search should cause (and wait depending on the value of sync_bundle_replication) for bundle synchronization with all search peers.", - "validation": "" - }, - "id": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Optional string to specify the search ID (<:sid>). If unspecified, a random ID is generated.", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a time string. Sets the latest (exclusive), respectively, time bounds for the search. \n\nThe time string can be either a UTC time (with fractional seconds), a relative time specifier (to now) or a formatted time string. (Also see comment for the search_mode variable.)", - "validation": "" - }, - "max_count": { - "datatype": "Number", - "default": "10000", - "required": "false", - "summary": "The number of events that can be accessible in any given status bucket. \n\nAlso, in transforming mode, the maximum number of results to store. Specifically, in all calls, codeoffset+count <= max_count.", - "validation": "" - }, - "max_time": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The number of seconds to run this search before finalizing. Specify 0 to never finalize.", - "validation": "" - }, - "namespace": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The application namespace in which to restrict searches. \n\nThe namespace corresponds to the identifier recognized in the /services/apps/local endpoint. ", - "validation": "" - }, - "now": { - "datatype": "String", - "default": "current system time", - "required": "false", - "summary": "Specify a time string to set the absolute time used for any relative time specifier in the search. Defaults to the current system time.\n\nYou can specify a relative time modifier for this parameter. For example, specify +2d to specify the current time plus two days.\n\nIf you specify a relative time modifier both in this parameter and in the search string, the search string modifier takes precedence.\n\nRefer to [[Documentation:Splunk:SearchReference:SearchTimeModifiers|Time modifiers for search]] for details on specifying relative time modifiers.", - "validation": "" - }, - "reduce_freq": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Determines how frequently to run the MapReduce reduce phase on accumulated map values.", - "validation": "" - }, - "reload_macros": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "Specifies whether to reload macro definitions from macros.conf. \n\nDefault is true.", - "validation": "" - }, - "remote_server_list": { - "datatype": "String", - "default": "empty list", - "required": "false", - "summary": "Comma-separated list of (possibly wildcarded) servers from which raw events should be pulled. This same server list is to be used in subsearches.", - "validation": "" - }, - "required_field_list": { - "datatype": "String", - "default": "empty list", - "required": "false", - "summary": "Deprecated. Use rf instead. \n\nA comma-separated list of required fields that, even if not referenced or used directly by the search,is still included by the events and summary endpoints.", - "validation": "" - }, - "rf": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Adds a required field to the search. There can be multiple rf POST arguments to the search.\n\nConsider using this form of passing the required fields to the search instead of the deprecated required_field_list. If both rf and required_field_list are supplied, the union of the two lists is used.", - "validation": "" - }, - "rt_blocking": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": " For a realtime search, indicates if the indexer blocks if the queue for this search is full.", - "validation": "" - }, - "rt_indexfilter": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "For a realtime search, indicates if the indexer prefilters events.", - "validation": "" - }, - "rt_maxblocksecs": { - "datatype": "Number", - "default": "60", - "required": "false", - "summary": "For a realtime search with rt_blocking set to true, the maximum time to block.\n\nSpecify 0 to indicate no limit.", - "validation": "" - }, - "rt_queue_size": { - "datatype": "Number", - "default": "10000 events", - "required": "false", - "summary": "For a realtime search, the queue size (in events) that the indexer should use for this search.", - "validation": "" - }, - "search": { - "datatype": "Search", - "default": "", - "required": "true", - "summary": "The search language string to execute, taking results from the local and remote servers.\n\nExamples:\n\n \"search *\"\n\n \"search * | outputcsv\"", - "validation": "" - }, - "search_listener": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Registers a search state listener with the search.\n\nUse the format search_state;results_condition;http_method;uri;\n\nFor example: search_listener=onResults;true;POST;/servicesNS/admin/search/saved/search/foobar/notify;\n", - "validation": "" - }, - "search_mode": { - "datatype": "Enum", - "default": "normal", - "required": "false", - "summary": "Valid values: (normal | realtime)\n\nIf set to realtime, search runs over live data. A realtime search may also be indicated by earliest_time and latest_time variables starting with 'rt' even if the search_mode is set to normal or is unset. For a real-time search, if both earliest_time and latest_time are both exactly 'rt', the search represents all appropriate live data received since the start of the search. \n\nAdditionally, if earliest_time and/or latest_time are 'rt' followed by a relative time specifiers then a sliding window is used where the time bounds of the window are determined by the relative time specifiers and are continuously updated based on the wall-clock time.", - "validation": "" - }, - "spawn_process": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "Specifies whether the search should run in a separate spawned process. Default is true.", - "validation": "" - }, - "status_buckets": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The most status buckets to generate.\n\n0 indicates to not generate timeline information.", - "validation": "" - }, - "sync_bundle_replication": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specifies whether this search should wait for bundle replication to complete.", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": "ISO-8601", - "required": "false", - "summary": "Used to convert a formatted time string from {start,end}_time into UTC seconds. It defaults to ISO-8601.", - "validation": "" - }, - "timeout": { - "datatype": "Number", - "default": "86400", - "required": "false", - "summary": "The number of seconds to keep this search after processing has stopped.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - } - }, - "summary": "Starts a new search, returning the search ID (<sid>).\n\nThe search parameter is a search language string that specifies the search. Often you create a search specifying just the search parameter. Use the other parameters to customize a search to specific needs.\n\nUse the returned (<sid>) in the following endpoints to view and manage the search:\n\n:search/jobs/{search_id}: View the status of this search job.\n\n:search/jobs/{search_id}/control: Execute job control commands, such as pause, cancel, preview, and others.\n\n:search/jobs/{search_id}/events: View a set of untransformed events for the search.\n\n:search/jobs/{search_id}/results: View results of the search.\n\n:search/jobs/{search_id}/results_preview: Preview results of a search that has not completed\n\n:search/jobs/{search_id}/search.log: View the log file generated by the search.\n\n:search/jobs/{search_id}/summary: View field summary information\n\n:search/jobs/{search_id}/timeline: View event distribution over time.", - "urlParams": {} - } - }, - "summary": "Provides listings for search jobs." - }, - "search/jobs/export": { - "methods": { - "GET": { - "params": { - "auto_cancel": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "auto_finalize_ec": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "auto_pause": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "enable_lookups": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "force_bundle_replication": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "id": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "max_time": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "namespace": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "now": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "reduce_freq": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "reload_macros": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "remote_server_list": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "required_field_list": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "rf": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "rt_blocking": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "rt_indexfilter": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "rt_maxblocksecs": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "rt_queue_size": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "search_listener": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "search_mode": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "sync_bundle_replication": { - "datatype": "Bool", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - }, - "timeout": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Same as for POST search/jobs.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Searched successfully." - } - }, - "summary": "Performs a search identical to POST search/jobs, except the search does not create a search ID () and the search streams results as they become available. Streaming of results is based on the search string.\n \nFor non-streaming searches, previews of the final results are available if preview is enabled. If preview is not enabled, it is better to use search/jobs with exec_mode=oneshot.", - "urlParams": {} - } - }, - "summary": "Allows for streaming of search results as the become available." - }, - "search/jobs/{search_id}": { - "methods": { - "DELETE": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Deletes the search job specified by {search_id}.\n\n{search_id} is the <sid> field returned from the GET operation for the search/jobs endpoint.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - }, - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Return summary information about the search job specified by {search_id}.\n\nYou can get a search ID from the field returned from the GET operation for the search/jobs endpoint.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/control": { - "methods": { - "POST": { - "params": { - "action": { - "datatype": "Enum", - "default": "", - "required": "true", - "summary": "Valid values: (pause | unpause | finalize | cancel | touch | setttl | setpriority | enablepreview | disablepreview)\n\nThe control action to execute.\n\npause: Suspends the execution of the current search.\n\nunpause: Resumes the execution of the current search, if paused.\n\nfinalize: Stops the search, and provides intermediate results to the /results endpoint.\n\ncancel: Stops the current search and deletes the result cache.\n\ntouch: Extends the expiration time of the search to now + ttl\n\nsetttl: Change the ttl of the search. Arguments: ttl=<number>\n\nsetpriority: Sets the priority of the search process. Arguments: priority=<0-10>\n\nenablepreview: Enable preview generation (may slow search considerably).\n\ndisablepreview: Disable preview generation.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "403": { - "summary": "Insufficient permissions to edit control action for search job." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Executes a job control command for the search specified by {search_id}.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/events": { - "methods": { - "GET": { - "params": { - "count": { - "datatype": "Number", - "default": "100", - "required": "false", - "summary": "The maximum number of results to return. If value is set to 0, then all available results are returned. Default value is 100.", - "validation": "" - }, - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A time string representing the earliest (inclusive), respectively, time bounds for the results to be returned. If not specified, the range applies to all results found.", - "validation": "" - }, - "f": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field to return for the event set. \n\nYou can pass multiple POST f arguments if multiple field are required. If field_list and f are provided, the union of the lists is used.", - "validation": "" - }, - "field_list": { - "datatype": "String", - "default": "*", - "required": "false", - "summary": "Deprecated. Consider using f.\n\nA comma-separated list of the fields to return for the event set.", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A time string representing the latest (exclusive), respectively, time bounds for the results to be returned. If not specified, the range applies to all results found.", - "validation": "" - }, - "max_lines": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The maximum lines that any single event's _raw field should contain. \n\nSpecify 0 to specify no limit.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The first result (inclusive) from which to begin returning data. \n\nThis value is 0-indexed. Default value is 0. \n\nIn 4.1+, negative offsets are allowed and are added to count to compute the absolute offset (for example, offset=-1 is the last available offset. Offsets in the results are always absolute and never negative.", - "validation": "" - }, - "output_mode": { - "datatype": "Enum", - "default": "xml", - "required": "false", - "summary": "Valid values: (csv | raw | xml | json)\n\nSpecifies what format the output should be returned in.", - "validation": "" - }, - "output_time_format": { - "datatype": "String", - "default": "time_format", - "required": "false", - "summary": "Formats a UTC time. Defaults to what is specified in time_format.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The post processing search to apply to results. Can be any valid search language string.", - "validation": "" - }, - "segmentation": { - "datatype": "String", - "default": "raw", - "required": "false", - "summary": "The type of segmentation to perform on the data. This incudes an option to perform k/v segmentation.\n", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": " %m/%d/%Y:%H:%M:%S", - "required": "false", - "summary": "Expression to convert a formatted time string from {start,end}_time into UTC seconds. \n\nIt defaults to %m/%d/%Y:%H:%M:%S", - "validation": "" - }, - "truncation_mode": { - "datatype": "String", - "default": "abstract", - "required": "false", - "summary": "Specifies how \"max_lines\" should be achieved.\n\nValid values are {abstract, truncate}. Default value is abstract.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "Search was found, but events are not yet ready. Retry request." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Returns the events of the search specified by {search_id}. These events are the data from the search pipeline before the first \"transforming\" search command. This is the primary method for a client to fetch a set of UNTRANSFORMED events for the search job.\n\nThis endpoint is only valid if the status_buckets > 0 or the search has no transforming commands.\n\n", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/results": { - "methods": { - "GET": { - "params": { - "count": { - "datatype": "Number", - "default": "100", - "required": "false", - "summary": "The maximum number of results to return. If value is set to 0, then all available results are returned.", - "validation": "" - }, - "f": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field to return for the event set. \n\nYou can pass multiple POST f arguments if multiple field are required. If field_list and f are provided the union of the lists is used.", - "validation": "" - }, - "field_list": { - "datatype": "String", - "default": "*", - "required": "false", - "summary": "Specify a comma-separated list of the fields to return for the event set.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The first result (inclusive) from which to begin returning data. \n\nThis value is 0-indexed. Default value is 0. \n\nIn 4.1+, negative offsets are allowed and are added to count to compute the absolute offset (for example, offset=-1 is the last available offset). \n\nOffsets in the results are always absolute and never negative.", - "validation": "" - }, - "output_mode": { - "datatype": "Enum", - "default": "", - "required": "false", - "summary": "Valid values: (csv | raw | xml | json)\n\nSpecifies what format the output should be returned in.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The post processing search to apply to results. Can be any valid search language string.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "Search was found, but events are not yet ready. Retry request." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Returns the results of the search specified by {search_id}. This is the table that exists after all processing from the search pipeline has completed.\n\nThis is the primary method for a client to fetch a set of TRANSFORMED events. If the dispatched search does not include a transforming command, the effect is the same as get_events, however with fewer options.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/results_preview": { - "methods": { - "GET": { - "params": { - "count": { - "datatype": "Number", - "default": "100", - "required": "false", - "summary": "The maximum number of results to return. \n\nIf value is set to 0, then all available results are returned.", - "validation": "" - }, - "f": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field to return for the event set. \n\nYou can pass multiple POST f arguments if multiple field are required. If field_list and f are provided the union of the lists is used.", - "validation": "" - }, - "field_list": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a comma-separated list of the fields to return for the event set.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "The first result (inclusive) from which to begin returning data. \n\nThis value is 0-indexed. Default value is 0. \n\nIn 4.1+, negative offsets are allowed and are added to count to compute the absolute offset (for example, offset=-1 is the last available offset). \n\nOffsets in the results are always absolute and never negative.", - "validation": "" - }, - "output_mode": { - "datatype": "String", - "default": "xml", - "required": "false", - "summary": "Specifies what format the output should be returned in.\n\nValid values are:\n\n csv\n raw\n xml\n json\n", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The post processing search to apply to results. Can be any valid search language string.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "Search was found, but events are not yet ready. Retry request." - }, - "404": { - "summary": "Search job does not exist." - } - }, - "summary": "Returns the intermediate preview results of the search specified by {search_id}. When the job is complete, this gives the same response as /search/jobs/{search_id}/results.\n\nThis endpoint is only valid if preview is enabled. ", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/search.log": { - "methods": { - "GET": { - "params": { - "attachment": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "If true, returns search.log as an attachment. Otherwise, streams search.log.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "Search was found, but events are not yet ready. Retry request." - }, - "404": { - "summary": "Search log does not exist." - } - }, - "summary": "Returns the search.log for the search job specified by {search_id}.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/summary": { - "methods": { - "GET": { - "params": { - "earliest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Time string representing the earliest (inclusive), respectively, time bounds for the search. \n\nThe time string can be either a UTC time (with fractional seconds), a relative time specifier (to now) or a formatted time string. (Also see comment for the search_mode variable.)", - "validation": "" - }, - "f": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field to return for the event set.\n\nYou can pass multiple POST f arguments if multiple field are required. If field_list and f are provided, the union of the lists is used.", - "validation": "" - }, - "field_list": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Deprecated. Consider using f.\n\nA comma-separated list of the fields to return for the event set.", - "validation": "" - }, - "latest_time": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Time string representing the latest (exclusive), respectively, time bounds for the search. \n\nThe time string can be either a UTC time (with fractional seconds), a relative time specifier (to now) or a formatted time string. (Also see comment for the search_mode variable.) ", - "validation": "" - }, - "min_freq": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "For each key, the fraction of results this key must occur in to be displayed.\n\nExpress the fraction as a number between 0 and 1.", - "validation": "" - }, - "output_time_format": { - "datatype": "String", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "Formats a UTC time. Defaults to what is specified in time_format.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "Empty string", - "required": "false", - "summary": "Specifies a substring that all returned events should contain either in one of their values or tags.", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": " %m/%d/%Y:%H:%M:%", - "required": "false", - "summary": "Expression to convert a formatted time string from {start,end}_time into UTC seconds.\nIt defaults to %m/%d/%Y:%H:%M:%S", - "validation": "" - }, - "top_count": { - "datatype": "Number", - "default": "10", - "required": "false", - "summary": "For each key, specfies how many of the most frequent items to return.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "403": { - "summary": "Insufficient permissions to view summary for search job." - }, - "404": { - "summary": "Summary for search job does not exist." - } - }, - "summary": "Returns \"getFieldsAndStats\" output of the so-far-read events.\n\nThis endpoint is only valid when status_buckets > 0. To guarantee a set of fields in the summary, when creating the search, use the required_fields_list or rf parameters.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/jobs/{search_id}/timeline": { - "methods": { - "GET": { - "params": { - "output_time_format": { - "datatype": "String", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "Formats a UTC time. Defaults to what is specified in time_format.", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": " %m/%d/%Y:%H:%M:%S", - "required": "false", - "summary": "Expression to convert a formatted time string from {start,end}_time into UTC seconds. \n\nIt defaults to %m/%d/%Y:%H:%M:%S", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "Search was found, but events are not yet ready. Retry request." - }, - "404": { - "summary": "Timeline for search job does not exist." - } - }, - "summary": "Returns event distribution over time of the so-far-read untransformed events.\n\nThis endpoint is only valid when status_buckets > 0. To guarantee a set of fields in the summary, when creating the search, use the required_fields_list or rf parameters.", - "urlParams": { - "search_id": { - "required": "true", - "summary": "search_id" - } - } - } - } - }, - "search/parser": { - "methods": { - "GET": { - "params": { - "enable_lookups": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "If true, reverse lookups are done to expand the search expression.", - "validation": "" - }, - "output_mode": { - "datatype": "String", - "default": "xml", - "required": "false", - "summary": "Specify output formatting. Select from either:\n\n xml: XML formatting\n json: JSON formatting\n", - "validation": "" - }, - "parse_only": { - "datatype": "Boolean", - "default": "false", - "required": "false", - "summary": "If true, disables expansion of search due evaluation of subsearches, time term expansion, lookups, tags, eventtypes, sourcetype alias.", - "validation": "" - }, - "q": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The search string to parse.", - "validation": "" - }, - "reload_macros": { - "datatype": "Boolean", - "default": "true", - "required": "false", - "summary": "If true, reload macro definitions from macros.conf.\n", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Parses Splunk search language and returns semantic map.", - "urlParams": {} - } - }, - "summary": "Provide search language parsing services." - }, - "search/tags": { - "methods": { - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - } - }, - "summary": "Returns a list of all search time tags.", - "urlParams": {} - } - }, - "summary": "Provides management of search time tags." - }, - "search/tags/{tag_name}": { - "methods": { - "DELETE": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "404": { - "summary": "Search tag does not exist." - } - }, - "summary": "Deletes the tag, and its associated field:value pair assignments. The resulting change in tags.conf is to set all field:value pairs to disabled.\n", - "urlParams": { - "tag_name": { - "required": "true", - "summary": "tag_name" - } - } - }, - "GET": { - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "404": { - "summary": "Search tag does not exist." - } - }, - "summary": "Returns a list of field:value pairs that have been associated with the tag specified by {tag_name}.", - "urlParams": { - "tag_name": { - "required": "true", - "summary": "tag_name" - } - } - }, - "POST": { - "params": { - "add": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field:value pair to tag with {tag_name}.", - "validation": "" - }, - "delete": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "A field:value pair to remove from {tag_name}.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "201": { - "summary": "Field successfuly added to tag." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Updates the field:value pairs associated with {tag_name}. \n\nMultiple field:value pairs can be attached by passing multiple add or delete form parameters. The server processes all of the adds first, and then deletes.\n\nIf {tag_name} does not exist, then the tag is created inline. Notification is sent to the client using the HTTP 201 status.", - "urlParams": { - "tag_name": { - "required": "true", - "summary": "tag_name" - } - } - } - } - }, - "search/timeparser": { - "methods": { - "GET": { - "params": { - "now": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The time to use as current time for relative time identifiers. \n\nCan itself either be a relative time (from the real \"now\" time) or an absolute time in the format specified by time_format.\n", - "validation": "" - }, - "output_time_format": { - "datatype": "String", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "Used to format a UTC time. Defaults to the value of time_format.", - "validation": "" - }, - "time": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The time argument to parse. \n\nAcceptable inputs are either a relative time identifier or an absolute time. Multiple time arguments can be passed by specifying multiple time parameters.\n", - "validation": "" - }, - "time_format": { - "datatype": "String", - "default": "%FT%T.%Q%:z", - "required": "false", - "summary": "The format (strftime) of the absolute time format passed in time. \n\nThis field is not used if a relative time identifier is provided. For absolute times, the default value is the ISO-8601 format.\n", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "No timeparser arguments given." - }, - "400": { - "summary": "Request error. See response body for details." - } - }, - "summary": "Returns a lookup table of time arguments to absolute timestamps.", - "urlParams": {} - } - }, - "summary": "Provides time argument parsing." - }, - "search/typeahead": { - "methods": { - "GET": { - "params": { - "count": { - "datatype": "Number", - "default": "", - "required": "true", - "summary": "The number of counts to return for this term.", - "validation": "" - }, - "output_mode": { - "datatype": "String", - "default": "xml", - "required": "false", - "summary": "Valid values: (xml | json)\n\nFormat for the output.", - "validation": "" - }, - "prefix": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The term for which to return typeahead results.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "204": { - "summary": "No Content. The server successfully processed the request, but is not returning any content." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "403": { - "summary": "Insufficient permissions to view typeahead results." - }, - "405": { - "summary": "Invalid method (only GET is supported)." - } - }, - "summary": "Returns a list of words or descriptions for possible auto-complete terms.\n\ncount is a required parameter to specify how many descriptions to list. prefix is a required parameter to specify a string for terms in your index.", - "urlParams": {} - } - }, - "summary": "Provides search string auto-complete suggestions." - }, - "server/control": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view server controls." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Lists the actions that can be performed at this endpoint.", - "urlParams": {} - } - }, - "summary": "Allows access to controls, such as restarting server." - }, - "server/control/restart": { - "methods": { - "POST": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Restart requested successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to restart Splunk." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Restarts the Splunk server.", - "urlParams": {} - } - }, - "summary": "Allows for restarting Splunk." - }, - "server/info": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view server configuration info." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerates the following information about the running splunkd: \n\n build\n cpu_arch (CPU architecure)\n guid (GUID for this splunk instance)\n isFree\n isTrial\n licenseKeys (hashes)\n licenseSignature\n licenseState\n license_labels\n master_guid (GUID of the license master)\n mode\n os_build\n os_name\n os_version\n rtsearch_enabled\n serverName\n version", - "urlParams": {} - } - }, - "summary": "Provides access to configuration information about the server." - }, - "server/info/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view server configuration info." - }, - "404": { - "summary": "Server configuration info does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Provides the identical information as /services/server/info. The only valid {name} here is server-info.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "server/logger": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view logger info." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Enumerates all splunkd logging categories, either specified in code or in $SPLUNK_HOME/etc/log.cfg.", - "urlParams": {} - } - }, - "summary": "Provides access to splunkd logging categories, either specified in code or in $SPLUNK_HOME/etc/log.cfg." - }, - "server/logger/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view logger info." - }, - "404": { - "summary": "Logger info does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Describes a specific splunkd logging category.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "level": { - "datatype": "Enum", - "default": "", - "required": "true", - "summary": "Valid values: (FATAL | CRIT | WARN | INFO | DEBUG)\n\nThe desired logging level for this category.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit logger configuration." - }, - "404": { - "summary": "Logger configuration does not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Sets the logging level for a specific logging category.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "server/settings": { - "methods": { - "GET": { - "config": "", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view server settings." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the server configuration of an instance of Splunk.", - "urlParams": {} - } - }, - "summary": "Provides access to server configuration information for an instance of Splunk." - }, - "server/settings/{name}": { - "methods": { - "GET": { - "config": "", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view server settings." - }, - "404": { - "summary": "Server settings do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Returns the server configuration of this instance of Splunk.\n\n\"settings\" is the only valid value for {name} in this endpoint. This endpoint returns the same information as [[Documentation:Splunk:RESTAPI:RESTsystem#GET_server.2Fsettings|GET server/settings]].", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "", - "params": { - "SPLUNK_DB": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Path to the default index for this instance of Splunk.\n\nThe default location is:\n\n$SPLUNK_HOME/var/lib/splunk/defaultdb/db/", - "validation": "is_dir(SPLUNK_DB)" - }, - "enableSplunkWebSSL": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Toggles between https and http. If true, enables https and SSL for Splunk Web. ", - "validation": "is_bool(enableSplunkWebSSL)" - }, - "host": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The default hostname to use for data inputs that do not override this setting.", - "validation": "" - }, - "httpport": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specifies the port on which Splunk Web is listening for this instance of Splunk. Defaults to 8000. If using SSL, set to the HTTPS port number.\n\nhttpport must be present for SplunkWeb to start. If omitted or 0 the server will NOT start an http listener.", - "validation": "" - }, - "mgmtHostPort": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify IP address:Port to set the managment port for splunkd. \n\nDefaults to 127.0.0.1:8089.", - "validation": "" - }, - "minFreeSpace": { - "datatype": "Number", - "default": "", - "required": "false", - "summary": "Specifies, in MB, a safe amount of space that must exist for splunkd to continue operating.\n\nminFreespace affects search and indexing:\n\nBefore attempting to launch a search, splunk requires this amount of free space on the filesystem where the dispatch directory is stored ($SPLUNK_HOME/var/run/splunk/dispatch).\n\nApplied similarly to the search quota values in authorize.conf and limits.conf.\n\nFor indexing, periodically, the indexer checks space on all partitions that contain splunk indexes as specified by indexes.conf. When you need to clear more disk space, indexing is paused and Splunk posts a ui banner + warning.", - "validation": "validate(isint(minFreeSpace), \"Minimum free space must be an integer.\",minFreeSpace > 0, \"Minimum free space must be greater than zero.\")" - }, - "pass4SymmKey": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Password string that is prepended to the splunk symmetric key to generate the final key that is used to sign all traffic between master/slave licenser.", - "validation": "" - }, - "serverName": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify an ASCII String to set the name used to identify this Splunk instance for features such as distributed search. Defaults to -.", - "validation": "" - }, - "sessionTimeout": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Specify a time range string to set the amount of time before a user session times out, expressed as a search-like time range. Default is 1h (one hour).\n\nFor example:\n\n24h: (24 hours)\n\n3d: (3 days)\n\n7200s: (7200 seconds, or two hours)\n", - "validation": "" - }, - "startwebserver": { - "datatype": "Boolean", - "default": "", - "required": "false", - "summary": "Specify 1 to enable Splunk Web. 0 disables Splunk Web. Default is 1.", - "validation": "is_bool(startwebserver)" - }, - "trustedIP": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The IP address of the authenticating proxy. Set to a valid IP address to enable SSO.\n\nDisabled by default. Normal value is '127.0.0.1'", - "validation": "validate(match(trustedIP, \"^\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}$\"),\"Trusted IP must be an IP address (IPv4)\")" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit server settings." - }, - "404": { - "summary": "Server settings do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Updates the server configuration of this instance of Splunk.\n\n\"settings\" is the only valid value for {name} in this endpoint.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - }, - "storage/passwords": { - "methods": { - "GET": { - "config": "app", - "params": { - "count": { - "datatype": "Number", - "default": "30", - "required": "false", - "summary": "Indicates the maximum number of entries to return. To return all entries, specify 0.", - "validation": "" - }, - "offset": { - "datatype": "Number", - "default": "0", - "required": "false", - "summary": "Index for first item to return.", - "validation": "" - }, - "search": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "Search expression to filter the response. The response matches field values against the search expression. For example:\n\nsearch=foo matches any object that has \"foo\" as a substring in a field.\nsearch=field_name%3Dfield_value restricts the match to a single field. URI-encoding is required in this example.", - "validation": "" - }, - "sort_dir": { - "datatype": "Enum", - "default": "asc", - "required": "false", - "summary": "Valid values: (asc | desc)\n\nIndicates whether to sort returned entries in ascending or descending order.", - "validation": "" - }, - "sort_key": { - "datatype": "String", - "default": "name", - "required": "false", - "summary": "Field to use for sorting.", - "validation": "" - }, - "sort_mode": { - "datatype": "Enum", - "default": "auto", - "required": "false", - "summary": "Valid values: (auto | alpha | alpha_case | num)\n\nIndicates the collating sequence for sorting the returned entries.\nauto: If all values of the field are numbers, collate numerically. Otherwise, collate alphabetically.\nalpha: Collate alphabetically.\nalpha_case: Collate alphabetically, case-sensitive.\nnum: Collate numerically.", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view credentials." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List available credentials", - "urlParams": {} - }, - "POST": { - "config": "app", - "params": { - "name": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "Username for the credentials", - "validation": "" - }, - "password": { - "datatype": "String", - "default": "", - "required": "true", - "summary": "The password for the credentials - this is the only part of the credentials that will be stored securely", - "validation": "" - }, - "realm": { - "datatype": "String", - "default": "", - "required": "false", - "summary": "The credential realm", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "201": { - "summary": "Created successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to create credentials." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Create/edit new credentials", - "urlParams": {} - } - }, - "summary": "Allows for management of secure credentials. The password is encrypted with a secret key that resides on the same machine. The clear text passwords can be accessed by users that have access to this service. Only users with admin priviledges can access this endpoint.\n\n'''Note:''' This endpoint is new for Splunk 4.3. It replaces the deprecated endpoint accessible from /admin/passwords/." - }, - "storage/passwords/{name}": { - "methods": { - "DELETE": { - "config": "app", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Deleted successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to delete credentials." - }, - "404": { - "summary": "Credentials do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "Delete the identified credentials", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "GET": { - "config": "app", - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Listed successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "403": { - "summary": "Insufficient permissions to view credentials." - }, - "404": { - "summary": "Credentials do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - } - }, - "summary": "List only the credentials identified by the given id", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - }, - "POST": { - "config": "app", - "params": { - "password": { - "datatype": "INHERITED", - "default": "", - "required": "true", - "summary": "INHERITED", - "validation": "" - } - }, - "request": "", - "response": "", - "returns": { - "200": { - "summary": "Updated successfully." - }, - "400": { - "summary": "Request error. See response body for details." - }, - "401": { - "summary": "Authentication failure: must pass valid credentials with request." - }, - "402": { - "summary": "The Splunk license in use has disabled this feature." - }, - "403": { - "summary": "Insufficient permissions to edit credentials." - }, - "404": { - "summary": "Credentials do not exist." - }, - "409": { - "summary": "Request error: this operation is invalid for this item. See response body for details." - }, - "500": { - "summary": "Internal server error. See response body for details." - }, - "503": { - "summary": "This feature has been disabled in Splunk configuration files." - } - }, - "summary": "Edit the identified credentials.", - "urlParams": { - "name": { - "required": "true", - "summary": "name" - } - } - } - } - } -} diff --git a/examples/explorer/explorer.css b/examples/explorer/explorer.css deleted file mode 100644 index 6cd02a2ad..000000000 --- a/examples/explorer/explorer.css +++ /dev/null @@ -1,187 +0,0 @@ -/* This CSS file was modified from the following tutorial */ -/* Creating a form without table */ -/* Author= "AJAY TALWAR" */ -/* Email- ajayslide183@gmail.com */ - - -*{ margin:0; padding:0;} -body{ font:100% normal Arial, Helvetica, sans-serif; background:#161712;} -form,input,select,textarea{margin:0; padding:0;} - -/* API RESPONSE CSS */ - -div.result{ - margin: 0 auto; - width:95%; - background:#ddd; - border:1px solid #262626; - overflow: auto; - margin-bottom: 50px; -} - -div.result div { - text-transform:uppercase; - border-bottom:1px solid #262626; - font-size:18px; - padding: 5px; - color: #FFF5CC; - text-align:center; -} - -div.result div.prettyprint { - margin-top: 5px; - margin-bottom: 5px; -} - -/* API FORM CSS */ - -div.box{ - margin:0 auto; - width:95%; - background:#222; - border:1px solid #262626; - margin-bottom: 10px; -} - -div.box h1{ - color:#FFF5CC; - font-size:18px; - text-transform:uppercase; - padding:5px 5px 5px 5px; - border-bottom:1px solid #161712; - border-top:1px solid #161712; -} - -div.box h1 span#api-name{ - text-align:left; -} - -div.box h1 span#api-method{ - text-align:left; - vertical-align:top; - float:right; -} - -div.box h2{ - color:#FFF7D9; - font-size:14px; - text-transform:uppercase; - padding:5px 0 5px 5px; - border-bottom:1px solid #161712; - border-top:1px solid #161712; -} - -div.box label{ - width:100%; - display: block; - background:#1C1C1C; - border-top:1px solid #262626; - border-bottom:1px solid #161712; - padding:10px 0 10px 0; -} -div.box label div{ - width:100%; -} -div.box label span.title { - display: inline; - color:#ddd; - font-size:13px; - float:left; - width:20%; - text-align:left; - margin-left: 10px; - margin-top: -5px; - padding:5px 20px 0 0; -} - -div.box label div.param-required{ - display: block; - color:#888; - font-size:11px; - text-align:left; - margin-right: 5px; - margin-left: 10px; - padding:5px 0px 0 0; -} - -div.box label div.param-description{ - display: block; - color:#888; - font-size:11px; - text-align:left; - margin-right: 5px; - margin-left: 10px; - padding:5px 0px 0 0; -} - -div.box .input_text{ - padding:0px 10px; - background:#eee; - color:#111; - border-bottom: 1px double #171717; - border-top: 1px double #171717; - border-left:1px double #333; - border-right:1px double #333; - display: block; - height: 20px; - width: 75%; -} - -div.box .button -{ - display: block; - padding:0.2em; - width: 9em; - margin: 0px auto; - background-color: #FF6600; - color: #000; - font-weight: bold; - border: 0.2em solid #E9692C; -} - -/* SERVER INFO CSS */ -#api-dropdown-box { - display: block; - width: 95%; - margin: 0px auto; - margin-top: 8px; - margin-bottom: 8px; -} - -#server-info-form { - width: 95%; - margin: 0px auto; - background: #f00; - padding-left: 1px; - padding-right: 1px; -} - -#server-info-form div.server-info-field { - width: 10.2%; - height: 40px; - border: 0; - margin: 0; - padding: 0em; - float: left; - padding-top: 10px; - padding-bottom: 10px; - padding-left: 10px; - border:1px solid #262626; - background:#1C1C1C; -} - -#server-info-form div.server-info-field h3 { - text-transform:uppercase; - font-size: 0.7em; - color: #fff; - width: 100%; -} - -#server-info-form div.server-info-field input { - font-size: 0.8em; - height: 1.4em; - color: #222; - width: 95%; - margin: 0px auto; - margin-top: 5px; -} \ No newline at end of file diff --git a/examples/explorer/explorer.html b/examples/explorer/explorer.html deleted file mode 100755 index 2112b0335..000000000 --- a/examples/explorer/explorer.html +++ /dev/null @@ -1,524 +0,0 @@ - - -Splunk API Explorer - - - - - - - - - - - - - - - - - - - -
-
-

Scheme

- -
-
-

Host

- -
-
-

Port

- -
-
-

Redirect Host

- -
-
-

Redirect Port

- -
-
-

owner

- -
-
-

app

- -
-
-

Username

- -
-
-

Password

- -
-
- - -
- - - - - -
- - -
-
-

-
- - diff --git a/examples/explorer/explorer.py b/examples/explorer/explorer.py deleted file mode 100755 index 62ebf85eb..000000000 --- a/examples/explorer/explorer.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import server -import webbrowser -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -from splunklib.six.moves import urllib - -PORT = 8080 - -def main(argv): - usage = "usage: %prog [options]" - - redirect_port_args = { - "redirectport": { - "flags": ["--redirectport"], - "default": PORT, - "help": "Port to use for redirect server (default: %s)" % PORT, - }, - } - - opts = utils.parse(argv, redirect_port_args, ".env", usage=usage) - - args = [("scheme", opts.kwargs["scheme"]), - ("host", opts.kwargs["host"]), - ("port", opts.kwargs["port"]), - ("redirecthost", "localhost"), - ("redirectport", opts.kwargs["redirectport"]), - ("username", opts.kwargs["username"]), - ("password", opts.kwargs["password"])] - if 'app' in list(opts.kwargs.keys()): - args.append(('app', opts.kwargs['app'])) - if 'owner' in list(opts.kwargs.keys()): - args.append(('owner', opts.kwargs['owner'])) - - # Encode these arguments - args = urllib.parse.urlencode(args) - - # Launch the browser - webbrowser.open("file://%s" % os.path.join(os.getcwd(), "explorer.html?%s" % args)) - - # And server the files - server.serve(opts.kwargs["redirectport"]) - -if __name__ == "__main__": - try: - main(sys.argv[1:]) - except KeyboardInterrupt: - pass - except: - raise diff --git a/examples/explorer/prettify/lang-apollo.js b/examples/explorer/prettify/lang-apollo.js deleted file mode 100755 index 7098baf41..000000000 --- a/examples/explorer/prettify/lang-apollo.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["com",/^#[^\n\r]*/,null,"#"],["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["str",/^"(?:[^"\\]|\\[\S\s])*(?:"|$)/,null,'"']],[["kwd",/^(?:ADS|AD|AUG|BZF|BZMF|CAE|CAF|CA|CCS|COM|CS|DAS|DCA|DCOM|DCS|DDOUBL|DIM|DOUBLE|DTCB|DTCF|DV|DXCH|EDRUPT|EXTEND|INCR|INDEX|NDX|INHINT|LXCH|MASK|MSK|MP|MSU|NOOP|OVSK|QXCH|RAND|READ|RELINT|RESUME|RETURN|ROR|RXOR|SQUARE|SU|TCR|TCAA|OVSK|TCF|TC|TS|WAND|WOR|WRITE|XCH|XLQ|XXALQ|ZL|ZQ|ADD|ADZ|SUB|SUZ|MPY|MPR|MPZ|DVP|COM|ABS|CLA|CLZ|LDQ|STO|STQ|ALS|LLS|LRS|TRA|TSQ|TMI|TOV|AXT|TIX|DLY|INP|OUT)\s/, -null],["typ",/^(?:-?GENADR|=MINUS|2BCADR|VN|BOF|MM|-?2CADR|-?[1-6]DNADR|ADRES|BBCON|[ES]?BANK=?|BLOCK|BNKSUM|E?CADR|COUNT\*?|2?DEC\*?|-?DNCHAN|-?DNPTR|EQUALS|ERASE|MEMORY|2?OCT|REMADR|SETLOC|SUBRO|ORG|BSS|BES|SYN|EQU|DEFINE|END)\s/,null],["lit",/^'(?:-*(?:\w|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?)?/],["pln",/^-*(?:[!-z]|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?/],["pun",/^[^\w\t\n\r "'-);\\\xa0]+/]]),["apollo","agc","aea"]); diff --git a/examples/explorer/prettify/lang-clj.js b/examples/explorer/prettify/lang-clj.js deleted file mode 100755 index 542a2205f..000000000 --- a/examples/explorer/prettify/lang-clj.js +++ /dev/null @@ -1,18 +0,0 @@ -/* - Copyright (C) 2011 Google Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["opn",/^[([{]+/,a,"([{"],["clo",/^[)\]}]+/,a,")]}"],["com",/^;[^\n\r]*/,a,";"],["pln",/^[\t\n\r \xa0]+/,a,"\t\n\r \xa0"],["str",/^"(?:[^"\\]|\\[\S\s])*(?:"|$)/,a,'"']],[["kwd",/^(?:def|if|do|let|quote|var|fn|loop|recur|throw|try|monitor-enter|monitor-exit|defmacro|defn|defn-|macroexpand|macroexpand-1|for|doseq|dosync|dotimes|and|or|when|not|assert|doto|proxy|defstruct|first|rest|cons|defprotocol|deftype|defrecord|reify|defmulti|defmethod|meta|with-meta|ns|in-ns|create-ns|import|intern|refer|alias|namespace|resolve|ref|deref|refset|new|set!|memfn|to-array|into-array|aset|gen-class|reduce|map|filter|find|nil?|empty?|hash-map|hash-set|vec|vector|seq|flatten|reverse|assoc|dissoc|list|list?|disj|get|union|difference|intersection|extend|extend-type|extend-protocol|prn)\b/,a], -["typ",/^:[\dA-Za-z-]+/]]),["clj"]); diff --git a/examples/explorer/prettify/lang-css.js b/examples/explorer/prettify/lang-css.js deleted file mode 100755 index 041e1f590..000000000 --- a/examples/explorer/prettify/lang-css.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\f\r ]+/,null," \t\r\n "]],[["str",/^"(?:[^\n\f\r"\\]|\\(?:\r\n?|\n|\f)|\\[\S\s])*"/,null],["str",/^'(?:[^\n\f\r'\\]|\\(?:\r\n?|\n|\f)|\\[\S\s])*'/,null],["lang-css-str",/^url\(([^"')]*)\)/i],["kwd",/^(?:url|rgb|!important|@import|@page|@media|@charset|inherit)(?=[^\w-]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*)\s*:/i],["com",/^\/\*[^*]*\*+(?:[^*/][^*]*\*+)*\//],["com", -/^(?:<\!--|--\>)/],["lit",/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],["lit",/^#[\da-f]{3,6}/i],["pln",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i],["pun",/^[^\s\w"']+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[["kwd",/^-?(?:[_a-z]|\\[\da-f]+ ?)(?:[\w-]|\\\\[\da-f]+ ?)*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[["str",/^[^"')]+/]]),["css-str"]); diff --git a/examples/explorer/prettify/lang-go.js b/examples/explorer/prettify/lang-go.js deleted file mode 100755 index fc18dc079..000000000 --- a/examples/explorer/prettify/lang-go.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["pln",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])+(?:'|$)|`[^`]*(?:`|$))/,null,"\"'"]],[["com",/^(?:\/\/[^\n\r]*|\/\*[\S\s]*?\*\/)/],["pln",/^(?:[^"'/`]|\/(?![*/]))+/]]),["go"]); diff --git a/examples/explorer/prettify/lang-hs.js b/examples/explorer/prettify/lang-hs.js deleted file mode 100755 index 9d77b0838..000000000 --- a/examples/explorer/prettify/lang-hs.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t-\r ]+/,null,"\t\n \r "],["str",/^"(?:[^\n\f\r"\\]|\\[\S\s])*(?:"|$)/,null,'"'],["str",/^'(?:[^\n\f\r'\\]|\\[^&])'?/,null,"'"],["lit",/^(?:0o[0-7]+|0x[\da-f]+|\d+(?:\.\d+)?(?:e[+-]?\d+)?)/i,null,"0123456789"]],[["com",/^(?:--+[^\n\f\r]*|{-(?:[^-]|-+[^}-])*-})/],["kwd",/^(?:case|class|data|default|deriving|do|else|if|import|in|infix|infixl|infixr|instance|let|module|newtype|of|then|type|where|_)(?=[^\d'A-Za-z]|$)/, -null],["pln",/^(?:[A-Z][\w']*\.)*[A-Za-z][\w']*/],["pun",/^[^\d\t-\r "'A-Za-z]+/]]),["hs"]); diff --git a/examples/explorer/prettify/lang-lisp.js b/examples/explorer/prettify/lang-lisp.js deleted file mode 100755 index 02a30e8d1..000000000 --- a/examples/explorer/prettify/lang-lisp.js +++ /dev/null @@ -1,3 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["opn",/^\(+/,a,"("],["clo",/^\)+/,a,")"],["com",/^;[^\n\r]*/,a,";"],["pln",/^[\t\n\r \xa0]+/,a,"\t\n\r \xa0"],["str",/^"(?:[^"\\]|\\[\S\s])*(?:"|$)/,a,'"']],[["kwd",/^(?:block|c[ad]+r|catch|con[ds]|def(?:ine|un)|do|eq|eql|equal|equalp|eval-when|flet|format|go|if|labels|lambda|let|load-time-value|locally|macrolet|multiple-value-call|nil|progn|progv|quote|require|return-from|setq|symbol-macrolet|t|tagbody|the|throw|unwind)\b/,a], -["lit",/^[+-]?(?:[#0]x[\da-f]+|\d+\/\d+|(?:\.\d+|\d+(?:\.\d*)?)(?:[de][+-]?\d+)?)/i],["lit",/^'(?:-*(?:\w|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?)?/],["pln",/^-*(?:[_a-z]|\\[!-~])(?:[\w-]*|\\[!-~])[!=?]?/i],["pun",/^[^\w\t\n\r "'-);\\\xa0]+/]]),["cl","el","lisp","scm"]); diff --git a/examples/explorer/prettify/lang-lua.js b/examples/explorer/prettify/lang-lua.js deleted file mode 100755 index e83a3c469..000000000 --- a/examples/explorer/prettify/lang-lua.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["str",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$))/,null,"\"'"]],[["com",/^--(?:\[(=*)\[[\S\s]*?(?:]\1]|$)|[^\n\r]*)/],["str",/^\[(=*)\[[\S\s]*?(?:]\1]|$)/],["kwd",/^(?:and|break|do|else|elseif|end|false|for|function|if|in|local|nil|not|or|repeat|return|then|true|until|while)\b/,null],["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i], -["pln",/^[_a-z]\w*/i],["pun",/^[^\w\t\n\r \xa0][^\w\t\n\r "'+=\xa0-]*/]]),["lua"]); diff --git a/examples/explorer/prettify/lang-ml.js b/examples/explorer/prettify/lang-ml.js deleted file mode 100755 index 6df02d728..000000000 --- a/examples/explorer/prettify/lang-ml.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["com",/^#(?:if[\t\n\r \xa0]+(?:[$_a-z][\w']*|``[^\t\n\r`]*(?:``|$))|else|endif|light)/i,null,"#"],["str",/^(?:"(?:[^"\\]|\\[\S\s])*(?:"|$)|'(?:[^'\\]|\\[\S\s])(?:'|$))/,null,"\"'"]],[["com",/^(?:\/\/[^\n\r]*|\(\*[\S\s]*?\*\))/],["kwd",/^(?:abstract|and|as|assert|begin|class|default|delegate|do|done|downcast|downto|elif|else|end|exception|extern|false|finally|for|fun|function|if|in|inherit|inline|interface|internal|lazy|let|match|member|module|mutable|namespace|new|null|of|open|or|override|private|public|rec|return|static|struct|then|to|true|try|type|upcast|use|val|void|when|while|with|yield|asr|land|lor|lsl|lsr|lxor|mod|sig|atomic|break|checked|component|const|constraint|constructor|continue|eager|event|external|fixed|functor|global|include|method|mixin|object|parallel|process|protected|pure|sealed|trait|virtual|volatile)\b/], -["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i],["pln",/^(?:[_a-z][\w']*[!#?]?|``[^\t\n\r`]*(?:``|$))/i],["pun",/^[^\w\t\n\r "'\xa0]+/]]),["fs","ml"]); diff --git a/examples/explorer/prettify/lang-n.js b/examples/explorer/prettify/lang-n.js deleted file mode 100755 index 6c2e85b98..000000000 --- a/examples/explorer/prettify/lang-n.js +++ /dev/null @@ -1,4 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["str",/^(?:'(?:[^\n\r'\\]|\\.)*'|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,a,'"'],["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,a,"#"],["pln",/^\s+/,a," \r\n\t\xa0"]],[["str",/^@"(?:[^"]|"")*(?:"|$)/,a],["str",/^<#[^#>]*(?:#>|$)/,a],["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,a],["com",/^\/\/[^\n\r]*/,a],["com",/^\/\*[\S\s]*?(?:\*\/|$)/, -a],["kwd",/^(?:abstract|and|as|base|catch|class|def|delegate|enum|event|extern|false|finally|fun|implements|interface|internal|is|macro|match|matches|module|mutable|namespace|new|null|out|override|params|partial|private|protected|public|ref|sealed|static|struct|syntax|this|throw|true|try|type|typeof|using|variant|virtual|volatile|when|where|with|assert|assert2|async|break|checked|continue|do|else|ensures|for|foreach|if|late|lock|new|nolate|otherwise|regexp|repeat|requires|return|surroundwith|unchecked|unless|using|while|yield)\b/, -a],["typ",/^(?:array|bool|byte|char|decimal|double|float|int|list|long|object|sbyte|short|string|ulong|uint|ufloat|ulong|ushort|void)\b/,a],["lit",/^@[$_a-z][\w$@]*/i,a],["typ",/^@[A-Z]+[a-z][\w$@]*/,a],["pln",/^'?[$_a-z][\w$@]*/i,a],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,a,"0123456789"],["pun",/^.[^\s\w"-$'./@`]*/,a]]),["n","nemerle"]); diff --git a/examples/explorer/prettify/lang-proto.js b/examples/explorer/prettify/lang-proto.js deleted file mode 100755 index f006ad8cf..000000000 --- a/examples/explorer/prettify/lang-proto.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.sourceDecorator({keywords:"bytes,default,double,enum,extend,extensions,false,group,import,max,message,option,optional,package,repeated,required,returns,rpc,service,syntax,to,true",types:/^(bool|(double|s?fixed|[su]?int)(32|64)|float|string)\b/,cStyleComments:!0}),["proto"]); diff --git a/examples/explorer/prettify/lang-scala.js b/examples/explorer/prettify/lang-scala.js deleted file mode 100755 index 60d034de4..000000000 --- a/examples/explorer/prettify/lang-scala.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["str",/^"(?:""(?:""?(?!")|[^"\\]|\\.)*"{0,3}|(?:[^\n\r"\\]|\\.)*"?)/,null,'"'],["lit",/^`(?:[^\n\r\\`]|\\.)*`?/,null,"`"],["pun",/^[!#%&(--:-@[-^{-~]+/,null,"!#%&()*+,-:;<=>?@[\\]^{|}~"]],[["str",/^'(?:[^\n\r'\\]|\\(?:'|[^\n\r']+))'/],["lit",/^'[$A-Z_a-z][\w$]*(?![\w$'])/],["kwd",/^(?:abstract|case|catch|class|def|do|else|extends|final|finally|for|forSome|if|implicit|import|lazy|match|new|object|override|package|private|protected|requires|return|sealed|super|throw|trait|try|type|val|var|while|with|yield)\b/], -["lit",/^(?:true|false|null|this)\b/],["lit",/^(?:0(?:[0-7]+|x[\da-f]+)l?|(?:0|[1-9]\d*)(?:(?:\.\d+)?(?:e[+-]?\d+)?f?|l?)|\\.\d+(?:e[+-]?\d+)?f?)/i],["typ",/^[$_]*[A-Z][\d$A-Z_]*[a-z][\w$]*/],["pln",/^[$A-Z_a-z][\w$]*/],["com",/^\/(?:\/.*|\*(?:\/|\**[^*/])*(?:\*+\/?)?)/],["pun",/^(?:\.+|\/)/]]),["scala"]); diff --git a/examples/explorer/prettify/lang-sql.js b/examples/explorer/prettify/lang-sql.js deleted file mode 100755 index da705b0b6..000000000 --- a/examples/explorer/prettify/lang-sql.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["str",/^(?:"(?:[^"\\]|\\.)*"|'(?:[^'\\]|\\.)*')/,null,"\"'"]],[["com",/^(?:--[^\n\r]*|\/\*[\S\s]*?(?:\*\/|$))/],["kwd",/^(?:add|all|alter|and|any|as|asc|authorization|backup|begin|between|break|browse|bulk|by|cascade|case|check|checkpoint|close|clustered|coalesce|collate|column|commit|compute|constraint|contains|containstable|continue|convert|create|cross|current|current_date|current_time|current_timestamp|current_user|cursor|database|dbcc|deallocate|declare|default|delete|deny|desc|disk|distinct|distributed|double|drop|dummy|dump|else|end|errlvl|escape|except|exec|execute|exists|exit|fetch|file|fillfactor|for|foreign|freetext|freetexttable|from|full|function|goto|grant|group|having|holdlock|identity|identitycol|identity_insert|if|in|index|inner|insert|intersect|into|is|join|key|kill|left|like|lineno|load|match|merge|national|nocheck|nonclustered|not|null|nullif|of|off|offsets|on|open|opendatasource|openquery|openrowset|openxml|option|or|order|outer|over|percent|plan|precision|primary|print|proc|procedure|public|raiserror|read|readtext|reconfigure|references|replication|restore|restrict|return|revoke|right|rollback|rowcount|rowguidcol|rule|save|schema|select|session_user|set|setuser|shutdown|some|statistics|system_user|table|textsize|then|to|top|tran|transaction|trigger|truncate|tsequal|union|unique|update|updatetext|use|user|using|values|varying|view|waitfor|when|where|while|with|writetext)(?=[^\w-]|$)/i, -null],["lit",/^[+-]?(?:0x[\da-f]+|(?:\.\d+|\d+(?:\.\d*)?)(?:e[+-]?\d+)?)/i],["pln",/^[_a-z][\w-]*/i],["pun",/^[^\w\t\n\r "'\xa0][^\w\t\n\r "'+\xa0-]*/]]),["sql"]); diff --git a/examples/explorer/prettify/lang-tex.js b/examples/explorer/prettify/lang-tex.js deleted file mode 100755 index ce96fbbd1..000000000 --- a/examples/explorer/prettify/lang-tex.js +++ /dev/null @@ -1 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"],["com",/^%[^\n\r]*/,null,"%"]],[["kwd",/^\\[@-Za-z]+/],["kwd",/^\\./],["typ",/^[$&]/],["lit",/[+-]?(?:\.\d+|\d+(?:\.\d*)?)(cm|em|ex|in|pc|pt|bp|mm)/i],["pun",/^[()=[\]{}]+/]]),["latex","tex"]); diff --git a/examples/explorer/prettify/lang-vb.js b/examples/explorer/prettify/lang-vb.js deleted file mode 100755 index 07506b03c..000000000 --- a/examples/explorer/prettify/lang-vb.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0\u2028\u2029]+/,null,"\t\n\r \xa0

"],["str",/^(?:["\u201c\u201d](?:[^"\u201c\u201d]|["\u201c\u201d]{2})(?:["\u201c\u201d]c|$)|["\u201c\u201d](?:[^"\u201c\u201d]|["\u201c\u201d]{2})*(?:["\u201c\u201d]|$))/i,null,'"“”'],["com",/^['\u2018\u2019].*/,null,"'‘’"]],[["kwd",/^(?:addhandler|addressof|alias|and|andalso|ansi|as|assembly|auto|boolean|byref|byte|byval|call|case|catch|cbool|cbyte|cchar|cdate|cdbl|cdec|char|cint|class|clng|cobj|const|cshort|csng|cstr|ctype|date|decimal|declare|default|delegate|dim|directcast|do|double|each|else|elseif|end|endif|enum|erase|error|event|exit|finally|for|friend|function|get|gettype|gosub|goto|handles|if|implements|imports|in|inherits|integer|interface|is|let|lib|like|long|loop|me|mod|module|mustinherit|mustoverride|mybase|myclass|namespace|new|next|not|notinheritable|notoverridable|object|on|option|optional|or|orelse|overloads|overridable|overrides|paramarray|preserve|private|property|protected|public|raiseevent|readonly|redim|removehandler|resume|return|select|set|shadows|shared|short|single|static|step|stop|string|structure|sub|synclock|then|throw|to|try|typeof|unicode|until|variant|wend|when|while|with|withevents|writeonly|xor|endif|gosub|let|variant|wend)\b/i, -null],["com",/^rem.*/i],["lit",/^(?:true\b|false\b|nothing\b|\d+(?:e[+-]?\d+[dfr]?|[dfilrs])?|(?:&h[\da-f]+|&o[0-7]+)[ils]?|\d*\.\d+(?:e[+-]?\d+)?[dfr]?|#\s+(?:\d+[/-]\d+[/-]\d+(?:\s+\d+:\d+(?::\d+)?(\s*(?:am|pm))?)?|\d+:\d+(?::\d+)?(\s*(?:am|pm))?)\s+#)/i],["pln",/^(?:(?:[a-z]|_\w)\w*|\[(?:[a-z]|_\w)\w*])/i],["pun",/^[^\w\t\n\r "'[\]\xa0\u2018\u2019\u201c\u201d\u2028\u2029]+/],["pun",/^(?:\[|])/]]),["vb","vbs"]); diff --git a/examples/explorer/prettify/lang-vhdl.js b/examples/explorer/prettify/lang-vhdl.js deleted file mode 100755 index 128b5b6cf..000000000 --- a/examples/explorer/prettify/lang-vhdl.js +++ /dev/null @@ -1,3 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\t\n\r \xa0]+/,null,"\t\n\r \xa0"]],[["str",/^(?:[box]?"(?:[^"]|"")*"|'.')/i],["com",/^--[^\n\r]*/],["kwd",/^(?:abs|access|after|alias|all|and|architecture|array|assert|attribute|begin|block|body|buffer|bus|case|component|configuration|constant|disconnect|downto|else|elsif|end|entity|exit|file|for|function|generate|generic|group|guarded|if|impure|in|inertial|inout|is|label|library|linkage|literal|loop|map|mod|nand|new|next|nor|not|null|of|on|open|or|others|out|package|port|postponed|procedure|process|pure|range|record|register|reject|rem|report|return|rol|ror|select|severity|shared|signal|sla|sll|sra|srl|subtype|then|to|transport|type|unaffected|units|until|use|variable|wait|when|while|with|xnor|xor)(?=[^\w-]|$)/i, -null],["typ",/^(?:bit|bit_vector|character|boolean|integer|real|time|string|severity_level|positive|natural|signed|unsigned|line|text|std_u?logic(?:_vector)?)(?=[^\w-]|$)/i,null],["typ",/^'(?:active|ascending|base|delayed|driving|driving_value|event|high|image|instance_name|last_active|last_event|last_value|left|leftof|length|low|path_name|pos|pred|quiet|range|reverse_range|right|rightof|simple_name|stable|succ|transaction|val|value)(?=[^\w-]|$)/i,null],["lit",/^\d+(?:_\d+)*(?:#[\w.\\]+#(?:[+-]?\d+(?:_\d+)*)?|(?:\.\d+(?:_\d+)*)?(?:e[+-]?\d+(?:_\d+)*)?)/i], -["pln",/^(?:[a-z]\w*|\\[^\\]*\\)/i],["pun",/^[^\w\t\n\r "'\xa0][^\w\t\n\r "'\xa0-]*/]]),["vhdl","vhd"]); diff --git a/examples/explorer/prettify/lang-wiki.js b/examples/explorer/prettify/lang-wiki.js deleted file mode 100755 index 9b0b44873..000000000 --- a/examples/explorer/prettify/lang-wiki.js +++ /dev/null @@ -1,2 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["pln",/^[\d\t a-gi-z\xa0]+/,null,"\t \xa0abcdefgijklmnopqrstuvwxyz0123456789"],["pun",/^[*=[\]^~]+/,null,"=*~^[]"]],[["lang-wiki.meta",/(?:^^|\r\n?|\n)(#[a-z]+)\b/],["lit",/^[A-Z][a-z][\da-z]+[A-Z][a-z][^\W_]+\b/],["lang-",/^{{{([\S\s]+?)}}}/],["lang-",/^`([^\n\r`]+)`/],["str",/^https?:\/\/[^\s#/?]*(?:\/[^\s#?]*)?(?:\?[^\s#]*)?(?:#\S*)?/i],["pln",/^(?:\r\n|[\S\s])[^\n\r#*=A-[^`h{~]*/]]),["wiki"]); -PR.registerLangHandler(PR.createSimpleLexer([["kwd",/^#[a-z]+/i,null,"#"]],[]),["wiki.meta"]); diff --git a/examples/explorer/prettify/lang-xq.js b/examples/explorer/prettify/lang-xq.js deleted file mode 100755 index e323ae323..000000000 --- a/examples/explorer/prettify/lang-xq.js +++ /dev/null @@ -1,3 +0,0 @@ -PR.registerLangHandler(PR.createSimpleLexer([["var pln",/^\$[\w-]+/,null,"$"]],[["pln",/^[\s=][<>][\s=]/],["lit",/^@[\w-]+/],["tag",/^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["com",/^\(:[\S\s]*?:\)/],["pln",/^[(),/;[\]{}]$/],["str",/^(?:"(?:[^"\\{]|\\[\S\s])*(?:"|$)|'(?:[^'\\{]|\\[\S\s])*(?:'|$))/,null,"\"'"],["kwd",/^(?:xquery|where|version|variable|union|typeswitch|treat|to|then|text|stable|sortby|some|self|schema|satisfies|returns|return|ref|processing-instruction|preceding-sibling|preceding|precedes|parent|only|of|node|namespace|module|let|item|intersect|instance|in|import|if|function|for|follows|following-sibling|following|external|except|every|else|element|descending|descendant-or-self|descendant|define|default|declare|comment|child|cast|case|before|attribute|assert|ascending|as|ancestor-or-self|ancestor|after|eq|order|by|or|and|schema-element|document-node|node|at)\b/], -["typ",/^(?:xs:yearMonthDuration|xs:unsignedLong|xs:time|xs:string|xs:short|xs:QName|xs:Name|xs:long|xs:integer|xs:int|xs:gYearMonth|xs:gYear|xs:gMonthDay|xs:gDay|xs:float|xs:duration|xs:double|xs:decimal|xs:dayTimeDuration|xs:dateTime|xs:date|xs:byte|xs:boolean|xs:anyURI|xf:yearMonthDuration)\b/,null],["fun pln",/^(?:xp:dereference|xinc:node-expand|xinc:link-references|xinc:link-expand|xhtml:restructure|xhtml:clean|xhtml:add-lists|xdmp:zip-manifest|xdmp:zip-get|xdmp:zip-create|xdmp:xquery-version|xdmp:word-convert|xdmp:with-namespaces|xdmp:version|xdmp:value|xdmp:user-roles|xdmp:user-last-login|xdmp:user|xdmp:url-encode|xdmp:url-decode|xdmp:uri-is-file|xdmp:uri-format|xdmp:uri-content-type|xdmp:unquote|xdmp:unpath|xdmp:triggers-database|xdmp:trace|xdmp:to-json|xdmp:tidy|xdmp:subbinary|xdmp:strftime|xdmp:spawn-in|xdmp:spawn|xdmp:sleep|xdmp:shutdown|xdmp:set-session-field|xdmp:set-response-encoding|xdmp:set-response-content-type|xdmp:set-response-code|xdmp:set-request-time-limit|xdmp:set|xdmp:servers|xdmp:server-status|xdmp:server-name|xdmp:server|xdmp:security-database|xdmp:security-assert|xdmp:schema-database|xdmp:save|xdmp:role-roles|xdmp:role|xdmp:rethrow|xdmp:restart|xdmp:request-timestamp|xdmp:request-status|xdmp:request-cancel|xdmp:request|xdmp:redirect-response|xdmp:random|xdmp:quote|xdmp:query-trace|xdmp:query-meters|xdmp:product-edition|xdmp:privilege-roles|xdmp:privilege|xdmp:pretty-print|xdmp:powerpoint-convert|xdmp:platform|xdmp:permission|xdmp:pdf-convert|xdmp:path|xdmp:octal-to-integer|xdmp:node-uri|xdmp:node-replace|xdmp:node-kind|xdmp:node-insert-child|xdmp:node-insert-before|xdmp:node-insert-after|xdmp:node-delete|xdmp:node-database|xdmp:mul64|xdmp:modules-root|xdmp:modules-database|xdmp:merging|xdmp:merge-cancel|xdmp:merge|xdmp:md5|xdmp:logout|xdmp:login|xdmp:log-level|xdmp:log|xdmp:lock-release|xdmp:lock-acquire|xdmp:load|xdmp:invoke-in|xdmp:invoke|xdmp:integer-to-octal|xdmp:integer-to-hex|xdmp:http-put|xdmp:http-post|xdmp:http-options|xdmp:http-head|xdmp:http-get|xdmp:http-delete|xdmp:hosts|xdmp:host-status|xdmp:host-name|xdmp:host|xdmp:hex-to-integer|xdmp:hash64|xdmp:hash32|xdmp:has-privilege|xdmp:groups|xdmp:group-serves|xdmp:group-servers|xdmp:group-name|xdmp:group-hosts|xdmp:group|xdmp:get-session-field-names|xdmp:get-session-field|xdmp:get-response-encoding|xdmp:get-response-code|xdmp:get-request-username|xdmp:get-request-user|xdmp:get-request-url|xdmp:get-request-protocol|xdmp:get-request-path|xdmp:get-request-method|xdmp:get-request-header-names|xdmp:get-request-header|xdmp:get-request-field-names|xdmp:get-request-field-filename|xdmp:get-request-field-content-type|xdmp:get-request-field|xdmp:get-request-client-certificate|xdmp:get-request-client-address|xdmp:get-request-body|xdmp:get-current-user|xdmp:get-current-roles|xdmp:get|xdmp:function-name|xdmp:function-module|xdmp:function|xdmp:from-json|xdmp:forests|xdmp:forest-status|xdmp:forest-restore|xdmp:forest-restart|xdmp:forest-name|xdmp:forest-delete|xdmp:forest-databases|xdmp:forest-counts|xdmp:forest-clear|xdmp:forest-backup|xdmp:forest|xdmp:filesystem-file|xdmp:filesystem-directory|xdmp:exists|xdmp:excel-convert|xdmp:eval-in|xdmp:eval|xdmp:estimate|xdmp:email|xdmp:element-content-type|xdmp:elapsed-time|xdmp:document-set-quality|xdmp:document-set-property|xdmp:document-set-properties|xdmp:document-set-permissions|xdmp:document-set-collections|xdmp:document-remove-properties|xdmp:document-remove-permissions|xdmp:document-remove-collections|xdmp:document-properties|xdmp:document-locks|xdmp:document-load|xdmp:document-insert|xdmp:document-get-quality|xdmp:document-get-properties|xdmp:document-get-permissions|xdmp:document-get-collections|xdmp:document-get|xdmp:document-forest|xdmp:document-delete|xdmp:document-add-properties|xdmp:document-add-permissions|xdmp:document-add-collections|xdmp:directory-properties|xdmp:directory-locks|xdmp:directory-delete|xdmp:directory-create|xdmp:directory|xdmp:diacritic-less|xdmp:describe|xdmp:default-permissions|xdmp:default-collections|xdmp:databases|xdmp:database-restore-validate|xdmp:database-restore-status|xdmp:database-restore-cancel|xdmp:database-restore|xdmp:database-name|xdmp:database-forests|xdmp:database-backup-validate|xdmp:database-backup-status|xdmp:database-backup-purge|xdmp:database-backup-cancel|xdmp:database-backup|xdmp:database|xdmp:collection-properties|xdmp:collection-locks|xdmp:collection-delete|xdmp:collation-canonical-uri|xdmp:castable-as|xdmp:can-grant-roles|xdmp:base64-encode|xdmp:base64-decode|xdmp:architecture|xdmp:apply|xdmp:amp-roles|xdmp:amp|xdmp:add64|xdmp:add-response-header|xdmp:access|trgr:trigger-set-recursive|trgr:trigger-set-permissions|trgr:trigger-set-name|trgr:trigger-set-module|trgr:trigger-set-event|trgr:trigger-set-description|trgr:trigger-remove-permissions|trgr:trigger-module|trgr:trigger-get-permissions|trgr:trigger-enable|trgr:trigger-disable|trgr:trigger-database-online-event|trgr:trigger-data-event|trgr:trigger-add-permissions|trgr:remove-trigger|trgr:property-content|trgr:pre-commit|trgr:post-commit|trgr:get-trigger-by-id|trgr:get-trigger|trgr:document-scope|trgr:document-content|trgr:directory-scope|trgr:create-trigger|trgr:collection-scope|trgr:any-property-content|thsr:set-entry|thsr:remove-term|thsr:remove-synonym|thsr:remove-entry|thsr:query-lookup|thsr:lookup|thsr:load|thsr:insert|thsr:expand|thsr:add-synonym|spell:suggest-detailed|spell:suggest|spell:remove-word|spell:make-dictionary|spell:load|spell:levenshtein-distance|spell:is-correct|spell:insert|spell:double-metaphone|spell:add-word|sec:users-collection|sec:user-set-roles|sec:user-set-password|sec:user-set-name|sec:user-set-description|sec:user-set-default-permissions|sec:user-set-default-collections|sec:user-remove-roles|sec:user-privileges|sec:user-get-roles|sec:user-get-description|sec:user-get-default-permissions|sec:user-get-default-collections|sec:user-doc-permissions|sec:user-doc-collections|sec:user-add-roles|sec:unprotect-collection|sec:uid-for-name|sec:set-realm|sec:security-version|sec:security-namespace|sec:security-installed|sec:security-collection|sec:roles-collection|sec:role-set-roles|sec:role-set-name|sec:role-set-description|sec:role-set-default-permissions|sec:role-set-default-collections|sec:role-remove-roles|sec:role-privileges|sec:role-get-roles|sec:role-get-description|sec:role-get-default-permissions|sec:role-get-default-collections|sec:role-doc-permissions|sec:role-doc-collections|sec:role-add-roles|sec:remove-user|sec:remove-role-from-users|sec:remove-role-from-role|sec:remove-role-from-privileges|sec:remove-role-from-amps|sec:remove-role|sec:remove-privilege|sec:remove-amp|sec:protect-collection|sec:privileges-collection|sec:privilege-set-roles|sec:privilege-set-name|sec:privilege-remove-roles|sec:privilege-get-roles|sec:privilege-add-roles|sec:priv-doc-permissions|sec:priv-doc-collections|sec:get-user-names|sec:get-unique-elem-id|sec:get-role-names|sec:get-role-ids|sec:get-privilege|sec:get-distinct-permissions|sec:get-collection|sec:get-amp|sec:create-user-with-role|sec:create-user|sec:create-role|sec:create-privilege|sec:create-amp|sec:collections-collection|sec:collection-set-permissions|sec:collection-remove-permissions|sec:collection-get-permissions|sec:collection-add-permissions|sec:check-admin|sec:amps-collection|sec:amp-set-roles|sec:amp-remove-roles|sec:amp-get-roles|sec:amp-doc-permissions|sec:amp-doc-collections|sec:amp-add-roles|search:unparse|search:suggest|search:snippet|search:search|search:resolve-nodes|search:resolve|search:remove-constraint|search:parse|search:get-default-options|search:estimate|search:check-options|prof:value|prof:reset|prof:report|prof:invoke|prof:eval|prof:enable|prof:disable|prof:allowed|ppt:clean|pki:template-set-request|pki:template-set-name|pki:template-set-key-type|pki:template-set-key-options|pki:template-set-description|pki:template-in-use|pki:template-get-version|pki:template-get-request|pki:template-get-name|pki:template-get-key-type|pki:template-get-key-options|pki:template-get-id|pki:template-get-description|pki:need-certificate|pki:is-temporary|pki:insert-trusted-certificates|pki:insert-template|pki:insert-signed-certificates|pki:insert-certificate-revocation-list|pki:get-trusted-certificate-ids|pki:get-template-ids|pki:get-template-certificate-authority|pki:get-template-by-name|pki:get-template|pki:get-pending-certificate-requests-xml|pki:get-pending-certificate-requests-pem|pki:get-pending-certificate-request|pki:get-certificates-for-template-xml|pki:get-certificates-for-template|pki:get-certificates|pki:get-certificate-xml|pki:get-certificate-pem|pki:get-certificate|pki:generate-temporary-certificate-if-necessary|pki:generate-temporary-certificate|pki:generate-template-certificate-authority|pki:generate-certificate-request|pki:delete-template|pki:delete-certificate|pki:create-template|pdf:make-toc|pdf:insert-toc-headers|pdf:get-toc|pdf:clean|p:status-transition|p:state-transition|p:remove|p:pipelines|p:insert|p:get-by-id|p:get|p:execute|p:create|p:condition|p:collection|p:action|ooxml:runs-merge|ooxml:package-uris|ooxml:package-parts-insert|ooxml:package-parts|msword:clean|mcgm:polygon|mcgm:point|mcgm:geospatial-query-from-elements|mcgm:geospatial-query|mcgm:circle|math:tanh|math:tan|math:sqrt|math:sinh|math:sin|math:pow|math:modf|math:log10|math:log|math:ldexp|math:frexp|math:fmod|math:floor|math:fabs|math:exp|math:cosh|math:cos|math:ceil|math:atan2|math:atan|math:asin|math:acos|map:put|map:map|map:keys|map:get|map:delete|map:count|map:clear|lnk:to|lnk:remove|lnk:insert|lnk:get|lnk:from|lnk:create|kml:polygon|kml:point|kml:interior-polygon|kml:geospatial-query-from-elements|kml:geospatial-query|kml:circle|kml:box|gml:polygon|gml:point|gml:interior-polygon|gml:geospatial-query-from-elements|gml:geospatial-query|gml:circle|gml:box|georss:point|georss:geospatial-query|georss:circle|geo:polygon|geo:point|geo:interior-polygon|geo:geospatial-query-from-elements|geo:geospatial-query|geo:circle|geo:box|fn:zero-or-one|fn:years-from-duration|fn:year-from-dateTime|fn:year-from-date|fn:upper-case|fn:unordered|fn:true|fn:translate|fn:trace|fn:tokenize|fn:timezone-from-time|fn:timezone-from-dateTime|fn:timezone-from-date|fn:sum|fn:subtract-dateTimes-yielding-yearMonthDuration|fn:subtract-dateTimes-yielding-dayTimeDuration|fn:substring-before|fn:substring-after|fn:substring|fn:subsequence|fn:string-to-codepoints|fn:string-pad|fn:string-length|fn:string-join|fn:string|fn:static-base-uri|fn:starts-with|fn:seconds-from-time|fn:seconds-from-duration|fn:seconds-from-dateTime|fn:round-half-to-even|fn:round|fn:root|fn:reverse|fn:resolve-uri|fn:resolve-QName|fn:replace|fn:remove|fn:QName|fn:prefix-from-QName|fn:position|fn:one-or-more|fn:number|fn:not|fn:normalize-unicode|fn:normalize-space|fn:node-name|fn:node-kind|fn:nilled|fn:namespace-uri-from-QName|fn:namespace-uri-for-prefix|fn:namespace-uri|fn:name|fn:months-from-duration|fn:month-from-dateTime|fn:month-from-date|fn:minutes-from-time|fn:minutes-from-duration|fn:minutes-from-dateTime|fn:min|fn:max|fn:matches|fn:lower-case|fn:local-name-from-QName|fn:local-name|fn:last|fn:lang|fn:iri-to-uri|fn:insert-before|fn:index-of|fn:in-scope-prefixes|fn:implicit-timezone|fn:idref|fn:id|fn:hours-from-time|fn:hours-from-duration|fn:hours-from-dateTime|fn:floor|fn:false|fn:expanded-QName|fn:exists|fn:exactly-one|fn:escape-uri|fn:escape-html-uri|fn:error|fn:ends-with|fn:encode-for-uri|fn:empty|fn:document-uri|fn:doc-available|fn:doc|fn:distinct-values|fn:distinct-nodes|fn:default-collation|fn:deep-equal|fn:days-from-duration|fn:day-from-dateTime|fn:day-from-date|fn:data|fn:current-time|fn:current-dateTime|fn:current-date|fn:count|fn:contains|fn:concat|fn:compare|fn:collection|fn:codepoints-to-string|fn:codepoint-equal|fn:ceiling|fn:boolean|fn:base-uri|fn:avg|fn:adjust-time-to-timezone|fn:adjust-dateTime-to-timezone|fn:adjust-date-to-timezone|fn:abs|feed:unsubscribe|feed:subscription|feed:subscribe|feed:request|feed:item|feed:description|excel:clean|entity:enrich|dom:set-pipelines|dom:set-permissions|dom:set-name|dom:set-evaluation-context|dom:set-domain-scope|dom:set-description|dom:remove-pipeline|dom:remove-permissions|dom:remove|dom:get|dom:evaluation-context|dom:domains|dom:domain-scope|dom:create|dom:configuration-set-restart-user|dom:configuration-set-permissions|dom:configuration-set-evaluation-context|dom:configuration-set-default-domain|dom:configuration-get|dom:configuration-create|dom:collection|dom:add-pipeline|dom:add-permissions|dls:retention-rules|dls:retention-rule-remove|dls:retention-rule-insert|dls:retention-rule|dls:purge|dls:node-expand|dls:link-references|dls:link-expand|dls:documents-query|dls:document-versions-query|dls:document-version-uri|dls:document-version-query|dls:document-version-delete|dls:document-version-as-of|dls:document-version|dls:document-update|dls:document-unmanage|dls:document-set-quality|dls:document-set-property|dls:document-set-properties|dls:document-set-permissions|dls:document-set-collections|dls:document-retention-rules|dls:document-remove-properties|dls:document-remove-permissions|dls:document-remove-collections|dls:document-purge|dls:document-manage|dls:document-is-managed|dls:document-insert-and-manage|dls:document-include-query|dls:document-history|dls:document-get-permissions|dls:document-extract-part|dls:document-delete|dls:document-checkout-status|dls:document-checkout|dls:document-checkin|dls:document-add-properties|dls:document-add-permissions|dls:document-add-collections|dls:break-checkout|dls:author-query|dls:as-of-query|dbk:convert|dbg:wait|dbg:value|dbg:stopped|dbg:stop|dbg:step|dbg:status|dbg:stack|dbg:out|dbg:next|dbg:line|dbg:invoke|dbg:function|dbg:finish|dbg:expr|dbg:eval|dbg:disconnect|dbg:detach|dbg:continue|dbg:connect|dbg:clear|dbg:breakpoints|dbg:break|dbg:attached|dbg:attach|cvt:save-converted-documents|cvt:part-uri|cvt:destination-uri|cvt:basepath|cvt:basename|cts:words|cts:word-query-weight|cts:word-query-text|cts:word-query-options|cts:word-query|cts:word-match|cts:walk|cts:uris|cts:uri-match|cts:train|cts:tokenize|cts:thresholds|cts:stem|cts:similar-query-weight|cts:similar-query-nodes|cts:similar-query|cts:shortest-distance|cts:search|cts:score|cts:reverse-query-weight|cts:reverse-query-nodes|cts:reverse-query|cts:remainder|cts:registered-query-weight|cts:registered-query-options|cts:registered-query-ids|cts:registered-query|cts:register|cts:query|cts:quality|cts:properties-query-query|cts:properties-query|cts:polygon-vertices|cts:polygon|cts:point-longitude|cts:point-latitude|cts:point|cts:or-query-queries|cts:or-query|cts:not-query-weight|cts:not-query-query|cts:not-query|cts:near-query-weight|cts:near-query-queries|cts:near-query-options|cts:near-query-distance|cts:near-query|cts:highlight|cts:geospatial-co-occurrences|cts:frequency|cts:fitness|cts:field-words|cts:field-word-query-weight|cts:field-word-query-text|cts:field-word-query-options|cts:field-word-query-field-name|cts:field-word-query|cts:field-word-match|cts:entity-highlight|cts:element-words|cts:element-word-query-weight|cts:element-word-query-text|cts:element-word-query-options|cts:element-word-query-element-name|cts:element-word-query|cts:element-word-match|cts:element-values|cts:element-value-ranges|cts:element-value-query-weight|cts:element-value-query-text|cts:element-value-query-options|cts:element-value-query-element-name|cts:element-value-query|cts:element-value-match|cts:element-value-geospatial-co-occurrences|cts:element-value-co-occurrences|cts:element-range-query-weight|cts:element-range-query-value|cts:element-range-query-options|cts:element-range-query-operator|cts:element-range-query-element-name|cts:element-range-query|cts:element-query-query|cts:element-query-element-name|cts:element-query|cts:element-pair-geospatial-values|cts:element-pair-geospatial-value-match|cts:element-pair-geospatial-query-weight|cts:element-pair-geospatial-query-region|cts:element-pair-geospatial-query-options|cts:element-pair-geospatial-query-longitude-name|cts:element-pair-geospatial-query-latitude-name|cts:element-pair-geospatial-query-element-name|cts:element-pair-geospatial-query|cts:element-pair-geospatial-boxes|cts:element-geospatial-values|cts:element-geospatial-value-match|cts:element-geospatial-query-weight|cts:element-geospatial-query-region|cts:element-geospatial-query-options|cts:element-geospatial-query-element-name|cts:element-geospatial-query|cts:element-geospatial-boxes|cts:element-child-geospatial-values|cts:element-child-geospatial-value-match|cts:element-child-geospatial-query-weight|cts:element-child-geospatial-query-region|cts:element-child-geospatial-query-options|cts:element-child-geospatial-query-element-name|cts:element-child-geospatial-query-child-name|cts:element-child-geospatial-query|cts:element-child-geospatial-boxes|cts:element-attribute-words|cts:element-attribute-word-query-weight|cts:element-attribute-word-query-text|cts:element-attribute-word-query-options|cts:element-attribute-word-query-element-name|cts:element-attribute-word-query-attribute-name|cts:element-attribute-word-query|cts:element-attribute-word-match|cts:element-attribute-values|cts:element-attribute-value-ranges|cts:element-attribute-value-query-weight|cts:element-attribute-value-query-text|cts:element-attribute-value-query-options|cts:element-attribute-value-query-element-name|cts:element-attribute-value-query-attribute-name|cts:element-attribute-value-query|cts:element-attribute-value-match|cts:element-attribute-value-geospatial-co-occurrences|cts:element-attribute-value-co-occurrences|cts:element-attribute-range-query-weight|cts:element-attribute-range-query-value|cts:element-attribute-range-query-options|cts:element-attribute-range-query-operator|cts:element-attribute-range-query-element-name|cts:element-attribute-range-query-attribute-name|cts:element-attribute-range-query|cts:element-attribute-pair-geospatial-values|cts:element-attribute-pair-geospatial-value-match|cts:element-attribute-pair-geospatial-query-weight|cts:element-attribute-pair-geospatial-query-region|cts:element-attribute-pair-geospatial-query-options|cts:element-attribute-pair-geospatial-query-longitude-name|cts:element-attribute-pair-geospatial-query-latitude-name|cts:element-attribute-pair-geospatial-query-element-name|cts:element-attribute-pair-geospatial-query|cts:element-attribute-pair-geospatial-boxes|cts:document-query-uris|cts:document-query|cts:distance|cts:directory-query-uris|cts:directory-query-depth|cts:directory-query|cts:destination|cts:deregister|cts:contains|cts:confidence|cts:collections|cts:collection-query-uris|cts:collection-query|cts:collection-match|cts:classify|cts:circle-radius|cts:circle-center|cts:circle|cts:box-west|cts:box-south|cts:box-north|cts:box-east|cts:box|cts:bearing|cts:arc-intersection|cts:and-query-queries|cts:and-query-options|cts:and-query|cts:and-not-query-positive-query|cts:and-not-query-negative-query|cts:and-not-query|css:get|css:convert|cpf:success|cpf:failure|cpf:document-set-state|cpf:document-set-processing-status|cpf:document-set-last-updated|cpf:document-set-error|cpf:document-get-state|cpf:document-get-processing-status|cpf:document-get-last-updated|cpf:document-get-error|cpf:check-transition|alert:spawn-matching-actions|alert:rule-user-id-query|alert:rule-set-user-id|alert:rule-set-query|alert:rule-set-options|alert:rule-set-name|alert:rule-set-description|alert:rule-set-action|alert:rule-remove|alert:rule-name-query|alert:rule-insert|alert:rule-id-query|alert:rule-get-user-id|alert:rule-get-query|alert:rule-get-options|alert:rule-get-name|alert:rule-get-id|alert:rule-get-description|alert:rule-get-action|alert:rule-action-query|alert:remove-triggers|alert:make-rule|alert:make-log-action|alert:make-config|alert:make-action|alert:invoke-matching-actions|alert:get-my-rules|alert:get-all-rules|alert:get-actions|alert:find-matching-rules|alert:create-triggers|alert:config-set-uri|alert:config-set-trigger-ids|alert:config-set-options|alert:config-set-name|alert:config-set-description|alert:config-set-cpf-domain-names|alert:config-set-cpf-domain-ids|alert:config-insert|alert:config-get-uri|alert:config-get-trigger-ids|alert:config-get-options|alert:config-get-name|alert:config-get-id|alert:config-get-description|alert:config-get-cpf-domain-names|alert:config-get-cpf-domain-ids|alert:config-get|alert:config-delete|alert:action-set-options|alert:action-set-name|alert:action-set-module-root|alert:action-set-module-db|alert:action-set-module|alert:action-set-description|alert:action-remove|alert:action-insert|alert:action-get-options|alert:action-get-name|alert:action-get-module-root|alert:action-get-module-db|alert:action-get-module|alert:action-get-description|zero-or-one|years-from-duration|year-from-dateTime|year-from-date|upper-case|unordered|true|translate|trace|tokenize|timezone-from-time|timezone-from-dateTime|timezone-from-date|sum|subtract-dateTimes-yielding-yearMonthDuration|subtract-dateTimes-yielding-dayTimeDuration|substring-before|substring-after|substring|subsequence|string-to-codepoints|string-pad|string-length|string-join|string|static-base-uri|starts-with|seconds-from-time|seconds-from-duration|seconds-from-dateTime|round-half-to-even|round|root|reverse|resolve-uri|resolve-QName|replace|remove|QName|prefix-from-QName|position|one-or-more|number|not|normalize-unicode|normalize-space|node-name|node-kind|nilled|namespace-uri-from-QName|namespace-uri-for-prefix|namespace-uri|name|months-from-duration|month-from-dateTime|month-from-date|minutes-from-time|minutes-from-duration|minutes-from-dateTime|min|max|matches|lower-case|local-name-from-QName|local-name|last|lang|iri-to-uri|insert-before|index-of|in-scope-prefixes|implicit-timezone|idref|id|hours-from-time|hours-from-duration|hours-from-dateTime|floor|false|expanded-QName|exists|exactly-one|escape-uri|escape-html-uri|error|ends-with|encode-for-uri|empty|document-uri|doc-available|doc|distinct-values|distinct-nodes|default-collation|deep-equal|days-from-duration|day-from-dateTime|day-from-date|data|current-time|current-dateTime|current-date|count|contains|concat|compare|collection|codepoints-to-string|codepoint-equal|ceiling|boolean|base-uri|avg|adjust-time-to-timezone|adjust-dateTime-to-timezone|adjust-date-to-timezone|abs)\b/], -["pln",/^[\w:-]+/],["pln",/^[\t\n\r \xa0]+/]]),["xq","xquery"]); diff --git a/examples/explorer/prettify/lang-yaml.js b/examples/explorer/prettify/lang-yaml.js deleted file mode 100755 index c38729b6c..000000000 --- a/examples/explorer/prettify/lang-yaml.js +++ /dev/null @@ -1,2 +0,0 @@ -var a=null; -PR.registerLangHandler(PR.createSimpleLexer([["pun",/^[:>?|]+/,a,":|>?"],["dec",/^%(?:YAML|TAG)[^\n\r#]+/,a,"%"],["typ",/^&\S+/,a,"&"],["typ",/^!\S*/,a,"!"],["str",/^"(?:[^"\\]|\\.)*(?:"|$)/,a,'"'],["str",/^'(?:[^']|'')*(?:'|$)/,a,"'"],["com",/^#[^\n\r]*/,a,"#"],["pln",/^\s+/,a," \t\r\n"]],[["dec",/^(?:---|\.\.\.)(?:[\n\r]|$)/],["pun",/^-/],["kwd",/^\w+:[\n\r ]/],["pln",/^\w+/]]),["yaml","yml"]); diff --git a/examples/explorer/prettify/prettify.css b/examples/explorer/prettify/prettify.css deleted file mode 100755 index d44b3a228..000000000 --- a/examples/explorer/prettify/prettify.css +++ /dev/null @@ -1 +0,0 @@ -.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} \ No newline at end of file diff --git a/examples/explorer/prettify/prettify.js b/examples/explorer/prettify/prettify.js deleted file mode 100755 index eef5ad7e6..000000000 --- a/examples/explorer/prettify/prettify.js +++ /dev/null @@ -1,28 +0,0 @@ -var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; -(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= -[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), -l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, -q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, -q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, -"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), -a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} -for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], -"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], -H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], -J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ -I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), -["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", -/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), -["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", -hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= -!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p 0): - port = argv[0] - serve(port = PORT) - else: - serve() - -if __name__ == "__main__": - try: - main(sys.argv[1:]) - except KeyboardInterrupt: - pass - except: - raise diff --git a/examples/export/README.md b/examples/export/README.md deleted file mode 100644 index 6a39aeeee..000000000 --- a/examples/export/README.md +++ /dev/null @@ -1,33 +0,0 @@ -# Export - -`export.py` is a sample application to export a portion, or all, events in a -specific, or all, indices to a CSV file - -The CLI arguments for the export are as follows (all arguments are of the form -`arg=value`): - - --index specifies the index to export. Default is all indexes. - --progress prints progress to stdout. Default is no progress shown. - --starttime starttime in SECONDS from 1970. Default is start at beginning of - index. - --endtime endtime in SECONDS from 1970. Default is end at the end of the - index. - --output output file name. Default is the current working directory, - export.out. - --limit limits the number of events per chunk. The number actually used - may be smaller than this limit. Deafult is 100,000. - --restart restarts the export if terminated prematurely. - --omode specifies the output format of the resulting export, the - allowable formats are xml, json, csv. - -## Possible Future Work - -### Friendly start/end times - -Currently, the start/end times are given as seconds from 1970, which is not -the most friendly/intuitive format. - -## Notes - -* When using csv or json output formats, sideband messages are not included. If - you wish to capture sideband messages, the xml format should be used. \ No newline at end of file diff --git a/examples/export/export.py b/examples/export/export.py deleted file mode 100755 index 3664a7691..000000000 --- a/examples/export/export.py +++ /dev/null @@ -1,355 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2012 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -This software exports a splunk index using the streaming export endpoint -using a parameterized chunking mechanism. -""" - -# installation support files -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -import time -from os import path - -# splunk support files -from splunklib.binding import connect -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -# hidden file -OUTPUT_FILE = "./export.out" -OUTPUT_MODE = "xml" -OUTPUT_MODES = ["csv", "xml", "json"] - -CLIRULES = { - 'end': { - 'flags': ["--endtime"], - 'default': "", - 'help': "Start time of export (default is start of index)" - }, - 'index': { - 'flags': ["--index"], - 'default': "*", - 'help': "Index to export (default is all user defined indices)" - }, - 'omode': { - 'flags': ["--omode"], - 'default': OUTPUT_MODE, - 'help': "output format %s default is %s" % (OUTPUT_MODES, OUTPUT_MODE) - }, - 'output': { - 'flags': ["--output"], - 'default': OUTPUT_FILE, - 'help': "Output file name (default is %s)" % OUTPUT_FILE - }, - 'recover': { - 'flags': ["--recover"], - 'default': False, - 'help': "Export attempts to recover from end of existing export" - }, - 'search': { - 'flags': ["--search"], - 'default': "search *", - 'help': "search string (default 'search *')" - }, - 'start': { - 'flags': ["--starttime"], - 'default': "", - 'help': "Start time of export (default is start of index)" - } -} - -def get_csv_next_event_start(location, event_buffer): - """ determin the event start and end of *any* valid event """ - - start = -1 - end = -1 - - event_start = event_buffer.find("\n", location + 1) - event_end = event_buffer.find('"\n', event_start + 1) - - while (event_end > 0): - parts = event_buffer[event_start:event_end].split(",") - # test parts 0 and 1 of CSV. Format should be time.qqq, anything - # else is not time stamp to keep moving. - try: - int(parts[0].replace('\n',"")) - timestamp = parts[1].replace('"', "") - timeparts = timestamp.split('.') - int(timeparts[0]) - int(timeparts[1]) - return (event_start, event_end) - except: - event_start = event_buffer.find("\n", event_end + 2) - event_end = event_buffer.find('"\n', event_start + 1) - - return (start, end) - -def get_csv_event_start(event_buffer): - """ get the event start of an event that is different (in time)from the - adjoining event, in CSV format """ - - (start, end) = get_csv_next_event_start(0, event_buffer) - if start < 0: - return (-1, -1, "") - - print(event_buffer[start:end]) - - tstart = event_buffer.find(",", start) - tend = event_buffer.find(",", tstart+1) - print(event_buffer[tstart:tend]) - last_time = event_buffer[tstart+1:tend].replace('"',"") - - while end > 0: - (start, end) = get_csv_next_event_start(start, event_buffer) - if end < 0: - return (-1, -1, "") - tstart = event_buffer.find(",", start) - tend = event_buffer.find(",", tstart+1) - this_time = event_buffer[tstart+1:tend].replace('"',"") - if this_time != last_time: - return (start, end + 1, last_time) - - return (-1, -1, "") - -def get_xml_event_start(event_buffer): - """ get the event start of an event that is different (in time)from the - adjoining event, in XML format """ - - result_pattern = "" - time_start_pattern = "" - time_end_pattern = "<" - event_end_pattern = "" - - event_start = event_buffer.find(result_pattern) - event_end = event_buffer.find(event_end_pattern, event_start) + \ - len(event_end_pattern) - if event_end < 0: - return (-1, -1, "") - time_key_start = event_buffer.find(time_key_pattern, event_start) - time_start = event_buffer.find(time_start_pattern, time_key_start) + \ - len(time_start_pattern) - time_end = event_buffer.find(time_end_pattern, time_start + 1) - last_time = event_buffer[time_start:time_end] - - # wallk through events until time changes - event_start = event_end - while event_end > 0: - event_start = event_buffer.find(result_pattern, event_start + 1) - event_end = event_buffer.find(event_end_pattern, event_start) + \ - len(event_end_pattern) - if event_end < 0: - return (-1, -1, "") - time_key_start = event_buffer.find(time_key_pattern, event_start) - time_start = event_buffer.find(time_start_pattern, time_key_start) - time_end = event_buffer.find(time_end_pattern, time_start) - this_time = event_buffer[time_start:time_end] - if this_time != last_time: - return (event_start, event_end, last_time) - event_start = event_end - - return (-1, -1, "") - -def get_json_event_start(event_buffer): - """ get the event start of an event that is different (in time)from the - adjoining event, in XML format """ - - event_start_pattern = '{"_cd":"' - time_key_pattern = '"_time":"' - time_end_pattern = '"' - event_end_pattern = '"},\n' - event_end_pattern2 = '"}[]' # old json output format bug - - event_start = event_buffer.find(event_start_pattern) - event_end = event_buffer.find(event_end_pattern, event_start) + \ - len(event_end_pattern) - if event_end < 0: - event_end = event_buffer.find(event_end_pattern2, event_start) + \ - len(event_end_pattern2) - if (event_end < 0): - return (-1, -1, "") - - time_start = event_buffer.find(time_key_pattern, event_start) + \ - len(time_key_pattern) - time_end = event_buffer.find(time_end_pattern, time_start + 1) - last_time = event_buffer[time_start:time_end] - - event_start = event_end - while event_end > 0: - event_start = event_buffer.find(event_start_pattern, event_start + 1) - event_end = event_buffer.find(event_end_pattern, event_start) + \ - len(event_end_pattern) - if event_end < 0: - event_end = event_buffer.find(event_end_pattern2, event_start) + \ - len(event_end_pattern2) - if (event_end < 0): - return (-1, -1, "") - time_start = event_buffer.find(time_key_pattern, event_start) + \ - len(time_key_pattern) - time_end = event_buffer.find(time_end_pattern, time_start + 1) - this_time = event_buffer[time_start:time_end] - if this_time != last_time: - return (event_start-2, event_end, last_time) - event_start = event_end - - return (-1, -1, "") - -def get_event_start(event_buffer, event_format): - """ dispatch event start method based on event format type """ - - if event_format == "csv": - return get_csv_event_start(event_buffer) - elif event_format == "xml": - return get_xml_event_start(event_buffer) - else: - return get_json_event_start(event_buffer) - -def recover(options): - """ recover from an existing export run. We do this by - finding the last time change between events, truncate the file - and restart from there """ - - event_format = options.kwargs['omode'] - - buffer_size = 64*1024 - fpd = open(options.kwargs['output'], "r+") - fpd.seek(0, 2) # seek to end - fptr = max(fpd.tell() - buffer_size, 0) - fptr_eof = 0 - - while (fptr > 0): - fpd.seek(fptr) - event_buffer = fpd.read(buffer_size) - (event_start, next_event_start, last_time) = \ - get_event_start(event_buffer, event_format) - if (event_start != -1): - fptr_eof = event_start + fptr - break - fptr = fptr - buffer_size - - if fptr < 0: - # didn't find a valid event, so start over - fptr_eof = 0 - last_time = 0 - - # truncate file here - fpd.truncate(fptr_eof) - fpd.seek(fptr_eof) - fpd.write("\n") - fpd.close() - - return last_time - -def cleanup_tail(options): - """ cleanup the tail of a recovery """ - - if options.kwargs['omode'] == "csv": - options.kwargs['fd'].write("\n") - elif options.kwargs['omode'] == "xml": - options.kwargs['fd'].write("\n\n") - else: - options.kwargs['fd'].write("\n]\n") - -def export(options, service): - """ main export method: export any number of indexes """ - - start = options.kwargs['start'] - end = options.kwargs['end'] - fixtail = options.kwargs['fixtail'] - once = True - - squery = options.kwargs['search'] - squery = squery + " index=%s" % options.kwargs['index'] - if (start != ""): - squery = squery + " earliest_time=%s" % start - if (end != ""): - squery = squery + " latest_time=%s" % end - - success = False - - while not success: - # issue query to splunkd - # count=0 overrides the maximum number of events - # returned (normally 50K) regardless of what the .conf - # file for splunkd says. - result = service.get('search/jobs/export', - search=squery, - output_mode=options.kwargs['omode'], - timeout=60, - earliest_time="0.000", - time_format="%s.%Q", - count=0) - - if result.status != 200: - print("warning: export job failed: %d, sleep/retry" % result.status) - time.sleep(60) - else: - success = True - - # write export file - while True: - if fixtail and once: - cleanup_tail(options) - once = False - content = result.body.read() - if len(content) == 0: break - options.kwargs['fd'].write(content) - options.kwargs['fd'].write("\n") - - options.kwargs['fd'].flush() - -def main(): - """ main entry """ - options = parse(sys.argv[1:], CLIRULES, ".env") - - if options.kwargs['omode'] not in OUTPUT_MODES: - print("output mode must be one of %s, found %s" % (OUTPUT_MODES, - options.kwargs['omode'])) - sys.exit(1) - - service = connect(**options.kwargs) - - if path.exists(options.kwargs['output']): - if not options.kwargs['recover']: - print("Export file %s exists, and recover option nor specified" % \ - options.kwargs['output']) - sys.exit(1) - else: - options.kwargs['end'] = recover(options) - options.kwargs['fixtail'] = True - openmode = "a" - else: - openmode = "w" - options.kwargs['fixtail'] = False - - try: - options.kwargs['fd'] = open(options.kwargs['output'], openmode) - except IOError: - print("Failed to open output file %s w/ mode %s" % \ - (options.kwargs['output'], openmode)) - sys.exit(1) - - export(options, service) - -if __name__ == '__main__': - main() diff --git a/examples/fired_alerts.py b/examples/fired_alerts.py deleted file mode 100755 index e736ea167..000000000 --- a/examples/fired_alerts.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility that prints out fired alerts.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(): - opts = parse(sys.argv[1:], {}, ".env") - service = connect(**opts.kwargs) - - for group in service.fired_alerts: - header = "%s (count: %d)" % (group.name, group.count) - print("%s" % header) - print('='*len(header)) - alerts = group.alerts - for alert in alerts.list(): - content = alert.content - for key in sorted(content.keys()): - value = content[key] - print("%s: %s" % (key, value)) - print() - -if __name__ == "__main__": - main() - - diff --git a/examples/follow.py b/examples/follow.py deleted file mode 100755 index cbb559deb..000000000 --- a/examples/follow.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Follows (aka tails) a realtime search using the job endpoints and prints - results to stdout.""" - -from __future__ import absolute_import -from __future__ import print_function -from pprint import pprint -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -import time - -import splunklib.client as client -import splunklib.results as results - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def follow(job, count, items): - offset = 0 # High-water mark - while True: - total = count() - if total <= offset: - time.sleep(1) # Wait for something to show up - job.refresh() - continue - stream = items(offset+1) - for event in results.JSONResultsReader(stream): - pprint(event) - offset = total - -def main(): - usage = "usage: follow.py " - opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) - - if len(opts.args) != 1: - utils.error("Search expression required", 2) - search = opts.args[0] - - service = client.connect(**opts.kwargs) - - job = service.jobs.create( - search, - earliest_time="rt", - latest_time="rt", - search_mode="realtime") - - # Wait for the job to transition out of QUEUED and PARSING so that - # we can if its a transforming search, or not. - while True: - job.refresh() - if job['dispatchState'] not in ['QUEUED', 'PARSING']: - break - time.sleep(2) # Wait - - if job['reportSearch'] is not None: # Is it a transforming search? - count = lambda: int(job['numPreviews']) - items = lambda _: job.preview(output_mode='json') - else: - count = lambda: int(job['eventCount']) - items = lambda offset: job.events(offset=offset, output_mode='json') - - try: - follow(job, count, items) - except KeyboardInterrupt: - print("\nInterrupted.") - finally: - job.cancel() - -if __name__ == "__main__": - main() - diff --git a/examples/genevents.py b/examples/genevents.py deleted file mode 100755 index 8b9b2d3bf..000000000 --- a/examples/genevents.py +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A tool to generate event data to a named index.""" - -from __future__ import absolute_import -from __future__ import print_function -import socket -import sys, os -from six.moves import range -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -import time -import datetime -from splunklib.client import connect -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -SPLUNK_HOST = "localhost" -SPLUNK_PORT = 9002 - -INGEST_TYPE = ["stream", "submit", "tcp"] - -RULES = { - 'ingest': { - 'flags': ["--ingest"], - 'default': 'stream', - 'help': "sets the type of ingest to one of %s" % INGEST_TYPE - }, - 'inputhost': { - 'flags': ["--inputhost"], - 'default': "127.0.0.1", - 'help': "input host when using tcp ingest, default is localhost" - }, - 'type': { - 'flags': ["--inputport"], - 'default': SPLUNK_PORT, - 'help': "input host port when using tcp ingest, default is %d" % \ - SPLUNK_PORT - }, -} - -def feed_index(service, opts): - """Feed the named index in a specific manner.""" - - indexname = opts.args[0] - itype = opts.kwargs['ingest'] - - - # get index handle - try: - index = service.indexes[indexname] - except KeyError: - print("Index %s not found" % indexname) - return - - if itype in ["stream", "submit"]: - stream = index.attach() - else: - # create a tcp input if one doesn't exist - input_host = opts.kwargs.get("inputhost", SPLUNK_HOST) - input_port = int(opts.kwargs.get("inputport", SPLUNK_PORT)) - input_name = "tcp:%s" % (input_port) - if input_name not in service.inputs.list(): - service.inputs.create("tcp", input_port, index=indexname) - # connect to socket - ingest = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - ingest.connect((input_host, input_port)) - - count = 0 - lastevent = "" - try: - for i in range(0, 10): - for j in range(0, 5000): - lastevent = "%s: event bunch %d, number %d\n" % \ - (datetime.datetime.now().isoformat(), i, j) - - if itype == "stream": - stream.write(lastevent + "\n") - elif itype == "submit": - index.submit(lastevent + "\n") - else: - ingest.send(lastevent + "\n") - - count = count + 1 - - print("submitted %d events, sleeping 1 second" % count) - time.sleep(1) - except KeyboardInterrupt: - print("^C detected, last event written:") - print(lastevent) - -def main(): - usage = "usage: %prog [options] []" - - argv = sys.argv[1:] - if len(argv) == 0: - print("must supply an index name") - sys.exit(1) - - opts = parse(argv, RULES, ".env", usage=usage) - service = connect(**opts.kwargs) - - if opts.kwargs['ingest'] not in INGEST_TYPE: - print("ingest type must be in set %s" % INGEST_TYPE) - sys.exit(1) - - feed_index(service, opts) - - -if __name__ == "__main__": - main() - diff --git a/examples/get_job.py b/examples/get_job.py deleted file mode 100755 index 3d2568154..000000000 --- a/examples/get_job.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A simple example showing to use the Service.job method to retrieve -a search Job by its sid. -""" - -from __future__ import absolute_import -from __future__ import print_function -import sys -import os -import time -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -import splunklib.client as client - -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(argv): - opts = parse(argv, {}, ".env") - service = client.connect(**opts.kwargs) - - # Execute a simple search, and store the sid - sid = service.search("search index=_internal | head 5").sid - - # Now, we can get the `Job` - job = service.job(sid) - - # Wait for the job to complete - while not job.is_done(): - time.sleep(1) - - print("Number of events found: %d" % int(job["eventCount"])) - -if __name__ == "__main__": - main(sys.argv[1:]) - diff --git a/examples/github_commits/README.md b/examples/github_commits/README.md deleted file mode 100644 index fe7832c5e..000000000 --- a/examples/github_commits/README.md +++ /dev/null @@ -1,13 +0,0 @@ -splunk-sdk-python github_commits example -======================================== - -This app provides an example of a modular input that Pulls down commit data from GitHub and creates events for each commit, which are then streamed to Splunk, based on the owner and repo_name provided by the user during setup of the input. - -To run this example locally run `SPLUNK_VERSION=latest docker compose up -d` from the root of this repository which will mount this example alongside the latest version of splunklib within `/opt/splunk/etc/apps/github_commits` and `/opt/splunk/etc/apps/github_commits/lib/splunklib` within the `splunk` container. - -Once the docker container is up and healthy log into the Splunk UI and setup a new `Github Commits` input by visiting this page: http://localhost:8000/en-US/manager/github_commits/datainputstats and selecting the "Add new..." button next to the Local Inputs > Github Commits. Enter values for a Github Repository owner and repo_name, for example owner = `splunk` repo_name = `splunk-sdk-python`. -(optional) `token` if using a private repository and/or to avoid Github's API limits. To get a Github API token visit the [Github settings page](https://github.com/settings/tokens/new) and make sure the repo and public_repo scopes are selected. - -NOTE: If no events appears then the script is likely not running properly, see https://docs.splunk.com/Documentation/SplunkCloud/latest/AdvancedDev/ModInputsDevTools for more details on debugging the modular input using the command line and relevant logs. - -Once the input is created you should be able to see an event when running the following search: `source="github_commits://*"` the event should contain commit data from given GitHub repository. diff --git a/examples/github_commits/README/inputs.conf.spec b/examples/github_commits/README/inputs.conf.spec deleted file mode 100644 index 156e60a4d..000000000 --- a/examples/github_commits/README/inputs.conf.spec +++ /dev/null @@ -1,6 +0,0 @@ -[github_commits://] -*This example modular input retrieves GitHub commits and indexes them in Splunk. - -owner = -repo_name = -token = diff --git a/examples/github_commits/bin/github_commits.py b/examples/github_commits/bin/github_commits.py deleted file mode 100644 index 5581b9897..000000000 --- a/examples/github_commits/bin/github_commits.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2021 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import - -import os -import re -import sys -import json -# NOTE: splunklib must exist within github_commits/lib/splunklib for this -# example to run! To run this locally use `SPLUNK_VERSION=latest docker compose up -d` -# from the root of this repo which mounts this example and the latest splunklib -# code together at /opt/splunk/etc/apps/github_commits -from datetime import datetime - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) - -from splunklib.modularinput import * -from splunklib import six -from six.moves import http_client - - -class MyScript(Script): - """All modular inputs should inherit from the abstract base class Script - from splunklib.modularinput.script. - They must override the get_scheme and stream_events functions, and, - if the scheme returned by get_scheme has Scheme.use_external_validation - set to True, the validate_input function. - """ - - def get_scheme(self): - """When Splunk starts, it looks for all the modular inputs defined by - its configuration, and tries to run them with the argument --scheme. - Splunkd expects the modular inputs to print a description of the - input in XML on stdout. The modular input framework takes care of all - the details of formatting XML and printing it. The user need only - override get_scheme and return a new Scheme object. - - :return: scheme, a Scheme object - """ - # Splunk will display "Github Commits" to users for this input - scheme = Scheme("Github Commits") - - scheme.description = "Streams events of commits in the specified Github repository (must be public, unless setting a token)." - # If you set external validation to True, without overriding validate_input, - # the script will accept anything as valid. Generally you only need external - # validation if there are relationships you must maintain among the - # parameters, such as requiring min to be less than max in this example, - # or you need to check that some resource is reachable or valid. - # Otherwise, Splunk lets you specify a validation string for each argument - # and will run validation internally using that string. - scheme.use_external_validation = True - scheme.use_single_instance = False # Set to false so an input can have an optional interval parameter. - - owner_argument = Argument("owner") - owner_argument.title = "Owner" - owner_argument.data_type = Argument.data_type_string - owner_argument.description = "Github user or organization that created the repository." - owner_argument.required_on_create = True - # If you are not using external validation, you would add something like: - # - # scheme.validation = "owner==splunk" - scheme.add_argument(owner_argument) - - repo_name_argument = Argument("repo_name") - repo_name_argument.title = "Repo Name" - repo_name_argument.data_type = Argument.data_type_string - repo_name_argument.description = "Name of the Github repository." - repo_name_argument.required_on_create = True - scheme.add_argument(repo_name_argument) - - token_argument = Argument("token") - token_argument.title = "Token" - token_argument.data_type = Argument.data_type_string - token_argument.description = "(Optional) A Github API access token. Required for private repositories (the token must have the 'repo' and 'public_repo' scopes enabled). Recommended to avoid Github's API limit, especially if setting an interval." - token_argument.required_on_create = False - token_argument.required_on_edit = False - scheme.add_argument(token_argument) - - return scheme - - def validate_input(self, validation_definition): - """In this example we are using external validation to verify that the Github - repository exists. If validate_input does not raise an Exception, the input - is assumed to be valid. Otherwise it prints the exception as an error message - when telling splunkd that the configuration is invalid. - - When using external validation, after splunkd calls the modular input with - --scheme to get a scheme, it calls it again with --validate-arguments for - each instance of the modular input in its configuration files, feeding XML - on stdin to the modular input to do validation. It is called the same way - whenever a modular input's configuration is edited. - - :param validation_definition: a ValidationDefinition object - """ - # Get the values of the parameters, and construct a URL for the Github API - - owner = validation_definition.parameters["owner"] - repo_name = validation_definition.parameters["repo_name"] - token = None - if "token" in validation_definition.parameters: - token = validation_definition.parameters["token"] - - # Call Github to retrieve repo information - res = _get_github_commits(owner, repo_name, 1, 1, token) - - # If we get any kind of message, that's a bad sign. - if "message" in res: - raise ValueError("Some error occur during fetching commits. - " + res["message"]) - elif len(res) == 1 and "sha" in res[0]: - pass - else: - raise ValueError("Expected only the latest commit, instead found " + str(len(res)) + " commits.") - - def stream_events(self, inputs, ew): - """This function handles all the action: splunk calls this modular input - without arguments, streams XML describing the inputs to stdin, and waits - for XML on stdout describing events. - - If you set use_single_instance to True on the scheme in get_scheme, it - will pass all the instances of this input to a single instance of this - script. - - :param inputs: an InputDefinition object - :param ew: an EventWriter object - """ - - # Go through each input for this modular input - for input_name, input_item in six.iteritems(inputs.inputs): - # Get fields from the InputDefinition object - owner = input_item["owner"] - repo_name = input_item["repo_name"] - token = None - if "token" in input_item: - token = input_item["token"] - - ''' - access metadata (like server_host, server_uri, etc) of modular inputs app from InputDefinition object - here inputs is a InputDefinition object - server_host = inputs.metadata["server_host"] - server_uri = inputs.metadata["server_uri"] - ''' - # Get the checkpoint directory out of the modular input's metadata - checkpoint_dir = inputs.metadata["checkpoint_dir"] - - checkpoint_file_path = os.path.join(checkpoint_dir, owner + "_" + repo_name + ".txt") - checkpoint_file_new_contents = "" - error_found = False - - # Set the temporary contents of the checkpoint file to an empty string - checkpoint_file_contents = "" - - try: - # read sha values from file, if exist - file = open(checkpoint_file_path, 'r') - checkpoint_file_contents = file.read() - file.close() - except: - # If there's an exception, assume the file doesn't exist - # Create the checkpoint file with an empty string - file = open(checkpoint_file_path, "a") - file.write("") - file.close() - - per_page = 100 # The maximum per page value supported by the Github API. - page = 1 - - while True: - # Get the commit count from the Github API - res = _get_github_commits(owner, repo_name, per_page, page, token) - if len(res) == 0: - break - - file = open(checkpoint_file_path, "a") - - for record in res: - if error_found: - break - - # If the file exists and doesn't contain the sha, or if the file doesn't exist. - if checkpoint_file_contents.find(record["sha"] + "\n") < 0: - try: - _stream_commit(ew, owner, repo_name, record) - # Append this commit to the string we'll write at the end - checkpoint_file_new_contents += record["sha"] + "\n" - except: - error_found = True - file.write(checkpoint_file_new_contents) - - # We had an error, die. - return - - file.write(checkpoint_file_new_contents) - file.close() - - page += 1 - - -def _get_display_date(date): - month_strings = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] - date_format = "%Y-%m-%d %H:%M:%S" - date = datetime.strptime(date, date_format) - - hours = date.hour - if hours < 10: - hours = "0" + str(hours) - - mins = date.minute - if mins < 10: - mins = "0" + str(mins) - - return "{month} {day}, {year} - {hour}:{minute} {period}".format(month=month_strings[date.month - 1], day=date.day, - year=date.year, hour=hours, minute=mins, - period="AM" if date.hour < 12 else "PM") - - -def _get_github_commits(owner, repo_name, per_page=1, page=1, token=None): - # Read the response from the Github API, then parse the JSON data into an object - repo_path = "/repos/%s/%s/commits?per_page=%d&page=%d" % (owner, repo_name, per_page, page) - connection = http_client.HTTPSConnection('api.github.com') - headers = { - 'Content-type': 'application/json', - 'User-Agent': 'splunk-sdk-python' - } - if token: - headers['Authorization'] = 'token ' + token - connection.request('GET', repo_path, headers=headers) - response = connection.getresponse() - body = response.read().decode() - return json.loads(body) - - -def _stream_commit(ew, owner, repo_name, commitData): - json_data = { - "sha": commitData["sha"], - "api_url": commitData["url"], - "url": "https://github.com/" + owner + "/" + repo_name + "/commit/" + commitData["sha"] - } - commit = commitData["commit"] - - # At this point, assumed checkpoint doesn't exist. - json_data["message"] = re.sub("\n|\r", " ", commit["message"]) - json_data["author"] = commit["author"]["name"] - json_data["rawdate"] = commit["author"]["date"] - commit_date = re.sub("T|Z", " ", commit["author"]["date"]).strip() - json_data["displaydate"] = _get_display_date(commit_date) - - # Create an Event object, and set its fields - event = Event() - event.stanza = repo_name - event.sourceType = "github_commits" - event.data = json.dumps(json_data) - - # Tell the EventWriter to write this event - ew.write_event(event) - - -if __name__ == "__main__": - sys.exit(MyScript().run(sys.argv)) diff --git a/examples/github_commits/default/app.conf b/examples/github_commits/default/app.conf deleted file mode 100644 index 14086d5a2..000000000 --- a/examples/github_commits/default/app.conf +++ /dev/null @@ -1,11 +0,0 @@ -[install] -is_configured = 0 - -[ui] -is_visible = 1 -label = GitHub Commits Modular Input - -[launcher] -author=Splunk -description=This example modular input retrieves GitHub commits and indexes them in Splunk. -version = 1.0 diff --git a/examples/github_forks/README.md b/examples/github_forks/README.md deleted file mode 100644 index 1a05c862f..000000000 --- a/examples/github_forks/README.md +++ /dev/null @@ -1,12 +0,0 @@ -splunk-sdk-python github_forks example -======================================== - -This app provides an example of a modular input that generates the number of repository forks according to the Github API based on the owner and repo_name provided by the user during setup of the input. - -To run this example locally run `SPLUNK_VERSION=latest docker compose up -d` from the root of this repository which will mount this example alongside the latest version of splunklib within `/opt/splunk/etc/apps/github_forks` and `/opt/splunk/etc/apps/github_forks/lib/splunklib` within the `splunk` container. - -Once the docker container is up and healthy log into the Splunk UI and setup a new `Github Repository Forks` input by visiting this page: http://localhost:8000/en-US/manager/github_forks/datainputstats and selecting the "Add new..." button next to the Local Inputs > Random Inputs. Enter values for a Github Repository owner and repo_name, for example owner = `splunk` repo_name = `splunk-sdk-python`. - -NOTE: If no Github Repository Forks input appears then the script is likely not running properly, see https://docs.splunk.com/Documentation/SplunkCloud/latest/AdvancedDev/ModInputsDevTools for more details on debugging the modular input using the command line and relevant logs. - -Once the input is created you should be able to see an event when running the following search: `source="github_forks://*"` the event should contain fields for `owner` and `repository` matching the values you input during setup and then a `fork_count` field corresponding to the number of forks the repo has according to the Github API. \ No newline at end of file diff --git a/examples/github_forks/README/inputs.conf.spec b/examples/github_forks/README/inputs.conf.spec deleted file mode 100644 index cd3d69b19..000000000 --- a/examples/github_forks/README/inputs.conf.spec +++ /dev/null @@ -1,5 +0,0 @@ -[github_forks://] -*Streams events giving the number of forks of a GitHub repository - -owner = -repo_name = \ No newline at end of file diff --git a/examples/github_forks/bin/github_forks.py b/examples/github_forks/bin/github_forks.py deleted file mode 100755 index 46b42a81b..000000000 --- a/examples/github_forks/bin/github_forks.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2013 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import os -import sys -import json -# NOTE: splunklib must exist within github_forks/lib/splunklib for this -# example to run! To run this locally use `SPLUNK_VERSION=latest docker compose up -d` -# from the root of this repo which mounts this example and the latest splunklib -# code together at /opt/splunk/etc/apps/github_forks -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) - -from splunklib.modularinput import * -from splunklib import six -from six.moves import http_client - -class MyScript(Script): - """All modular inputs should inherit from the abstract base class Script - from splunklib.modularinput.script. - They must override the get_scheme and stream_events functions, and, - if the scheme returned by get_scheme has Scheme.use_external_validation - set to True, the validate_input function. - """ - def get_scheme(self): - """When Splunk starts, it looks for all the modular inputs defined by - its configuration, and tries to run them with the argument --scheme. - Splunkd expects the modular inputs to print a description of the - input in XML on stdout. The modular input framework takes care of all - the details of formatting XML and printing it. The user need only - override get_scheme and return a new Scheme object. - - :return: scheme, a Scheme object - """ - # Splunk will display "Github Repository Forks" to users for this input - scheme = Scheme("Github Repository Forks") - - scheme.description = "Streams events giving the number of forks of a GitHub repository." - # If you set external validation to True, without overriding validate_input, - # the script will accept anything as valid. Generally you only need external - # validation if there are relationships you must maintain among the - # parameters, such as requiring min to be less than max in this example, - # or you need to check that some resource is reachable or valid. - # Otherwise, Splunk lets you specify a validation string for each argument - # and will run validation internally using that string. - scheme.use_external_validation = True - scheme.use_single_instance = True - - owner_argument = Argument("owner") - owner_argument.title = "Owner" - owner_argument.data_type = Argument.data_type_string - owner_argument.description = "Github user or organization that created the repository." - owner_argument.required_on_create = True - # If you are not using external validation, you would add something like: - # - # scheme.validation = "owner==splunk" - scheme.add_argument(owner_argument) - - repo_name_argument = Argument("repo_name") - repo_name_argument.title = "Repo Name" - repo_name_argument.data_type = Argument.data_type_string - repo_name_argument.description = "Name of the Github repository." - repo_name_argument.required_on_create = True - scheme.add_argument(repo_name_argument) - - return scheme - - def validate_input(self, validation_definition): - """In this example we are using external validation to verify that the Github - repository exists. If validate_input does not raise an Exception, the input - is assumed to be valid. Otherwise it prints the exception as an error message - when telling splunkd that the configuration is invalid. - - When using external validation, after splunkd calls the modular input with - --scheme to get a scheme, it calls it again with --validate-arguments for - each instance of the modular input in its configuration files, feeding XML - on stdin to the modular input to do validation. It is called the same way - whenever a modular input's configuration is edited. - - :param validation_definition: a ValidationDefinition object - """ - # Get the values of the parameters, and construct a URL for the Github API - owner = validation_definition.parameters["owner"] - repo_name = validation_definition.parameters["repo_name"] - - # Call Github to retrieve repo information - jsondata = _get_github_repos(owner, repo_name) - - # If there is only 1 field in the jsondata object,some kind or error occurred - # with the Github API. - # Typically, this will happen with an invalid repository. - if len(jsondata) == 1: - raise ValueError("The Github repository was not found.") - - # If the API response seems normal, validate the fork count - # If there's something wrong with getting fork_count, raise a ValueError - try: - fork_count = int(jsondata["forks_count"]) - except ValueError as ve: - raise ValueError("Invalid fork count: %s", ve.message) - - def stream_events(self, inputs, ew): - """This function handles all the action: splunk calls this modular input - without arguments, streams XML describing the inputs to stdin, and waits - for XML on stdout describing events. - - If you set use_single_instance to True on the scheme in get_scheme, it - will pass all the instances of this input to a single instance of this - script. - - :param inputs: an InputDefinition object - :param ew: an EventWriter object - """ - # Go through each input for this modular input - for input_name, input_item in six.iteritems(inputs.inputs): - # Get fields from the InputDefinition object - owner = input_item["owner"] - repo_name = input_item["repo_name"] - - # Hint: API auth required?, get a secret from passwords.conf - # self.service.namespace["app"] = input_item["__app"] - # api_token = self.service.storage_passwords["github_api_token"].clear_password - - # Get the fork count from the Github API - jsondata = _get_github_repos(owner, repo_name) - fork_count = jsondata["forks_count"] - - # Create an Event object, and set its fields - event = Event() - event.stanza = input_name - event.data = 'owner="%s" repository="%s" fork_count=%s' % \ - (owner.replace('"', '\\"'), repo_name.replace('"', '\\"'), fork_count) - - # Tell the EventWriter to write this event - ew.write_event(event) - - -def _get_github_repos(owner, repo_name): - # Read the response from the Github API, then parse the JSON data into an object - repo_path = "/repos/%s/%s" % (owner, repo_name) - connection = http_client.HTTPSConnection('api.github.com') - headers = { - 'Content-type': 'application/json', - 'User-Agent': 'splunk-sdk-python', - } - connection.request('GET', repo_path, headers=headers) - response = connection.getresponse() - body = response.read().decode() - return json.loads(body) - - -if __name__ == "__main__": - sys.exit(MyScript().run(sys.argv)) diff --git a/examples/github_forks/default/app.conf b/examples/github_forks/default/app.conf deleted file mode 100644 index d4c18dee1..000000000 --- a/examples/github_forks/default/app.conf +++ /dev/null @@ -1,11 +0,0 @@ -[install] -is_configured = 0 - -[ui] -is_visible = 1 -label = Github Repository Forks - -[launcher] -author=Splunk -description=Streams events giving the number of forks of a GitHub repository -version = 1.0 \ No newline at end of file diff --git a/examples/handlers/README.md b/examples/handlers/README.md deleted file mode 100644 index d63ef99fa..000000000 --- a/examples/handlers/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Pluggable HTTP Request Handlers - -The Splunk SDK library supports pluggable HTTP request handlers that enable -the library to be used with alternate HTTP request implementations. - -This feature can be used to supply implementations with support for features -not included in the default request handler (which is based on httplib), such -as support for HTTP proxies and server certificate validation. It can also be -used to provide implementations with additional logging or diagnostic output -for debugging. - -This directory contains a collection of examples that demonstrate various -alternative HTTP request handlers. - -* **handler_urllib2.py** is a simple request handler implemented using urllib2. - -* **handler_debug.py** wraps the default request handler and prints some - simple request information to stdout. - -* **handler_proxy.py** implements support for HTTP requests via a proxy. - -* **handler_certs.py** implements a hander that validates server certs. - diff --git a/examples/handlers/cacert.bad.pem b/examples/handlers/cacert.bad.pem deleted file mode 100644 index 48fa1ac97..000000000 --- a/examples/handlers/cacert.bad.pem +++ /dev/null @@ -1,16 +0,0 @@ ------BEGIN CERTIFICATE----- -xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx -UzELMAkGA1UECBMCQ0ExFjAUBgNVBAcTDVNhbiBGcmFuY2lzY28xDzANBgNVBAoT -BlNwbHVuazEXMBUGA1UEAxMOU3BsdW5rQ29tbW9uQ0ExITAfBgkqhkiG9w0BCQEW -EnN1cHBvcnRAc3BsdW5rLmNvbTAeFw0wNjA3MjQxNzEyMTlaFw0xNjA3MjExNzEy -MTlaMH8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMNU2FuIEZy -YW5jaXNjbzEPMA0GA1UEChMGU3BsdW5rMRcwFQYDVQQDEw5TcGx1bmtDb21tb25D -QTEhMB8GCSqGSIb3DQEJARYSc3VwcG9ydEBzcGx1bmsuY29tMIGfMA0GCSqGSIb3 -DQEBAQUAA4GNADCBiQKBgQDJmb55yvam1GqGgTK0dfHXWJiB0Fh8fsdJFRc5dxBJ -PFaC/klmtbLFLbYuXdC2Jh4cm/uhj1/FWmA0Wbhb02roAV03Z3SX0pHyFa3Udyqr -9f5ERJ0AYFA+y5UhbMnD9zlhs7J8ucub3XvA8rn79ejkYtDX2rMQWPNZYPcrxUEh -iwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAKW37NFwTikJOMo9Z8cjmJDz9wa4yckB -MlEA1/s6k6OmzZH0gkAssLstRkBavlr1uIBPZ2Jfse6FjoJ5ekC1AoXkInwmCspW -GTVCoe8rwhU0xaj0GsC+wA3ykL+UKuXz6iE3oDcnLr0qxiNT2OxdTxz+EB9T0ynR -x/F2KL1hdfCR ------END CERTIFICATE----- diff --git a/examples/handlers/cacert.pem b/examples/handlers/cacert.pem deleted file mode 100644 index bf1366149..000000000 --- a/examples/handlers/cacert.pem +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDejCCAmICCQCNHBN8tj/FwzANBgkqhkiG9w0BAQsFADB/MQswCQYDVQQGEwJV -UzELMAkGA1UECAwCQ0ExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDzANBgNVBAoM -BlNwbHVuazEXMBUGA1UEAwwOU3BsdW5rQ29tbW9uQ0ExITAfBgkqhkiG9w0BCQEW -EnN1cHBvcnRAc3BsdW5rLmNvbTAeFw0xNzAxMzAyMDI2NTRaFw0yNzAxMjgyMDI2 -NTRaMH8xCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZy -YW5jaXNjbzEPMA0GA1UECgwGU3BsdW5rMRcwFQYDVQQDDA5TcGx1bmtDb21tb25D -QTEhMB8GCSqGSIb3DQEJARYSc3VwcG9ydEBzcGx1bmsuY29tMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzB9ltVEGk73QvPlxXtA0qMW/SLDQlQMFJ/C/ -tXRVJdQsmcW4WsaETteeWZh8AgozO1LqOa3I6UmrWLcv4LmUAh/T3iZWXzHLIqFN -WLSVU+2g0Xkn43xSgQEPSvEK1NqZRZv1SWvx3+oGHgu03AZrqTj0HyLujqUDARFX -sRvBPW/VfDkomHj9b8IuK3qOUwQtIOUr+oKx1tM1J7VNN5NflLw9NdHtlfblw0Ys -5xI5Qxu3rcCxkKQuwz9KRe4iijOIRMAKX28pbakxU9Nk38Ac3PNadgIk0s7R829k -980sqGWkd06+C17OxgjpQbvLOR20FtmQybttUsXGR7Bp07YStwIDAQABMA0GCSqG -SIb3DQEBCwUAA4IBAQCxhQd6KXP2VzK2cwAqdK74bGwl5WnvsyqdPWkdANiKksr4 -ZybJZNfdfRso3fA2oK1R8i5Ca8LK3V/UuAsXvG6/ikJtWsJ9jf+eYLou8lS6NVJO -xDN/gxPcHrhToGqi1wfPwDQrNVofZcuQNklcdgZ1+XVuotfTCOXHrRoNmZX+HgkY -gEtPG+r1VwSFowfYqyFXQ5CUeRa3JB7/ObF15WfGUYplbd3wQz/M3PLNKLvz5a1z -LMNXDwN5Pvyb2epyO8LPJu4dGTB4jOGpYLUjG1UUqJo9Oa6D99rv6sId+8qjERtl -ZZc1oaC0PKSzBmq+TpbR27B8Zra3gpoA+gavdRZj ------END CERTIFICATE----- diff --git a/examples/handlers/handler_certs.py b/examples/handlers/handler_certs.py deleted file mode 100755 index 7140cd651..000000000 --- a/examples/handlers/handler_certs.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Example of a HTTP request handler that validates server certificates.""" - -# -# In order to run this sample, you need to supply the path to the server -# root cert file on the command line, eg: -# -# > python handler_certs.py --ca_file=cacert.pem -# -# For your convenience the Splunk cert file (cacert.pem) is included in this -# directory. There is also a version of the file (cacert.bad.pem) that does -# not match, so that you can check and make sure the validation fails when -# that cert file is ues. -# -# If you run this script without providing the cert file it will simply -# invoke Splunk without anycert validation. -# - -from __future__ import absolute_import - -from io import BytesIO -from pprint import pprint -import ssl -import socket -import sys -import os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -from splunklib import six -from splunklib.six.moves import urllib -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -RULES = { - "ca_file": { - 'flags': ["--ca_file"], - 'default': None, - 'help': "Root certs file", - } -} - -# Extend httplib's implementation of HTTPSConnection with support server -# certificate validation. -class HTTPSConnection(six.moves.http_client.HTTPSConnection): - def __init__(self, host, port=None, ca_file=None): - six.moves.http_client.HTTPSConnection.__init__(self, host, port) - self.ca_file = ca_file - - def connect(self): - sock = socket.create_connection((self.host, self.port)) - if self.ca_file is not None: - self.sock = ssl.wrap_socket( - sock, None, None, - ca_certs=self.ca_file, - cert_reqs=ssl.CERT_REQUIRED) - else: - self.sock = ssl.wrap_socket( - sock, None, None, cert_reqs=ssl.CERT_NONE) - -def spliturl(url): - parsed_url = urllib.parse.urlparse(url) - host = parsed_url.hostname - port = parsed_url.port - path = '?'.join((parsed_url.path, parsed_url.query)) if parsed_url.query else parsed_url.path - # Strip brackets if its an IPv6 address - if host.startswith('[') and host.endswith(']'): host = host[1:-1] - if port is None: port = DEFAULT_PORT - return parsed_url.scheme, host, port, path - -def handler(ca_file=None): - """Returns an HTTP request handler configured with the given ca_file.""" - - def request(url, message, **kwargs): - scheme, host, port, path = spliturl(url) - - if scheme != "https": - ValueError("unsupported scheme: %s" % scheme) - - connection = HTTPSConnection(host, port, ca_file) - try: - body = message.get('body', "") - headers = dict(message.get('headers', [])) - connection.request(message['method'], path, body, headers) - response = connection.getresponse() - finally: - connection.close() - - return { - 'status': response.status, - 'reason': response.reason, - 'headers': response.getheaders(), - 'body': BytesIO(response.read()) - } - - return request - -opts = utils.parse(sys.argv[1:], RULES, ".env") -ca_file = opts.kwargs['ca_file'] -service = client.connect(handler=handler(ca_file), **opts.kwargs) -pprint([app.name for app in service.apps]) - diff --git a/examples/handlers/handler_debug.py b/examples/handlers/handler_debug.py deleted file mode 100755 index 383428ae4..000000000 --- a/examples/handlers/handler_debug.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Example of a debug request handler that wraps the default request handler - and prints debugging information to stdout.""" - -from __future__ import absolute_import -from __future__ import print_function -from pprint import pprint -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -import splunklib.binding as binding -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def handler(): - default = binding.handler() - def request(url, message, **kwargs): - response = default(url, message, **kwargs) - print("%s %s => %d (%s)" % ( - message['method'], url, response['status'], response['reason'])) - return response - return request - -opts = utils.parse(sys.argv[1:], {}, ".env") -service = client.connect(handler=handler(), **opts.kwargs) -pprint([app.name for app in service.apps]) diff --git a/examples/handlers/handler_proxy.py b/examples/handlers/handler_proxy.py deleted file mode 100755 index eff371541..000000000 --- a/examples/handlers/handler_proxy.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Example of a HTTP request handler that supports requests via a HTTP proxy.""" - -# -# In order to run this sample, you will need to have a proxy available to -# relay your requests to Splunk. One way to do this is to run the tiny-proxy.py -# script included in this directory and then run this script using whatever -# port you bound tiny-proxy to, eg: -# -# > python tiny-proxy.py -p 8080 -# > python handler_proxy.py --proxy=localhost:8080 -# - -from __future__ import absolute_import - -from io import BytesIO -from pprint import pprint -import sys, os -import ssl -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -from splunklib.six.moves import urllib - -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -RULES = { - "proxy": { - 'flags': ["--proxy"], - 'default': "localhost:8080", - 'help': "Use proxy on given (default localhost:8080)", - } -} - -def request(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', "") if method == 'post' else None - headers = dict(message.get('headers', [])) - req = urllib.request.Request(url, data, headers) - try: - response = urllib.request.urlopen(req) - except urllib.error.URLError as response: - # If running Python 2.7.9+, disable SSL certificate validation and try again - if sys.version_info >= (2, 7, 9): - response = urllib.request.urlopen(req, context=ssl._create_unverified_context()) - else: - raise - except urllib.error.HTTPError as response: - pass # Propagate HTTP errors via the returned response message - return { - 'status': response.code, - 'reason': response.msg, - 'headers': dict(response.info()), - 'body': BytesIO(response.read().encode('utf-8')) - } - -def handler(proxy): - proxy_handler = urllib.request.ProxyHandler({'http': proxy, 'https': proxy}) - opener = urllib.request.build_opener(proxy_handler) - urllib.request.install_opener(opener) - return request - -opts = utils.parse(sys.argv[1:], RULES, ".env") -proxy = opts.kwargs['proxy'] -try: - service = client.connect(handler=handler(proxy), **opts.kwargs) - pprint([app.name for app in service.apps]) -except urllib.error.URLError as e: - if e.reason.errno == 1 and sys.version_info < (2, 6, 3): - # There is a bug in Python < 2.6.3 that does not allow proxies with - # HTTPS. You can read more at: http://bugs.python.org/issue1424152 - pass - else: - raise - diff --git a/examples/handlers/handler_urllib2.py b/examples/handlers/handler_urllib2.py deleted file mode 100755 index d81d66d59..000000000 --- a/examples/handlers/handler_urllib2.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Example of a urllib2 based HTTP request handler.""" - -from __future__ import absolute_import - -from io import BytesIO -from pprint import pprint -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) -from splunklib.six.moves import urllib -import ssl - -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def request(url, message, **kwargs): - method = message['method'].lower() - data = message.get('body', "") if method == 'post' else None - headers = dict(message.get('headers', [])) - # If running Python 2.7.9+, disable SSL certificate validation - req = urllib.request.Request(url, data, headers) - try: - if sys.version_info >= (2, 7, 9): - response = urllib.request.urlopen(req, context=ssl._create_unverified_context()) - else: - response = urllib.request.urlopen(req) - except urllib.error.HTTPError as response: - pass # Propagate HTTP errors via the returned response message - return { - 'status': response.code, - 'reason': response.msg, - 'headers': dict(response.info()), - 'body': BytesIO(response.read()) - } - -opts = utils.parse(sys.argv[1:], {}, ".env") -service = client.connect(handler=request, **opts.kwargs) -pprint([app.name for app in service.apps]) - diff --git a/examples/handlers/tiny-proxy.py b/examples/handlers/tiny-proxy.py deleted file mode 100755 index 5603f2096..000000000 --- a/examples/handlers/tiny-proxy.py +++ /dev/null @@ -1,358 +0,0 @@ -#!/usr/bin/python - -from __future__ import absolute_import -from __future__ import print_function -__doc__ = """Tiny HTTP Proxy. - -This module implements GET, HEAD, POST, PUT and DELETE methods -on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT -method is also implemented experimentally, but has not been -tested yet. - -Any help will be greatly appreciated. SUZUKI Hisao - -2009/11/23 - Modified by Mitko Haralanov - * Added very simple FTP file retrieval - * Added custom logging methods - * Added code to make this a standalone application - -2012/03/07 - Modified by Brad Lovering - * Added basic support for IPv6 -""" - -__version__ = "0.3.1" - -import select -import socket -from splunklib.six.moves import BaseHTTPServer -from splunklib.six.moves import socketserver -from splunklib.six.moves import urllib -import logging -import logging.handlers -import getopt -import sys -import os -import signal -import threading -from types import FrameType, CodeType -import time -import ftplib - -DEFAULT_LOG_FILENAME = "proxy.log" - -class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler): - __base = BaseHTTPServer.BaseHTTPRequestHandler - __base_handle = __base.handle - - server_version = "TinyHTTPProxy/" + __version__ - rbufsize = 0 # self.rfile Be unbuffered - - def handle(self): - (ip, port) = self.client_address - self.server.logger.log (logging.INFO, "Request from '%s'", ip) - if hasattr(self, 'allowed_clients') and ip not in self.allowed_clients: - self.raw_requestline = self.rfile.readline() - if self.parse_request(): self.send_error(403) - else: - self.__base_handle() - - def _connect_to(self, netloc): - i = netloc.rfind(':') - j = netloc.rfind(']') - if i > j: - host = netloc[:i] - port = int(netloc[i+1:]) - else: - host = netloc - port = 80 - if host[0] == '[' and host[-1] == ']': - host = host[1:-1] - host_port = (host, port) - self.server.logger.log (logging.INFO, "connect to %s:%d", host_port[0], host_port[1]) - try: - return socket.create_connection(host_port) - except socket.error as arg: - try: msg = arg[1] - except: msg = arg - self.send_error(404, msg) - return None - - def do_CONNECT(self): - soc = None - try: - soc = self._connect_to(self.path) - if soc: - self.log_request(200) - self.wfile.write(self.protocol_version + - " 200 Connection established\r\n") - self.wfile.write("Proxy-agent: %s\r\n" % self.version_string()) - self.wfile.write("\r\n") - self._read_write(soc, 300) - finally: - if soc: soc.close() - self.connection.close() - - def do_GET(self): - (scm, netloc, path, params, query, fragment) = urllib.parse.urlparse( - self.path, 'http') - if scm not in ('http', 'ftp') or fragment or not netloc: - self.send_error(400, "bad url %s" % self.path) - return - soc = None - try: - if scm == 'http': - soc = self._connect_to(netloc) - if soc: - self.log_request() - soc.send("%s %s %s\r\n" % (self.command, - urllib.parse.urlunparse(('', '', path, - params, query, - '')), - self.request_version)) - self.headers['Connection'] = 'close' - del self.headers['Proxy-Connection'] - for key_val in self.headers.items(): - soc.send("%s: %s\r\n" % key_val) - soc.send("\r\n") - self._read_write(soc) - elif scm == 'ftp': - # fish out user and password information - i = netloc.find ('@') - if i >= 0: - login_info, netloc = netloc[:i], netloc[i+1:] - try: user, passwd = login_info.split (':', 1) - except ValueError: user, passwd = "anonymous", None - else: user, passwd ="anonymous", None - self.log_request () - try: - ftp = ftplib.FTP (netloc) - ftp.login (user, passwd) - if self.command == "GET": - ftp.retrbinary ("RETR %s"%path, self.connection.send) - ftp.quit () - except Exception as e: - self.server.logger.log (logging.WARNING, "FTP Exception: %s", - e) - finally: - if soc: soc.close() - self.connection.close() - - def _read_write(self, soc, max_idling=20, local=False): - iw = [self.connection, soc] - local_data = "" - ow = [] - count = 0 - while 1: - count += 1 - (ins, _, exs) = select.select(iw, ow, iw, 1) - if exs: break - if ins: - for i in ins: - if i is soc: out = self.connection - else: out = soc - data = i.recv(8192) - if data: - if local: local_data += data - else: out.send(data) - count = 0 - if count == max_idling: break - if local: return local_data - return None - - do_HEAD = do_GET - do_POST = do_GET - do_PUT = do_GET - do_DELETE=do_GET - - def log_message (self, format, *args): - self.server.logger.log (logging.INFO, "%s %s", self.address_string (), - format % args) - - def log_error (self, format, *args): - self.server.logger.log (logging.ERROR, "%s %s", self.address_string (), - format % args) - -class ThreadingHTTPServer (socketserver.ThreadingMixIn, - BaseHTTPServer.HTTPServer): - def __init__ (self, server_address, RequestHandlerClass, logger=None): - BaseHTTPServer.HTTPServer.__init__ (self, server_address, - RequestHandlerClass) - self.logger = logger - -def logSetup (filename, log_size, daemon): - logger = logging.getLogger ("TinyHTTPProxy") - logger.setLevel (logging.INFO) - if not filename: - if not daemon: - # display to the screen - handler = logging.StreamHandler () - else: - handler = logging.handlers.RotatingFileHandler (DEFAULT_LOG_FILENAME, - maxBytes=(log_size*(1<<20)), - backupCount=5) - else: - handler = logging.handlers.RotatingFileHandler (filename, - maxBytes=(log_size*(1<<20)), - backupCount=5) - fmt = logging.Formatter ("[%(asctime)-12s.%(msecs)03d] " - "%(levelname)-8s {%(name)s %(threadName)s}" - " %(message)s", - "%Y-%m-%d %H:%M:%S") - handler.setFormatter (fmt) - - logger.addHandler (handler) - return logger - -def usage (msg=None): - if msg: print(msg) - print(sys.argv[0], "[-p port] [-l logfile] [-dh] [allowed_client_name ...]]") - print() - print(" -p - Port to bind to") - print(" -l - Path to logfile. If not specified, STDOUT is used") - print(" -d - Run in the background") - print() - -def handler (signo, frame): - while frame and isinstance (frame, FrameType): - if frame.f_code and isinstance (frame.f_code, CodeType): - if "run_event" in frame.f_code.co_varnames: - frame.f_locals["run_event"].set () - return - frame = frame.f_back - -def daemonize_part2 (logger): - class DevNull (object): - def __init__ (self): self.fd = os.open (os.path.devnull, os.O_WRONLY) - def write (self, *args, **kwargs): return 0 - def read (self, *args, **kwargs): return 0 - def fileno (self): return self.fd - def close (self): os.close (self.fd) - class ErrorLog: - def __init__ (self, obj): self.obj = obj - def write (self, string): self.obj.log (logging.ERROR, string) - def read (self, *args, **kwargs): return 0 - def close (self): pass - - - filename = "./proxypid" - if os.name == "nt": - filename = "proxypid" - else: - os.setsid () - - fd = os.open (os.path.devnull, os.O_RDONLY) - if fd != 0: - os.dup2 (fd, 0) - os.close (fd) - null = DevNull () - log = ErrorLog (logger) - sys.stdout = null - sys.stderr = log - sys.stdin = null - fd = os.open (os.path.devnull, os.O_WRONLY) - #if fd != 1: os.dup2 (fd, 1) - os.dup2 (sys.stdout.fileno (), 1) - if fd != 2: os.dup2 (fd, 2) - if fd not in (1, 2): os.close (fd) - # write PID to pidfile - fd = open(filename, "w") - fd.write("%s" % os.getpid()) - fd.close() - -def daemonize(logger, opts): - import subprocess - - if os.name == "nt": - # Windows does not support fork, so we re-invoke this program - # without the daemonize flag - for path in sys.path: - if os.path.exists(os.path.join(path, "python.exe")) == True: - pythonExePath = os.path.join(path, "python.exe") - - cwd = os.getcwd() - cmdline = pythonExePath + " " + os.path.join(cwd, "tiny-proxy.py") - for opt, value in opts: - if opt == "-d": - pass # skip the daemonize flag - else: - cmdline = cmdline + " %s %s" % (str(opt), str(value)) - - subprocess.Popen(cmdline.split(" "), shell=True, cwd=cwd) - time.sleep(1) - sys.exit(0) - else: - if os.fork () != 0: - ## allow the child pid to instantiate the server - ## class - time.sleep (1) - sys.exit (0) - - daemonize_part2(logger) - -def main (): - logfile = None - daemon = False - max_log_size = 20 - port = 8000 - allowed = [] - run_event = threading.Event () - # hard code local host - local_hostname = "127.0.0.1" - - try: opts, args = getopt.getopt (sys.argv[1:], "l:dhp:", []) - except getopt.GetoptError as e: - usage (str (e)) - return 1 - - for opt, value in opts: - if opt == "-p": port = int (value) - if opt == "-l": logfile = value - if opt == "-d": daemon = not daemon - if opt == "-h": - usage () - return 0 - - # setup the log file - logger = logSetup (logfile, max_log_size, daemon) - - if daemon: - daemonize (logger, opts) - - if os.name == "nt": - daemonize_part2(logger) - - signal.signal (signal.SIGINT, handler) - - if args: - allowed = [] - for name in args: - client = socket.gethostbyname(name) - allowed.append(client) - logger.log (logging.INFO, "Accept: %s (%s)" % (client, name)) - ProxyHandler.allowed_clients = allowed - else: - logger.log (logging.INFO, "Any clients will be served...") - - server_address = (socket.gethostbyname (local_hostname), port) - ProxyHandler.protocol = "HTTP/1.0" - httpd = ThreadingHTTPServer (server_address, ProxyHandler, logger) - sa = httpd.socket.getsockname () - print("Servering HTTP on", sa[0], "port", sa[1]) - req_count = 0 - while not run_event.isSet (): - try: - httpd.handle_request () - req_count += 1 - if req_count == 1000: - logger.log (logging.INFO, "Number of active threads: %s", - threading.activeCount ()) - req_count = 0 - except select.error as e: - if e[0] == 4 and run_event.isSet (): pass - else: - logger.log (logging.CRITICAL, "Errno: %d - %s", e[0], e[1]) - logger.log (logging.INFO, "Server shutdown") - return 0 - -if __name__ == '__main__': - sys.exit (main ()) diff --git a/examples/index.py b/examples/index.py deleted file mode 100755 index 0c8da974f..000000000 --- a/examples/index.py +++ /dev/null @@ -1,194 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for interacting with Splunk indexes.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect - -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -HELP_EPILOG = """ -Commands: - clean []+ - create [options] - disable []+ - enable []+ - list []* - update [options] - -Examples: - # Create an index called 'MyIndex' - index.py create MyIndex - - # Clean index 'MyIndex' - index.py clean MyIndex - - # Disable indexes 'MyIndex' and 'main' - index.py disable MyIndex main - - # Enable indexes 'MyIndex' and 'main' - index.py enable MyIndex main - - # List all indexes - index.py list - - # List properties of index 'MyIndex' - index.py list MyIndex -""" - -class Program: - def __init__(self, service): - self.service = service - - def clean(self, argv): - self.foreach(argv, lambda index: index.clean()) - - def create(self, argv): - """Create an index according to the given argument vector.""" - - if len(argv) == 0: - error("Command requires an index name", 2) - - name = argv[0] - - if name in self.service.indexes: - print("Index '%s' already exists" % name) - return - - # Read index metadata and construct command line parser rules that - # correspond to each editable field. - - # Request editable fields - fields = self.service.indexes.itemmeta().fields.optional - - # Build parser rules - rules = dict([(field, {'flags': ["--%s" % field]}) for field in fields]) - - # Parse the argument vector - opts = cmdline(argv, rules) - - # Execute the edit request - self.service.indexes.create(name, **opts.kwargs) - - def disable(self, argv): - self.foreach(argv, lambda index: index.disable()) - - def enable(self, argv): - self.foreach(argv, lambda index: index.enable()) - - def list(self, argv): - """List available indexes if no names provided, otherwise list the - properties of the named indexes.""" - - def read(index): - print(index.name) - for key in sorted(index.content.keys()): - value = index.content[key] - print(" %s: %s" % (key, value)) - - if len(argv) == 0: - for index in self.service.indexes: - count = index['totalEventCount'] - print("%s (%s)" % (index.name, count)) - else: - self.foreach(argv, read) - - def run(self, argv): - """Dispatch the given command & args.""" - command = argv[0] - handlers = { - 'clean': self.clean, - 'create': self.create, - 'disable': self.disable, - 'enable': self.enable, - 'list': self.list, - 'update': self.update, - } - handler = handlers.get(command, None) - if handler is None: - error("Unrecognized command: %s" % command, 2) - handler(argv[1:]) - - def foreach(self, argv, func): - """Apply the function to each index named in the argument vector.""" - opts = cmdline(argv) - if len(opts.args) == 0: - error("Command requires an index name", 2) - for name in opts.args: - if name not in self.service.indexes: - error("Index '%s' does not exist" % name, 2) - index = self.service.indexes[name] - func(index) - - def update(self, argv): - """Update an index according to the given argument vector.""" - - if len(argv) == 0: - error("Command requires an index name", 2) - name = argv[0] - - if name not in self.service.indexes: - error("Index '%s' does not exist" % name, 2) - index = self.service.indexes[name] - - # Read index metadata and construct command line parser rules that - # correspond to each editable field. - - # Request editable fields - fields = self.service.indexes.itemmeta().fields.optional - - # Build parser rules - rules = dict([(field, {'flags': ["--%s" % field]}) for field in fields]) - - # Parse the argument vector - opts = cmdline(argv, rules) - - # Execute the edit request - index.update(**opts.kwargs) - -def main(): - usage = "usage: %prog [options] []" - - argv = sys.argv[1:] - - # Locate the command - index = next((i for i, v in enumerate(argv) if not v.startswith('-')), -1) - - if index == -1: # No command - options = argv - command = ["list"] - else: - options = argv[:index] - command = argv[index:] - - opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) - service = connect(**opts.kwargs) - program = Program(service) - program.run(command) - -if __name__ == "__main__": - main() - - diff --git a/examples/info.py b/examples/info.py deleted file mode 100755 index e54349d4c..000000000 --- a/examples/info.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""An example that prints Splunk service info & settings.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import splunklib.client as client - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -if __name__ == "__main__": - opts = parse(sys.argv[1:], {}, ".env") - service = client.connect(**opts.kwargs) - - content = service.info - for key in sorted(content.keys()): - value = content[key] - if isinstance(value, list): - print("%s:" % key) - for item in value: print(" %s" % item) - else: - print("%s: %s" % (key, value)) - - print("Settings:") - content = service.settings.content - for key in sorted(content.keys()): - value = content[key] - print(" %s: %s" % (key, value)) diff --git a/examples/inputs.py b/examples/inputs.py deleted file mode 100755 index be77d02d5..000000000 --- a/examples/inputs.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for interacting with Splunk inputs.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(): - opts = parse(sys.argv[1:], {}, ".env") - service = connect(**opts.kwargs) - - for item in service.inputs: - header = "%s (%s)" % (item.name, item.kind) - print(header) - print('='*len(header)) - content = item.content - for key in sorted(content.keys()): - value = content[key] - print("%s: %s" % (key, value)) - print() - -if __name__ == "__main__": - main() - - diff --git a/examples/job.py b/examples/job.py deleted file mode 100755 index 8e51ba6a7..000000000 --- a/examples/job.py +++ /dev/null @@ -1,277 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for interacting with Splunk search jobs.""" - -# All job commands operate on search 'specifiers' (spec). A search specifier -# is either a search-id (sid) or the index of the search job in the list of -# jobs, eg: @0 would specify the first job in the list, @1 the second, and so -# on. - -from __future__ import absolute_import -from __future__ import print_function -from pprint import pprint -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect -try: - from utils import error, parse, cmdline -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -HELP_EPILOG = """ -Commands: - cancel + - create [options] - events + - finalize + - list []* - pause + - preview + - results + - searchlog + - summary + - perf + - timeline + - touch + - unpause + - -A search can be specified either by using it 'search id' ('sid'), or by -using the index in the listing of searches. For example, @5 would refer -to the 5th search job in the list. - -Examples: - # Cancel a search - job.py cancel @0 - - # Create a search - job.py create 'search * | stats count' --search_mode=blocking - - # List all searches - job.py list - - # List properties of the specified searches - job.py list @3 scheduler__nobody__search_SW5kZXhpbmcgd29ya2xvYWQ_at_1311888600_b18031c8d8f4b4e9 - - # Get all results for the third search - job.py results @3 -""" - -FLAGS_CREATE = [ - "search", "earliest_time", "latest_time", "now", "time_format", - "exec_mode", "search_mode", "rt_blocking", "rt_queue_size", - "rt_maxblocksecs", "rt_indexfilter", "id", "status_buckets", - "max_count", "max_time", "timeout", "auto_finalize_ec", "enable_lookups", - "reload_macros", "reduce_freq", "spawn_process", "required_field_list", - "rf", "auto_cancel", "auto_pause", -] - -FLAGS_EVENTS = [ - "offset", "count", "earliest_time", "latest_time", "search", - "time_format", "output_time_format", "field_list", "f", "max_lines", - "truncation_mode", "output_mode", "segmentation" -] - -FLAGS_RESULTS = [ - "offset", "count", "search", "field_list", "f", "output_mode" -] - -FLAGS_TIMELINE = [ - "time_format", "output_time_format" -] - -FLAGS_SEARCHLOG = [ - "attachment" -] - -FLAGS_SUMMARY = [ - "earliest_time", "latest_time", "time_format", "output_time_format", - "field_list", "f", "search", "top_count", "min_freq" -] - -def cmdline(argv, flags): - """A cmdopts wrapper that takes a list of flags and builds the - corresponding cmdopts rules to match those flags.""" - rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules) - -def output(stream): - """Write the contents of the given stream to stdout.""" - while True: - content = stream.read(1024) - if len(content) == 0: break - sys.stdout.write(content) - -class Program: - def __init__(self, service): - self.service = service - - def cancel(self, argv): - self.foreach(argv, lambda job: job.cancel()) - - def create(self, argv): - """Create a search job.""" - opts = cmdline(argv, FLAGS_CREATE) - if len(opts.args) != 1: - error("Command requires a search expression", 2) - query = opts.args[0] - job = self.service.jobs.create(opts.args[0], **opts.kwargs) - print(job.sid) - - def events(self, argv): - """Retrieve events for the specified search jobs.""" - opts = cmdline(argv, FLAGS_EVENTS) - self.foreach(opts.args, lambda job: - output(job.events(**opts.kwargs))) - - def finalize(self, argv): - """Finalize the specified search jobs.""" - self.foreach(argv, lambda job: job.finalize()) - - def foreach(self, argv, func): - """Apply the function to each job specified in the argument vector.""" - if len(argv) == 0: - error("Command requires a search specifier.", 2) - for item in argv: - job = self.lookup(item) - if job is None: - error("Search job '%s' does not exist" % item, 2) - func(job) - - def list(self, argv): - """List all current search jobs if no jobs specified, otherwise - list the properties of the specified jobs.""" - - def read(job): - for key in sorted(job.content.keys()): - # Ignore some fields that make the output hard to read and - # that are available via other commands. - if key in ["performance"]: continue - print("%s: %s" % (key, job.content[key])) - - if len(argv) == 0: - index = 0 - for job in self.service.jobs: - print("@%d : %s" % (index, job.sid)) - index += 1 - return - - self.foreach(argv, read) - - def preview(self, argv): - """Retrieve the preview for the specified search jobs.""" - opts = cmdline(argv, FLAGS_RESULTS) - self.foreach(opts.args, lambda job: - output(job.preview(**opts.kwargs))) - - def results(self, argv): - """Retrieve the results for the specified search jobs.""" - opts = cmdline(argv, FLAGS_RESULTS) - self.foreach(opts.args, lambda job: - output(job.results(**opts.kwargs))) - - def sid(self, spec): - """Convert the given search specifier into a search-id (sid).""" - if spec.startswith('@'): - index = int(spec[1:]) - jobs = self.service.jobs.list() - if index < len(jobs): - return jobs[index].sid - return spec # Assume it was already a valid sid - - def lookup(self, spec): - """Lookup search job by search specifier.""" - return self.service.jobs[self.sid(spec)] - - def pause(self, argv): - """Pause the specified search jobs.""" - self.foreach(argv, lambda job: job.pause()) - - def perf(self, argv): - """Retrive performance info for the specified search jobs.""" - self.foreach(argv, lambda job: pprint(job['performance'])) - - def run(self, argv): - """Dispatch the given command.""" - command = argv[0] - handlers = { - 'cancel': self.cancel, - 'create': self.create, - 'events': self.events, - 'finalize': self.finalize, - 'list': self.list, - 'pause': self.pause, - 'preview': self.preview, - 'results': self.results, - 'searchlog': self.searchlog, - 'summary': self.summary, - 'perf': self.perf, - 'timeline': self.timeline, - 'touch': self.touch, - 'unpause': self.unpause, - } - handler = handlers.get(command, None) - if handler is None: - error("Unrecognized command: %s" % command, 2) - handler(argv[1:]) - - def searchlog(self, argv): - """Retrieve the searchlog for the specified search jobs.""" - opts = cmdline(argv, FLAGS_SEARCHLOG) - self.foreach(opts.args, lambda job: - output(job.searchlog(**opts.kwargs))) - - def summary(self, argv): - opts = cmdline(argv, FLAGS_SUMMARY) - self.foreach(opts.args, lambda job: - output(job.summary(**opts.kwargs))) - - def timeline(self, argv): - opts = cmdline(argv, FLAGS_TIMELINE) - self.foreach(opts.args, lambda job: - output(job.timeline(**opts.kwargs))) - - def touch(self, argv): - self.foreach(argv, lambda job: job.touch()) - - def unpause(self, argv): - self.foreach(argv, lambda job: job.unpause()) - -def main(): - usage = "usage: %prog [options] []" - - argv = sys.argv[1:] - - # Locate the command - index = next((i for i, v in enumerate(argv) if not v.startswith('-')), -1) - - if index == -1: # No command - options = argv - command = ["list"] - else: - options = argv[:index] - command = argv[index:] - - opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) - service = connect(**opts.kwargs) - program = Program(service) - program.run(command) - -if __name__ == "__main__": - main() - diff --git a/examples/kvstore.py b/examples/kvstore.py deleted file mode 100644 index 2ca32e5a9..000000000 --- a/examples/kvstore.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for interacting with Splunk KV Store Collections.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os, json -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(): - opts = parse(sys.argv[1:], {}, ".env") - opts.kwargs["owner"] = "nobody" - opts.kwargs["app"] = "search" - service = connect(**opts.kwargs) - - print("KV Store Collections:") - for collection in service.kvstore: - print(" %s" % collection.name) - - # Let's delete a collection if it already exists, and then create it - collection_name = "example_collection" - if collection_name in service.kvstore: - service.kvstore.delete(collection_name) - - # Let's create it and then make sure it exists - service.kvstore.create(collection_name) - collection = service.kvstore[collection_name] - - # Let's make sure it doesn't have any data - print("Should be empty: %s" % json.dumps(collection.data.query())) - - # Let's add some json data - collection.data.insert(json.dumps({"_key": "item1", "somekey": 1, "otherkey": "foo"})) - #Let's add data as a dictionary object - collection.data.insert({"_key": "item2", "somekey": 2, "otherkey": "foo"}) - collection.data.insert(json.dumps({"somekey": 3, "otherkey": "bar"})) - - # Let's make sure it has the data we just entered - print("Should have our data: %s" % json.dumps(collection.data.query(), indent=1)) - - # Let's run some queries - print("Should return item1: %s" % json.dumps(collection.data.query_by_id("item1"), indent=1)) - - #Let's update some data - data = collection.data.query_by_id("item2") - data['otherkey'] = "bar" - #Passing data using 'json.dumps' - collection.data.update("item2", json.dumps(data)) - print("Should return item2 with updated data: %s" % json.dumps(collection.data.query_by_id("item2"), indent=1)) - data['otherkey'] = "foo" - # Passing data as a dictionary instance - collection.data.update("item2", data) - print("Should return item2 with updated data: %s" % json.dumps(collection.data.query_by_id("item2"), indent=1)) - - - query = json.dumps({"otherkey": "foo"}) - print("Should return item1 and item2: %s" % json.dumps(collection.data.query(query=query), indent=1)) - - query = json.dumps({"otherkey": "bar"}) - print("Should return third item with auto-generated _key: %s" % json.dumps(collection.data.query(query=query), indent=1)) - - # passing query data as dict - query = {"somekey": {"$gt": 1}} - print("Should return item2 and item3: %s" % json.dumps(collection.data.query(query=query), indent=1)) - - # Let's delete the collection - collection.delete() - -if __name__ == "__main__": - main() - - diff --git a/examples/loggers.py b/examples/loggers.py deleted file mode 100755 index df71af09e..000000000 --- a/examples/loggers.py +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line tool lists out the Splunk logging categories and their - current logging level.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import splunklib.client as client - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(argv): - usage = "usage: %prog [options]" - opts = parse(argv, {}, ".env", usage=usage) - service = client.connect(**opts.kwargs) - - for logger in service.loggers: - print("%s (%s)" % (logger.name, logger['level'])) - -if __name__ == "__main__": - main(sys.argv[1:]) - diff --git a/examples/oneshot.py b/examples/oneshot.py deleted file mode 100755 index 8429aedfb..000000000 --- a/examples/oneshot.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for executing oneshot Splunk searches.""" - -from __future__ import absolute_import -from pprint import pprint -import socket -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect -import splunklib.results as results - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def pretty(response): - reader = results.JSONResultsReader(response) - for result in reader: - if isinstance(result, dict): - pprint(result) - -def main(): - usage = "usage: oneshot.py " - opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) - if len(opts.args) != 1: - utils.error("Search expression required", 2) - - search = opts.args[0] - service = connect(**opts.kwargs) - socket.setdefaulttimeout(None) - response = service.jobs.oneshot(search, output_mode='json') - - pretty(response) - -if __name__ == "__main__": - main() diff --git a/examples/random_numbers/README.md b/examples/random_numbers/README.md deleted file mode 100644 index 7ff4069f2..000000000 --- a/examples/random_numbers/README.md +++ /dev/null @@ -1,12 +0,0 @@ -splunk-sdk-python random_numbers example -======================================== - -This app provides an example of a modular input that generates a random number between the min and max values provided by the user during setup of the input. - -To run this example locally run `SPLUNK_VERSION=latest docker compose up -d` from the root of this repository which will mount this example alongside the latest version of splunklib within `/opt/splunk/etc/apps/random_numbers` and `/opt/splunk/etc/apps/random_numbers/lib/splunklib` within the `splunk` container. - -Once the docker container is up and healthy log into the Splunk UI and setup a new `Random Numbers` input by visiting this page: http://localhost:8000/en-US/manager/random_numbers/datainputstats and selecting the "Add new..." button next to the Local Inputs > Random Inputs. Enter values for the `min` and `max` values which the random number should be generated between. - -NOTE: If no Random Numbers input appears then the script is likely not running properly, see https://docs.splunk.com/Documentation/SplunkCloud/latest/AdvancedDev/ModInputsDevTools for more details on debugging the modular input using the command line and relevant logs. - -Once the input is created you should be able to see an event when running the following search: `source="random_numbers://*"` the event should contain a `number` field with a float between the min and max specified when the input was created. \ No newline at end of file diff --git a/examples/random_numbers/README/inputs.conf.spec b/examples/random_numbers/README/inputs.conf.spec deleted file mode 100644 index 4a1038e05..000000000 --- a/examples/random_numbers/README/inputs.conf.spec +++ /dev/null @@ -1,5 +0,0 @@ -[random_numbers://] -*Generates events containing a random floating point number. - -min = -max = \ No newline at end of file diff --git a/examples/random_numbers/bin/random_numbers.py b/examples/random_numbers/bin/random_numbers.py deleted file mode 100755 index b9673db99..000000000 --- a/examples/random_numbers/bin/random_numbers.py +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2013 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import random, sys -import os -# NOTE: splunklib must exist within random_numbers/lib/splunklib for this -# example to run! To run this locally use `SPLUNK_VERSION=latest docker compose up -d` -# from the root of this repo which mounts this example and the latest splunklib -# code together at /opt/splunk/etc/apps/random_numbers -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) - -from splunklib.modularinput import * -from splunklib import six - -class MyScript(Script): - """All modular inputs should inherit from the abstract base class Script - from splunklib.modularinput.script. - They must override the get_scheme and stream_events functions, and, - if the scheme returned by get_scheme has Scheme.use_external_validation - set to True, the validate_input function. - """ - def get_scheme(self): - """When Splunk starts, it looks for all the modular inputs defined by - its configuration, and tries to run them with the argument --scheme. - Splunkd expects the modular inputs to print a description of the - input in XML on stdout. The modular input framework takes care of all - the details of formatting XML and printing it. The user need only - override get_scheme and return a new Scheme object. - - :return: scheme, a Scheme object - """ - # "random_numbers" is the name Splunk will display to users for this input. - scheme = Scheme("Random Numbers") - - scheme.description = "Streams events containing a random number." - # If you set external validation to True, without overriding validate_input, - # the script will accept anything as valid. Generally you only need external - # validation if there are relationships you must maintain among the - # parameters, such as requiring min to be less than max in this example, - # or you need to check that some resource is reachable or valid. - # Otherwise, Splunk lets you specify a validation string for each argument - # and will run validation internally using that string. - scheme.use_external_validation = True - scheme.use_single_instance = True - - min_argument = Argument("min") - min_argument.title = "Minimum" - min_argument.data_type = Argument.data_type_number - min_argument.description = "Minimum random number to be produced by this input." - min_argument.required_on_create = True - # If you are not using external validation, you would add something like: - # - # scheme.validation = "min > 0" - scheme.add_argument(min_argument) - - max_argument = Argument("max") - max_argument.title = "Maximum" - max_argument.data_type = Argument.data_type_number - max_argument.description = "Maximum random number to be produced by this input." - max_argument.required_on_create = True - scheme.add_argument(max_argument) - - return scheme - - def validate_input(self, validation_definition): - """In this example we are using external validation to verify that min is - less than max. If validate_input does not raise an Exception, the input is - assumed to be valid. Otherwise it prints the exception as an error message - when telling splunkd that the configuration is invalid. - - When using external validation, after splunkd calls the modular input with - --scheme to get a scheme, it calls it again with --validate-arguments for - each instance of the modular input in its configuration files, feeding XML - on stdin to the modular input to do validation. It is called the same way - whenever a modular input's configuration is edited. - - :param validation_definition: a ValidationDefinition object - """ - # Get the parameters from the ValidationDefinition object, - # then typecast the values as floats - minimum = float(validation_definition.parameters["min"]) - maximum = float(validation_definition.parameters["max"]) - - if minimum >= maximum: - raise ValueError("min must be less than max; found min=%f, max=%f" % minimum, maximum) - - def stream_events(self, inputs, ew): - """This function handles all the action: splunk calls this modular input - without arguments, streams XML describing the inputs to stdin, and waits - for XML on stdout describing events. - - If you set use_single_instance to True on the scheme in get_scheme, it - will pass all the instances of this input to a single instance of this - script. - - :param inputs: an InputDefinition object - :param ew: an EventWriter object - """ - # Go through each input for this modular input - for input_name, input_item in six.iteritems(inputs.inputs): - # Get the values, cast them as floats - minimum = float(input_item["min"]) - maximum = float(input_item["max"]) - - # Create an Event object, and set its data fields - event = Event() - event.stanza = input_name - event.data = "number=\"%s\"" % str(random.uniform(minimum, maximum)) - - # Tell the EventWriter to write this event - ew.write_event(event) - -if __name__ == "__main__": - sys.exit(MyScript().run(sys.argv)) diff --git a/examples/random_numbers/default/app.conf b/examples/random_numbers/default/app.conf deleted file mode 100644 index 8af3cc6c6..000000000 --- a/examples/random_numbers/default/app.conf +++ /dev/null @@ -1,11 +0,0 @@ -[install] -is_configured = 0 - -[ui] -is_visible = 1 -label = Random Numbers - -[launcher] -author=Splunk -description=Streams events containing a random number -version = 1.0 \ No newline at end of file diff --git a/examples/results.py b/examples/results.py deleted file mode 100755 index e18e8f567..000000000 --- a/examples/results.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A script that reads XML search results from stdin and pretty-prints them - back to stdout. The script is designed to be used with the search.py - example, eg: './search.py "search 404" | ./results.py'""" - -from __future__ import absolute_import -from pprint import pprint -import sys, os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import splunklib.results as results - - -def pretty(): - reader = results.JSONResultsReader(sys.stdin) - for event in reader: - pprint(event) - - -if __name__ == "__main__": - pretty() diff --git a/examples/saved_search/README.md b/examples/saved_search/README.md deleted file mode 100644 index a4e1f23c8..000000000 --- a/examples/saved_search/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# Saved Search - -The saved search example supports `create`, `list`, `list-all` and `delete` -saved search actions. - -`list-all` requires no argument, and will display all saved searches. - -`list` and `delete` requires the `--name` argument to either list the contents -of a specific saved search or delete a specific saved search. - -`create` requires the `--name` argument, as well as a list of any other arguments -to establish a saved search. The help output is seen below. - -Of special note is the events that can perform actions (`--actions` and -`--action..=...`). Email, rss and scripts can be -invoked as a result of the event firing. Scripts are run out of -`$SPLUNK_HOME/bin/scripts/`. - diff --git a/examples/saved_search/saved_search.py b/examples/saved_search/saved_search.py deleted file mode 100755 index 657f6aa69..000000000 --- a/examples/saved_search/saved_search.py +++ /dev/null @@ -1,216 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for manipulating saved searches - (list-all/create/list/delete).""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - -import splunklib.binding as binding - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -# these 'rules' allow for setting parameters primarily for creating saved searches -RULES = { - "name": { - 'flags': ["--name"], - 'help': " name of search name to be created" - }, - "search": { - 'flags': ["--search"], - 'help': " splunk search string" - }, - "is_visible": { - 'flags': ["--is_visible"], - 'help': " Should the saved search appear under the Seaches & Report menu (defaults to true)" - }, - "is_scheduled": { - 'flags': ["--is_scheduled"], - 'help': " Does the saved search run on the saved schedule." - }, - "max_concurrent": { - 'flags': ["--max_concurrent"], - 'help': " If the search is ran by the scheduler how many concurrent instances of this search is the scheduler allowed to run (defaults to 1)" - }, - "realtime_schedule": { - 'flags': ["--realtime_schedule"], - 'help': " Is the scheduler allowed to skip executions of this saved search, if there is not enough search bandwidtch (defaults to true), set to false only for summary index populating searches" - }, - "run_on_startup": { - 'flags': ["--run_on_startup"], - 'help': " Should the scheduler run this saved search on splunkd start up (defaults to false)" - }, - "cron_schedule": { - 'flags': ["--cron_schedule"], - 'help': " The cron formatted schedule of the saved search. Required for Alerts" - }, - "alert_type": { - 'flags': ["--alert_type"], - 'help': " The thing to count a quantity of in relation to relation. Required for Alerts. (huh?)" - }, - "alert_threshold": { - 'flags': ["--alert_threshold"], - 'help': " The quantity of counttype must exceed in relation to relation. Required for Alerts. (huh?)" - }, - "alert_comparator": { - 'flags': ["--alert_comparator"], - 'help': " The relation the count type has to the quantity. Required for Alerts. (huh?)" - }, - "actions": { - 'flags': ["--actions"], - 'help': " A list of the actions to fire on alert; supported values are {(email, rss) | script}. For example, actions = rss,email would enable both RSS feed and email sending. Or if you want to just fire a script: actions = script" - }, - "action...": { - 'flags': ["--action.."], - 'help': " A key/value pair that is specific to the action_type. For example, if actions contains email, then the following keys would be necessary: action.email.to=foo@splunk.com and action.email.sender=splunkbot. For scripts: action.script.filename=doodle.py (note: script is run from $SPLUNK_HOME/bin/scripts/)" - }, - "dispatch.ttl": { - 'flags': ["--dispatch.ttl"], - 'help': " The TTL of the search job created" - }, - "dispatch.buckets": { - 'flags': ["--dispatch.buckets"], - 'help': " The number of event buckets (huh?)" - }, - "dispatch.max_count": { - 'flags': ["--dispatch.max_count"], - 'help': " Maximum number of results" - }, - "dispatch.max_time": { - 'flags': ["--dispatch.max_time"], - 'help': " Maximum amount of time in seconds before finalizing the search" - }, - "dispatch.lookups": { - 'flags': ["--dispatch.lookups"], - 'help': " Boolean flag indicating whether to enable lookups in this search" - }, - "dispatch.spawn_process": { - 'flags': ["--dispatch.spawn_process"], - 'help': " Boolean flag whether to spawn the search as a separate process" - }, - "dispatch.time_format": { - 'flags': ["--dispatch.time_format"], - 'help': " Format string for earliest/latest times" - }, - "dispatch.earliest_time": { - 'flags': ["--dispatch.earliest_time"], - 'help': " The earliest time for the search" - }, - "dispatch.latest_time": { - 'flags': ["--dispatch.latest_time"], - 'help': " The latest time for the search" - }, - "alert.expires": { - 'flags': ["--alert.expires"], - 'help': " [time-specifier] The period of time for which the alert will be shown in the alert's dashboard" - }, - "alert.severity": { - 'flags': ["--alert.severity"], - 'help': " [int] Specifies the alert severity level, valid values are: 1-debug, 2-info, 3-warn, 4-error, 5-severe, 6-fatal" - }, - "alert.supress": { - 'flags': ["--alert.supress"], - 'help': " [bool]whether alert suppression is enabled for this scheduled search" - }, - "alert.supress_keys": { - 'flags': ["--alert.supress_keys"], - 'help': " [string] comma delimited list of keys to use for suppress, to access result values use result. syntax" - }, - "alert.supress.period": { - 'flags': ["--alert.supress.period"], - 'help': " [time-specifier] suppression period, use ack to suppress until acknowledgment is received" - }, - "alert.digest": { - 'flags': ["--alert.digest"], - 'help': " [bool] whether the alert actions are executed on the entire result set or on each individual result (defaults to true)" - }, - "output_mode": { - 'flags': ["--output_mode"], - 'help': " type of output (atom, xml)" - }, - ## - ## special -- catch these options pre-build to perform catch post/get/delete - ## - "operation": { - 'flags': ["--operation"], - 'help': " type of splunk operation: list-all, list, create, delete (defaults to list-all)" - } -} - -def main(argv): - """ main entry """ - usage = 'usage: %prog --help for options' - opts = utils.parse(argv, RULES, ".env", usage=usage) - - context = binding.connect(**opts.kwargs) - operation = None - - # splunk.binding.debug = True # for verbose information (helpful for debugging) - - # Extract from command line and build into variable args - kwargs = {} - for key in RULES.keys(): - if key in opts.kwargs: - if key == "operation": - operation = opts.kwargs[key] - else: - kwargs[key] = opts.kwargs[key] - - # no operation? if name present, default to list, otherwise list-all - if not operation: - if 'name' in kwargs: - operation = 'list' - else: - operation = 'list-all' - - # pre-sanitize - if (operation != "list" and operation != "create" - and operation != "delete" - and operation != "list-all"): - print("operation %s not one of list-all, list, create, delete" % operation) - sys.exit(0) - - if 'name' not in kwargs and operation != "list-all": - print("operation requires a name") - sys.exit(0) - - # remove arg 'name' from passing through to operation builder, except on create - if operation != "create" and operation != "list-all": - name = kwargs['name'] - kwargs.pop('name') - - # perform operation on saved search created with args from cli - if operation == "list-all": - result = context.get("saved/searches", **kwargs) - elif operation == "list": - result = context.get("saved/searches/%s" % name, **kwargs) - elif operation == "create": - result = context.post("saved/searches", **kwargs) - else: - result = context.delete("saved/searches/%s" % name, **kwargs) - print("HTTP STATUS: %d" % result.status) - xml_data = result.body.read().decode('utf-8') - sys.stdout.write(xml_data) - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/examples/saved_searches.py b/examples/saved_searches.py deleted file mode 100755 index 6301339f5..000000000 --- a/examples/saved_searches.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility that lists saved searches.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.client import connect - -try: - from utils import parse -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - - -def main(): - opts = parse(sys.argv[1:], {}, ".env") - service = connect(**opts.kwargs) - - for saved_search in service.saved_searches: - header = saved_search.name - print(header) - print('='*len(header)) - content = saved_search.content - for key in sorted(content.keys()): - value = content[key] - print("%s: %s" % (key, value)) - history = saved_search.history() - if len(history) > 0: - print("history:") - for job in history: - print(" %s" % job.name) - print() - -if __name__ == "__main__": - main() - - diff --git a/examples/search.py b/examples/search.py deleted file mode 100755 index 858e92312..000000000 --- a/examples/search.py +++ /dev/null @@ -1,116 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility for executing Splunk searches.""" - -from __future__ import absolute_import -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -from time import sleep - -from splunklib.binding import HTTPError -import splunklib.client as client - -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -FLAGS_TOOL = [ "verbose" ] - -FLAGS_CREATE = [ - "earliest_time", "latest_time", "now", "time_format", - "exec_mode", "search_mode", "rt_blocking", "rt_queue_size", - "rt_maxblocksecs", "rt_indexfilter", "id", "status_buckets", - "max_count", "max_time", "timeout", "auto_finalize_ec", "enable_lookups", - "reload_macros", "reduce_freq", "spawn_process", "required_field_list", - "rf", "auto_cancel", "auto_pause", -] - -FLAGS_RESULTS = [ - "offset", "count", "search", "field_list", "f", "output_mode" -] - -def cmdline(argv, flags, **kwargs): - """A cmdopts wrapper that takes a list of flags and builds the - corresponding cmdopts rules to match those flags.""" - rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".env", **kwargs) - -def main(argv): - usage = 'usage: %prog [options] "search"' - - flags = [] - flags.extend(FLAGS_TOOL) - flags.extend(FLAGS_CREATE) - flags.extend(FLAGS_RESULTS) - opts = cmdline(argv, flags, usage=usage) - - if len(opts.args) != 1: - error("Search expression required", 2) - search = opts.args[0] - - verbose = opts.kwargs.get("verbose", 0) - - kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) - kwargs_create = dslice(opts.kwargs, FLAGS_CREATE) - kwargs_results = dslice(opts.kwargs, FLAGS_RESULTS) - - service = client.connect(**kwargs_splunk) - - try: - service.parse(search, parse_only=True) - except HTTPError as e: - cmdopts.error("query '%s' is invalid:\n\t%s" % (search, str(e)), 2) - return - - job = service.jobs.create(search, **kwargs_create) - while True: - while not job.is_ready(): - pass - stats = {'isDone': job['isDone'], - 'doneProgress': job['doneProgress'], - 'scanCount': job['scanCount'], - 'eventCount': job['eventCount'], - 'resultCount': job['resultCount']} - progress = float(stats['doneProgress'])*100 - scanned = int(stats['scanCount']) - matched = int(stats['eventCount']) - results = int(stats['resultCount']) - if verbose > 0: - status = ("\r%03.1f%% | %d scanned | %d matched | %d results" % ( - progress, scanned, matched, results)) - sys.stdout.write(status) - sys.stdout.flush() - if stats['isDone'] == '1': - if verbose > 0: sys.stdout.write('\n') - break - sleep(2) - - if 'count' not in kwargs_results: kwargs_results['count'] = 0 - results = job.results(**kwargs_results) - while True: - content = results.read(1024) - if len(content) == 0: break - sys.stdout.write(content.decode('utf-8')) - sys.stdout.flush() - sys.stdout.write('\n') - - job.cancel() - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/examples/search_modes.py b/examples/search_modes.py deleted file mode 100644 index f1d1687f2..000000000 --- a/examples/search_modes.py +++ /dev/null @@ -1,41 +0,0 @@ -import sys -import os -# import from utils/__init__.py -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -from utils import * -import time -from splunklib.client import connect -from splunklib import results -from splunklib import six - -def cmdline(argv, flags, **kwargs): - """A cmdopts wrapper that takes a list of flags and builds the - corresponding cmdopts rules to match those flags.""" - rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".env", **kwargs) - -def modes(argv): - opts = cmdline(argv, []) - kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) - service = connect(**kwargs_splunk) - - # By default the job will run in 'smart' mode which will omit events for transforming commands - job = service.jobs.create('search index=_internal | head 10 | top host') - while not job.is_ready(): - time.sleep(0.5) - pass - reader = results.JSONResultsReader(job.events(output_mode='json')) - # Events found: 0 - print('Events found with adhoc_search_level="smart": %s' % len([e for e in reader])) - - # Now set the adhoc_search_level to 'verbose' to see the events - job = service.jobs.create('search index=_internal | head 10 | top host', adhoc_search_level='verbose') - while not job.is_ready(): - time.sleep(0.5) - pass - reader = results.JSONResultsReader(job.events(output_mode='json')) - # Events found: 10 - print('Events found with adhoc_search_level="verbose": %s' % len([e for e in reader])) - -if __name__ == "__main__": - modes(sys.argv[1:]) \ No newline at end of file diff --git a/examples/searchcommands_app/README.md b/examples/searchcommands_app/README.md deleted file mode 100644 index b1c07311d..000000000 --- a/examples/searchcommands_app/README.md +++ /dev/null @@ -1,125 +0,0 @@ -splunk-sdk-python searchcommands_app example -============================================= - -This app provides several examples of custom search commands that illustrate each of the base command types: - - Command | Type | Description -:---------------- |:-----------|:------------------------------------------------------------------------------------------- - countmatches | Streaming | Counts the number of non-overlapping matches to a regular expression in a set of fields. - generatetext | Generating | Generates a specified number of events containing a specified text string. - simulate | Generating | Generates a sequence of events drawn from a csv file using repeated random sampling with replacement. - generatehello | Generating | Generates a specified number of events containing the text string 'hello'. - sum | Reporting | Adds all of the numbers in a set of fields. - filter | Eventing | Filters records from the events stream based on user-specified criteria. - -The app is tested on Splunk 5 and 6. Here is its manifest: - -``` -├── bin -│   ├── countmatches.py .......... CountMatchesCommand implementation -│ ├── generatetext.py .......... GenerateTextCommand implementation -│ ├── simulate.py .............. SimulateCommand implementation -│ └── sum.py ................... SumCommand implementation -├── lib -| └── splunklib ................ splunklib module -├── default -│ ├── data -│ │   └── ui -│ │   └── nav -│ │   └── default.xml .. -│ ├── app.conf ................. Used by Splunk to maintain app state [1] -│ ├── commands.conf ............ Search command configuration [2] -│ ├── logging.conf ............. Python logging[3] configuration in ConfigParser[4] format -│ └── searchbnf.conf ........... Search assistant configuration [5] -└── metadata - └── default.meta ............. Permits the search assistant to use searchbnf.conf[6] -``` -**References** -[1] [app.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Appconf) -[2] [commands.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Commandsconf) -[3] [Python Logging HOWTO](https://docs.python.org/2/howto/logging.html) -[4] [ConfigParser—Configuration file parser](https://docs.python.org/2/library/configparser.html) -[5] [searchbnf.conf](https://docs.splunk.com/Documentation/Splunk/latest/admin/Searchbnfconf) -[6] [Set permissions in the file system](https://docs.splunk.com/Documentation/Splunk/latest/AdvancedDev/SetPermissions#Set_permissions_in_the_filesystem) - -## Installation - -+ Bring up Dockerized Splunk with the app installed from the root of this repository via: - - ``` - SPLUNK_VERSION=latest docker compose up -d - ``` - -+ When the `splunk` service is healthy (`health: starting` -> `healthy`) login and run test searches within the app via http://localhost:8000/en-US/app/searchcommands_app/search - -### Example searches - -#### countmatches -``` -| inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text -``` -Results: -text | word_count -:----|:---| -excellent review my friend loved it yours always guppyman @GGreeny62... http://t.co/fcvq7NDHxl | 14 -Tú novia te ama mucho | 5 -... | - -#### filter -``` -| generatetext text="Hello world! How the heck are you?" count=6 \ -| filter predicate="(int(_serial) & 1) == 0" update="_raw = _raw.replace('world', 'Splunk')" -``` -Results: -Event | -:-----| -2. Hello Splunk! How the heck are you? | -4. Hello Splunk! How the heck are you? | -6. Hello Splunk! How the heck are you? | - -#### generatetext -``` -| generatetext count=3 text="Hello there" -``` -Results: -Event | -:-----| -1. Hello there | -2. Hello there | -3. Hello there | - -#### simulate -``` -| simulate csv="/opt/splunk/etc/apps/searchcommands_app/data/population.csv" rate=10 interval=00:00:01 duration=00:00:02 seed=9 -``` -Results: -Event | -:-----| -text = Margarita (8) | -text = RT @Habibies: When you were born, you cried and the world rejoiced. Live your life so that when you die, the world will cry and you will re... | -text = @dudaribeiro_13 q engraçado em. | - -#### sum -``` -| inputlookup tweets -| countmatches fieldname=word_count pattern="\\w+" text -| sum total=word_counts word_count -``` -Results: -word_counts | -:-----| -4497.0 | - -## Optional:Set up logging using logging.conf file -+ Inside the **default** directory of our app, we have a [logging.conf](https://github.com/splunk/splunk-sdk-python/blob/master/examples/searchcommands_app/package/default/logging.conf) file. -+ In logging.conf file we can define loggers, handlers and formatters for our app. refer [this doc](https://docs.python.org/2/library/logging.config.html#configuration-file-format) for more details -+ Logs will be written in the files specified in the handlers defined for the respective loggers - + For **'searchcommands_app'** app logs will be written in **searchcommands_app.log** and **splunklib.log** files defined in respective handlers, and are present at $SPLUNK_HOME/etc/apps/searchcommands_app/ dir - + By default logs will be written in the app's root directory, but it can be overriden by specifying the absolute path for the logs file in the conf file -+ By default, logging level is set to WARNING -+ To see debug and above level logs, Set level to DEBUG in logging.conf file - -## License - -This software is licensed under the Apache License 2.0. Details can be found in -the file LICENSE. diff --git a/examples/searchcommands_app/package/README/logging.conf.spec b/examples/searchcommands_app/package/README/logging.conf.spec deleted file mode 100644 index c9b93118a..000000000 --- a/examples/searchcommands_app/package/README/logging.conf.spec +++ /dev/null @@ -1,116 +0,0 @@ -# -# The format of this file is described in this article at Python.org: -# -# [Configuration file format](https://docs.python.org/2/library/logging.config.html#configuration-file-format) -# -# This file must contain sections called [loggers], [handlers] and [formatters] that identify by name the entities of -# each type that are defined in the file. For each such entity, there is a separate section that identifies how that -# entity is configured. Thus, for a logger named log01 in the [loggers] section, the relevant configuration details are -# held in a section [logger_log01]. Similarly, a handler called hand01 in the [handlers] section will have its -# configuration held in a section called [handler_hand01], while a formatter called form01 in the [formatters] section -# will have its configuration specified in a section called [formatter_form01]. The root logger configuration must be -# specified in a section called [logger_root]. - -[loggers] - * Specifies a list of logger keys. - -keys = - * A comma-separated list of logger keys. Each key must have a corresponding [logger_] section in the - * configuration file. - * Defaults to empty. - -[logger_root] - * Specifies the configuration of the root logger. - * The root logger must specify a level and a list of handlers. - -level = [critical|error|warning|info|debug|notset] - * Can be one of debug, info, warning, error, critical, or notset. For the root logger only, notset means that all - * messages will be logged. Level values are evaluated in the context of the logging package’s namespace. - * Defaults to warning. - -handlers = - * A comma-separated list of handler names, which must appear in the [handlers] section. These names must appear in - * the [handlers] section and have corresponding sections in the configuration file. - * Defaults to stderr. - -[logger_] - * Specifies the configuration of a logger. - -qualname = - * The hierarchical channel name of the logger, that is to say the name used by the application to get the logger. - * A value is required. - -level = [critical|error|warning|info|debug|notset] - * Can be one of debug, info, warning, error, critical or notset. For the root logger only, notset means that all - * messages will be logged. Level values are evaluated in the context of the logging package’s namespace. - * Defaults to warning. - -handlers = - * A comma-separated list of handler names, which must appear in the [handlers] section. These names must appear in - * the [handlers] section and have corresponding sections in the configuration file. - * Defaults to stderr. - -propagate = [0|1] - * Set to 1 to indicate that messages must propagate to handlers higher up the logger hierarchy from this logger, or - * 0 to indicate that messages are not propagated to handlers up the hierarchy. - * Defaults to 1. - -[handlers] - * Specifies a list of handler keys. - * See [logging.handlers](https://docs.python.org/2/library/logging.handlers.html). - -keys = - * A comma-separated list of handlers keys. Each key must have a corresponding [handler_] section in the - * configuration file. - * Defaults to empty. - -[handler_] - * Specifies the configuration of a handler. - -args = - * When evaluated in the context of the logging package’s namespace, is the list of arguments to the constructor for - * the handler class. - -class = - * Specifies the handler’s class as determined by eval() in the logging package’s namespace. - * Defaults to logging.FileHandler. - -level = [critical|error|warning|info|debug|notset] - * Can be one of debug, info, warning, error, critical or notset. This value is interpreted as for loggers, and - * notset is taken to mean, "log everything". - -formatter = - * Specifies the key name of the formatter for this handler. If a name is specified, it must appear in the - * [formatters] section and have a corresponding section in the configuration file. - * Defaults to logging._defaultFormatter. - -[formatters] - * Specifies a list of formatter keys. - * See [logging.formatters](https://docs.python.org/2/howto/logging.html#formatters). - -keys = - * A comma-separated list of formatter keys. Each key must have a corresponding [formatter_] section in the - * configuration file. - * Defaults to empty. - -[formatter_] - * Specifies the configuration of a formatter. - -class = - * The name of the formatter’s class as a dotted module and class name. This setting is useful for instantiating a - * Formatter subclass. Subclasses of Formatter can present exception tracebacks in an expanded or condensed format. - * Defaults to logging.Formatter. - -datefmt = - * The strftime-compatible date/time format string. If empty, the package substitutes ISO8601 format date/times. - * An example ISO8601 date/time is datetime(2015, 2, 6, 15, 53, 36, 786309).isoformat() == - * '2015-02-06T15:53:36.786309'. For a complete list of formatting directives, see section [strftime() and strptime() - * Behavior](https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior) - * Defaults to empty. - -format = - * The overall format string. This string uses %()s styled string substitution; the possible keys are - * documented in [LogRecord](https://docs.python.org/2/library/logging.html#logging.LogRecord) attributes. The following format string will log the time in a - * human-readable format, the severity of the message, and the contents of the message, in that order: - * format = '%(asctime)s - %(levelname)s - %(message)s' - * A value is required. diff --git a/examples/searchcommands_app/package/bin/_pydebug_conf.py b/examples/searchcommands_app/package/bin/_pydebug_conf.py deleted file mode 100644 index 0c14c9460..000000000 --- a/examples/searchcommands_app/package/bin/_pydebug_conf.py +++ /dev/null @@ -1,20 +0,0 @@ -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -host = 'localhost' -port = 5678 -suspend = False -is_enabled = {} diff --git a/examples/searchcommands_app/package/bin/app.py b/examples/searchcommands_app/package/bin/app.py deleted file mode 100644 index 260ab55ef..000000000 --- a/examples/searchcommands_app/package/bin/app.py +++ /dev/null @@ -1,114 +0,0 @@ -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" Sets the packages path and optionally starts the Python remote debugging client. - -The Python remote debugging client depends on the settings of the variables defined in _pydebug_conf.py. Set these -variables in _pydebug_conf.py to enable/disable debugging using either the JetBrains PyCharm or Eclipse PyDev remote -debug egg which must be copied to your application's bin directory and renamed as _pydebug.egg. - -""" - -from __future__ import absolute_import, division, print_function, unicode_literals - -settrace = stoptrace = lambda: NotImplemented -remote_debugging = None - - -def initialize(): - - from os import path - from sys import modules, path as python_path - - import platform - - module_dir = path.dirname(path.realpath(__file__)) - system = platform.system() - - for packages in path.join(module_dir, 'packages'), path.join(path.join(module_dir, 'packages', system)): - if not path.isdir(packages): - break - python_path.insert(0, path.join(packages)) - - configuration_file = path.join(module_dir, '_pydebug_conf.py') - - if not path.exists(configuration_file): - return - - debug_client = path.join(module_dir, '_pydebug.egg') - - if not path.exists(debug_client): - return - - _remote_debugging = { - 'client_package_location': debug_client, - 'is_enabled': False, - 'host': None, - 'port': 5678, - 'suspend': True, - 'stderr_to_server': False, - 'stdout_to_server': False, - 'overwrite_prev_trace': False, - 'patch_multiprocessing': False, - 'trace_only_current_thread': False} - - exec(compile(open(configuration_file).read(), configuration_file, 'exec'), {'__builtins__': __builtins__}, _remote_debugging) - python_path.insert(1, debug_client) - - from splunklib.searchcommands import splunklib_logger as logger - import pydevd - - def _settrace(): - host, port = _remote_debugging['host'], _remote_debugging['port'] - logger.debug('Connecting to Python debug server at %s:%d', host, port) - - try: - pydevd.settrace( - host=host, - port=port, - suspend=_remote_debugging['suspend'], - stderrToServer=_remote_debugging['stderr_to_server'], - stdoutToServer=_remote_debugging['stdout_to_server'], - overwrite_prev_trace=_remote_debugging['overwrite_prev_trace'], - patch_multiprocessing=_remote_debugging['patch_multiprocessing'], - trace_only_current_thread=_remote_debugging['trace_only_current_thread']) - except SystemExit as error: - logger.error('Failed to connect to Python debug server at %s:%d: %s', host, port, error) - else: - logger.debug('Connected to Python debug server at %s:%d', host, port) - - global remote_debugging - remote_debugging = _remote_debugging - - global settrace - settrace = _settrace - - global stoptrace - stoptrace = pydevd.stoptrace - - remote_debugging_is_enabled = _remote_debugging['is_enabled'] - - if isinstance(remote_debugging_is_enabled, (list, set, tuple)): - app_name = path.splitext(path.basename(modules['__main__'].__file__))[0] - remote_debugging_is_enabled = app_name in remote_debugging_is_enabled - - if remote_debugging_is_enabled is True: - settrace() - - return - -initialize() -del initialize diff --git a/examples/searchcommands_app/package/bin/countmatches.py b/examples/searchcommands_app/package/bin/countmatches.py deleted file mode 100755 index 24b10588f..000000000 --- a/examples/searchcommands_app/package/bin/countmatches.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import os,sys - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators -from splunklib import six - - -@Configuration() -class CountMatchesCommand(StreamingCommand): - """ Counts the number of non-overlapping matches to a regular expression in a set of fields. - - ##Syntax - - .. code-block:: - countmatches fieldname= pattern= - - ##Description - - A count of the number of non-overlapping matches to the regular expression specified by `pattern` is computed for - each record processed. The result is stored in the field specified by `fieldname`. If `fieldname` exists, its value - is replaced. If `fieldname` does not exist, it is created. Event records are otherwise passed through to the next - pipeline processor unmodified. - - ##Example - - Count the number of words in the `text` of each tweet in tweets.csv and store the result in `word_count`. - - .. code-block:: - | inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text - - """ - fieldname = Option( - doc=''' - **Syntax:** **fieldname=**** - **Description:** Name of the field that will hold the match count''', - require=True, validate=validators.Fieldname()) - - pattern = Option( - doc=''' - **Syntax:** **pattern=**** - **Description:** Regular expression pattern to match''', - require=True, validate=validators.RegularExpression()) - - def stream(self, records): - self.logger.debug('CountMatchesCommand: %s', self) # logs command line - pattern = self.pattern - for record in records: - count = 0 - for fieldname in self.fieldnames: - matches = pattern.findall(six.text_type(six.ensure_binary(record[fieldname]).decode("utf-8"))) - count += len(matches) - record[self.fieldname] = count - yield record - -dispatch(CountMatchesCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/bin/filter.py b/examples/searchcommands_app/package/bin/filter.py deleted file mode 100755 index 3a29ca9b2..000000000 --- a/examples/searchcommands_app/package/bin/filter.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import os,sys - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, EventingCommand, Configuration, Option -from splunklib.searchcommands.validators import Code - -@Configuration() -class FilterCommand(EventingCommand): - """ Filters, augments, and updates records on the events stream. - - ##Syntax - - .. code-block:: - filter predicate= update= - - ##Description - - The :code:`filter` command filters records from the events stream returning only those for which the - :code:`predicate` is true after applying :code:`update` statements. If no :code:`predicate` is specified, all - records are returned. If no :code:`update` is specified, records are returned unmodified. - - The :code:`predicate` and :code:`update` operations execute in a restricted scope that includes the standard Python - built-in module and the current record. Within this scope fields are accessible by name as local variables. - - ##Example - - Excludes odd-numbered records and replaces all occurrences of "world" with "Splunk" in the _raw field produced by - the :code:`generatetext` command. - - .. code-block:: - | generatetext text="Hello world! How the heck are you?" count=6 - | filter predicate="(int(_serial) & 1) == 0" update="_raw = _raw.replace('world', 'Splunk')" - - """ - predicate = Option(doc=''' - **Syntax:** **predicate=**** - **Description:** Filters records from the events stream returning only those for which the predicate is True. - - ''', validate=Code('eval')) - - update = Option(doc=''' - **Syntax:** **update=**** - **Description:** Augments or modifies records for which the predicate is True before they are returned. - - ''', validate=Code('exec')) - - def transform(self, records): - predicate = self.predicate - update = self.update - - if predicate and update: - predicate = predicate.object - update = update.object - - for record in records: - if eval(predicate, FilterCommand._globals, record): - exec(update, FilterCommand._globals, record) - yield record - return - - if predicate: - predicate = predicate.object - for record in records: - if eval(predicate, FilterCommand._globals, record): - yield record - return - - if update: - update = update.object - for record in records: - exec(update, FilterCommand._globals, record) - yield record - return - - for record in records: - yield record - - _globals = {'__builtins__': __builtins__} - - -dispatch(FilterCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/bin/generatehello.py b/examples/searchcommands_app/package/bin/generatehello.py deleted file mode 100755 index 572f6b740..000000000 --- a/examples/searchcommands_app/package/bin/generatehello.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import os,sys -import time - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, GeneratingCommand, Configuration, Option, validators -from splunklib.six.moves import range - - -@Configuration() -class GenerateHelloCommand(GeneratingCommand): - - count = Option(require=True, validate=validators.Integer(0)) - - def generate(self): - self.logger.debug("Generating %s events" % self.count) - for i in range(1, self.count + 1): - text = 'Hello World %d' % i - yield {'_time': time.time(), 'event_no': i, '_raw': text} - -dispatch(GenerateHelloCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/bin/generatetext.py b/examples/searchcommands_app/package/bin/generatetext.py deleted file mode 100755 index 8251e6571..000000000 --- a/examples/searchcommands_app/package/bin/generatetext.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import os,sys -import time - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, GeneratingCommand, Configuration, Option, validators -from splunklib import six -from splunklib.six.moves import range - - -@Configuration() -class GenerateTextCommand(GeneratingCommand): - - count = Option(require=True, validate=validators.Integer(0)) - text = Option(require=True) - - def generate(self): - text = self.text - self.logger.debug("Generating %d events with text %s" % (self.count, self.text)) - for i in range(1, self.count + 1): - yield {'_serial': i, '_time': time.time(), '_raw': six.text_type(i) + '. ' + text} - -dispatch(GenerateTextCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/bin/simulate.py b/examples/searchcommands_app/package/bin/simulate.py deleted file mode 100755 index db223c71b..000000000 --- a/examples/searchcommands_app/package/bin/simulate.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import random -import csv -import os,sys -import time - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, GeneratingCommand, Configuration, Option, validators - - -@Configuration() -class SimulateCommand(GeneratingCommand): - """ Generates a sequence of events drawn from a CSV file using repeated random sampling - - ##Syntax - - .. code-block:: - simulate csv= rate= interval= duration= - [seed=] - - ##Description - - The :code:`simulate` command uses repeated random samples of the event records in :code:`csv` for the execution - period of :code:`duration`. Sample sizes are determined for each time :code:`interval` in :code:`duration` - using a Poisson distribution with an average :code:`rate` specifying the expected event count during - :code:`interval`. - - ##Example - - .. code-block:: - | simulate csv="/opt/splunk/etc/apps/searchcommands_app/data/population.csv" rate=10 interval=00:00:01 duration=00:00:02 seed=1 - - This example generates events drawn from repeated random sampling of events from :code:`tweets.csv`. Events are - drawn at an average rate of 50 per second for a duration of 5 seconds. Events are piped to the example - :code:`countmatches` command which adds a :code:`word_count` field containing the number of words in the - :code:`text` field of each event. The mean and standard deviation of the :code:`word_count` are then computed by - the builtin :code:`stats` command. - - - """ - csv_file = Option( - doc='''**Syntax:** **csv=**** - **Description:** CSV file from which repeated random samples will be - drawn''', - name='csv', require=True, validate=validators.File()) - - duration = Option( - doc='''**Syntax:** **duration=**** - **Description:** Duration of simulation''', - require=True, validate=validators.Duration()) - - interval = Option( - doc='''**Syntax:** **interval=**** - **Description:** Sampling interval''', - require=True, validate=validators.Duration()) - - rate = Option( - doc='''**Syntax:** **rate=**** - **Description:** Average event count during sampling `interval`''', - require=True, validate=validators.Integer(1)) - - seed = Option( - doc='''**Syntax:** **seed=**** - **Description:** Value for initializing the random number generator ''') - - def generate(self): - if self.seed is not None: - random.seed(self.seed) - records = [record for record in csv.DictReader(self.csv_file)] - lambda_value = 1.0 / (self.rate / float(self.interval)) - - duration = self.duration - while duration > 0: - count = int(round(random.expovariate(lambda_value))) - start_time = time.clock() - for record in random.sample(records, count): - yield record - interval = time.clock() - start_time - if interval < self.interval: - time.sleep(self.interval - interval) - duration -= max(interval, self.interval) - -dispatch(SimulateCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/bin/sum.py b/examples/searchcommands_app/package/bin/sum.py deleted file mode 100755 index a714699db..000000000 --- a/examples/searchcommands_app/package/bin/sum.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import, division, print_function, unicode_literals -import app -import os,sys - -splunkhome = os.environ['SPLUNK_HOME'] -sys.path.append(os.path.join(splunkhome, 'etc', 'apps', 'searchcommands_app', 'lib')) -from splunklib.searchcommands import dispatch, ReportingCommand, Configuration, Option, validators - - -@Configuration(requires_preop=True) -class SumCommand(ReportingCommand): - """ Computes the sum of a set of fields. - - ##Syntax - - .. code-block:: - sum total= - - ##Description: - - The total produced is sum(sum(fieldname, 1, n), 1, N) where n = number of fields, N = number of records. - - ##Example - - ..code-block:: - index = _internal | head 200 | sum total=lines linecount - - This example computes the total linecount in the first 200 records in the - :code:`_internal index`. - - """ - total = Option( - doc=''' - **Syntax:** **total=**** - **Description:** Name of the field that will hold the computed sum''', - require=True, validate=validators.Fieldname()) - - @Configuration() - def map(self, records): - """ Computes sum(fieldname, 1, n) and stores the result in 'total' """ - self.logger.debug('SumCommand.map') - fieldnames = self.fieldnames - total = 0.0 - for record in records: - for fieldname in fieldnames: - total += float(record[fieldname]) - yield {self.total: total} - - def reduce(self, records): - """ Computes sum(total, 1, N) and stores the result in 'total' """ - self.logger.debug('SumCommand.reduce') - fieldname = self.total - total = 0.0 - for record in records: - value = record[fieldname] - try: - total += float(value) - except ValueError: - self.logger.debug(' could not convert %s value to float: %s', fieldname, repr(value)) - yield {self.total: total} - -dispatch(SumCommand, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_app/package/data/population.csv b/examples/searchcommands_app/package/data/population.csv deleted file mode 100644 index 5a0b016be..000000000 --- a/examples/searchcommands_app/package/data/population.csv +++ /dev/null @@ -1,629 +0,0 @@ -"_serial","_time",text -0,1380899494,"excellent review my friend loved it yours always guppyman @GGreeny62... http://t.co/fcvq7NDHxl" -1,1380899494,"Tú novia te ama mucho" -2,1380899494,"RT @Cindystaysjdm: @MannyYHT girls are like the Feds, they always watching 👀" -3,1380899494,"no me alcanza las palabras para el verbo amar..♫" -4,1380899494,"@__AmaT 요즘은 곡안쓰시고 귀농하시는군요 ㅋㅋ" -5,1380899494,"melhor geração #DiaMundialDeRBD" -6,1380899494,"@mariam_n_k من أي ناحية مين أنا ؟ ، إذا كان السؤال هل اعرفك او لا الجواب : لا ." -7,1380899494,"Oreka Sud lance #DEMplus un logiciel de simulation du démantèlement d'un réacteur #nucléaire http://t.co/lyC9nWxnWk" -8,1380899494,"@gusosama そんなことないですよ(。•́︿•̀。)でも有難うございます♡" -9,1380899494,"11:11 pwede pwends ta? HAHAHA" -10,1380899494,"RT @royalTee_x3: Football players >>> 😍😎" -11,1380899494,"#FF Belles lettres @ChTwDe In est comme in est, in s'arfait nin Ben lui y'a rien à changer Poèsie, amitié, tendresse SUIVEZ Un chou ce ch'ti" -12,1380899494,"@_AbdullaS @Hawazn1993 @bntmisfr1 @prh00M @nhLa_30 هههههههههههههههههههههههههههههههههههههههههههههه." -13,1380899494,"RT @alrweili12: #متابعين -✳ اضفني @alrweili12✅ -✳ رتويـت ✅ -✳ أضف مـن يقـوم بالرتويـــت ✅ -✳أضف مـن يضيفـك ✅ -#زيادة_متابعين -1" -14,1380899494,"RT @CHSExplorer: Monzon with a 20 yard rushing TD off an option play. T-Birds up 37-21 with 30 seconds left in the game" -15,1380899494,"Margarita (8)" -16,1380899494,"RT @chikichikiko: ぶふぁっ! なんぞ、これ!?(^0^;) しかもNHKって、、。RT 【祝】NHKで跡部様が紹介される http://t.co/i7WB0pMHrj" -17,1380899494,"#fact directioners love one direction" -18,1380899494,"https://t.co/2b10ScKlAo cuanto? — 5 http://t.co/ldtoRMvpnB" -19,1380899494,"Still make 11:11 wishes.." -20,1380899494,"Estar tan cansada y agotada que no te queda energía ni para abrir los ojos mas de 5 segundos seguidos." -21,1380899494,"The man of the night #killem #otp #lastshot http://t.co/EFrJ7upMu1" -22,1380899494,"@MaintainNGain so I've had just a bad/frustrating morning, but then I saw this on my feed which made me smile! Thanks! #neededadvice #smile" -23,1380899494,"RT @1yuki1yuki9: 日経エンタはエイターを殺す気。 http://t.co/MyzxDZJOGD" -24,1380899494,"@michael_snape Oi, what the fuck happened last night! I know I was in town but I do not remember one place we went! Just know I was with you" -25,1380899494,"@taku_is_ahoo 苦しかったわわら。" -26,1380899494,"“@pinulbilang: Iklan tvm yg baru ada @apriliokevin sama @Princess_Ind masa :* :D *poke @AprilioKingdom”" -27,1380899494,"RT @ArsenalNewsUK: WEST BROM v ARSENAL: Latest team news and stats http://t.co/u9BsfrGF45" -28,1380899494,"Se siente tocada Terenzano.-" -29,1380899494,"أحياناً العقلانيه تكون سيئه وتجعلك تتحفظ وتنظر للحياة بواقعيتها ، -بينما الجنون يرفع من سقف أفكارك ويجعلك لا تعرف معنى المستحيل .!" -30,1380899494,"RT @TweetUstazAzhar: Cinta itu bukannya suatu permainan . Cinta adalah suatu anugerah dari Allah . Jagalah anugerah Allah ini dengan sebaik…" -31,1380899494,"I hope I don't have to take my child care test today" -32,1380899494,"RT @chingjoyce: Kaya naman palaaaaaaaaaa!! My goodness!" -33,1380899494,"たのしかったww -けどくっそねむいし - -あしたおきれんw" -34,1380899494,"RT @LeVraiHoroscope: La #Balance est toujours là pour aider ceux qu'elle aime vraiment." -35,1380899494,"RT @KertomTorres: La gente dice que ''odiar'' es una palabra muy fuerte, pero van por ahí diciendo ""te amo"" como si eso no significara nada." -36,1380899494,"RT @samkinah: ""@TimmyAisha: Are you Copper? - -Because I Cu in my dreams!"" Hehehe" -37,1380899494,"In here tryin think wat ima eat" -38,1380899494,"Yeah, after I thank The Lord 4 wakin me 🙌🙏" -39,1380899494, -40,1380899494,"RT @tryna_be_famous: RT @tryna_be_famous Nigga look like a microwaved hot dog http://t.co/T6IQpYrzCh" -41,1380899494,"RT @9493_room: 1004 에인줠Day..... http://t.co/mwVnEREljF" -42,1380899494,"@dudaribeiro_13 q engraçado em." -43,1380899494,"RT @Mzhs81: この雑コラが個人的にツボ #艦これ http://t.co/0OIUkfj8FR" -44,1380899494,"【PCMAX】サイトに登録するだけで女性からメールが来ると思っているあなた!女の子は奪うものですよ!気合でいきしょう!\(^0^)/ -◎http://t.co/zZjw8KLUsB(登録無料)" -45,1380899494,"http://t.co/8Yq0AHnoDd -「枯れずの花」更新しました! -#narou #narouN5047BT -少し日付をオーバーしましたが、第七話「薔花、散る。」を投稿しました。 -これにて、第一次薔藤時代編は終わりです。" -46,1380899494,"@u2w3c_ 譲りますヽ(`・ω・´)ノどちらに住んでますかね?" -47,1380899494,"RT @IamLEGIT: @mizzaaaa_ @ahaiqall aku handsome lagiii" -48,1380899494, -49,1380899494,"紙が若干ペロンって曲がってしまったせいかチビ信乃の背景が歪んでてワロタ" -50,1380899494,"Don't act like it is a bad thing to be in love with me. You might find out your dreams come true." -51,1380899494,"RT @ahmethc: basgan'a ""sakin ol şampiyon"" derken http://t.co/Q2YNjKV8P7" -52,1380899494,"明日ひーろー行く人?(^o^)" -53,1380899494,". http://t.co/bMgug5LdP2" -54,1380899494,"越谷EASYGOINGSに行ってきた。 -江崎さん、松崎さん、絵かきの手、パプリカン -素晴らしかった。久々に完全客でのライブハウス。リフレッシュできた。 -あまり酒飲まないと決めたのに結局へろへ。 - -さて、明後日は浅草で僕の企画、明々後日は越谷で乗り込みPAです。 -楽しみワクワク。" -55,1380899494,"【イククル】会員登録前にモチベーションを上げてからいきましょう!男性の場合は「超モーレツアタックするぞー!」、女性の場合は「プロフィール超充実させちゃうー!」ですね。\(^^)/ -◎http://t.co/jNcIgBoS2W【登録無料】4" -56,1380899494,"常に呼ばれている陽菜です(ノシ・ω・)ノシ(ノシ・ω・)ノシ" -57,1380899494,"@nflhqm yesssss. Hahahahaha" -58,1380899494,"RT @nobunaga_s: 跡部様がNHKに出演されたというのは誠ですか!?…流石です!" -59,1380899494,"There are screaming children RIGHT outside my window. Make it stop." -60,1380899494,"*fly*" -61,1380899494,"Ah shit! I'm just waking up from what can only be describe as a comma. I hope I won't be up all night because of this." -62,1380899494,"BBQの追い込みTL合間のシット君に癒されたwwwww" -63,1380899493, -64,1380899493, -65,1380899493, -66,1380899493, -67,1380899493, -68,1380899493,"RT @LeVraiHoroscope: Ce que le #Cancer aime en automne : regarder des films d'horreur et faire la fête avec ses amis." -69,1380899493, -70,1380899493, -71,1380899493,"@emunmun @crnpi32 そー中毒なるねん! やめられへん (笑)" -72,1380899493,"RT @TOWER_Revo: 【あと3日】10/7(月)21時~初音階段『生初音ミク降臨!?ボーカロイドとノイズの融合!』開催&配信まであと3日となりました!月曜日からノイズの世界を楽しみましょう! http://t.co/k0zn9J6tQ5 詳細⇒http://t.co/…" -73,1380899493,"BOA TARDE A TODOS CLIENTES E AMIGOS!!!! O PERFIL DE NOSSA EMPRESA NO FACEBOOK AGORA SE TORNOU UMA FÃ PAGE! ABRAÇOS http://t.co/kroqZuJYi5" -74,1380899493,"これうまい http://t.co/YlT8pAMxse" -75,1380899493,"@LMurilloV de estos? http://t.co/uZ2s8jYRZE" -76,1380899493, -77,1380899493,"@rikaaaa714 てか、どうせなら一緒に写ろう!" -78,1380899493,"@Mesho_2002 لآ تحتكك :) هههههههههههه آمزح" -79,1380899493,"RT @Axwell: @Palmesus YEs! can't wait to party with my neighbors in your beautiful country!" -80,1380899493,"http://t.co/CNvqHVecpf #про ститутки в челябинске" -81,1380899493,"@MileyCyrus Oh yes Miley, I love taking selfies in bed also, you look so happy, your happiness in this picture just radiates off" -82,1380899493,"@community_kpop Sone , Baby :)" -83,1380899493,"cowok gak boleh cengeng ah.. RT @Amberrlliu92: [] ini gue ragu -.- nangis gara2 masalah RP, atau nangis gara2 denger lagu ini berulang2 T_T" -84,1380899493,"Vova что?! RT @engpravda: Putin calls professor of Higher School of Economics a jerk http://t.co/GOx4jfdfND" -85,1380899493,"RT @gtapics: Drake is probably playing GTA V right now picking up prostitutes and driving them to safer cities" -86,1380899493,"The Byte Me Daily is out! http://t.co/yaIpTnubC8 ▸ Top stories today via @Bitdefender @billnelson @misterfergusson" -87,1380899493,"RT @BornOfEternity: Jonathan Rhys Meyers con el que hizo del Jace pequeño, y el halcón. A mi este hombre me mata. http://t.co/nxdk1uZbdD" -88,1380899493,"@_lonyma وين راح الم راسك هاإاإاه" -89,1380899493, -90,1380899493,"RT @SenRandPaul: . @BarackObama sent 7 security guards to #WWIIMemorial this AM to keep out our vets. Sadly, that is 2 more than were prese…" -91,1380899493,"Los odio . @MJSantorelli" -92,1380899493,"I've harvested 967 of food! http://t.co/VjlsTijdQc #ipad, #ipadgames, #gameinsight" -93,1380899493,"My boy Thor is a Sore loser https://t.co/KTtwAlHqr2" -94,1380899493,"@bibikunhiy だあああ‼またですか!" -95,1380899493,"@_desytriana beneran kok, gak sepik.-." -96,1380899493,"Oq q era aquela cena do Matt da Rebekah e da outra desconhecida lá, já suspeitava q a Rebekah cortava pros dois lado" -97,1380899493,"RT @SastraRevolusi: Seandainya pria tahu, perempuan yang menanyakan status adalah perempuan yang tidak ingin kehilangan, bukan malah ingin …" -98,1380899493,"serious selekeh sangat! badan mcm kayu nak pakai baju ketat ketat. dengan tangan mcm sotong klau bercakap. wuuuuu --'" -99,1380899493,"رب أني مسني الضر و انت ارحم الراحمين.. - شاهد: http://t.co/MIc0UNNkaQ -#غرد_بذكر_الله -#دعاء_لربي" -100,1380899493,"@ellzaamay ok" -101,1380899493,"흐아ㅜ래으루ㅏ이닭발... #소연아생일축하해" -102,1380899493,"RT @OhTheFameGaga: Put your hands up, make ‘em touch! Make it real loud!" -103,1380899493,"12 12" -104,1380899493,"RT @Keenzah_: ""@lesxviezvous: Au Portugal, dans les fêtes foraines, on trouve de la barbe à Maman."" PTTTTTTTTTTTTTDR JAI RIGOLÉE 6FOIS" -105,1380899493,"RT @kozara: 透明飲んでも隠し切れないイケメン ぽぺん" -106,1380899493,"RT @AfifSyakir_: Saya harap saya jadi yang terakhir buat ibu bapa ku di saat-saat mereka perlukan ku untuk membacakan syahadah untuk mereka…" -107,1380899493,"Especially loads of the gay men who bizarrely feel they have a right to tut at a 20 yo woman for being too sexy or whatever it is." -108,1380899493,"@berry_berryss めーーーん!!! -おめでとおめでとおめでと♡" -109,1380899493,"RT @imas_anime: この後、24:00〜東京MXにて第1話が再放送です。同時にバンダイチャンネルでも配信します。 -http://t.co/1KdQhC6aNm -久しぶりに765プロのアイドル達とアニメで再会できます!楽しみにお待ち下さい。 #imas #projec…" -110,1380899493,"RT @_OfficialAkim: ♬ Rokok Yang Dulu Bukanlah Yang Sekarang, Dulu RM10 , Sekarang Up 12 Ringgit. Dulu Dulu Dulu Perokok Bahagia, Sekarang M…" -111,1380899493,"Libtards blame Tea Party for shutdown. Yer welcome America! #RiseUp #PatriotsUnite #StopLibtards #ImCute #ncot #tcot #!" -112,1380899493,"RT @himybradfordboy: @_Gr_in_ szczerze to nic się nie zgadza xD wiek -14, kolor oczu- brązowe, ulubiony kolor - czarny, ulubiona gwiazda - …" -113,1380899493,"RT @TwerkForJustin: FOLLOW TRICK -RT TO GAIN -FOLLOW @ACIDICVODCA -FOLLOW EVERYONE WHO RTS -GAIN LIKE CRAZY -#twerkforjustinfollowtrick" -114,1380899493,"RT @Habibies: When you were born, you cried and the world rejoiced. Live your life so that when you die, the world will cry and you will re…" -115,1380899493,"@aaaaasukaaaaaa -じゃあサイゼ行く?(^_^)笑" -116,1380899493,"@RGH0DY @jana_abdullah ههههههههههههههه" -117,1380899493,"みんなくん付けなのか かわいい" -118,1380899493,"@fishaessi follback" -119,1380899493,"おぽぽぽぽぽぽぽう!!!ーー!ぴぽーおおおぽ!!!!" -120,1380899493,"รู้ป่าวใคร http://t.co/Nq101xcU82" -121,1380899493,"luthfinya iya dhiya salsabilanya enggak""@itceem: Salsaaawrs dhiyasalsabilaluthfi hehehe""" -122,1380899493,"The rioting youths in Mbsa should use their brains not emotions." -123,1380899493,"多分威圧感のあるくしゃみなんだろうな" -124,1380899493,"inuejulawo taye replied to Samuel Date360's discussion I Gave Him A BJ On Our First Date, Would He Still Respe... http://t.co/oOCx1IaXES" -125,1380899493,"me separo do amor da minha vida mas não me separo do meu celular" -126,1380899492, -127,1380899492, -128,1380899492, -129,1380899492, -130,1380899492, -131,1380899492,"@Njr92 :) http://t.co/W7nnZqSEo2" -132,1380899492,"Probably going to hell for that one time that nun substitute teacher yelled at me and sent me to the office LOL #memories" -133,1380899492,"http://t.co/RlSuI4KxLT" -134,1380899492,"@rachel_abby15 we make your day baby girl ? http://t.co/F1y9SgYhYP" -135,1380899492,"RT @__mur_____: . - -. - -. - -    》    三.浦.翔.平 NrKr - -    俺が君の居場所に為る -    寶絶対に離れん麝無えよ ? - -    ! ..    Rt呉れた奴迎え - -. - -. - -." -136,1380899492,"RT @discasp: @HWoodEnding CAN YOU PLEASE WISH MY FRIEND @glenroyjls A HAPPY 14TH BIRTHDAY PLEASE?!!XX @HollywoodTyler @HollywoodCamB @Holly…" -137,1380899492,"@soumar1991 مساء الأنوار" -138,1380899492,MAYBE -139,1380899492,"@VasundharaBJP @drramansingh @ChouhanShivraj @VijayGoelBJP @CVoter just indication of trend.With @narendramodi's support BJP landslide win" -140,1380899492,"寒い寒い。暖かいシャワー浴びたのに。寒い寒い。" -141,1380899492,"@littleofharold pronto" -142,1380899492,"This is not a list of reasons to read the bible http://t.co/o1np7jd8WI #bible" -143,1380899492, -144,1380899492,"もう1回ききたい!笑" -145,1380899492,"la tua celebrity crush? — ian somerhalder. http://t.co/jikyDEWoON" -146,1380899492,"Np : Best song ever - One Direction :)))))))" -147,1380899492,"RT @BuketOzdmr: Beyler bugün eve gidemiyoz hayırlı olsun @almancik @bbkanikli" -148,1380899492,"야갤중계 ㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋㅋ" -149,1380899492,"Lmao!!! RT @miskoom: They have put my guy in camera zone. Lmao" -150,1380899492,"Got my first referral woho senior year" -151,1380899492,"@myjkys_08sf おお?" -152,1380899492,"@VeraVonMonika even UK has sun today :-) @geoff_deweaver @ThitiaOfficial @DonDraper_NY @wade_corrina @MarlenaWells @josephjett @JZspeaks" -153,1380899492,"I duno what it is but you just my type 😋" -154,1380899492,"@xxsanox 豪快なのにお肉はちっちゃいってのがまたステキね♥︎" -155,1380899492,"Yayyyy I just bought my mom and dad so much gear 😍💜💛 #lovethem" -156,1380899492,"Ostéopathe de merde grouille toi" -157,1380899492,"@IsmiFadillahRzy sampai bertemu di alam mimpi yah..haha" -158,1380899492,"RT @untidm: コーナーキックの時マークついてた奴に点を決められた時に、みんなの視線が怖い。 -#サッカー部あるある" -159,1380899492,"http://t.co/JUifcH9fXe где купить экстракт зеленого кофе" -160,1380899492,"I got da moneeeyyyyyy" -161,1380899492,"@vvip_jihyung omg?" -162,1380899492,"どうせ行くなら一番美味しいもの食べたい!デート!合コン!女子会!での注文の参考に!「金の蔵jr」人気メニューランキングBEST10 -http://t.co/XCiXxigsBC" -163,1380899492,"@ria_ash1217 多分知らないかなー? -大丈夫だよ〜聞き専門でも! -一応俺の rain-t ねー(´ω`)" -164,1380899492,"@A_xoxo_red - -チョンスジョンのお迎え" -165,1380899492,"RT @alajavivi7: Os espero esta noche en el Voy Bien señores!!!! http://t.co/c306QYYh7U" -166,1380899492,"RT @perfxctpayne: poseeeeey en perm avec juliette" -167,1380899492,"RT @bLoOdyBeEtRut85: Πήγα για τσιγάρα, και γύρισα. Τέτοιος μαλάκας." -168,1380899492,"القبض على اللاجئين الفلسطينيين في الإسكندرية و قتلهم في البحر -#وبكرة_تشوفوا_مصر -#السيسي_خائن" -169,1380899492,"@narryykissme thank you so much babe, please can u send my username to niall? it would mean everything to me♥" -170,1380899492,"RT @ActorLeeMinHo: On air. http://t.co/6cJGMoYCD9 http://t.co/7evlV6m5Ua" -171,1380899492,"@mdr58dncdm うぇーーーーーい!!!観よう!観たい!" -172,1380899492,"RT @RT_ARAB_RT: 🔲〰◾〰◾〰◾〰🔲 - -➊ فرصتك ✔ -➋ لزيادة متابعينك✔ -➌ رتويت✔ -➍ فولومي @RT_ARAB_RT ✔ -➎ فولوباك✔ -➏ اضافة من عمل رتويت✔ -➐ فولوباك للجميع✔ -…" -173,1380899492,"@mafasmk so sry bro ur kerala boy gone !!" -174,1380899492,"RT @TheXFactorUSA: @ddlovato also... #GLEEKS + #LOVATICS = #GLOVATIKS (and will probably take over the world)" -175,1380899492,"Bazıları sosyal sorumluluklarin altinda kalmis sosyal devletten uzaklasmis;al sadaka ver oy al kaputulasyon ver oy" -176,1380899492,"RT @gamthestar: Gravity หนังดีที่กลั้นหายใจทั้งเรื่อง ดูIMAXยิ่งเพิ่มความตื่นเต้น ภาพสวยมากกกก ลุ้นมากกกก คือแนะนำมากๆ ดี๊ดีค่ะคุณผู้ชม" -177,1380899492,"RT @Mooomoo3333: : بنت المدينة أشد الإناث فتنة في لهجتها عذوبة وفي غنجها أعجوبة تعجز حروفي عن الوصف بل هُنَ أجمل من ذلك وكفى♡❤”" -178,1380899492,"Uhuk makasih uhuk RT @_Reiruki: Galah uhuk emng uhuk manis uhuk (?) RT Ricoziel: Kaga uhuk kok uhuk (cont) http://t.co/rH6dcTwu83" -179,1380899492,"相性悪いのかなぁ" -180,1380899492,"RT @DianaYourCousin: No es guapa ni na mi @EstherCabreraa :) http://t.co/Tbsxt0DYTv" -181,1380899492,"RT @EXO_FANBASE: 131004 Xiumin @ The 18th Busan International Film Festival Blue Carpet {cr. melting} http://t.co/nu9i4bxupj" -182,1380899492,"海より深く納得>RT" -183,1380899492,"@H21uw -ありがとうございます!♡" -184,1380899492,"@taigaohba -分かる。 -ほんとぐっすり寝させてください" -185,1380899492,"FC CRIADO PARA ROSA CATERINA DE ANGELIS." -186,1380899492,"Dhan :( gitu ya ? Oke @ardhankhalis: @yraudina gue udah balik beb, kenapa emg?""" -187,1380899492,"Жизнь в темпе бешеном , петли не вешали мы" -188,1380899492,"Niyaya ni DJ si Kath sa isang room para kausapin at i-comfort. Naks! 😊💕 http://t.co/CM02frV3N9 -Joche" -189,1380899492,"ชอบผช.แบบเกรท วรินทรอ่ะ ขี้เล่นๆ เจ้าชู้นิดๆ เป็นผู้ใหญ่ด้วย ดูพี่แกเล่นหนังก็เคลิ้ม หลงเบย 😘" -190,1380899492,"@AndiDarfiantoPD iyo2, sembarang ji, traning moo" -191,1380899492,"Today stats: One follower, No unfollowers via http://t.co/tmuKc0tddl" -192,1380899492,"David Beckham: I was always going to second guess decision to retire from playing football: Exclusive intervie... http://t.co/IaKf4St5B9" -193,1380899492,"@jorgeheredia85 ""EL PREPAGO"" UNICA FUNCION.HOY 20H30. FEDENADOR.ENTRADAS A LA VENTA FEDENADOR Y TEATRO DEL ANGEL. INFO:2380585. VALOR $20,o" -194,1380899492,"電車ぱんぱんすぎて腰がやべー(;_;)" -195,1380899492,"All These Exploding Cars Will Make You Feel Different About Burning Teslas: A Tesla caught fire yesterday. Thi... http://t.co/c8XlVp8uLi" -196,1380899492,"Se em 2009 nos fizesse a campanha de 2008 e de 2010 eramos campeões POR QUE DEUS POR QUE DEUSSS POR QUEEEEEEEE" -197,1380899492,"It's the 'Dark Star'/ 'Black Sun' which is Saturn. And, the Colorful band around it is Saturn's rings. http://t.co/p3975DtSlg" -198,1380899492,"Minha Mãe recebeu um Bilhete da diretora da escola '' Reação da minha mãe '' : O que eu pago uma das melhores escolas Particulares pra que" -199,1380899492,"じぶが書いた言葉からは逃げられませんって前に教授がいってたけどその通りだなー" -200,1380899492,"今夜はブランキージェットシティ聴いてますーん。" -201,1380899492,"まえぬうううううううううううう雨" -202,1380899492,"Évelin marcou seu Tweet como favorito" -203,1380899492,"동생도 좋아요. 그러니까 나만 두고 가지마." -204,1380899491, -205,1380899491, -206,1380899491, -207,1380899491, -208,1380899491, -209,1380899491, -210,1380899491, -211,1380899491, -212,1380899491,"Bush teacher exposed! Lmfao http://t.co/JWhaXLIgqM" -213,1380899491, -214,1380899491, -215,1380899491,"@KPamyu2 まほパーフェクト♡" -216,1380899491, -217,1380899491,"{ما خلقنا السماوات والأرض وما بينهما إلا بالحق وأجل مسمى والذين كفروا عما أنذروا معرضون} [الأحقاف:3] -http://t.co/fXuz2BeCx4" -218,1380899491,"We're just rlly in love http://t.co/KIwbVLBqOO" -219,1380899491,"<3 <3 <3 ""@OFFICIALBTOB #BTOB #THRILLER 마지막 방송을 시작한 #비투비 멤버들의 떼샷 ver.2 Happy미카엘1004day! http://t.co/6nF0a8TXeW""" -220,1380899491,"Canım canım :) @pinaruzkuc http://t.co/T3N9x9DU6E" -221,1380899491, -222,1380899491,"@MLB Cardinals Braves Tigers Red Sox #TGI4Day" -223,1380899491,"@mf_hp えー!むっちゃんの大好きな人物だよ?" -224,1380899491,"RT @mohmadbinfetais: ″خَدَعك من أخبَرك -بأنّ التّجاهُل يجذب الأنثى ويَزيد تَعلّقها بك.! -فأكثَر ما تَحتقِر المرأة ""التّجاهُل - -#كلام_جميل" -225,1380899491,"¡Viernes! Y ¡hoy toca! -#HoyToca Van Gogh Pachuca! - -Puedes reservar vía MD!" -226,1380899491,"ボスがなかなか倒せないヽ(`Д´)ノ -みんなもコレはじめて殴ったらいいよ ´∀`)≡〇)`Д゚) -【http://t.co/ntpSE5PnqV】" -227,1380899491,"They got it'$" -228,1380899491,"RT @Niken_adisti: @Salsabilathlita @muhammad13adtyo hha :D" -229,1380899491,"@seonai_ thanku gal! 💞 Xx" -230,1380899491,"@maaikewind Dank je wel! 15 oktober weet ik meer." -231,1380899491,"Y es un hecho triste, mi naturaleza. Mi destino insiste con tenerte cerca." -232,1380899491,"RT @matty_parsons: Some proper chavs in Bradford....." -233,1380899491, -234,1380899491,"RT @oursupaluv: Angels, have you wished Chunji @wowous a happy birthday yet? It seems he's online! #happy21stchunji" -235,1380899491,"@unxcorn_ did u ever cut yourself ?" -236,1380899491,"@Fatima_Haya eeecht niet... Gij straalt altijd 🙊" -237,1380899491,"@broken_star_ he hasn't been in for three days now! At least that means I didn't miss anything today ;) what happened in English!!!" -238,1380899491,"@Salgado_lb 봇주님도 감기시라니88 푹 쉬셔요...!" -239,1380899491,"Si anda rondando la felicidad, no tengas tanto temor de cambiar" -240,1380899491,"I really could walk to waffle House but no" -241,1380899491,"When I get rid of these social networks, who you gone want me to tell then ??... I'll wait on that one...😐💭" -242,1380899491,"RT @pittsavedme: #KCAARGENTINA #PETERLANZANI" -243,1380899491,"RT @_cococruz: FIESTA PROMO HRT 2013!!!! NO TE QUEDES AFUERAAA, QUEDAN LAS ULTIMAS PULSERAS" -244,1380899491,"http://t.co/MIgvnX7TW3 физикадан дипломды ж мыстар http://t.co/MIgvnX7TW3" -245,1380899491,"@wtknhey わかる" -246,1380899491,"Hamla means Attack, not pregnant wala hamla. ;-)" -247,1380899491,"A kid in my driving class just took off his pants in the middle of the room. Okay then, that's cool" -248,1380899491,"憂鬱やな〜自己嫌悪" -249,1380899491,"13 <3 blue *__* @loretun13" -250,1380899491,"@Charli_FCB are you serious?!! Omg that's ridiculous!! Didn't know the Uni was open till so late!" -251,1380899491,"DIGO MILANESAS JAJAJAJAJJAA QUE PAJERO QUE SOY" -252,1380899491,"@1125yik 気分wwww - -暇人かwww" -253,1380899491,"X Factor Noww" -254,1380899491,"@Risa_v_rock 声優陣いつもいいポジションよなw" -255,1380899491,"ショボン" -256,1380899491,"@AsNana_RM is that Kevin? :3" -257,1380899491,"oeps dierendag gauw zien dat ik Rosie kan pakken om effe te knuffelen....." -258,1380899491,"@arvachova026 ты всю дорогу шла одна ?" -259,1380899491,"@DopeAss_Chyna just texted u fat girl" -260,1380899491,"@shiina1230  いっこだけ言い方微妙にちゃうやつあってわろたww" -261,1380899491,"Omwt appie w thesie en daarna na theess." -262,1380899491,"É impressão minha ou o Twitter mudou alguma coisa??!!" -263,1380899491,"Ela olha o céu encoberto e acha graça em tudo que não pode ver.." -264,1380899491,"@Yoboth_b2st จริงนะ" -265,1380899491,"#Во Владимире предприниматели жестоко избили трех полицейских" -266,1380899491,"RT @bani_saja: ba'unggut ba'unggut ""@Ujankwara: @syirajmufti sdh""" -267,1380899491,"RT @Bailey_brown4: Why did I not know more than half of the stuff on that AP chem test!? #retakes?" -268,1380899491,"【ワクワク】女性の方はまず掲示板へ投稿しましょう!次に男性から届いたメールを見て、自分の理想の男性はいるか、どの男性とメールやり取りを始めるか決めましょう。(^-^)v -◎http://t.co/vlu0iRKzdR【登録無料】" -269,1380899491,"家賃が大幅値上げされるようなら引っ越しもありよね、と検索してみたものの、結構厳しいなーと思い知る。" -270,1380899491,"11:11" -271,1380899491,"#serveur restaurant 75 GARE DE LYON BERCY: EMPLOYE POLYVALENT: Vous etes disponible et pret meme à la dernière... http://t.co/4xITYPCb51" -272,1380899491,"キルラキルってやっぱグレンラガン作った人たちが作ってるのか~やっぱこのチームはいろいろとセンス感じる!!" -273,1380899491,"ah porque me rtw eso o.O" -274,1380899491,"足先の冷えがww" -275,1380899491,"あ、年くった。" -276,1380899491,"日本海のシラス(^O^)" -277,1380899491,"antonimnya :p eh yg terakhr jangan! RT @hvsyawn: -_- kok RT CIC_BebyChae: kai pesek jelek item idup, puas? wkwk RT hvsyawn: tapi" -278,1380899491,"POR CIERTO, ME HAN PUESTO UN PUTO 9 EN UN TRABAJO DE PLÁSTICA. OLE." -279,1380899491,"É #BigFollow, imagina ter mais de 20.000 followers por apenas R$ 750,00? #DEMIWentPlatinumInBrazil: -bigfollow.net" -280,1380899491,"rocio esta re triste porque nunca gana" -281,1380899491,"ながもんさん -20時間の入渠に入りました" -282,1380899490, -283,1380899490, -284,1380899490, -285,1380899490, -286,1380899490, -287,1380899490, -288,1380899490, -289,1380899490, -290,1380899490, -291,1380899490,"i officially ship krisbaek now! \O/ http://t.co/z1BB7X8RpP" -292,1380899490, -293,1380899490,"Mending berangkat deh malem ini~" -294,1380899490,"@YSJSU what's on at the SU tonight?" -295,1380899490,"@remembrance0810 ありがとう(。-_-。)" -296,1380899490, -297,1380899490,"..... #절망 -아 존못임 ㅠㅠ http://t.co/UOnpEYPsdW" -298,1380899490,"@ka_iskw 宣言したから起きれそうじゃんヽ(・∀・)ノ笑" -299,1380899490,"http://t.co/8lNH2jyjxh" -300,1380899490, -301,1380899490,"Menurut lo? ""@Lok206: Ini bukan lagu kan? ""@nuningalvia: Don't you ever forget about me when you toss and turn in your sleep I hope it's" -302,1380899490,"RT @KidSexyyRauhl: #BEAUTYANDABEAT IS A MAKE UP LINE OMG 😍 http://t.co/qLL4JEQfPW" -303,1380899490,"http://t.co/qqchmHemKP" -304,1380899490,"RT @moojmela: The study of fruits is known as Pomology." -305,1380899490,"Aww excited na ako... xD -#OneRunOnePhilippines http://t.co/H1coYMF1Kp" -306,1380899490,"¿Pocos Seguidores? [█ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅] 17% Obten Seguidores siguiendo a ► @granhijodeperra y ganas hasta 5O Seguidores" -307,1380899490,"@thewolf6 @M_ALHMAIDANI البركة فيك اجتهد وورنا شطارتك 😉" -308,1380899490,"@kamenriderw1006 エロい" -309,1380899490,"RT @bokaled_q8: واللـّہ لو تعطيهم من الطيب أطنان تبقى ( النفوس الرديہ) رديہ" -310,1380899490,"@Giuli_liotard que sos voa" -311,1380899490,"@ControlSrk druže je l' se ti drogiraš?" -312,1380899490,"学校前の小川のやる気のなさ #二水あるある" -313,1380899490,"THE BOYS KILL ME EVERYDAY" -314,1380899490,"#Normal RT @eguierootz Ea tiraera temprano aqui" -315,1380899490,"@sukiyaki86 フハハハッ" -316,1380899490,"RT @n_almisbah: ذبح الأضاحي يتم بالتعاون مع الأمانة العامة للأوقاف وإدارة مسلخ محافظة حولي -1/5 -http://t.co/8lXe2e3FBQ" -317,1380899490,"5 Articles needed urgently | Academic Writing | Article Rewriting … http://t.co/4qaCbVNKP7 #copywriting" -318,1380899490,"@LauraneMolac t as vu !!" -319,1380899490,"まっきん&来来キョンシーズわろた" -320,1380899490,"#bridetips Lake Michigan Engagement from Kristin La Voie Photography http://t.co/I9tskzI6qI" -321,1380899490,"RT @Genesyslab: Top 5 Mistakes To Avoid When Moving Your Contact Center To the Cloud | Oct 9th 2PM ET / 11AM PT >> http://t.co/f1LH3sxB8f <…" -322,1380899490,"CGI 3D Animated Short HD: ""I, Pet Goat II"" by - Heliofant(+ 再生リスト): http://t.co/LA2zJYuWbV @youtubeさんから" -323,1380899490,"ME VIOLAN LA OREJA. http://t.co/TgpGfC3i94" -324,1380899490,"Piro gente." -325,1380899490,"@emdiemey solangs keine apfelpfannkuchen sind bleiben bratkartoffelz besser" -326,1380899490,"RT @JONBOOGIEE: I don't think y'all ready. #musicmonday @justinbieber http://t.co/FA0w0Z1bup" -327,1380899490,"RT @ohgirIquotes: I'm still in love with you." -328,1380899490,"RT @stargirlkah: @lloydmahoned eu te amo amiga,eu ja vou agora amo vc ♥" -329,1380899490,"Pues vamos ha hacer algo de tarea:)" -330,1380899490,"@yumeminemu レシピ教えて♡" -331,1380899490,"the bling ring" -332,1380899490,"ela ama ele ,ele ama ela , eles se amam , tudo mundo sabe , menos eles -#boa tarde" -333,1380899490,"@Atsinganoi Victimless!" -334,1380899490,"RT @shinema7253: 伝説のサスペンス映画 -アイデンティティー http://t.co/ZP5ciPB3km" -335,1380899490,"سبحان الله وبحمدهِ عدد خلقهِ ورضى نفسه وزنة عرشه ومداد كلماته." -336,1380899490,"@nyemiliamolins entra aquí https://t.co/7sG2URtcJ6 … … ve a ""ver galería"", luego, busca ""Franciel herrera de jesus"" y vota por mi. GRACIAS!" -337,1380899490,"RT @PuisiDariHati: Silap aku juga -Terlalu menyayangimu, dalam-dalam -Bukan ini mahu aku, tapi kalau ini untuk aku -Ya, terima kasih, semuanya…" -338,1380899490,"Mi madre vaya risazas." -339,1380899490,"bakit kaya ako paboritong papakin ng mga langgam" -340,1380899490,"RT @diarykecilkuu: Tuhan telah menciptakan bahagia untuk aku lewat kamu :)" -341,1380899490,"@tonia_ysmgo 私の意味不明な連想に反応ありがとうございます。toniaさんがすごいってことだったんだけど自分が読んでも意味わかんない。レス不要~^^;" -342,1380899490,"เป็นผู้หญิงที่ The badest female กันทั้งคู่เลยนะครับ 555555 #thesixthsense2" -343,1380899490,"Duit? Kaga butuh | pacar? Kaga penting | lalu? | gue lagi butuh tukang pijat karna dia lebih penting. Ahahaa" -344,1380899490,"4巻読了なので、復習にガーシュウィン「ラプソディ・イン・ブルー」とラフマニノフ「ピアノ協奏曲 2, ハ短調, Op. 18 - 1.」を聴いてみる…。" -345,1380899490,"RT @Faeez_petak: Done with fb.. thanks to all the wishes again.. hamoir 500org yg post di fb telah ku reply.. harap xde sape yg ketinggalan…" -346,1380899490,"¿Pocos Seguidores? [█ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅] 17% Obten Seguidores siguiendo a ► @granhijodeperra y ganas hasta 5O Seguidores" -347,1380899490,"Mais quelle journée de kk. Vive le WE." -348,1380899490,"I just added this to my closet on Poshmark: Juicy Couture bracelet. http://t.co/089qVTTfK8 via @poshmarkapp #shopmycloset" -349,1380899490,"RT @medaGrumpyCat: Ghost hunters: Can you communicate with us? *Door creeks* Ghost hunters: Oh, so your name is Laura??" -350,1380899490,"RT @AFuckingPooh: @lovelyteenager2 xD pahahahahah" -351,1380899490,"RT @Ff3Raguna: #起きてる人rt" -352,1380899490,"RT @CynthiaIvette_: Happy early Birthday🎉🎈🎊@RuthlessE_ thanks for the cupcake😁👌" -353,1380899490,"http://t.co/is4V8MQxKL" -354,1380899490,"学校に泊まってたから、バスなの忘れてた。この時間、バスない\(^o^)/オワタ" -355,1380899490,"¿Pocos Seguidores? [█ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅ ̅] 17% Obten Seguidores siguiendo a ► @granhijodeperra y ganas hasta 5O Seguidores" -356,1380899490,"@ljoeljoe1123 yahh today is your wife birthday. #happy21stchunji" -357,1380899490,"Indahnya berbagi dengan Anak Yatim untuk Pembangunan ""KOBONG ANAK YATIM"" | aksi @ Rp.10.000,- http://t.co/e37MFyK8GU" -358,1380899490,"vou me arrumar, e ir beeeeijú :*" -359,1380899490,"明日(今日)は木崎湖をに行く予定" -360,1380899490,"気持ちよかった" -361,1380899490,"esto me parecio muy tierno, fue amor a primera vista!! -10051 ByakuranxShoichi - ->Karina< http://t.co/AZiYNglm5v" -362,1380899490,"Hay que armar una bicicleteada (?) tuitera, que recorra la ciudad tomando fernet en los bares emblemáticos." -363,1380899490,"eating organge" -364,1380899489, -365,1380899489,"RT @MyersCorii: Home early" -366,1380899489,"Аватария в одноклассниках http://t.co/TjcB0vckIm" -367,1380899489, -368,1380899489, -369,1380899489,"RT @yuuki820: U-16の快挙を喜びつつチーム東京振り返り。スレイマンの怪我で急遽招集されたサワくん(ちなみに正しくはトカチョフ)は13得点11リバウンド。簡易だから出てないけどレイアップのブロックも上手かった。髪が伸びてるのも今日で見慣れましたw http://t…" -370,1380899489,"@03_7_3 @km_72どんなまいでもかわいいから大丈夫♪" -371,1380899489,"@fahmykun kesimpulan yg ditarik? Iya dr yg udah tjd dan/atau terbukti. - -Untuk kasus gitu, itulah gunanya pemahaman konsep sm adanya teori…" -372,1380899489,cansada -373,1380899489,"Sick and tired of you r shit I'm done" -374,1380899489,"“@GoGoHoratio: @out10emma @GoGoGorillas @AlanGorilla @_BlingKong @CatchMeWhileYo1 I'm going to live in a beautiful garden! :)” Good for you!" -375,1380899489,"Mackin' on Harry 😘 @ Oxford Street http://t.co/YG8SLWEeVM" -376,1380899489,"This lightweight read. http://t.co/3hymPoSi2R" -377,1380899489,"@vin_bio_ardoneo bienvenue merci de suivre nos news!" -378,1380899489,"Hj a prof. Eloiza quase me mato rindo" -379,1380899489,"Wkwk :D tau aja kmu din :P ""@didinfabregas: kalo si @wadiep mah penasaran itu tuh, haha jaim ajj dia nggk mau ngaku, wkwkkwkwk @himieumy""" -380,1380899489,"j'en vais le dire mtn" -381,1380899489,"3 people followed me // automatically checked by http://t.co/oMjDTMTE3s" -382,1380899489,"RT @itsnarrycrew: RT if LIAM, HARRY, NIALL, ZAYN, AND LOUIS are NOT following you! and i'll dm them to follow you! but you MUST be followin…" -383,1380899489,"RT @heyyouapp: » http://t.co/Kvu5w9Hd5j @heyyouapp Zombie Fitness PRO - aerobic,strength training workout app | #Health & Fitness #iPhone #…" -384,1380899489,"「立てよ、立て、セオデンの騎士らよ! 捨身の勇猛が眼ざめた、火と殺戮ぞ! 槍を振え、盾をくだけよ、剣の日ぞ、赤き血の日よぞ、日の上る前ぞ! いざ進め、いざ進め、ゴンドールへ乗り進め!」 ―セオデン" -385,1380899489,"Having tea cooked by Emily this evening :)" -386,1380899489,"@JBGill I dont think I've sobbed while watching a music video before. It is also a great song." -387,1380899489,"@bugyo_mi Oh…!跡部様にかっさらわれた…。そして7日は手塚誕なんで…!!" -388,1380899489,"@ilivelifedaily @CMB_Yungblack32 @Nikenando25 that nigga lips look like he having an allergic reaction. Looking like will smith in Hitch 😳." -389,1380899489,"@kituinoippattt こんばんわ #fxch #usdjpy http://t.co/IkeoJJlMxGで実況中" -390,1380899489,"اُمِي وأم من يقرأ : جَعلكم الله مِن السَبعِينْ ألفاً ؛ الذَينَ يَدخُلُونَ الجَنةّ بَلا حِسَاب ولا سابق عذاب ♥ - -#ساعة_استجابه""" -391,1380899489,"@daddy_yankee Buen día Sr. Ayala :)" -392,1380899489,"Parce que ma mere va changer de iPhone et je veux avoir son iPhone mais elle dit que je peux pas parce que je dois avoir un forfait-" -393,1380899489,"""@dianadeanfi: Jangan negative thinking atuh ih! asli gasukaa!!!""" -394,1380899489,"Mas nunca mais é 16:45?" -395,1380899489,"Tamires: ""olha lá o Pichani!"" Huehue" -396,1380899489,"アレン「あ、いたいた。」デビット「んあ?弟子じゃねーか。」ジャスデロ「ヒッ、何か用?」アレン「僕のバイト先で、ちょっと不足がありまして…短期で人材募集してるんです。よかったら来ませんか?」デビット「んー…今月割と手一杯…「まかないありの日給一万円(ぼそっ)」行く。やる。」" -397,1380899489, -398,1380899489,"kawaii desu ne :(" -399,1380899489,"الاف مبروك للامه العيناويه والاداره والاعبين وكل من ينتمي الي الصرح العيناوي ع الفوز" -400,1380899489,"@ninoyui_a 意外と田舎なんだよ〜(笑)" -401,1380899489,"Eu muito mal.. -(cólica)" -402,1380899489,"リミックスアルバムかっこよ過ぎるがなあああ!" -403,1380899489,"i hate that stupid old burgundy truck, you never let me drive. you're a redneck heartbreak whos really bad at lying." -404,1380899489,"アルティメットか何か忘れた、∞ランクでSランク帯のがよく出るみたいのはあったけど今作のドロ率だと悟りを開くかエリハムになるか" -405,1380899489,"graças a deus, sexta feira já çç" -406,1380899489,"#kangsomm ชอบทำให้ยิ้มตามอยู่เรื่อยเด็กบ้าเอ้ยยย >///<" -407,1380899489, -408,1380899489,"Kowangg memangggg osammmmmm :) :*" -409,1380899489,"サークルチェックしたいもん" -410,1380899489,"Target Deals: Sale Week of October 6 via http://t.co/nb367jX06n - Before you shop, check out ... http://t.co/YEIWi5ylL6" -411,1380899489,"ごっちさんいけめんんんんんん( ;∀;)" -412,1380899489,"Piction oh piction xD" -413,1380899489,"#96persen Penyelam tidak akan bisa kentut saat menyelam, pada kedalaman lebih dari 10 meter." -414,1380899488, -415,1380899488, -416,1380899488, -417,1380899488, -418,1380899488, -419,1380899488, -420,1380899488, -421,1380899488,"俺の部屋にバッタがぁぁぁあぁあ!!! -キモすぎーーーーーーー! -うぉぉぉおぉぉお!!! http://t.co/tcgHPWgKaT" -422,1380899488, -423,1380899488, -424,1380899488, -425,1380899488, -426,1380899488,"@MarelysQuintero #Viernesdebelloszapatosypies que no falte tu foto amiga mia" -427,1380899488, -428,1380899488,"Acting like I've finished the uni term! #3weeksIn" -429,1380899488,"@DiilennyDuran_ tato ;$" -430,1380899488,"@LeVraiHoroscope Les Taureau on toujours raison ! ;)" -431,1380899488, -432,1380899488,"RT @dear_my_deer: 131003 LUHAN INDEX UPDATE♥(2pics) #LUHAN 루한이 또 이러케 멋있쟈나 오빠쟈나 → http://t.co/lTMrB1swQR http://t.co/ci57MDOjca" -433,1380899488,"RT @reham54696: هل تريد السعادة ؟ دعني اضمك قليلاً وستنسى حياتك ~" -434,1380899488,"@CouniyaMamaw mdrrrrr" -435,1380899488,"RT @Fun_Beard: A year ago today my beautiful wife attempted suicide. People love you. There IS help: -1-800-273-8255 -http://t.co/6njoVkxVba -…" -436,1380899488,"@ayakasa_36 @momota_ro そうなんだよね でもそうもいかないのが人生だからマタニティマークつけてるんじゃない?" -437,1380899488,"@KimDibbers the pillow should be nigel ;)" -438,1380899488,"RT @slam173: صاااااادوووه 🙈🙉🙉👅 http://t.co/RCFyXTJFw9" -439,1380899488,"RT @Colonos_Cs: Vean a los asistentes a la #ViaCatalana: peligrosos radicales q desean romper la convivencia y fracturar la sociedad. http:…" -440,1380899488,"""@TalaAltaweel: احب وقتي معك اكثر من اي شي ثاني..""" -441,1380899488,"@chairunnisaAG ahluu... temen lo noh ah" -442,1380899488,"Degreee kat luar negara . Start a new life hehe" -443,1380899488,"@midokon407sj ありがとうございます。本来は暑いのダメなんで涼しいのwelcome!!なんですけどね。これだけ急激に涼しくなると、それはそれでしんどいです(^^; お休みなさいませ~☆" -444,1380899488,"RT @Fact: McDonald's hamburgers contains only 15% real beef while the other 85% is meat filler & pink slime cleansed with ammonia which cau…" -445,1380899488,"RT @elsya_yonata: @reginaivanova4 @NovitaDewiXF @chelseaolivia92. Precious Moments Eau de Parfum .. ID Line : elsyayonata(msh bnyk bermacam…" -446,1380899488,"RT @TuiterHits: - ¿Es aquí la reunión de poetas violentos? - -- Bienvenido, -toma asiento -y como hagas ruido -te reviento." -447,1380899488,"@Tech_NIQ_ue Thatsssss Crazyyyyyyy " -448,1380899488,"Wkakakak,make up dlu cyiinn""@SukartiPutri: Aku cinta, tapi gengsi ~""" -449,1380899488,"@GummyRebel will pray fr you mann ! Thiss time kau cmfrm pass witb flying colours lahh .. :) where you ?" -450,1380899488,"abis ngadep laptop cuci muka jadi segerr ¤(^_^)¤" -451,1380899488,"Bence kışın en güzel yanı; kahve, yatak, film üçlüsü." -452,1380899488,"Siiiike :p" -453,1380899488,"@LaloSaenger wow yo amo a John Mayer y que te guste a ti hace tu musica perfecta" -454,1380899488,"[名古屋イベント] 動物フェスティバル2013なごや http://t.co/iFfaFxwimJ #Event_Nagoya" -455,1380899488,"RT @YldzOguz: Yargıçlar Sendikası Başk. Ö.Faruk Eminağaoğlu'nun da geziden dolayı meslekten ihraç ve 11 yıla kadar hapsi isteniyor http://t…" -456,1380899488,"RT @shona_0507: *はるちゃん* -・優しい -・錦戸 -・最強eighter - -雑www" -457,1380899488,"Slmtketemubskyaaaa❤!" -458,1380899488, -459,1380899488,"@yukkuri_bouto 気をつけて帰ってくださいね(´・ω・)背後から見守ってま(ry" -460,1380899488,"RT @TeamPusongBato: Swerte mo. Iniyakan kita." -461,1380899488,"Amr Diab - Odam Oyounak عمرو دياب - قدام عيونك http://t.co/dSJIM4IIaX" -462,1380899488,"#BringBackMoorman #BillsMafia" -463,1380899488,"try lah @rynnfreaxy" -464,1380899488,"RT @TitsTatsAssKink: →#PussyDayEveryDay #GreatAss #FingeringHerAss ◄ » #Ass_TitsTatsAssKink -#PicGods «Tits♦Tats♦Ass♦Kink» http://t.co/xObqL…" -465,1380899488,"@afiqahhamidi96 ohh pkul brp kau pi?" -466,1380899488,"Pharmacy Staff Pharmacist - Decatur, TX http://t.co/sZijNJnbDY" -467,1380899488,"Haaa yelaaa qiss @QJaine" -468,1380899488,"@secretakz ぜ、ぜってーかわいくねえすから 大人のなでなでっつうのは〜、女の子とかがやるよしよしみたいのじゃなくてこう、くしゃってやるやつっすよ!ほらやるじゃん男が女にさ…こう、くしゃって…あれっすよアレ" -469,1380899488,"RT @supertud: มันเป็นโมเม้นหนึ่งที่ใครๆก็เคยรู้สึก.. http://t.co/wChE3gy3kg" -470,1380899488,"♫ In time it will reveal ♫ That special love that's deep inside of us ♫ will all reveal in time ♫ #NowPlaying http://t.co/hiGI3uSejG" -471,1380899488,"RT @MonkeyJo_: @maribellymora okay! When it syops raining. Tomorrow night?" -472,1380899488,"11:11 peace of mind" -473,1380899488,"Aml ♡ - - حِينْ يسِألوُنيَ عٌنكك : سَ أقوُل سعادهہ دخلت في حياتي ولا اريدهآ أن تزول ....(=| <3" -474,1380899488,wskqwsoidkiejdoqjdijsak -475,1380899488,"@nuratiqahmad kann! Terus teringat kau hahahah 🙊" -476,1380899488,"Vi el mosco mas horrible del mundo!!!" -477,1380899488,"RT @RealGyptian: Wanna speak to @RealGyptian LIVE on Mon 7 Oct via the new #BBMChannels from @BBM & @UK_BlackBerry find out more here: http…" -478,1380899488,"@ulanwln @bratha_wide coba tanya bang rama. Ulan leh ikut tau gak" -479,1380899488,"Nuovo genius loci. Storia e antologia della letteratura latina. Con espansione online. Per le Scuole superiori: 3 http://t.co/ysW2jvctgw" -480,1380899488,"Ketemu sama lo itu kaya udah ketemu -neraka!! Bawaannya panes mulu!!" -481,1380899488,"気が付いたらよるほーでした" -482,1380899488,"I.G!うおおおお楽しみだなあああ" -483,1380899488,"Je Ne Comprends Pas Diego , Il Connait Violetta Sa Va Faire Une Heure & Il L'aime Déjà o.0 Veut-Il Rendre Jaloux Léon ? o.0" -484,1380899488,"_(┐ ノε¦)_" -485,1380899488,"はじまった!" -486,1380899488,"Kepikiran mimpi td siang....pengen bgt jd nyata :))" -487,1380899487, -488,1380899487, -489,1380899487,"@SyafiSalehan ada apa??" -490,1380899487, -491,1380899487,"Yo no soy capaz de dejarte http://t.co/KsZF4AUeqL" -492,1380899487,"1 MONTH http://t.co/DftUuaTcmB" -493,1380899487, -494,1380899487, -495,1380899487,"Polémique...? #LT" -496,1380899487,"คือวันนี้ให้เวลาทำข้อสอบ 3 ชม. ชม.แรกดูคลิปแล้ววิจารณ์ก็เสียเวลาตรงนั้นไปเยอะ ทำข้อสอบทีต้องร่างก่อนนะแล้วค่อยลงกระดาษส่งจริง แล้วก็ทำไม่ทัน" -497,1380899487,"かわいい。どうしよう。かわいい。 -にこにこしてるかわいい!" -498,1380899487,"有名なのは、この オルチャンブレスです^^ -市販のシリコンゴムなどで簡単に作れます★ -みなさんもぜひつくってみてください! - -(外国にいくときは、はずしたほうがいいです!) http://t.co/kdInkAIGnj" -499,1380899487, diff --git a/examples/searchcommands_app/package/default/app.conf b/examples/searchcommands_app/package/default/app.conf deleted file mode 100644 index 7229e82dc..000000000 --- a/examples/searchcommands_app/package/default/app.conf +++ /dev/null @@ -1,11 +0,0 @@ -[launcher] -description = {description} -author = Splunk, Inc. -version = {version} - -[package] -id = {name} - -[ui] -label = Custom search command examples -is_visible = 1 diff --git a/examples/searchcommands_app/package/default/commands.conf b/examples/searchcommands_app/package/default/commands.conf deleted file mode 100644 index 4ef41c556..000000000 --- a/examples/searchcommands_app/package/default/commands.conf +++ /dev/null @@ -1,27 +0,0 @@ -# [commands.conf]($SPLUNK_HOME/etc/system/README/commands.conf.spec) -# Configuration for Search Commands Protocol version 2 - -[countmatches] -filename = countmatches.py -chunked = true -python.version = python3 - -[filter] -filename = filter.py -chunked = true -python.version = python3 - -[generatetext] -filename = generatetext.py -chunked = true -python.version = python3 - -[simulate] -filename = simulate.py -chunked = true -python.version = python3 - -[sum] -filename = sum.py -chunked = true -python.version = python3 diff --git a/examples/searchcommands_app/package/default/distsearch.conf b/examples/searchcommands_app/package/default/distsearch.conf deleted file mode 100644 index 1c13e5414..000000000 --- a/examples/searchcommands_app/package/default/distsearch.conf +++ /dev/null @@ -1,7 +0,0 @@ -# Valid in <=8.2 -[replicationWhitelist] -searchcommands_app = apps/searchcommands_app/lib/... - -# Valid in >=8.3 -[replicationAllowlist] -searchcommands_app = apps/searchcommands_app/lib/... diff --git a/examples/searchcommands_app/package/default/logging.conf b/examples/searchcommands_app/package/default/logging.conf deleted file mode 100644 index f3220a63d..000000000 --- a/examples/searchcommands_app/package/default/logging.conf +++ /dev/null @@ -1,99 +0,0 @@ -# -# The format and semantics of this file are described in this article at Python.org: -# -# [Configuration file format](https://docs.python.org/2/library/logging.config.html#configuration-file-format) -# -[loggers] -keys = root, splunklib, CountMatchesCommand, GenerateHelloCommand, GenerateTextCommand, SimulateCommand, SumCommand - -[logger_root] -# Default: WARNING -level = WARNING -# Default: stderr -handlers = stderr - -[logger_splunklib] -qualname = splunklib -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = splunklib -# Default: 1 -propagate = 0 - -[logger_CountMatchesCommand] -qualname = CountMatchesCommand -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = app -# Default: 1 -propagate = 0 - -[logger_GenerateHelloCommand] -qualname = GenerateHelloCommand -# Default: WARNING -level = DEBUG -# Default: stderr -handlers = app -# Default: 1 -propagate = 0 - -[logger_GenerateTextCommand] -qualname = GenerateTextCommand -# Default: WARNING -level = DEBUG -# Default: stderr -handlers = app -# Default: 1 -propagate = 0 - -[logger_SimulateCommand] -qualname = SimulateCommand -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = app -# Default: 1 -propagate = 0 - -[logger_SumCommand] -qualname = SumCommand -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = splunklib -# Default: 1 -propagate = 0 - -[handlers] -# See [logging.handlers](https://docs.python.org/2/library/logging.handlers.html) -keys = app, splunklib, stderr - -[handler_app] -# Select this handler to log events to searchcommands_app.log -class = logging.handlers.RotatingFileHandler -level = NOTSET -args = ('searchcommands_app.log', 'a', 524288000, 9, 'utf-8', True) -formatter = searchcommands - -[handler_splunklib] -# Select this handler to log events to splunklib.log -class = logging.handlers.RotatingFileHandler -args = ('splunklib.log', 'a', 524288000, 9, 'utf-8', True) -level = NOTSET -formatter = searchcommands - -[handler_stderr] -# Select this handler to log events to stderr which splunkd redirects to the associated job's search.log file -class = logging.StreamHandler -level = NOTSET -args = (sys.stderr,) -formatter = searchcommands - -[formatters] -keys = searchcommands - -[formatter_searchcommands] -format = %(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, Line=%(lineno)s, %(message)s -datefmt = %Y-%m-%d %H:%M:%S %Z diff --git a/examples/searchcommands_app/package/default/searchbnf.conf b/examples/searchcommands_app/package/default/searchbnf.conf deleted file mode 100644 index 8254f027d..000000000 --- a/examples/searchcommands_app/package/default/searchbnf.conf +++ /dev/null @@ -1,99 +0,0 @@ -# [searchbnf.conf](http://docs.splunk.com/Documentation/Splunk/latest/Admin/Searchbnfconf) - -[countmatches-command] -syntax = COUNTMATCHES FIELDNAME= PATTERN= -alias = -shortdesc = Counts the number of non-overlapping matches to a regular expression in a search result. -description = \ - This command augments records with a count of the number of non-overlapping matches to the regular expression \ - specified by PATTERN. The result is stored in the field specified by FIELDNAME. If FIELDNAME exists, its value is \ - replaced. If FIELDNAME does not exist, it is created. Results are otherwise passed through to the next pipeline \ - processor unmodified. -comment1 = \ - This example counts the number of words in the text of each tweet in the tweets lookup table and puts the result \ - in word_count. -example1 = \ - | inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text -category = streaming -appears-in = 1.2 -maintainer = dnoble -usage = public -tags = searchcommands_app - -[filter-command] -syntax = FILTER PREDICATE= UPDATE= -alias = -shortdesc = Filters, augments, and updates records on the events pipeline. -description = \ - This command filters records on the events pipeline returning only those for which the PREDICATE is true after \ - applying UPDATE statements. If no PREDICATE is specified, all records are returned. If no UPDATE is specified, \ - records are returned unmodified.\ - The predicate and update operations execute in a restricted scope that includes the standard Python built-in \ - module and the current record. Fields in the record are accessible by name as local variables. -comment1 = \ - This example excludes odd-numbered records and replaces all occurrences of "world" with "Splunk" in the _raw field \ - of the records produced by the generatetext command. -example1 = \ - | generatetext text="Hello world! How the heck are you?" count=6 \ - | filter predicate="(int(_serial) & 1) == 0" update="_raw = _raw.replace('world', 'Splunk')" -category = events -appears-in = 1.5 -maintainer = dnoble -usage = public -tags = searchcommands_app - -[generatetext-command] -syntax = GENERATETEXT COUNT= TEXT= -alias = -shortdesc = Generates a sequence of occurrences of a text string on the streams pipeline. -description = \ - This command generates COUNT occurrences of a TEXT string. Each occurrence is prefixed by its _SERIAL number and \ - stored in the _RAW field of each record. -comment1 = \ - This example generates 10 occurrences of the string "Hello world!". -example1 = | generatetext count=10 text="Hello world!" -category = generating -appears-in = 1.5 -maintainer = dnoble -usage = public -tags = searchcommands_app - -[simulate-command] -syntax = SIMULATE CSV= RATE= INTERVAL= DURATION= \ - [SEED=]? -alias = -shortdesc = Generates a sequence of events drawn from a csv file using repeated random sampling. -description = \ - This command uses repeated random samples of the event records in CSV for the execution period of DURATION. Sample \ - sizes are determined for each time INTERVAL in DURATION using a Poisson distribution with an average RATE \ - specifying the expected event count during INTERVAL. -comment1 = \ - This example generates events drawn by repeated random sampling of events from population.csv. Events are \ - drawn at an average rate of 50 per second for a duration of 5 seconds. Events are piped to the example \ - countmatches command which adds a word_count field containing the number of words in the text field of each event. \ - The mean and standard deviation of the word_count are then computed by the builtin stats command. -example1 = \ - | simulate csv="/opt/splunk/etc/apps/searchcommands_app/data/population.csv" rate=10 interval=00:00:01 duration=00:00:02 seed=1 -category = generating -appears-in = 1.2 -maintainer = dnoble -usage = public -tags = searchcommands_app - -[sum-command] -syntax = SUM TOTAL= -alias = -shortdesc = Computes the sum of a set of numeric fields. -description = \ - This command computes the sum of a set of numeric fields. The TOTAL produced is sum(sum(fieldname, 1, n), 1, N) \ - where n = number of fields in , N = number of records processed. -comment1 = This example computes the total number of words in the text field of the tweets lookup table. -example1 = \ - | inputlookup tweets \ - | countmatches fieldname=word_count pattern="\\w+" text \ - | sum total=word_counts word_count -category = reporting -appears-in = 1.2 -maintainer = dnoble -usage = public -tags = searchcommands_app diff --git a/examples/searchcommands_app/package/default/transforms.conf b/examples/searchcommands_app/package/default/transforms.conf deleted file mode 100644 index 5c08de91b..000000000 --- a/examples/searchcommands_app/package/default/transforms.conf +++ /dev/null @@ -1,2 +0,0 @@ -[tweets] -filename = tweets.csv.gz \ No newline at end of file diff --git a/examples/searchcommands_app/package/lookups/tweets.csv.gz b/examples/searchcommands_app/package/lookups/tweets.csv.gz deleted file mode 100644 index 82f1a7403..000000000 Binary files a/examples/searchcommands_app/package/lookups/tweets.csv.gz and /dev/null differ diff --git a/examples/searchcommands_app/package/metadata/default.meta b/examples/searchcommands_app/package/metadata/default.meta deleted file mode 100644 index 942c2219c..000000000 --- a/examples/searchcommands_app/package/metadata/default.meta +++ /dev/null @@ -1,2 +0,0 @@ -[] -access = read: [ * ], write : [ admin ] diff --git a/examples/searchcommands_template/bin/filter.py b/examples/searchcommands_template/bin/filter.py deleted file mode 100644 index 153c76a69..000000000 --- a/examples/searchcommands_template/bin/filter.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python - -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) -from splunklib.searchcommands import \ - dispatch, EventingCommand, Configuration, Option, validators - - -@Configuration() -class %(command.title())Command(EventingCommand): - """ %(synopsis) - - ##Syntax - - %(syntax) - - ##Description - - %(description) - - """ - def transform(self, events): - # Put your event transformation code here - pass - -dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_template/bin/generate.py b/examples/searchcommands_template/bin/generate.py deleted file mode 100644 index 4622b3c95..000000000 --- a/examples/searchcommands_template/bin/generate.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python - -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) -from splunklib.searchcommands import \ - dispatch, GeneratingCommand, Configuration, Option, validators - -@Configuration() -class %(command.title())Command(GeneratingCommand): - """ %(synopsis) - - ##Syntax - - %(syntax) - - ##Description - - %(description) - - """ - def generate(self): - # Put your event code here - pass - -dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_template/bin/report.py b/examples/searchcommands_template/bin/report.py deleted file mode 100644 index 2d5269878..000000000 --- a/examples/searchcommands_template/bin/report.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) -from splunklib.searchcommands import \ - dispatch, ReportingCommand, Configuration, Option, validators - - -@Configuration() -class %(command.title())Command(ReportingCommand): - """ %(synopsis) - - ##Syntax - - %(syntax) - - ##Description - - %(description) - - """ - @Configuration() - def map(self, events): - # Put your streaming preop implementation here, or remove the map method, - # if you have no need for a streaming preop - pass - - def reduce(self, events): - # Put your reporting implementation - pass - -dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_template/bin/stream.py b/examples/searchcommands_template/bin/stream.py deleted file mode 100644 index fa946a02c..000000000 --- a/examples/searchcommands_template/bin/stream.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -import sys -import os - -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) -from splunklib.searchcommands import \ - dispatch, StreamingCommand, Configuration, Option, validators - - -@Configuration() -class %(command.title())Command(StreamingCommand): - """ %(synopsis) - - ##Syntax - - %(syntax) - - ##Description - - %(description) - - """ - def stream(self, events): - # Put your event transformation code here - for event in events: - yield event - -dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_template/default/app.conf b/examples/searchcommands_template/default/app.conf deleted file mode 100644 index 86f324e51..000000000 --- a/examples/searchcommands_template/default/app.conf +++ /dev/null @@ -1,16 +0,0 @@ -# Splunk app configuration file - -[ui] -label = %(app_label) -is_visible = 1 - -[launcher] -description = %(app_description) -author = %(app_author) -version = %(app_version) - -[package] -id = %(app_id) - -[install] -is_configured = 0 diff --git a/examples/searchcommands_template/default/commands-scpv1.conf b/examples/searchcommands_template/default/commands-scpv1.conf deleted file mode 100644 index 30f4571ca..000000000 --- a/examples/searchcommands_template/default/commands-scpv1.conf +++ /dev/null @@ -1,12 +0,0 @@ -# [commands.conf]($SPLUNK_HOME/etc/system/README/commands.conf.spec) -# Configuration for Search Commands Protocol version 1 - -[%(command.lower()] -filename = %(command.lower()).py -enableheader = true -outputheader = true -requires_srinfo = true -stderr_dest = message -supports_getinfo = true -supports_rawargs = true -supports_multivalues = true diff --git a/examples/searchcommands_template/default/commands-scpv2.conf b/examples/searchcommands_template/default/commands-scpv2.conf deleted file mode 100644 index 79b7e3fc1..000000000 --- a/examples/searchcommands_template/default/commands-scpv2.conf +++ /dev/null @@ -1,6 +0,0 @@ -# [commands.conf]($SPLUNK_HOME/etc/system/README/commands.conf.spec) -# Configuration for Search Commands Protocol version 2 - -[%(command.lower()] -filename = %(command.lower()).py -chunked = true diff --git a/examples/searchcommands_template/default/commands.conf b/examples/searchcommands_template/default/commands.conf deleted file mode 100644 index 8e6d9fa7c..000000000 --- a/examples/searchcommands_template/default/commands.conf +++ /dev/null @@ -1,13 +0,0 @@ -# [commands.conf]($SPLUNK_HOME/etc/system/README/commands.conf.spec) -# Configured for Search Command Protocol version 1 by default -# Replace the contents of this file with commands-scpv2.conf to enable Search Command Protocol version 2 - -[%(command.lower()] -filename = %(command.lower()).py -enableheader = true -outputheader = true -requires_srinfo = true -stderr_dest = message -supports_getinfo = true -supports_rawargs = true -supports_multivalues = true diff --git a/examples/searchcommands_template/default/data/ui/nav/default.xml b/examples/searchcommands_template/default/data/ui/nav/default.xml deleted file mode 100644 index c2128a6f3..000000000 --- a/examples/searchcommands_template/default/data/ui/nav/default.xml +++ /dev/null @@ -1,18 +0,0 @@ - diff --git a/examples/searchcommands_template/default/distsearch.conf b/examples/searchcommands_template/default/distsearch.conf deleted file mode 100644 index 8abbe3b9e..000000000 --- a/examples/searchcommands_template/default/distsearch.conf +++ /dev/null @@ -1,7 +0,0 @@ -# Valid in <=8.2 -[replicationWhitelist] -searchcommands_template = apps/searchcommands_template/lib/... - -# Valid in >=8.3 -[replicationAllowlist] -searchcommands_template = apps/searchcommands_template/lib/... diff --git a/examples/searchcommands_template/default/logging.conf b/examples/searchcommands_template/default/logging.conf deleted file mode 100644 index 4efb7e40c..000000000 --- a/examples/searchcommands_template/default/logging.conf +++ /dev/null @@ -1,64 +0,0 @@ -# -# The format of this file is described in this article at Python.org: -# -# [Configuration file format](https://docs.python.org/2/library/logging.config.html#configuration-file-format) -# -[loggers] -keys = root, splunklib, %(command.title())Command - -[logger_root] -# Default: WARNING -level = WARNING -# Default: stderr -handlers = stderr - -[logger_splunklib] -qualname = splunklib -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = splunklib -# Default: 1 -propagate = 0 - -[logger_SearchCommand] -qualname = SearchCommand - -[logger_%(command.title())Command] -qualname = %(command.title())Command -# Default: WARNING -level = NOTSET -# Default: stderr -handlers = app -# Default: 1 -propagate = 0 - -[handlers] -keys = app, splunklib, stderr - -[handler_app] -# Select this handler to log events to $SPLUNK_HOME/var/log/splunk/searchcommands_app.log -class = logging.handlers.RotatingFileHandler -level = NOTSET -args = ('%(SPLUNK_HOME)s/var/log/splunk/searchcommands_app.log', 'a', 524288000, 9, 'utf-8', True) -formatter = searchcommands - -[handler_splunklib] -# Select this handler to log events to $SPLUNK_HOME/var/log/splunk/splunklib.log -class = logging.handlers.RotatingFileHandler -args = ('%(SPLUNK_HOME)s/var/log/splunk/splunklib.log', 'a', 524288000, 9, 'utf-8', True) -level = NOTSET -formatter = searchcommands - -[handler_stderr] -# Select this handler to log events to stderr which splunkd redirects to the associated job's search.log file -class = logging.StreamHandler -level = NOTSET -args = (sys.stderr,) -formatter = searchcommands - -[formatters] -keys = searchcommands - -[formatter_searchcommands] -format = %(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, Line=%(lineno)s, %(message)s diff --git a/examples/searchcommands_template/metadata/default.meta b/examples/searchcommands_template/metadata/default.meta deleted file mode 100644 index 942c2219c..000000000 --- a/examples/searchcommands_template/metadata/default.meta +++ /dev/null @@ -1,2 +0,0 @@ -[] -access = read: [ * ], write : [ admin ] diff --git a/examples/spcmd.py b/examples/spcmd.py deleted file mode 100755 index 28b4e9a93..000000000 --- a/examples/spcmd.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This tool basically provides a little sugar on top of the Python interactive -# command interpreter. It establishes a "default" connection and makes the -# properties of that connection ambient. It also picks up known local variables -# and passes those values as options to various commands. For example, you can -# set the default output_mode for a session by simply setting a local variable -# 'output_mode' to a legal output_mode value. - -"""An interactive command shell for Splunk.""" - -from __future__ import absolute_import -from __future__ import print_function -from code import compile_command, InteractiveInterpreter -try: - import readline # Activates readline editing, ignore for windows -except ImportError: - pass -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from splunklib.six.moves import input as raw_input -import splunklib.client as client - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -class Session(InteractiveInterpreter): - def __init__(self, **kwargs): - self.service = client.connect(**kwargs) - self.delete = self.service.delete - self.get = self.service.get - self.post = self.service.post - locals = { - 'service': self.service, - 'connect': client.connect, - 'delete': self.delete, - 'get': self.get, - 'post': self.post, - 'load': self.load, - } - InteractiveInterpreter.__init__(self, locals) - - def eval(self, expression): - return self.runsource(expression) - - def load(self, filename): - exec(open(filename).read(), self.locals, self.locals) - - # Run the interactive interpreter - def run(self): - print("Welcome to Splunk SDK's Python interactive shell") - print("%s connected to %s:%s" % ( - self.service.username, - self.service.host, - self.service.port)) - - while True: - try: - input = raw_input("> ") - except EOFError: - print("\n\nThanks for using Splunk>.\n") - return - - if input is None: - return - - if len(input) == 0: - continue # Ignore - - try: - # Gather up lines until we have a fragment that compiles - while True: - co = compile_command(input) - if co is not None: break - input = input + '\n' + raw_input(". ") # Keep trying - except SyntaxError: - self.showsyntaxerror() - continue - except Exception as e: - print("Error: %s" % e) - continue - - self.runcode(co) - -RULES = { - "eval": { - 'flags': ["-e", "--eval"], - 'action': "append", - 'help': "Evaluate the given expression", - }, - "interactive": { - 'flags': ["-i", "--interactive"], - 'action': "store_true", - 'help': "Enter interactive mode", - } -} - -def actions(opts): - """Ansers if the given command line options specify any 'actions'.""" - return len(opts.args) > 0 or 'eval' in opts.kwargs - -def main(): - opts = utils.parse(sys.argv[1:], RULES, ".env") - - # Connect and initialize the command session - session = Session(**opts.kwargs) - - # Load any non-option args as script files - for arg in opts.args: - session.load(arg) - - # Process any command line evals - for arg in opts.kwargs.get('eval', []): - session.eval(arg) - - # Enter interactive mode automatically if no actions were specified or - # or if interactive mode was specifically requested. - if not actions(opts) or "interactive" in opts.kwargs: - session.run() - -if __name__ == "__main__": - main() - diff --git a/examples/spurl.py b/examples/spurl.py deleted file mode 100755 index 748b56d9c..000000000 --- a/examples/spurl.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A simple command line interface for the Splunk REST APIs.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -from xml.etree import ElementTree - -import splunklib.binding as binding - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -# Invoke the url using the given opts parameters -def invoke(path, **kwargs): - method = kwargs.get("method", "GET") - return binding.connect(**kwargs).request(path, method=method) - -def print_response(response): - if response.status != 200: - print("%d %s" % (response.status, response.reason)) - return - body = response.body.read() - try: - root = ElementTree.XML(body) - print(ElementTree.tostring(root)) - except Exception: - print(body) - -def main(): - opts = utils.parse(sys.argv[1:], {}, ".env") - for arg in opts.args: - print_response(invoke(arg, **opts.kwargs)) - -if __name__ == "__main__": - main() - diff --git a/examples/stail.py b/examples/stail.py deleted file mode 100755 index 6ba4ee54e..000000000 --- a/examples/stail.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Tails a realtime search using the export endpoint and prints results to - stdout.""" - -from __future__ import absolute_import -from __future__ import print_function -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -from pprint import pprint - -from splunklib.client import connect -from splunklib.results import JSONResultsReader - -try: - import utils -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -def main(): - usage = "usage: %prog " - opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) - - if len(opts.args) != 1: - utils.error("Search expression required", 2) - search = opts.args[0] - - service = connect(**opts.kwargs) - - try: - result = service.get( - "search/jobs/export", - search=search, - earliest_time="rt", - latest_time="rt", - search_mode="realtime", - output_mode="json") - - for result in JSONResultsReader(result.body): - if result is not None: - print(pprint(result)) - - except KeyboardInterrupt: - print("\nInterrupted.") - -if __name__ == "__main__": - main() - diff --git a/examples/submit.py b/examples/submit.py deleted file mode 100755 index 1e74e7a49..000000000 --- a/examples/submit.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility that submits event data to Splunk from stdin.""" - -from __future__ import absolute_import -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) - -import splunklib.client as client - -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -RULES = { - "eventhost": { - 'flags': ["--eventhost"], - 'help': "The event's host value" - }, - "source": { - 'flags': ["--eventsource"], - 'help': "The event's source value" - }, - "sourcetype": { - 'flags': ["--sourcetype"], - 'help': "The event's sourcetype" - } -} - -def main(argv): - usage = 'usage: %prog [options] ' - opts = parse(argv, RULES, ".env", usage=usage) - - if len(opts.args) == 0: error("Index name required", 2) - index = opts.args[0] - - kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) - service = client.connect(**kwargs_splunk) - - if index not in service.indexes: - error("Index '%s' does not exist." % index, 2) - - kwargs_submit = dslice(opts.kwargs, - {'eventhost':'host'}, 'source', 'sourcetype') - - # - # The following code uses the Splunk streaming receiver in order - # to reduce the buffering of event data read from stdin, which makes - # this tool a little friendlier for submitting large event streams, - # however if the buffering is not a concern, you can achieve the - # submit somewhat more directly using Splunk's 'simple' receiver, - # as follows: - # - # event = sys.stdin.read() - # service.indexes[index].submit(event, **kwargs_submit) - # - - cn = service.indexes[index].attach(**kwargs_submit) - try: - while True: - line = sys.stdin.readline().rstrip('\r\n') - if len(line) == 0: break - cn.write(line) - finally: - cn.close() - -if __name__ == "__main__": - main(sys.argv[1:]) - diff --git a/examples/twitted/README.md b/examples/twitted/README.md deleted file mode 100644 index 7a82bdf5a..000000000 --- a/examples/twitted/README.md +++ /dev/null @@ -1,31 +0,0 @@ -# Twitted - -This is a simple Splunk application that indexes the output of the Twitter -"spritzer" and provides a collection of saved searches for inspecting the -resulting Twitter data, and also two sample custom search commands. - -This sample serves two purposes: first, it's a fun and readily available data -source to use to learn and explore Splunk, and second, the input script -demonstrates how to use the SDK to "push" data into Splunk using a TCP input. - -Note that the input script is not implemented as a Splunk scripted input. It's -designed to run standalone so that it's convenient for you to experiment with. -If this were a real Splunk app, the input Script would be written as a full -Splunk scripted input so that Splunk could manage its execution. - -In order to deploy the application, all you need to do is copy (or link) the -twitted sub directory (aka, .../splunk-sdk-python/examples/twitted/twitted) to -the Splunk app directory at $SPLUNK_HOME/etc/apps/twitted. - -Then, to run the app all you have to do is type: - - python ./input.py - -and the script will prompt you for your Twitter credentials. The script takes a ---verbose={0..2} flag so that you can specify how much info is written to -stdout. Note that the verbosity level does not change what the script feeds -to Splunk for indexing. - -Once the input script is up and running, you can start exploring the data using -Splunk or the splunk CLI or any of the SDK command line tools. - diff --git a/examples/twitted/clean b/examples/twitted/clean deleted file mode 100755 index 29334d915..000000000 --- a/examples/twitted/clean +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -../index.py clean twitter - diff --git a/examples/twitted/input.py b/examples/twitted/input.py deleted file mode 100755 index e907cc55d..000000000 --- a/examples/twitted/input.py +++ /dev/null @@ -1,286 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -from __future__ import print_function -from pprint import pprint - -import base64 -from getpass import getpass -import splunklib.six.moves.http_client -import json -import socket -import sys -import os -from splunklib import six -from six.moves import input -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) - - -import splunklib.client as client - -from utils import error, parse - -TWITTER_STREAM_HOST = "stream.twitter.com" -TWITTER_STREAM_PATH = "/1/statuses/sample.json" - -DEFAULT_SPLUNK_HOST = "localhost" -DEFAULT_SPLUNK_PORT = 9001 - -ingest = None # The splunk ingest socket -verbose = 1 - -class Twitter: - def __init__(self, username, password): - self.buffer = "" - self.username = username - self.password = password - - def connect(self): - # Login using basic auth - login = "%s:%s" % (self.username, self.password) - token = "Basic " + str.strip(base64.encodestring(login)) - headers = { - 'Content-Length': "0", - 'Authorization': token, - 'Host': "stream.twitter.com", - 'User-Agent': "twitted.py/0.1", - 'Accept': "*/*", - } - connection = six.moves.http_client.HTTPSConnection(TWITTER_STREAM_HOST) - connection.request("GET", TWITTER_STREAM_PATH, "", headers) - response = connection.getresponse() - if response.status != 200: - raise Exception("HTTP Error %d (%s)" % ( - response.status, response.reason)) - return response - -RULES = { - 'tusername': { - 'flags': ["--twitter:username"], - 'help': "Twitter username", - }, - 'tpassword': { - 'flags': ["--twitter:password"], - 'help': "Twitter password", - }, - 'inputhost': { - 'flags': ["--input:host"], - 'help': "Host address for Splunk (default: localhost)", - }, - 'inputport': { - 'flags': ["--input:port"], - 'help': "Port to use for Splunk TCP input (default: 9001)", - }, - 'verbose': { - 'flags': ["--verbose"], - 'default': 1, - 'type': "int", - 'help': "Verbosity level (0-3, default 0)", - } -} - -def cmdline(): - kwargs = parse(sys.argv[1:], RULES, ".env").kwargs - - # Prompt for Twitter username/password if not provided on command line - if 'tusername' not in kwargs: - kwargs['tusername'] = input("Twitter username: ") - if 'tpassword' not in kwargs: - kwargs['tpassword'] = getpass("Twitter password:") - - # Prompt for Splunk username/password if not provided on command line - if 'username' not in kwargs: - kwargs['username'] = input("Splunk username: ") - if 'password' not in kwargs: - kwargs['password'] = getpass("Splunk password:") - - return kwargs - -# Returns a str, dict or simple list -def flatten(value, prefix=None): - """Takes an arbitrary JSON(ish) object and 'flattens' it into a dict - with values consisting of either simple types or lists of simple - types.""" - - def issimple(value): # foldr(True, or, value)? - for item in value: - if isinstance(item, dict) or isinstance(item, list): - return False - return True - - if isinstance(value, six.text_type): - return value.encode("utf8") - - if isinstance(value, list): - if issimple(value): return value - offset = 0 - result = {} - prefix = "%d" if prefix is None else "%s_%%d" % prefix - for item in value: - k = prefix % offset - v = flatten(item, k) - if not isinstance(v, dict): v = {k:v} - result.update(v) - offset += 1 - return result - - if isinstance(value, dict): - result = {} - prefix = "%s" if prefix is None else "%s_%%s" % prefix - for k, v in six.iteritems(value): - k = prefix % str(k) - v = flatten(v, k) - if not isinstance(v, dict): v = {k:v} - result.update(v) - return result - - return value - -# Sometimes twitter just stops sending us data on the HTTP connection. -# In these cases, we'll try up to MAX_TRIES to read 2048 bytes, and if -# that fails we bail out. -MAX_TRIES = 100 - -def listen(username, password): - try: - twitter = Twitter(username, password) - stream = twitter.connect() - except Exception as e: - error("There was an error logging in to Twitter:\n%s" % str(e), 2) - - buffer = "" - tries = 0 - while True and tries < MAX_TRIES: - offset = buffer.find("\r\n") - if offset != -1: - status = buffer[:offset] - buffer = buffer[offset+2:] - process(status) - tries = 0 - continue # Consume all statuses in buffer before reading more - buffer += stream.read(2048) - tries += 1 - - if tries == MAX_TRIES: - error("""Twitter seems to have closed the connection. Make sure -you don't have any other open instances of the 'twitted' sample app.""", 2) - -def output(record): - print_record(record) - - for k in sorted(record.keys()): - if k.endswith("_str"): - continue # Ignore - - v = record[k] - - if v is None: - continue # Ignore - - if isinstance(v, list): - if len(v) == 0: continue - v = ','.join([str(item) for item in v]) - - # Field renames - k = { 'source': "status_source" }.get(k, k) - - if isinstance(v, str): - format = '%s="%s" ' - v = v.replace('"', "'") - else: - format = "%s=%r " - result = format % (k, v) - - ingest.send(result) - - end = "\r\n---end-status---\r\n" - try: - ingest.send(end) - except: - error("There was an error with the TCP connection to Splunk.", 2) - -# Print some infor to stdout, depending on verbosity level. -def print_record(record): - if verbose == 0: - return - - if verbose > 1: - pprint(record) # Very chatty - return - - # Otherwise print a nice summary of the record - if 'delete_status_id' in record: - print("delete %d %d" % ( - record['delete_status_id'], - record['delete_status_user_id'])) - else: - print("status %s %d %d" % ( - record['created_at'], - record['id'], - record['user_id'])) - -def process(status): - status = json.loads(status) - record = flatten(status) - output(record) - -def main(): - kwargs = cmdline() - - global verbose - verbose = kwargs['verbose'] - - # Force the owner namespace, if not provided - if 'owner' not in list(kwargs.keys()): - kwargs['owner'] = kwargs['username'] - - if verbose > 0: print("Initializing Splunk ..") - service = client.connect(**kwargs) - - # Create the index if it doesn't exist - if 'twitter' not in service.indexes: - if verbose > 0: print("Creating index 'twitter' ..") - service.indexes.create("twitter") - - # Create the TCP input if it doesn't exist - input_host = kwargs.get("inputhost", DEFAULT_SPLUNK_HOST) - input_port = kwargs.get("inputport", DEFAULT_SPLUNK_PORT) - input_name = str(input_port) - if input_name not in service.inputs: - if verbose > 0: print("Creating input '%s'" % input_name) - service.inputs.create( - input_port, "tcp", index="twitter", sourcetype="twitter") - - global ingest - ingest = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - ingest.connect((input_host, input_port)) - - if verbose > 0: - print("Listening (and sending data to %s:%s).." % (input_host, input_port)) - try: - listen(kwargs['tusername'], kwargs['tpassword']) - except KeyboardInterrupt: - pass - except Exception as e: - error("""There was an error with the connection to Twitter. Make sure -you don't have other running instances of the 'twitted' sample app, and try -again.""", 2) - print(e) - -if __name__ == "__main__": - main() - diff --git a/examples/twitted/reload b/examples/twitted/reload deleted file mode 100755 index f07ff2b7b..000000000 --- a/examples/twitted/reload +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -# Reload the twitted app - -../spurl.py /services/apps/local/twitted/_reload - diff --git a/examples/twitted/run b/examples/twitted/run deleted file mode 100755 index ce4324697..000000000 --- a/examples/twitted/run +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -./input.py $* - diff --git a/examples/twitted/search b/examples/twitted/search deleted file mode 100755 index 29add4bca..000000000 --- a/examples/twitted/search +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -# Top Sources -../search.py "search index=twitter status_source=* | stats count(status_source) as count by status_source | sort -count | head 20" $* diff --git a/examples/twitted/twitted/bin/hashtags.py b/examples/twitted/twitted/bin/hashtags.py deleted file mode 100755 index bd2c02952..000000000 --- a/examples/twitted/twitted/bin/hashtags.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import csv, sys, re -import os - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir))) - -from splunklib.six.moves import zip -from splunklib.six.moves import urllib - -# Tees output to a logfile for debugging -class Logger: - def __init__(self, filename, buf = None): - self.log = open(filename, 'w') - self.buf = buf - - def flush(self): - self.log.flush() - - if self.buf is not None: - self.buf.flush() - - def write(self, message): - self.log.write(message) - self.log.flush() - - if self.buf is not None: - self.buf.write(message) - self.buf.flush() - -# Tees input as it is being read, also logging it to a file -class Reader: - def __init__(self, buf, filename = None): - self.buf = buf - if filename is not None: - self.log = open(filename, 'w') - else: - self.log = None - - def __iter__(self): - return self - - def next(self): - return self.readline() - - __next__ = next - - def readline(self): - line = self.buf.readline() - - if not line: - raise StopIteration - - # Log to a file if one is present - if self.log is not None: - self.log.write(line) - self.log.flush() - - # Return to the caller - return line - -def output_results(results, mvdelim = '\n', output = sys.stdout): - """Given a list of dictionaries, each representing - a single result, and an optional list of fields, - output those results to stdout for consumption by the - Splunk pipeline""" - - # We collect all the unique field names, as well as - # convert all multivalue keys to the right form - fields = set() - for result in results: - for key in list(result.keys()): - if(isinstance(result[key], list)): - result['__mv_' + key] = encode_mv(result[key]) - result[key] = mvdelim.join(result[key]) - fields.update(list(result.keys())) - - # convert the fields into a list and create a CSV writer - # to output to stdout - fields = sorted(list(fields)) - - writer = csv.DictWriter(output, fields) - - # Write out the fields, and then the actual results - writer.writerow(dict(list(zip(fields, fields)))) - writer.writerows(results) - -def read_input(buf, has_header = True): - """Read the input from the given buffer (or stdin if no buffer) - is supplied. An optional header may be present as well""" - - # Use stdin if there is no supplied buffer - if buf == None: - buf = sys.stdin - - # Attempt to read a header if necessary - header = {} - if has_header: - # Until we get a blank line, read "attr:val" lines, - # setting the values in 'header' - last_attr = None - while True: - line = buf.readline() - - # remove lastcharacter (which is a newline) - line = line[:-1] - - # When we encounter a newline, we are done with the header - if len(line) == 0: - break - - colon = line.find(':') - - # If we can't find a colon, then it might be that we are - # on a new line, and it belongs to the previous attribute - if colon < 0: - if last_attr: - header[last_attr] = header[last_attr] + '\n' + urllib.parse.unquote(line) - else: - continue - - # extract it and set value in settings - last_attr = attr = line[:colon] - val = urllib.parse.unquote(line[colon+1:]) - header[attr] = val - - return buf, header - -def encode_mv(vals): - """For multivalues, values are wrapped in '$' and separated using ';' - Literal '$' values are represented with '$$'""" - s = "" - for val in vals: - val = val.replace('$', '$$') - if len(s) > 0: - s += ';' - s += '$' + val + '$' - - return s - -def main(argv): - stdin_wrapper = Reader(sys.stdin) - buf, settings = read_input(stdin_wrapper, has_header = True) - events = csv.DictReader(buf) - - results = [] - - for event in events: - # For each event, - text = event["text"] - hashtags = set() - - hash_regex = re.compile(r'\s+(#[0-9a-zA-Z+_]+)', re.IGNORECASE) - for hashtag_match in hash_regex.finditer(text): - # Get the hashtag - hashtag = hashtag_match.group(0).strip().lower() - - # Append the hashtag to the list - hashtags.add(hashtag) - - # Now that we have the hashtags, we can add them to our event - hashtags = list(hashtags) - hashtags.sort() - event["hashtags"] = hashtags - - results.append(event) - - # And output it to the next stage of the pipeline - output_results(results) - -if __name__ == "__main__": - try: - main(sys.argv) - except Exception: - import traceback - traceback.print_exc(file=sys.stdout) diff --git a/examples/twitted/twitted/bin/tophashtags.py b/examples/twitted/twitted/bin/tophashtags.py deleted file mode 100755 index 499f9f389..000000000 --- a/examples/twitted/twitted/bin/tophashtags.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import csv, sys, urllib, re -import os - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir))) -from collections import OrderedDict - -from splunklib import six -from splunklib.six.moves import zip -from splunklib.six.moves import urllib - - -# Tees output to a logfile for debugging -class Logger: - def __init__(self, filename, buf = None): - self.log = open(filename, 'w') - self.buf = buf - - def flush(self): - self.log.flush() - - if self.buf is not None: - self.buf.flush() - - def write(self, message): - self.log.write(message) - self.log.flush() - - if self.buf is not None: - self.buf.write(message) - self.buf.flush() - - -# Tees input as it is being read, also logging it to a file -class Reader: - def __init__(self, buf, filename = None): - self.buf = buf - if filename is not None: - self.log = open(filename, 'w') - else: - self.log = None - - def __iter__(self): - return self - - def next(self): - return self.readline() - - __next__ = next - - def readline(self): - line = self.buf.readline() - - if not line: - raise StopIteration - - # Log to a file if one is present - if self.log is not None: - self.log.write(line) - self.log.flush() - - # Return to the caller - return line - - -def output_results(results, mvdelim = '\n', output = sys.stdout): - """Given a list of dictionaries, each representing - a single result, and an optional list of fields, - output those results to stdout for consumption by the - Splunk pipeline""" - - # We collect all the unique field names, as well as - # convert all multivalue keys to the right form - fields = set() - for result in results: - for key in list(result.keys()): - if(isinstance(result[key], list)): - result['__mv_' + key] = encode_mv(result[key]) - result[key] = mvdelim.join(result[key]) - fields.update(list(result.keys())) - - # convert the fields into a list and create a CSV writer - # to output to stdout - fields = sorted(list(fields)) - - writer = csv.DictWriter(output, fields) - - # Write out the fields, and then the actual results - writer.writerow(dict(list(zip(fields, fields)))) - writer.writerows(results) - - -def read_input(buf, has_header = True): - """Read the input from the given buffer (or stdin if no buffer) - is supplied. An optional header may be present as well""" - - # Use stdin if there is no supplied buffer - if buf == None: - buf = sys.stdin - - # Attempt to read a header if necessary - header = {} - if has_header: - # Until we get a blank line, read "attr:val" lines, - # setting the values in 'header' - last_attr = None - while True: - line = buf.readline() - - # remove lastcharacter (which is a newline) - line = line[:-1] - - # When we encounter a newline, we are done with the header - if len(line) == 0: - break - - colon = line.find(':') - - # If we can't find a colon, then it might be that we are - # on a new line, and it belongs to the previous attribute - if colon < 0: - if last_attr: - header[last_attr] = header[last_attr] + '\n' + urllib.parse.unquote(line) - else: - continue - - # extract it and set value in settings - last_attr = attr = line[:colon] - val = urllib.parse.unquote(line[colon+1:]) - header[attr] = val - - return buf, header - - -def encode_mv(vals): - """For multivalues, values are wrapped in '$' and separated using ';' - Literal '$' values are represented with '$$'""" - s = "" - for val in vals: - val = val.replace('$', '$$') - if len(s) > 0: - s += ';' - s += '$' + val + '$' - - return s - - -def main(argv): - stdin_wrapper = Reader(sys.stdin) - buf, settings = read_input(stdin_wrapper, has_header = True) - events = csv.DictReader(buf) - - hashtags = OrderedDict() - - for event in events: - # For each event, - text = event["text"] - - hash_regex = re.compile(r'\s+(#[0-9a-zA-Z+_]+)', re.IGNORECASE) - for hashtag_match in hash_regex.finditer(text): - hashtag = hashtag_match.group(0).strip().lower() - - hashtag_count = 0 - if hashtag in hashtags: - hashtag_count = hashtags[hashtag] - - hashtags[hashtag] = hashtag_count + 1 - - num_hashtags = sum(hashtags.values()) - - from decimal import Decimal - results = [] - for k, v in six.iteritems(hashtags): - results.insert(0, { - "hashtag": k, - "count": v, - "percentage": (Decimal(v) / Decimal(num_hashtags)) - }) - - # And output it to the next stage of the pipeline - output_results(results) - - -if __name__ == "__main__": - try: - main(sys.argv) - except Exception: - import traceback - traceback.print_exc(file=sys.stdout) diff --git a/examples/twitted/twitted/default/app.conf b/examples/twitted/twitted/default/app.conf deleted file mode 100644 index 4b55cee8d..000000000 --- a/examples/twitted/twitted/default/app.conf +++ /dev/null @@ -1,13 +0,0 @@ -# -# Splunk app configuration file -# - -[ui] -is_visible = 1 -label = twitted - -[launcher] -author = -description = -version = 1.2 - diff --git a/examples/twitted/twitted/default/commands.conf b/examples/twitted/twitted/default/commands.conf deleted file mode 100644 index df8cf8941..000000000 --- a/examples/twitted/twitted/default/commands.conf +++ /dev/null @@ -1,15 +0,0 @@ -[tophashtags] -filename = tophashtags.py -streaming = false -retainsevents = false -overrides_timeorder = true -enableheader = true -passauth = true - -[hashtags] -filename = hashtags.py -streaming = true -retainsevents = true -overrides_timeorder = true -enableheader = true -passauth = false \ No newline at end of file diff --git a/examples/twitted/twitted/default/data/ui/nav/default.xml b/examples/twitted/twitted/default/data/ui/nav/default.xml deleted file mode 100644 index c2128a6f3..000000000 --- a/examples/twitted/twitted/default/data/ui/nav/default.xml +++ /dev/null @@ -1,18 +0,0 @@ - diff --git a/examples/twitted/twitted/default/indexes.conf b/examples/twitted/twitted/default/indexes.conf deleted file mode 100644 index d0e759da9..000000000 --- a/examples/twitted/twitted/default/indexes.conf +++ /dev/null @@ -1,4 +0,0 @@ -[twitter] -coldPath = $SPLUNK_DB/twitter/colddb -homePath = $SPLUNK_DB/twitter/db -thawedPath = $SPLUNK_DB/twitter/thaweddb diff --git a/examples/twitted/twitted/default/inputs.conf b/examples/twitted/twitted/default/inputs.conf deleted file mode 100644 index f44fb4ce4..000000000 --- a/examples/twitted/twitted/default/inputs.conf +++ /dev/null @@ -1,8 +0,0 @@ -[tcp://9001] -connection_host = dns -index = twitter -sourcetype = twitter - -[tcp://9002] -index = twitter -sourcetype = twitter diff --git a/examples/twitted/twitted/default/props.conf b/examples/twitted/twitted/default/props.conf deleted file mode 100644 index 13e17c7a7..000000000 --- a/examples/twitted/twitted/default/props.conf +++ /dev/null @@ -1,6 +0,0 @@ -[twitter] -LINE_BREAKER = (\r\n---end-status---\r\n) -CHARSET = UTF-8 -SHOULD_LINEMERGE = false - -REPORT-1 = twitter_text, twitter_htags, twitter_mention diff --git a/examples/twitted/twitted/default/savedsearches.conf b/examples/twitted/twitted/default/savedsearches.conf deleted file mode 100644 index e89691137..000000000 --- a/examples/twitted/twitted/default/savedsearches.conf +++ /dev/null @@ -1,135 +0,0 @@ -[Top Sources] -action.email.reportServerEnabled = 0 -alert.suppress = 0 -alert.track = 0 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter status_source=* | stats count(status_source) as count by status_source | sort -count | head 20 -vsid = gog49lc6 - -[Top Words] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter * | rex field=text max_match=1000 "(?\w{3,})" | top 20 word -vsid = gog49lc6 - -[Statuses, verified] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter | search user_verified=True -vsid = gog49lc6 - -[Statuses] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter -vsid = gog49lc6 - -[Users, most followers] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter | dedup user_id | table user_id, user_name, user_screen_name, user_followers_count, user_statuses_count, user_verified | sort -user_followers_count -vsid = gog49lc6 - -[Users, most tweets] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter | dedup user_id | table user_id, user_name, user_screen_name, user_followers_count, user_statuses_count, user_verified | sort -user_statuses_count -vsid = gog49lc6 - -[Users, verified, most tweets] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter user_verified=True | dedup user_id | table user_id, user_name, user_screen_name, user_followers_count, user_statuses_count, user_verified | sort -user_statuses_count -vsid = gog49lc6 - -[Users, verified, most followers] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter user_verified=True | dedup user_id | table user_id, user_name, user_screen_name, user_followers_count, user_statuses_count, user_verified | sort -user_followers_count -vsid = gog49lc6 - -[Users, most seen tweets] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter | stats count(user_id) as user_statuses_seen by user_id | table user_screen_name, user_statuses_seen, user_statuses_count, user_verified | sort -user_statuses_seen, -user_statuses_count -vsid = gog49lc6 - -[Statuses, most retweeted] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter retweet_count>0 | table created_at, retweet_count, user_screen_name, text | sort -retweet_count, -created_at -vsid = gopz0n46 - -[Users, most deletes] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter | stats count(delete_status_user_id) as deletes_seen by delete_status_user_id | sort -deletes_seen -vsid = got9p0bd - -[Statuses, real-time] -action.email.reportServerEnabled = 0 -alert.track = 1 -dispatch.earliest_time = rt-1m -dispatch.latest_time = rt -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter -vsid = goxlionw - -[Top Words, version 2] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter * \ -| rex field=text max_match=1000 "(?\w{3,})" | fields word | mvexpand word \ -| where not (word="and" or word="com" or word="http" or word="that" or word="the" or word="you" or word="with")\ -| top 50 word -vsid = gp1rbo5g - -[Most mentioned] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter mention=* | fields mention | mvexpand mention | stats count(mention) as count by mention | sort - count | head 50 -vsid = gp3htyye - -[Popular hashtags] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter hashtag=* | fields hashtag | mvexpand hashtag | stats count(hashtag) as count by hashtag | sort - count | head 50 -vsid = gp3hzuqr - -[Top Tags] -action.email.reportServerEnabled = 0 -alert.track = 1 -displayview = flashtimeline -request.ui_dispatch_view = flashtimeline -search = index=twitter * \ -| rex field=text max_match=1000 "(?#\w{1,})" | fields word | mvexpand word \ -| top 50 word -vsid = gpsrhije diff --git a/examples/twitted/twitted/default/transforms.conf b/examples/twitted/twitted/default/transforms.conf deleted file mode 100644 index 15c76f3f4..000000000 --- a/examples/twitted/twitted/default/transforms.conf +++ /dev/null @@ -1,14 +0,0 @@ -[twitter_text] -REGEX = text=\"(?[^"]*) - -[twitter_htags] -SOURCE_KEY = text -MV_ADD = 1 -REGEX = \#(?[^#:\s]+) - -[twitter_mention] -SOURCE_KEY = text -MV_ADD = 1 -REGEX = @(?[^@:\s]+) - - diff --git a/examples/twitted/twitted/default/viewstates.conf b/examples/twitted/twitted/default/viewstates.conf deleted file mode 100644 index 5460d974f..000000000 --- a/examples/twitted/twitted/default/viewstates.conf +++ /dev/null @@ -1,175 +0,0 @@ -[flashtimeline:gog49lc6] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:gopz0n46] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = retweet_count,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 122px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:got9p0bd] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:goxlionw] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:gp1rbo5g] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:gp3htyye] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:gp3hzuqr] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True - -[flashtimeline:gpsrhije] -Count_0_8_1.default = 50 -DataOverlay_0_13_0.dataOverlayMode = none -DataOverlay_0_13_0.default = none -DataOverlay_1_14_0.dataOverlayMode = none -DataOverlay_1_14_0.default = none -FieldPicker_0_6_1.fields = user_screen_name,text -FieldPicker_0_6_1.sidebarDisplay = true -FlashTimeline_0_5_0.height = 106px -FlashTimeline_0_5_0.minimized = false -MaxLines_0_14_0.default = 10 -MaxLines_0_14_0.maxLines = 10 -RowNumbers_0_13_0.default = true -RowNumbers_0_13_0.displayRowNumbers = true -RowNumbers_1_12_0.default = true -RowNumbers_1_12_0.displayRowNumbers = true -RowNumbers_2_13_0.default = true -RowNumbers_2_13_0.displayRowNumbers = true -Segmentation_0_15_0.default = full -Segmentation_0_15_0.segmentation = full -SoftWrap_0_12_0.enable = True diff --git a/examples/twitted/twitted/metadata/default.meta b/examples/twitted/twitted/metadata/default.meta deleted file mode 100644 index ad9ff9361..000000000 --- a/examples/twitted/twitted/metadata/default.meta +++ /dev/null @@ -1,29 +0,0 @@ - -# Application-level permissions - -[] -access = read : [ * ], write : [ admin, power ] - -### EVENT TYPES - -[eventtypes] -export = system - - -### PROPS - -[props] -export = system - - -### TRANSFORMS - -[transforms] -export = system - - -### VIEWSTATES: even normal users should be able to create shared viewstates - -[viewstates] -access = read : [ * ], write : [ * ] -export = system diff --git a/examples/upload.py b/examples/upload.py deleted file mode 100755 index af592b949..000000000 --- a/examples/upload.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A command line utility that uploads a file to Splunk for indexing.""" - -from __future__ import absolute_import -from os import path -import sys, os -sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) -import splunklib.client as client - -try: - from utils import * -except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " - "(e.g., export PYTHONPATH=~/splunk-sdk-python.") - -RULES = { - "eventhost": { - 'flags': ["--eventhost"], - 'help': "The event's host value" - }, - "host_regex": { - 'flags': ["--host_regex"], - 'help': "A regex to use to extract the host value from the file path" - }, - "host_segment": { - 'flags': ["--host_segment"], - 'help': "The number of the path segment to use for the host value" - }, - "index": { - 'flags': ["--index"], - 'default': "main", - 'help': "The index name (default main)" - }, - "rename-source": { - 'flags': ["--source"], - 'help': "The event's source value" - }, - "sourcetype": { - 'flags': ["--sourcetype"], - 'help': "The event's sourcetype" - } -} - -def main(argv): - usage = 'usage: %prog [options] *' - opts = parse(argv, RULES, ".env", usage=usage) - - kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) - service = client.connect(**kwargs_splunk) - - name = opts.kwargs['index'] - if name not in service.indexes: - error("Index '%s' does not exist." % name, 2) - index = service.indexes[name] - - kwargs_submit = dslice(opts.kwargs, - {'eventhost': "host"}, 'source', 'host_regex', - 'host_segment', 'rename-source', 'sourcetype') - - for arg in opts.args: - # Note that it's possible the file may not exist (if you had a typo), - # but it only needs to exist on the Splunk server, which we can't verify. - fullpath = path.abspath(arg) - index.upload(fullpath, **kwargs_submit) - -if __name__ == "__main__": - main(sys.argv[1:]) - diff --git a/tests/searchcommands/test_searchcommands_app.py b/tests/searchcommands/test_searchcommands_app.py deleted file mode 100755 index faf14abd8..000000000 --- a/tests/searchcommands/test_searchcommands_app.py +++ /dev/null @@ -1,422 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# -# Copyright © 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# P2 [ ] TODO: Add integration tests that, for example, verify we can use the SearchCommand.service object. -# We verify that the service object is constructed correctly, but we've got no automated tests that verify we can use -# the service object. - -# P2 [ ] TODO: Use saved dispatch dir to mock tests that depend on its contents (?) -# To make records more generally useful to application developers we should provide/demonstrate how to mock -# self.metadata, self.search_results_info, and self.service. Such mocks might be based on archived dispatch directories. - - -from __future__ import absolute_import, division, print_function, unicode_literals - -from collections import namedtuple -from splunklib.six.moves import cStringIO as StringIO -from datetime import datetime - -from splunklib.six.moves import filter as ifilter -from splunklib.six.moves import map as imap -from splunklib.six.moves import zip as izip - -from subprocess import PIPE, Popen -from splunklib import six - -try: - from unittest2 import main, skipUnless, TestCase -except ImportError: - from unittest import main, skipUnless, TestCase - -import gzip -import json -import csv -import io -import os -import sys - -try: - from tests.searchcommands import project_root -except ImportError: - # Python 2.6 - pass - -import pytest - -def pypy(): - try: - process = Popen(['pypy', '--version'], stderr=PIPE, stdout=PIPE) - except OSError: - return False - else: - process.communicate() - return process.returncode == 0 - - -class Recording(object): - - def __init__(self, path): - - self._dispatch_dir = path + '.dispatch_dir' - self._search = None - - if os.path.exists(self._dispatch_dir): - with io.open(os.path.join(self._dispatch_dir, 'request.csv')) as ifile: - reader = csv.reader(ifile) - for name, value in izip(next(reader), next(reader)): - if name == 'search': - self._search = value - break - assert self._search is not None - - splunk_cmd = path + '.splunk_cmd' - - try: - with io.open(splunk_cmd, 'r') as f: - self._args = f.readline().encode().split(None, 5) # ['splunk', 'cmd', , , ] - except IOError as error: - if error.errno != 2: - raise - self._args = ['splunk', 'cmd', 'python', None] - - self._input_file = path + '.input.gz' - - self._output_file = path + '.output' - - if six.PY3 and os.path.isfile(self._output_file + '.py3'): - self._output_file = self._output_file + '.py3' - - # Remove the "splunk cmd" portion - self._args = self._args[2:] - - def get_args(self, command_path): - self._args[1] = command_path - return self._args - - @property - def dispatch_dir(self): - return self._dispatch_dir - - @property - def input_file(self): - return self._input_file - - @property - def output_file(self): - return self._output_file - - @property - def search(self): - return self._search - - -class Recordings(object): - - def __init__(self, name, action, phase, protocol_version): - - basedir = Recordings._prefix + six.text_type(protocol_version) - - if not os.path.isdir(basedir): - raise ValueError('Directory "{}" containing recordings for protocol version {} does not exist'.format( - protocol_version, basedir)) - - self._basedir = basedir - self._name = '.'.join(ifilter(lambda part: part is not None, (name, action, phase))) - - def __iter__(self): - - basedir = self._basedir - name = self._name - - iterator = imap( - lambda directory: Recording(os.path.join(basedir, directory, name)), ifilter( - lambda filename: os.path.isdir(os.path.join(basedir, filename)), os.listdir(basedir))) - - return iterator - - _prefix = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'recordings', 'scpv') - -@pytest.mark.smoke -class TestSearchCommandsApp(TestCase): - app_root = os.path.join(project_root, 'examples', 'searchcommands_app', 'build', 'searchcommands_app') - - def setUp(self): - if not os.path.isdir(TestSearchCommandsApp.app_root): - build_command = os.path.join(project_root, 'examples', 'searchcommands_app', 'setup.py build') - self.skipTest("You must build the searchcommands_app by running " + build_command) - TestCase.setUp(self) - - @pytest.mark.skipif(six.PY3, reason="Python 2 does not treat Unicode as words for regex, so Python 3 has broken fixtures") - def test_countmatches_as_unit(self): - expected, output, errors, exit_status = self._run_command('countmatches', action='getinfo', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('countmatches', action='execute', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('countmatches') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_chunks(expected, output) - - return - - def test_generatehello_as_unit(self): - - expected, output, errors, exit_status = self._run_command('generatehello', action='getinfo', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('generatehello', action='execute', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_insensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('generatehello') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_chunks(expected, output, time_sensitive=False) - - return - - def test_sum_as_unit(self): - - expected, output, errors, exit_status = self._run_command('sum', action='getinfo', phase='reduce', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('sum', action='getinfo', phase='map', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('sum', action='execute', phase='map', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('sum', action='execute', phase='reduce', protocol=1) - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_csv_files_time_sensitive(expected, output) - - expected, output, errors, exit_status = self._run_command('sum', phase='map') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_chunks(expected, output) - - expected, output, errors, exit_status = self._run_command('sum', phase='reduce') - self.assertEqual(0, exit_status, msg=six.text_type(errors)) - self.assertEqual('', errors, msg=six.text_type(errors)) - self._compare_chunks(expected, output) - - return - - def assertInfoEqual(self, output, expected): - reader = csv.reader(StringIO(output)) - self.assertEqual([], next(reader)) - fields = next(reader) - values = next(reader) - self.assertRaises(StopIteration, reader.next) - output = dict(izip(fields, values)) - - reader = csv.reader(StringIO(expected)) - self.assertEqual([], next(reader)) - fields = next(reader) - values = next(reader) - self.assertRaises(StopIteration, reader.next) - expected = dict(izip(fields, values)) - - self.assertDictEqual(expected, output) - - def _compare_chunks(self, expected, output, time_sensitive=True): - expected = expected.strip() - output = output.strip() - - if time_sensitive: - compare_csv_files = self._compare_csv_files_time_sensitive - else: - compare_csv_files = self._compare_csv_files_time_insensitive - - chunks_1 = self._load_chunks(StringIO(expected)) - chunks_2 = self._load_chunks(StringIO(output)) - - self.assertEqual(len(chunks_1), len(chunks_2)) - n = 0 - - for chunk_1, chunk_2 in izip(chunks_1, chunks_2): - self.assertDictEqual( - chunk_1.metadata, chunk_2.metadata, - 'Chunk {0}: metadata error: "{1}" != "{2}"'.format(n, chunk_1.metadata, chunk_2.metadata)) - compare_csv_files(chunk_1.body, chunk_2.body) - n += 1 - - return - - def _compare_csv_files_time_insensitive(self, expected, output): - - skip_first_row = expected[0:2] == '\r\n' - expected = StringIO(expected) - output = StringIO(output) - line_number = 1 - - if skip_first_row: - self.assertEqual(expected.readline(), output.readline()) - line_number += 1 - - expected = csv.DictReader(expected) - output = csv.DictReader(output) - - for expected_row in expected: - output_row = next(output) - - try: - timestamp = float(output_row['_time']) - datetime.fromtimestamp(timestamp) - except BaseException as error: - self.fail(error) - else: - output_row['_time'] = expected_row['_time'] - - self.assertDictEqual( - expected_row, output_row, 'Error on line {0}: expected {1}, not {2}'.format( - line_number, expected_row, output_row)) - - line_number += 1 - - if six.PY2: - self.assertRaises(StopIteration, output.next) - - return - - def _compare_csv_files_time_sensitive(self, expected, output): - self.assertEqual(len(expected), len(output)) - - skip_first_row = expected[0:2] == '\r\n' - expected = StringIO(expected) - output = StringIO(output) - line_number = 1 - - if skip_first_row: - self.assertEqual(expected.readline(), output.readline()) - line_number += 1 - - expected = csv.DictReader(expected) - output = csv.DictReader(output) - - for expected_row in expected: - output_row = next(output) - self.assertDictEqual( - expected_row, output_row, 'Error on line {0}: expected {1}, not {2}'.format( - line_number, expected_row, output_row)) - line_number += 1 - - if six.PY2: - self.assertRaises(StopIteration, output.next) - - return - - def _get_search_command_path(self, name): - path = os.path.join( - project_root, 'examples', 'searchcommands_app', 'build', 'searchcommands_app', 'bin', name + '.py') - self.assertTrue(os.path.isfile(path)) - return path - - def _load_chunks(self, ifile): - import re - - pattern = re.compile(r'chunked 1.0,(?P\d+),(?P\d+)(\n)?') - decoder = json.JSONDecoder() - - chunks = [] - - while True: - - line = ifile.readline() - - if len(line) == 0: - break - - match = pattern.match(line) - if match is None: - continue - - metadata_length = int(match.group('metadata_length')) - metadata = ifile.read(metadata_length) - metadata = decoder.decode(metadata) - - body_length = int(match.group('body_length')) - body = ifile.read(body_length) if body_length > 0 else '' - - chunks.append(TestSearchCommandsApp._Chunk(metadata, body)) - - return chunks - - def _run_command(self, name, action=None, phase=None, protocol=2): - - command = self._get_search_command_path(name) - - # P2 [ ] TODO: Test against the version of Python that ships with the version of Splunk used to produce each - # recording - # At present we use whatever version of splunk, if any, happens to be on PATH - - # P2 [ ] TODO: Examine the contents of the app and splunklib log files (?) - - expected, output, errors, process = None, None, None, None - - for recording in Recordings(name, action, phase, protocol): - compressed_file = recording.input_file - uncompressed_file = os.path.splitext(recording.input_file)[0] - try: - with gzip.open(compressed_file, 'rb') as ifile: - with io.open(uncompressed_file, 'wb') as ofile: - b = bytearray(io.DEFAULT_BUFFER_SIZE) - n = len(b) - while True: - count = ifile.readinto(b) - if count == 0: - break - if count < n: - ofile.write(b[:count]) - break - ofile.write(b) - - with io.open(uncompressed_file, 'rb') as ifile: - env = os.environ.copy() - env['PYTHONPATH'] = os.pathsep.join(sys.path) - process = Popen(recording.get_args(command), stdin=ifile, stderr=PIPE, stdout=PIPE, env=env) - output, errors = process.communicate() - - with io.open(recording.output_file, 'rb') as ifile: - expected = ifile.read() - finally: - os.remove(uncompressed_file) - - return six.ensure_str(expected), six.ensure_str(output), six.ensure_str(errors), process.returncode - - _Chunk = namedtuple('Chunk', 'metadata body') - - -if __name__ == "__main__": - main() diff --git a/tests/test_examples.py b/tests/test_examples.py deleted file mode 100755 index e2057ffb7..000000000 --- a/tests/test_examples.py +++ /dev/null @@ -1,343 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2015 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import os -from subprocess import PIPE, Popen -import time -import sys - -import io - -try: - import unittest -except ImportError: - import unittest2 as unittest - -import pytest - -from tests import testlib - -import splunklib.client as client -from splunklib import six - -DIR_PATH = os.path.dirname(os.path.realpath(__file__)) -EXAMPLES_PATH = os.path.join(DIR_PATH, '..', 'examples') - -def check_multiline(testcase, first, second, message=None): - """Assert that two multi-line strings are equal.""" - testcase.assertTrue(isinstance(first, six.string_types), - 'First argument is not a string') - testcase.assertTrue(isinstance(second, six.string_types), - 'Second argument is not a string') - # Unix-ize Windows EOL - first = first.replace("\r", "") - second = second.replace("\r", "") - if first != second: - testcase.fail("Multiline strings are not equal: %s" % message) - - -# Run the given python script and return its exit code. -def run(script, stdin=None, stdout=PIPE, stderr=None): - process = start(script, stdin, stdout, stderr) - process.communicate() - return process.wait() - - -# Start the given python script and return the corresponding process object. -# The script can be specified as either a string or arg vector. In either case -# it will be prefixed to invoke python explicitly. -def start(script, stdin=None, stdout=PIPE, stderr=None): - if isinstance(script, str): - script = script.split() - script = ["python"] + script - return Popen(script, stdin=stdin, stdout=stdout, stderr=stderr, cwd=EXAMPLES_PATH) - - -# Rudimentary sanity check for each of the examples -class ExamplesTestCase(testlib.SDKTestCase): - def check_commands(self, *args): - for arg in args: - result = run(arg) - self.assertEqual(result, 0, '"{0}" run failed with result code {1}'.format(arg, result)) - self.service.login() # Because a Splunk restart invalidates our session - - def setUp(self): - super(ExamplesTestCase, self).setUp() - - # Ignore result, it might already exist - run("index.py create sdk-tests") - - @pytest.mark.skipif(six.PY3, reason="Async needs work to support Python 3") - def test_async(self): - result = run("async/async.py sync") - self.assertEqual(result, 0) - - try: - # Only try running the async version of the test if eventlet - # is present on the system - import eventlet - result = run("async/async.py async") - self.assertEqual(result, 0) - except: - pass - - def test_binding1(self): - result = run("binding1.py") - self.assertEqual(result, 0) - - def test_conf(self): - try: - conf = self.service.confs['server'] - if 'SDK-STANZA' in conf: - conf.delete("SDK-STANZA") - except Exception as e: - pass - - try: - self.check_commands( - "conf.py --help", - "conf.py", - "conf.py viewstates", - 'conf.py --app=search --owner=admin viewstates', - "conf.py create server SDK-STANZA", - "conf.py create server SDK-STANZA testkey=testvalue", - "conf.py delete server SDK-STANZA") - finally: - conf = self.service.confs['server'] - if 'SDK-STANZA' in conf: - conf.delete('SDK-STANZA') - - def test_event_types(self): - self.check_commands( - "event_types.py --help", - "event_types.py") - - def test_fired_alerts(self): - self.check_commands( - "fired_alerts.py --help", - "fired_alerts.py") - - def test_follow(self): - self.check_commands("follow.py --help") - - def test_handlers(self): - self.check_commands( - "handlers/handler_urllib2.py", - "handlers/handler_debug.py", - "handlers/handler_certs.py", - "handlers/handler_certs.py --ca_file=handlers/cacert.pem", - "handlers/handler_proxy.py --help") - - # Run the cert handler example with a bad cert file, should error. - result = run( - "handlers/handlers_certs.py --ca_file=handlers/cacert.bad.pem", - stderr=PIPE) - self.assertNotEqual(result, 0) - - # The proxy handler example requires that there be a proxy available - # to relay requests, so we spin up a local proxy using the proxy - # script included with the sample. - - # Assumes that tiny-proxy.py is in the same directory as the sample - - #This test seems to be flaky - # if six.PY2: # Needs to be fixed PY3 - # process = start("handlers/tiny-proxy.py -p 8080", stderr=PIPE) - # try: - # time.sleep(5) # Wait for proxy to finish initializing - # result = run("handlers/handler_proxy.py --proxy=localhost:8080") - # self.assertEqual(result, 0) - # finally: - # process.kill() - - # Run it again without the proxy and it should fail. - result = run( - "handlers/handler_proxy.py --proxy=localhost:80801", stderr=PIPE) - self.assertNotEqual(result, 0) - - def test_index(self): - self.check_commands( - "index.py --help", - "index.py", - "index.py list", - "index.py list sdk-tests", - "index.py disable sdk-tests", - "index.py enable sdk-tests", - "index.py clean sdk-tests") - return - - def test_info(self): - self.check_commands( - "info.py --help", - "info.py") - - def test_inputs(self): - self.check_commands( - "inputs.py --help", - "inputs.py") - - def test_job(self): - self.check_commands( - "job.py --help", - "job.py", - "job.py list", - "job.py list @0") - - def test_kvstore(self): - self.check_commands( - "kvstore.py --help", - "kvstore.py") - - def test_loggers(self): - self.check_commands( - "loggers.py --help", - "loggers.py") - - def test_oneshot(self): - self.check_commands(["oneshot.py", "search * | head 10"]) - - def test_saved_searches(self): - self.check_commands( - "saved_searches.py --help", - "saved_searches.py") - - def test_saved_search(self): - temp_name = testlib.tmpname() - self.check_commands( - "saved_search/saved_search.py", - ["saved_search/saved_search.py", "--help"], - ["saved_search/saved_search.py", "list-all"], - ["saved_search/saved_search.py", "--operation", "create", "--name", temp_name, "--search", "search * | head 5"], - ["saved_search/saved_search.py", "list", "--name", temp_name], - ["saved_search/saved_search.py", "list", "--operation", "delete", "--name", temp_name], - ["saved_search/saved_search.py", "list", "--name", "Errors in the last 24 hours"] - ) - - def test_search(self): - self.check_commands( - "search.py --help", - ["search.py", "search * | head 10"], - ["search.py", - "search * | head 10 | stats count", '--output_mode=csv']) - - def test_spcmd(self): - self.check_commands( - "spcmd.py --help", - "spcmd.py -e\"get('authentication/users')\"") - - def test_spurl(self): - self.check_commands( - "spurl.py --help", - "spurl.py", - "spurl.py /services", - "spurl.py apps/local") - - def test_submit(self): - self.check_commands("submit.py --help") - - def test_upload(self): - # Note: test must run on machine where splunkd runs, - # or a failure is expected - if "SPLUNK_HOME" not in os.environ: - self.skipTest("SPLUNK_HOME is not set, skipping") - file_to_upload = os.path.expandvars(os.environ.get("INPUT_EXAMPLE_UPLOAD", "./upload.py")) - self.check_commands( - "upload.py --help", - "upload.py --index=sdk-tests %s" % file_to_upload) - - # The following tests are for the Analytics example - def test_analytics(self): - # We have to add the current path to the PYTHONPATH, - # otherwise the import doesn't work quite right - sys.path.append(EXAMPLES_PATH) - import analytics - - # Create a tracker - tracker = analytics.input.AnalyticsTracker( - "sdk-test", self.opts.kwargs, index = "sdk-test") - - service = client.connect(**self.opts.kwargs) - - # Before we start, we'll clean the index - index = service.indexes["sdk-test"] - index.clean() - - tracker.track("test_event", distinct_id="abc123", foo="bar", abc="123") - tracker.track("test_event", distinct_id="123abc", abc="12345") - - # Wait until the events get indexed - self.assertEventuallyTrue(lambda: index.refresh()['totalEventCount'] == '2', timeout=200) - - # Now, we create a retriever to retrieve the events - retriever = analytics.output.AnalyticsRetriever( - "sdk-test", self.opts.kwargs, index = "sdk-test") - - # Assert applications - applications = retriever.applications() - self.assertEqual(len(applications), 1) - self.assertEqual(applications[0]["name"], "sdk-test") - self.assertEqual(applications[0]["count"], 2) - - # Assert events - events = retriever.events() - self.assertEqual(len(events), 1) - self.assertEqual(events[0]["name"], "test_event") - self.assertEqual(events[0]["count"], 2) - - # Assert properties - expected_properties = { - "abc": 2, - "foo": 1 - } - properties = retriever.properties("test_event") - self.assertEqual(len(properties), len(expected_properties)) - for prop in properties: - name = prop["name"] - count = prop["count"] - self.assertTrue(name in list(expected_properties.keys())) - self.assertEqual(count, expected_properties[name]) - - # Assert property values - expected_property_values = { - "123": 1, - "12345": 1 - } - values = retriever.property_values("test_event", "abc") - self.assertEqual(len(values), len(expected_property_values)) - for value in values: - name = value["name"] - count = value["count"] - self.assertTrue(name in list(expected_property_values.keys())) - self.assertEqual(count, expected_property_values[name]) - - # Assert event over time - over_time = retriever.events_over_time( - time_range = analytics.output.TimeRange.MONTH) - self.assertEqual(len(over_time), 1) - self.assertEqual(len(over_time["test_event"]), 1) - self.assertEqual(over_time["test_event"][0]["count"], 2) - - # Now that we're done, we'll clean the index - index.clean() - -if __name__ == "__main__": - os.chdir("../examples") - try: - import unittest2 as unittest - except ImportError: - import unittest - unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index 51080a29d..5b6b712ca 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -4,7 +4,7 @@ try: from utils import * except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " + raise Exception("Add the SDK repository to your PYTHONPATH to run the test cases " "(e.g., export PYTHONPATH=~/splunk-sdk-python.") diff --git a/tests/testlib.py b/tests/testlib.py index ae3246a21..4a99e026a 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -24,7 +24,6 @@ # Run the test suite on the SDK without installing it. sys.path.insert(0, '../') -sys.path.insert(0, '../examples') import splunklib.client as client from time import sleep @@ -38,7 +37,7 @@ try: from utils import parse except ImportError: - raise Exception("Add the SDK repository to your PYTHONPATH to run the examples " + raise Exception("Add the SDK repository to your PYTHONPATH to run the test cases " "(e.g., export PYTHONPATH=~/splunk-sdk-python.") import os diff --git a/tox.ini b/tox.ini index 00ad22b8d..8b8bcb1b5 100644 --- a/tox.ini +++ b/tox.ini @@ -25,7 +25,6 @@ application-import-names = splunk-sdk-python [testenv] passenv = LANG setenv = SPLUNK_HOME=/opt/splunk - INPUT_EXAMPLE_UPLOAD=/opt/splunk/var/log/splunk/splunkd_ui_access.log allowlist_externals = make deps = pytest pytest-cov diff --git a/utils/__init__.py b/utils/__init__.py index b1bb77a50..bd0900c3d 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. -"""Utility module shared by the SDK examples & unit tests.""" +"""Utility module shared by the SDK unit tests.""" from __future__ import absolute_import from utils.cmdopts import *