diff --git a/.env b/.env new file mode 100644 index 000000000..0d5fabf11 --- /dev/null +++ b/.env @@ -0,0 +1,16 @@ +# Splunk host (default: localhost) +host=localhost +# Splunk admin port (default: 8089) +port=8089 +# Splunk username +username=admin +# Splunk password +password=changed! +# Access scheme (default: https) +scheme=https +# Your version of Splunk (default: 6.2) +version=8.0 +# Bearer token for authentication +#bearerToken="" +# Session key for authentication +#sessionKey="" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 450736ec8..d588537b3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ on: push: branches: - master - workflow_dispatch: {} + workflow_dispatch: { } jobs: find_version: @@ -21,6 +21,13 @@ jobs: - name: Get version id: get-version run: python -c 'import splunklib; print("::set-output name=version::%s" % splunklib.__version__)' + - name: Install tox + run: pip install tox + - name: Generate API docs + run: | + rm -rf ./docs/_build + tox -e docs + cd ./docs/_build/html && zip -r ../docs_html.zip . -x ".*" -x "__MACOSX" tag_version: needs: find_version name: Tag Version @@ -32,7 +39,7 @@ jobs: repo-token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ needs.find_version.outputs.version }} release: - needs: [find_version, tag_version] + needs: [ find_version, tag_version ] name: Create Release runs-on: ubuntu-latest steps: @@ -52,6 +59,11 @@ jobs: **TODO: Insert CHANGELOG.md contents here.** draft: false prerelease: false + - name: Upload Artifact + uses: actions/upload-artifact@v3 + with: + name: apidocs + path: docs/_build/docs_html.zip publish: needs: release name: Deploy Release to PyPI diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 71ed1e667..42713a686 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,7 +1,7 @@ name: Python CI on: - [push, pull_request] + [ push, pull_request ] jobs: build: @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [2.7, 3.7] + python: [ 2.7, 3.7 ] splunk-version: - "8.0" - "latest" @@ -36,15 +36,6 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - - name: Create .splunkrc file - run: | - cd ~ - echo host=localhost > .splunkrc - echo port=8089 >> .splunkrc - echo username=admin >> .splunkrc - echo password=changed! >> .splunkrc - echo scheme=https >> .splunkrc - echo version=${{ matrix.splunk }} >> .splunkrc - name: Install tox run: pip install tox - name: Test Execution diff --git a/CHANGELOG.md b/CHANGELOG.md index 7edf338d6..78d7edbc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,42 @@ # Splunk Enterprise SDK for Python Changelog +## Version 1.6.19 + +### New features and APIs +* [#441](https://github.com/splunk/splunk-sdk-python/pull/441) JSONResultsReader added and deprecated ResultsReader + * Pre-requisite: Query parameter 'output_mode' must be set to 'json' + * Improves performance by approx ~80-90% + * ResultsReader is deprecated and will be removed in future releases (NOTE: Please migrate to JSONResultsReader) +* [#437](https://github.com/splunk/splunk-sdk-python/pull/437) added setup_logging() method in splunklib for logging +* [#426](https://github.com/splunk/splunk-sdk-python/pull/426) Added new github_commit modular input example +* [#392](https://github.com/splunk/splunk-sdk-python/pull/392) Break out search argument to option parsing for v2 custom search commands +* [#384](https://github.com/splunk/splunk-sdk-python/pull/384) Added Float parameter validator for custom search commands +* [#371](https://github.com/splunk/splunk-sdk-python/pull/371) Modinput preserve 'app' context + +### Bug fixes +* [#439](https://github.com/splunk/splunk-sdk-python/pull/439) Modified POST method debug log to not log sensitive body/data +* [#431](https://github.com/splunk/splunk-sdk-python/pull/431) Add distsearch.conf to Stream Search Command examples [ [issue#418](https://github.com/splunk/splunk-sdk-python/issues/418) ] +* [#419](https://github.com/splunk/splunk-sdk-python/pull/419) Hec endpoint issue[ [issue#345](https://github.com/splunk/splunk-sdk-python/issues/345) ] +* [#416](https://github.com/splunk/splunk-sdk-python/pull/416) Removed strip() method in load_value() method from data.py file [ [issue#400](https://github.com/splunk/splunk-sdk-python/issues/400) ] +* [#148](https://github.com/splunk/splunk-sdk-python/pull/148) Identical entity names will cause an infinite loop + +### Minor changes +* [#440](https://github.com/splunk/splunk-sdk-python/pull/440) Github release workflow modified to generate docs +* [#430](https://github.com/splunk/splunk-sdk-python/pull/430) Fix indentation in README +* [#429](https://github.com/splunk/splunk-sdk-python/pull/429) documented how to access modular input metadata +* [#427](https://github.com/splunk/splunk-sdk-python/pull/427) Replace .splunkrc with .env file in test and examples +* [#424](https://github.com/splunk/splunk-sdk-python/pull/424) Float validator test fix +* [#423](https://github.com/splunk/splunk-sdk-python/pull/423) Python3 compatibility for ResponseReader.__str__() +* [#422](https://github.com/splunk/splunk-sdk-python/pull/422) ordereddict and all its reference removed +* [#421](https://github.com/splunk/splunk-sdk-python/pull/421) Update README.md +* [#387](https://github.com/splunk/splunk-sdk-python/pull/387) Update filter.py +* [#331](https://github.com/splunk/splunk-sdk-python/pull/331) Fix a couple of warnings spotted when running python 2.7 tests +* [#330](https://github.com/splunk/splunk-sdk-python/pull/330) client: use six.string_types instead of basestring +* [#329](https://github.com/splunk/splunk-sdk-python/pull/329) client: remove outdated comment in Index.submit +* [#262](https://github.com/splunk/splunk-sdk-python/pull/262) properly add parameters to request based on the method of the request +* [#237](https://github.com/splunk/splunk-sdk-python/pull/237) Don't output close tags if you haven't written a start tag +* [#149](https://github.com/splunk/splunk-sdk-python/pull/149) "handlers" stanza missing in examples/searchcommands_template/default/logging.conf + ## Version 1.6.18 ### Bug fixes diff --git a/Makefile b/Makefile index 233978781..2810c6aec 100644 --- a/Makefile +++ b/Makefile @@ -56,16 +56,16 @@ test_smoke_no_app: @echo "$(ATTN_COLOR)==> test_smoke_no_app $(NO_COLOR)" @tox -e py27,py37 -- -m "smoke and not app" -.PHONY: splunkrc -splunkrc: - @echo "$(ATTN_COLOR)==> splunkrc $(NO_COLOR)" - @echo "To make a .splunkrc:" - @echo " [SPLUNK_INSTANCE_JSON] | python scripts/build-splunkrc.py ~/.splunkrc" - -.PHONY: splunkrc_default -splunkrc_default: - @echo "$(ATTN_COLOR)==> splunkrc_default $(NO_COLOR)" - @python scripts/build-splunkrc.py ~/.splunkrc +.PHONY: env +env: + @echo "$(ATTN_COLOR)==> env $(NO_COLOR)" + @echo "To make a .env:" + @echo " [SPLUNK_INSTANCE_JSON] | python scripts/build-env.py" + +.PHONY: env_default +env_default: + @echo "$(ATTN_COLOR)==> env_default $(NO_COLOR)" + @python scripts/build-env.py .PHONY: up up: diff --git a/README.md b/README.md index 1436ad240..77dedf876 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ # The Splunk Enterprise Software Development Kit for Python -#### Version 1.6.18 +#### Version 1.6.19 The Splunk Enterprise Software Development Kit (SDK) for Python contains library code and examples designed to enable developers to build applications using the Splunk platform. @@ -60,7 +60,6 @@ You'll need `docker` and `docker-compose` to get up and running using this metho ``` make up SPLUNK_VERSION=8.0 make wait_up -make splunkrc_default make test make down ``` @@ -75,7 +74,7 @@ The SDK command-line examples require a common set of arguments that specify the #### Using username/password ```python import splunklib.client as client - service = client.connect(host=, username=, password=, autoLogin=True) +service = client.connect(host=, username=, password=, autologin=True) ``` #### Using bearer token @@ -91,13 +90,13 @@ service = client.connect(host=, token=, autologin=True) ``` ### -#### Create a .splunkrc convenience file +#### Update a .env file -To connect to Splunk Enterprise, many of the SDK examples and unit tests take command-line arguments that specify values for the host, port, and login credentials for Splunk Enterprise. For convenience during development, you can store these arguments as key-value pairs in a text file named **.splunkrc**. Then, the SDK examples and unit tests use the values from the **.splunkrc** file when you don't specify them. +To connect to Splunk Enterprise, many of the SDK examples and unit tests take command-line arguments that specify values for the host, port, and login credentials for Splunk Enterprise. For convenience during development, you can store these arguments as key-value pairs in a **.env** file. Then, the SDK examples and unit tests use the values from the **.env** file when you don't specify them. ->**Note**: Storing login credentials in the **.splunkrc** file is only for convenience during development. This file isn't part of the Splunk platform and shouldn't be used for storing user credentials for production. And, if you're at all concerned about the security of your credentials, enter them at the command line rather than saving them in this file. +>**Note**: Storing login credentials in the **.env** file is only for convenience during development. This file isn't part of the Splunk platform and shouldn't be used for storing user credentials for production. And, if you're at all concerned about the security of your credentials, enter them at the command line rather than saving them in this file. -To use this convenience file, create a text file with the following format: +here is an example of .env file: # Splunk Enterprise host (default: localhost) host=localhost @@ -106,27 +105,15 @@ To use this convenience file, create a text file with the following format: # Splunk Enterprise username username=admin # Splunk Enterprise password - password=changeme + password=changed! # Access scheme (default: https) scheme=https # Your version of Splunk Enterprise version=8.0 - -Save the file as **.splunkrc** in the current user's home directory. - -* For example on OS X, save the file as: - - ~/.splunkrc - -* On Windows, save the file as: - - C:\Users\currentusername\.splunkrc - - You might get errors in Windows when you try to name the file because ".splunkrc" appears to be a nameless file with an extension. You can use the command line to create this file by going to the **C:\Users\\<currentusername>** directory and entering the following command: - - Notepad.exe .splunkrc - - Click **Yes**, then continue creating the file. + # Bearer token for authentication + #bearerToken= + # Session key for authentication + #sessionKey= #### Run the examples @@ -144,7 +131,7 @@ Using Session key python examplename.py --sessionKey="" -If you saved your login credentials in the **.splunkrc** file, you can omit those arguments: +If you saved your login credentials in the **.env** file, you can omit those arguments: python examplename.py @@ -212,19 +199,48 @@ class CustomStreamingCommand(StreamingCommand): Do ```python @Configuration() - class GeneratorTest(GeneratingCommand): - def generate(self): - yield self.gen_record(_time=time.time(), one=1) - yield self.gen_record(_time=time.time(), two=2) +class GeneratorTest(GeneratingCommand): + def generate(self): + yield self.gen_record(_time=time.time(), one=1) + yield self.gen_record(_time=time.time(), two=2) ``` Don't ```python @Configuration() - class GeneratorTest(GeneratingCommand): - def generate(self): - yield {'_time': time.time(), 'one': 1} - yield {'_time': time.time(), 'two': 2} +class GeneratorTest(GeneratingCommand): + def generate(self): + yield {'_time': time.time(), 'one': 1} + yield {'_time': time.time(), 'two': 2} +``` + +### Access metadata of modular inputs app +* In stream_events() method we can access modular input app metadata from InputDefinition object +* See [GitHub Commit](https://github.com/splunk/splunk-sdk-python/blob/develop/examples/github_commits/bin/github_commits.py) Modular input App example for reference. +```python + def stream_events(self, inputs, ew): + # other code + + # access metadata (like server_host, server_uri, etc) of modular inputs app from InputDefinition object + # here inputs is a InputDefinition object + server_host = inputs.metadata["server_host"] + server_uri = inputs.metadata["server_uri"] + + # Get the checkpoint directory out of the modular input's metadata + checkpoint_dir = inputs.metadata["checkpoint_dir"] +``` + +#### Optional:Set up logging for splunklib ++ The default level is WARNING, which means that only events of this level and above will be visible ++ To change a logging level we can call setup_logging() method and pass the logging level as an argument. ++ Optional: we can also pass log format and date format string as a method argument to modify default format + +```python +import logging +from splunklib import setup_logging + +# To see debug and above level logs +setup_logging(logging.DEBUG) ``` ### Changelog diff --git a/docker-compose.yml b/docker-compose.yml index 6885cfd5f..84c427072 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,6 +14,8 @@ services: - ./splunklib:/opt/splunk/etc/apps/github_forks/lib/splunklib - ./examples/random_numbers:/opt/splunk/etc/apps/random_numbers - ./splunklib:/opt/splunk/etc/apps/random_numbers/lib/splunklib + - ./examples/github_commits:/opt/splunk/etc/apps/github_commits + - ./splunklib:/opt/splunk/etc/apps/github_commits/lib/splunklib - ./examples/searchcommands_app/package:/opt/splunk/etc/apps/searchcommands_app - ./splunklib:/opt/splunk/etc/apps/searchcommands_app/lib/splunklib - ./examples/twitted/twitted:/opt/splunk/etc/apps/twitted diff --git a/docs/searchcommands.rst b/docs/searchcommands.rst index 281f755ff..e70abf49f 100644 --- a/docs/searchcommands.rst +++ b/docs/searchcommands.rst @@ -88,6 +88,10 @@ splunklib.searchcommands :members: :inherited-members: +.. autoclass:: Float + :members: + :inherited-members: + .. autoclass:: RegularExpression :members: :inherited-members: diff --git a/examples/analytics/input.py b/examples/analytics/input.py index 93432adb8..1bbd1db98 100755 --- a/examples/analytics/input.py +++ b/examples/analytics/input.py @@ -102,7 +102,7 @@ def main(): argv = sys.argv[1:] - splunk_opts = utils.parse(argv, {}, ".splunkrc", usage=usage) + splunk_opts = utils.parse(argv, {}, ".env", usage=usage) tracker = AnalyticsTracker("cli_app", splunk_opts.kwargs) #tracker.track("test_event", "abc123", foo="bar", bar="foo") diff --git a/examples/analytics/output.py b/examples/analytics/output.py index 07e0753b0..cbbb697f5 100755 --- a/examples/analytics/output.py +++ b/examples/analytics/output.py @@ -152,7 +152,7 @@ def main(): argv = sys.argv[1:] - opts = utils.parse(argv, {}, ".splunkrc", usage=usage) + opts = utils.parse(argv, {}, ".env", usage=usage) retriever = AnalyticsRetriever(opts.args[0], opts.kwargs) #events = retriever.events() diff --git a/examples/analytics/server.py b/examples/analytics/server.py index f4b849f76..a1235e52e 100755 --- a/examples/analytics/server.py +++ b/examples/analytics/server.py @@ -146,7 +146,7 @@ def application(name): def main(): argv = sys.argv[1:] - opts = utils.parse(argv, {}, ".splunkrc") + opts = utils.parse(argv, {}, ".env") global splunk_opts splunk_opts = opts.kwargs diff --git a/examples/async/async.py b/examples/async/async.py index ececa8989..097e50b3c 100755 --- a/examples/async/async.py +++ b/examples/async/async.py @@ -51,7 +51,7 @@ def main(argv): usage = "async.py " # Parse the command line args. - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") # We have to see if we got either the "sync" or # "async" command line arguments. diff --git a/examples/binding1.py b/examples/binding1.py index 1dae4f927..19c850879 100755 --- a/examples/binding1.py +++ b/examples/binding1.py @@ -52,7 +52,7 @@ def search(self, query, **kwargs): return self.context.post("search/jobs/export", search=query, **kwargs) def main(argv): - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") context = connect(**opts.kwargs) service = Service(context) assert service.apps().status == 200 diff --git a/examples/conf.py b/examples/conf.py index 33d9755ef..f4163be80 100755 --- a/examples/conf.py +++ b/examples/conf.py @@ -151,7 +151,7 @@ def main(): commands = ['create', 'delete', 'list'] # parse args, connect and setup - opts = parse(argv, {}, ".splunkrc", usage=usage) + opts = parse(argv, {}, ".env", usage=usage) service = connect(**opts.kwargs) program = Program(service) diff --git a/examples/dashboard/feed.py b/examples/dashboard/feed.py index 38f5fc0a2..e61f1ba72 100755 --- a/examples/dashboard/feed.py +++ b/examples/dashboard/feed.py @@ -171,7 +171,7 @@ def iterate(job): def main(argv): # Parse the command line args. - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") # Connect to Splunk service = client.connect(**opts.kwargs) diff --git a/examples/event_types.py b/examples/event_types.py index eec68fa07..c7e17d123 100755 --- a/examples/event_types.py +++ b/examples/event_types.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for item in service.event_types: diff --git a/examples/explorer/README.md b/examples/explorer/README.md index 2e5093837..e51d9a8d4 100644 --- a/examples/explorer/README.md +++ b/examples/explorer/README.md @@ -8,7 +8,7 @@ To run, simply execute: ./explorer.py -It will pick up all relevant values from your .splunkrc, or you can pass them +It will pick up all relevant values from your .env, or you can pass them in on the command line. You can see help by adding `--help` to the exectuion. The API Explorer will open up a browser window that will show you a drop down diff --git a/examples/explorer/explorer.py b/examples/explorer/explorer.py index be3dc3279..62ebf85eb 100755 --- a/examples/explorer/explorer.py +++ b/examples/explorer/explorer.py @@ -43,7 +43,7 @@ def main(argv): }, } - opts = utils.parse(argv, redirect_port_args, ".splunkrc", usage=usage) + opts = utils.parse(argv, redirect_port_args, ".env", usage=usage) args = [("scheme", opts.kwargs["scheme"]), ("host", opts.kwargs["host"]), diff --git a/examples/export/export.py b/examples/export/export.py index 06b433f5f..3664a7691 100755 --- a/examples/export/export.py +++ b/examples/export/export.py @@ -320,7 +320,7 @@ def export(options, service): def main(): """ main entry """ - options = parse(sys.argv[1:], CLIRULES, ".splunkrc") + options = parse(sys.argv[1:], CLIRULES, ".env") if options.kwargs['omode'] not in OUTPUT_MODES: print("output mode must be one of %s, found %s" % (OUTPUT_MODES, diff --git a/examples/fired_alerts.py b/examples/fired_alerts.py index c70352f5d..e736ea167 100755 --- a/examples/fired_alerts.py +++ b/examples/fired_alerts.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for group in service.fired_alerts: diff --git a/examples/follow.py b/examples/follow.py index 96652e5f3..cbb559deb 100755 --- a/examples/follow.py +++ b/examples/follow.py @@ -42,13 +42,13 @@ def follow(job, count, items): job.refresh() continue stream = items(offset+1) - for event in results.ResultsReader(stream): + for event in results.JSONResultsReader(stream): pprint(event) offset = total def main(): usage = "usage: follow.py " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) @@ -72,10 +72,10 @@ def main(): if job['reportSearch'] is not None: # Is it a transforming search? count = lambda: int(job['numPreviews']) - items = lambda _: job.preview() + items = lambda _: job.preview(output_mode='json') else: count = lambda: int(job['eventCount']) - items = lambda offset: job.events(offset=offset) + items = lambda offset: job.events(offset=offset, output_mode='json') try: follow(job, count, items) diff --git a/examples/genevents.py b/examples/genevents.py index b717c2ae2..8b9b2d3bf 100755 --- a/examples/genevents.py +++ b/examples/genevents.py @@ -113,7 +113,7 @@ def main(): print("must supply an index name") sys.exit(1) - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) service = connect(**opts.kwargs) if opts.kwargs['ingest'] not in INGEST_TYPE: diff --git a/examples/get_job.py b/examples/get_job.py index 073917185..3d2568154 100755 --- a/examples/get_job.py +++ b/examples/get_job.py @@ -33,7 +33,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(argv): - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") service = client.connect(**opts.kwargs) # Execute a simple search, and store the sid diff --git a/examples/github_commits/README.md b/examples/github_commits/README.md new file mode 100644 index 000000000..fe7832c5e --- /dev/null +++ b/examples/github_commits/README.md @@ -0,0 +1,13 @@ +splunk-sdk-python github_commits example +======================================== + +This app provides an example of a modular input that Pulls down commit data from GitHub and creates events for each commit, which are then streamed to Splunk, based on the owner and repo_name provided by the user during setup of the input. + +To run this example locally run `SPLUNK_VERSION=latest docker compose up -d` from the root of this repository which will mount this example alongside the latest version of splunklib within `/opt/splunk/etc/apps/github_commits` and `/opt/splunk/etc/apps/github_commits/lib/splunklib` within the `splunk` container. + +Once the docker container is up and healthy log into the Splunk UI and setup a new `Github Commits` input by visiting this page: http://localhost:8000/en-US/manager/github_commits/datainputstats and selecting the "Add new..." button next to the Local Inputs > Github Commits. Enter values for a Github Repository owner and repo_name, for example owner = `splunk` repo_name = `splunk-sdk-python`. +(optional) `token` if using a private repository and/or to avoid Github's API limits. To get a Github API token visit the [Github settings page](https://github.com/settings/tokens/new) and make sure the repo and public_repo scopes are selected. + +NOTE: If no events appears then the script is likely not running properly, see https://docs.splunk.com/Documentation/SplunkCloud/latest/AdvancedDev/ModInputsDevTools for more details on debugging the modular input using the command line and relevant logs. + +Once the input is created you should be able to see an event when running the following search: `source="github_commits://*"` the event should contain commit data from given GitHub repository. diff --git a/examples/github_commits/README/inputs.conf.spec b/examples/github_commits/README/inputs.conf.spec new file mode 100644 index 000000000..156e60a4d --- /dev/null +++ b/examples/github_commits/README/inputs.conf.spec @@ -0,0 +1,6 @@ +[github_commits://] +*This example modular input retrieves GitHub commits and indexes them in Splunk. + +owner = +repo_name = +token = diff --git a/examples/github_commits/bin/github_commits.py b/examples/github_commits/bin/github_commits.py new file mode 100644 index 000000000..5581b9897 --- /dev/null +++ b/examples/github_commits/bin/github_commits.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python +# +# Copyright 2021 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import + +import os +import re +import sys +import json +# NOTE: splunklib must exist within github_commits/lib/splunklib for this +# example to run! To run this locally use `SPLUNK_VERSION=latest docker compose up -d` +# from the root of this repo which mounts this example and the latest splunklib +# code together at /opt/splunk/etc/apps/github_commits +from datetime import datetime + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) + +from splunklib.modularinput import * +from splunklib import six +from six.moves import http_client + + +class MyScript(Script): + """All modular inputs should inherit from the abstract base class Script + from splunklib.modularinput.script. + They must override the get_scheme and stream_events functions, and, + if the scheme returned by get_scheme has Scheme.use_external_validation + set to True, the validate_input function. + """ + + def get_scheme(self): + """When Splunk starts, it looks for all the modular inputs defined by + its configuration, and tries to run them with the argument --scheme. + Splunkd expects the modular inputs to print a description of the + input in XML on stdout. The modular input framework takes care of all + the details of formatting XML and printing it. The user need only + override get_scheme and return a new Scheme object. + + :return: scheme, a Scheme object + """ + # Splunk will display "Github Commits" to users for this input + scheme = Scheme("Github Commits") + + scheme.description = "Streams events of commits in the specified Github repository (must be public, unless setting a token)." + # If you set external validation to True, without overriding validate_input, + # the script will accept anything as valid. Generally you only need external + # validation if there are relationships you must maintain among the + # parameters, such as requiring min to be less than max in this example, + # or you need to check that some resource is reachable or valid. + # Otherwise, Splunk lets you specify a validation string for each argument + # and will run validation internally using that string. + scheme.use_external_validation = True + scheme.use_single_instance = False # Set to false so an input can have an optional interval parameter. + + owner_argument = Argument("owner") + owner_argument.title = "Owner" + owner_argument.data_type = Argument.data_type_string + owner_argument.description = "Github user or organization that created the repository." + owner_argument.required_on_create = True + # If you are not using external validation, you would add something like: + # + # scheme.validation = "owner==splunk" + scheme.add_argument(owner_argument) + + repo_name_argument = Argument("repo_name") + repo_name_argument.title = "Repo Name" + repo_name_argument.data_type = Argument.data_type_string + repo_name_argument.description = "Name of the Github repository." + repo_name_argument.required_on_create = True + scheme.add_argument(repo_name_argument) + + token_argument = Argument("token") + token_argument.title = "Token" + token_argument.data_type = Argument.data_type_string + token_argument.description = "(Optional) A Github API access token. Required for private repositories (the token must have the 'repo' and 'public_repo' scopes enabled). Recommended to avoid Github's API limit, especially if setting an interval." + token_argument.required_on_create = False + token_argument.required_on_edit = False + scheme.add_argument(token_argument) + + return scheme + + def validate_input(self, validation_definition): + """In this example we are using external validation to verify that the Github + repository exists. If validate_input does not raise an Exception, the input + is assumed to be valid. Otherwise it prints the exception as an error message + when telling splunkd that the configuration is invalid. + + When using external validation, after splunkd calls the modular input with + --scheme to get a scheme, it calls it again with --validate-arguments for + each instance of the modular input in its configuration files, feeding XML + on stdin to the modular input to do validation. It is called the same way + whenever a modular input's configuration is edited. + + :param validation_definition: a ValidationDefinition object + """ + # Get the values of the parameters, and construct a URL for the Github API + + owner = validation_definition.parameters["owner"] + repo_name = validation_definition.parameters["repo_name"] + token = None + if "token" in validation_definition.parameters: + token = validation_definition.parameters["token"] + + # Call Github to retrieve repo information + res = _get_github_commits(owner, repo_name, 1, 1, token) + + # If we get any kind of message, that's a bad sign. + if "message" in res: + raise ValueError("Some error occur during fetching commits. - " + res["message"]) + elif len(res) == 1 and "sha" in res[0]: + pass + else: + raise ValueError("Expected only the latest commit, instead found " + str(len(res)) + " commits.") + + def stream_events(self, inputs, ew): + """This function handles all the action: splunk calls this modular input + without arguments, streams XML describing the inputs to stdin, and waits + for XML on stdout describing events. + + If you set use_single_instance to True on the scheme in get_scheme, it + will pass all the instances of this input to a single instance of this + script. + + :param inputs: an InputDefinition object + :param ew: an EventWriter object + """ + + # Go through each input for this modular input + for input_name, input_item in six.iteritems(inputs.inputs): + # Get fields from the InputDefinition object + owner = input_item["owner"] + repo_name = input_item["repo_name"] + token = None + if "token" in input_item: + token = input_item["token"] + + ''' + access metadata (like server_host, server_uri, etc) of modular inputs app from InputDefinition object + here inputs is a InputDefinition object + server_host = inputs.metadata["server_host"] + server_uri = inputs.metadata["server_uri"] + ''' + # Get the checkpoint directory out of the modular input's metadata + checkpoint_dir = inputs.metadata["checkpoint_dir"] + + checkpoint_file_path = os.path.join(checkpoint_dir, owner + "_" + repo_name + ".txt") + checkpoint_file_new_contents = "" + error_found = False + + # Set the temporary contents of the checkpoint file to an empty string + checkpoint_file_contents = "" + + try: + # read sha values from file, if exist + file = open(checkpoint_file_path, 'r') + checkpoint_file_contents = file.read() + file.close() + except: + # If there's an exception, assume the file doesn't exist + # Create the checkpoint file with an empty string + file = open(checkpoint_file_path, "a") + file.write("") + file.close() + + per_page = 100 # The maximum per page value supported by the Github API. + page = 1 + + while True: + # Get the commit count from the Github API + res = _get_github_commits(owner, repo_name, per_page, page, token) + if len(res) == 0: + break + + file = open(checkpoint_file_path, "a") + + for record in res: + if error_found: + break + + # If the file exists and doesn't contain the sha, or if the file doesn't exist. + if checkpoint_file_contents.find(record["sha"] + "\n") < 0: + try: + _stream_commit(ew, owner, repo_name, record) + # Append this commit to the string we'll write at the end + checkpoint_file_new_contents += record["sha"] + "\n" + except: + error_found = True + file.write(checkpoint_file_new_contents) + + # We had an error, die. + return + + file.write(checkpoint_file_new_contents) + file.close() + + page += 1 + + +def _get_display_date(date): + month_strings = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] + date_format = "%Y-%m-%d %H:%M:%S" + date = datetime.strptime(date, date_format) + + hours = date.hour + if hours < 10: + hours = "0" + str(hours) + + mins = date.minute + if mins < 10: + mins = "0" + str(mins) + + return "{month} {day}, {year} - {hour}:{minute} {period}".format(month=month_strings[date.month - 1], day=date.day, + year=date.year, hour=hours, minute=mins, + period="AM" if date.hour < 12 else "PM") + + +def _get_github_commits(owner, repo_name, per_page=1, page=1, token=None): + # Read the response from the Github API, then parse the JSON data into an object + repo_path = "/repos/%s/%s/commits?per_page=%d&page=%d" % (owner, repo_name, per_page, page) + connection = http_client.HTTPSConnection('api.github.com') + headers = { + 'Content-type': 'application/json', + 'User-Agent': 'splunk-sdk-python' + } + if token: + headers['Authorization'] = 'token ' + token + connection.request('GET', repo_path, headers=headers) + response = connection.getresponse() + body = response.read().decode() + return json.loads(body) + + +def _stream_commit(ew, owner, repo_name, commitData): + json_data = { + "sha": commitData["sha"], + "api_url": commitData["url"], + "url": "https://github.com/" + owner + "/" + repo_name + "/commit/" + commitData["sha"] + } + commit = commitData["commit"] + + # At this point, assumed checkpoint doesn't exist. + json_data["message"] = re.sub("\n|\r", " ", commit["message"]) + json_data["author"] = commit["author"]["name"] + json_data["rawdate"] = commit["author"]["date"] + commit_date = re.sub("T|Z", " ", commit["author"]["date"]).strip() + json_data["displaydate"] = _get_display_date(commit_date) + + # Create an Event object, and set its fields + event = Event() + event.stanza = repo_name + event.sourceType = "github_commits" + event.data = json.dumps(json_data) + + # Tell the EventWriter to write this event + ew.write_event(event) + + +if __name__ == "__main__": + sys.exit(MyScript().run(sys.argv)) diff --git a/examples/github_commits/default/app.conf b/examples/github_commits/default/app.conf new file mode 100644 index 000000000..14086d5a2 --- /dev/null +++ b/examples/github_commits/default/app.conf @@ -0,0 +1,11 @@ +[install] +is_configured = 0 + +[ui] +is_visible = 1 +label = GitHub Commits Modular Input + +[launcher] +author=Splunk +description=This example modular input retrieves GitHub commits and indexes them in Splunk. +version = 1.0 diff --git a/examples/github_forks/bin/github_forks.py b/examples/github_forks/bin/github_forks.py index 5ffa4e409..46b42a81b 100755 --- a/examples/github_forks/bin/github_forks.py +++ b/examples/github_forks/bin/github_forks.py @@ -130,6 +130,10 @@ def stream_events(self, inputs, ew): owner = input_item["owner"] repo_name = input_item["repo_name"] + # Hint: API auth required?, get a secret from passwords.conf + # self.service.namespace["app"] = input_item["__app"] + # api_token = self.service.storage_passwords["github_api_token"].clear_password + # Get the fork count from the Github API jsondata = _get_github_repos(owner, repo_name) fork_count = jsondata["forks_count"] diff --git a/examples/handlers/handler_certs.py b/examples/handlers/handler_certs.py index e97e45f44..7140cd651 100755 --- a/examples/handlers/handler_certs.py +++ b/examples/handlers/handler_certs.py @@ -114,7 +114,7 @@ def request(url, message, **kwargs): return request -opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") +opts = utils.parse(sys.argv[1:], RULES, ".env") ca_file = opts.kwargs['ca_file'] service = client.connect(handler=handler(ca_file), **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/handlers/handler_debug.py b/examples/handlers/handler_debug.py index 1ed4b6334..383428ae4 100755 --- a/examples/handlers/handler_debug.py +++ b/examples/handlers/handler_debug.py @@ -41,6 +41,6 @@ def request(url, message, **kwargs): return response return request -opts = utils.parse(sys.argv[1:], {}, ".splunkrc") +opts = utils.parse(sys.argv[1:], {}, ".env") service = client.connect(handler=handler(), **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/handlers/handler_proxy.py b/examples/handlers/handler_proxy.py index dbf36457d..eff371541 100755 --- a/examples/handlers/handler_proxy.py +++ b/examples/handlers/handler_proxy.py @@ -80,7 +80,7 @@ def handler(proxy): urllib.request.install_opener(opener) return request -opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") +opts = utils.parse(sys.argv[1:], RULES, ".env") proxy = opts.kwargs['proxy'] try: service = client.connect(handler=handler(proxy), **opts.kwargs) diff --git a/examples/handlers/handler_urllib2.py b/examples/handlers/handler_urllib2.py index 359dabc0b..d81d66d59 100755 --- a/examples/handlers/handler_urllib2.py +++ b/examples/handlers/handler_urllib2.py @@ -53,7 +53,7 @@ def request(url, message, **kwargs): 'body': BytesIO(response.read()) } -opts = utils.parse(sys.argv[1:], {}, ".splunkrc") +opts = utils.parse(sys.argv[1:], {}, ".env") service = client.connect(handler=request, **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/index.py b/examples/index.py index 9260e88d7..0c8da974f 100755 --- a/examples/index.py +++ b/examples/index.py @@ -183,7 +183,7 @@ def main(): options = argv[:index] command = argv[index:] - opts = parse(options, {}, ".splunkrc", usage=usage, epilog=HELP_EPILOG) + opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) service = connect(**opts.kwargs) program = Program(service) program.run(command) diff --git a/examples/info.py b/examples/info.py index da60aeaa8..e54349d4c 100755 --- a/examples/info.py +++ b/examples/info.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") if __name__ == "__main__": - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = client.connect(**opts.kwargs) content = service.info diff --git a/examples/inputs.py b/examples/inputs.py index 7c6436817..be77d02d5 100755 --- a/examples/inputs.py +++ b/examples/inputs.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for item in service.inputs: diff --git a/examples/job.py b/examples/job.py index 257281e4d..8e51ba6a7 100755 --- a/examples/job.py +++ b/examples/job.py @@ -267,7 +267,7 @@ def main(): options = argv[:index] command = argv[index:] - opts = parse(options, {}, ".splunkrc", usage=usage, epilog=HELP_EPILOG) + opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) service = connect(**opts.kwargs) program = Program(service) program.run(command) diff --git a/examples/kvstore.py b/examples/kvstore.py index 7ea2cd6f4..2ca32e5a9 100644 --- a/examples/kvstore.py +++ b/examples/kvstore.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") opts.kwargs["owner"] = "nobody" opts.kwargs["app"] = "search" service = connect(**opts.kwargs) diff --git a/examples/loggers.py b/examples/loggers.py index 2d88b8969..df71af09e 100755 --- a/examples/loggers.py +++ b/examples/loggers.py @@ -32,7 +32,7 @@ def main(argv): usage = "usage: %prog [options]" - opts = parse(argv, {}, ".splunkrc", usage=usage) + opts = parse(argv, {}, ".env", usage=usage) service = client.connect(**opts.kwargs) for logger in service.loggers: diff --git a/examples/oneshot.py b/examples/oneshot.py index 9c28ff0e4..8429aedfb 100755 --- a/examples/oneshot.py +++ b/examples/oneshot.py @@ -32,21 +32,21 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def pretty(response): - reader = results.ResultsReader(response) + reader = results.JSONResultsReader(response) for result in reader: if isinstance(result, dict): pprint(result) def main(): usage = "usage: oneshot.py " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) search = opts.args[0] service = connect(**opts.kwargs) socket.setdefaulttimeout(None) - response = service.jobs.oneshot(search) + response = service.jobs.oneshot(search, output_mode='json') pretty(response) diff --git a/examples/results.py b/examples/results.py index 9c0f18751..e18e8f567 100755 --- a/examples/results.py +++ b/examples/results.py @@ -17,18 +17,21 @@ """A script that reads XML search results from stdin and pretty-prints them back to stdout. The script is designed to be used with the search.py example, eg: './search.py "search 404" | ./results.py'""" - + from __future__ import absolute_import from pprint import pprint import sys, os + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import splunklib.results as results + def pretty(): - reader = results.ResultsReader(sys.stdin) + reader = results.JSONResultsReader(sys.stdin) for event in reader: pprint(event) + if __name__ == "__main__": pretty() diff --git a/examples/saved_search/saved_search.py b/examples/saved_search/saved_search.py index 91f5ef70a..657f6aa69 100755 --- a/examples/saved_search/saved_search.py +++ b/examples/saved_search/saved_search.py @@ -160,7 +160,7 @@ def main(argv): """ main entry """ usage = 'usage: %prog --help for options' - opts = utils.parse(argv, RULES, ".splunkrc", usage=usage) + opts = utils.parse(argv, RULES, ".env", usage=usage) context = binding.connect(**opts.kwargs) operation = None diff --git a/examples/saved_searches.py b/examples/saved_searches.py index 5455f2cec..6301339f5 100755 --- a/examples/saved_searches.py +++ b/examples/saved_searches.py @@ -31,7 +31,7 @@ def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for saved_search in service.saved_searches: diff --git a/examples/search.py b/examples/search.py index 1c5ace22e..858e92312 100755 --- a/examples/search.py +++ b/examples/search.py @@ -49,7 +49,7 @@ def cmdline(argv, flags, **kwargs): """A cmdopts wrapper that takes a list of flags and builds the corresponding cmdopts rules to match those flags.""" rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".splunkrc", **kwargs) + return parse(argv, rules, ".env", **kwargs) def main(argv): usage = 'usage: %prog [options] "search"' diff --git a/examples/search_modes.py b/examples/search_modes.py index dbbb8442a..f1d1687f2 100644 --- a/examples/search_modes.py +++ b/examples/search_modes.py @@ -12,7 +12,7 @@ def cmdline(argv, flags, **kwargs): """A cmdopts wrapper that takes a list of flags and builds the corresponding cmdopts rules to match those flags.""" rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".splunkrc", **kwargs) + return parse(argv, rules, ".env", **kwargs) def modes(argv): opts = cmdline(argv, []) @@ -24,7 +24,7 @@ def modes(argv): while not job.is_ready(): time.sleep(0.5) pass - reader = results.ResultsReader(job.events()) + reader = results.JSONResultsReader(job.events(output_mode='json')) # Events found: 0 print('Events found with adhoc_search_level="smart": %s' % len([e for e in reader])) @@ -33,7 +33,7 @@ def modes(argv): while not job.is_ready(): time.sleep(0.5) pass - reader = results.ResultsReader(job.events()) + reader = results.JSONResultsReader(job.events(output_mode='json')) # Events found: 10 print('Events found with adhoc_search_level="verbose": %s' % len([e for e in reader])) diff --git a/examples/searchcommands_app/README.md b/examples/searchcommands_app/README.md index 075253134..b1c07311d 100644 --- a/examples/searchcommands_app/README.md +++ b/examples/searchcommands_app/README.md @@ -35,7 +35,7 @@ The app is tested on Splunk 5 and 6. Here is its manifest: └── default.meta ............. Permits the search assistant to use searchbnf.conf[6] ``` **References** -[1] [app.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Appconf app.conf) +[1] [app.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Appconf) [2] [commands.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Commandsconf) [3] [Python Logging HOWTO](https://docs.python.org/2/howto/logging.html) [4] [ConfigParser—Configuration file parser](https://docs.python.org/2/library/configparser.html) @@ -110,6 +110,15 @@ word_counts | :-----| 4497.0 | +## Optional:Set up logging using logging.conf file ++ Inside the **default** directory of our app, we have a [logging.conf](https://github.com/splunk/splunk-sdk-python/blob/master/examples/searchcommands_app/package/default/logging.conf) file. ++ In logging.conf file we can define loggers, handlers and formatters for our app. refer [this doc](https://docs.python.org/2/library/logging.config.html#configuration-file-format) for more details ++ Logs will be written in the files specified in the handlers defined for the respective loggers + + For **'searchcommands_app'** app logs will be written in **searchcommands_app.log** and **splunklib.log** files defined in respective handlers, and are present at $SPLUNK_HOME/etc/apps/searchcommands_app/ dir + + By default logs will be written in the app's root directory, but it can be overriden by specifying the absolute path for the logs file in the conf file ++ By default, logging level is set to WARNING ++ To see debug and above level logs, Set level to DEBUG in logging.conf file + ## License This software is licensed under the Apache License 2.0. Details can be found in diff --git a/examples/searchcommands_app/package/default/distsearch.conf b/examples/searchcommands_app/package/default/distsearch.conf new file mode 100644 index 000000000..1c13e5414 --- /dev/null +++ b/examples/searchcommands_app/package/default/distsearch.conf @@ -0,0 +1,7 @@ +# Valid in <=8.2 +[replicationWhitelist] +searchcommands_app = apps/searchcommands_app/lib/... + +# Valid in >=8.3 +[replicationAllowlist] +searchcommands_app = apps/searchcommands_app/lib/... diff --git a/examples/searchcommands_app/package/default/logging.conf b/examples/searchcommands_app/package/default/logging.conf index 4b2ae621e..f3220a63d 100644 --- a/examples/searchcommands_app/package/default/logging.conf +++ b/examples/searchcommands_app/package/default/logging.conf @@ -96,3 +96,4 @@ keys = searchcommands [formatter_searchcommands] format = %(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, Line=%(lineno)s, %(message)s +datefmt = %Y-%m-%d %H:%M:%S %Z diff --git a/examples/searchcommands_template/bin/filter.py b/examples/searchcommands_template/bin/filter.py index 194118af0..153c76a69 100644 --- a/examples/searchcommands_template/bin/filter.py +++ b/examples/searchcommands_template/bin/filter.py @@ -5,7 +5,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) from splunklib.searchcommands import \ - dispatch, StreamingCommand, Configuration, Option, validators + dispatch, EventingCommand, Configuration, Option, validators @Configuration() diff --git a/examples/searchcommands_template/bin/stream.py b/examples/searchcommands_template/bin/stream.py index aa7379038..fa946a02c 100644 --- a/examples/searchcommands_template/bin/stream.py +++ b/examples/searchcommands_template/bin/stream.py @@ -23,6 +23,7 @@ class %(command.title())Command(StreamingCommand): """ def stream(self, events): # Put your event transformation code here - pass + for event in events: + yield event dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) diff --git a/examples/searchcommands_template/default/distsearch.conf b/examples/searchcommands_template/default/distsearch.conf new file mode 100644 index 000000000..8abbe3b9e --- /dev/null +++ b/examples/searchcommands_template/default/distsearch.conf @@ -0,0 +1,7 @@ +# Valid in <=8.2 +[replicationWhitelist] +searchcommands_template = apps/searchcommands_template/lib/... + +# Valid in >=8.3 +[replicationAllowlist] +searchcommands_template = apps/searchcommands_template/lib/... diff --git a/examples/searchcommands_template/default/logging.conf b/examples/searchcommands_template/default/logging.conf index fdcd6ade8..4efb7e40c 100644 --- a/examples/searchcommands_template/default/logging.conf +++ b/examples/searchcommands_template/default/logging.conf @@ -33,6 +33,9 @@ handlers = app # Default: 1 propagate = 0 +[handlers] +keys = app, splunklib, stderr + [handler_app] # Select this handler to log events to $SPLUNK_HOME/var/log/splunk/searchcommands_app.log class = logging.handlers.RotatingFileHandler diff --git a/examples/spcmd.py b/examples/spcmd.py index f2b21378d..28b4e9a93 100755 --- a/examples/spcmd.py +++ b/examples/spcmd.py @@ -118,7 +118,7 @@ def actions(opts): return len(opts.args) > 0 or 'eval' in opts.kwargs def main(): - opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") + opts = utils.parse(sys.argv[1:], RULES, ".env") # Connect and initialize the command session session = Session(**opts.kwargs) diff --git a/examples/spurl.py b/examples/spurl.py index 71c60e2ae..748b56d9c 100755 --- a/examples/spurl.py +++ b/examples/spurl.py @@ -47,7 +47,7 @@ def print_response(response): print(body) def main(): - opts = utils.parse(sys.argv[1:], {}, ".splunkrc") + opts = utils.parse(sys.argv[1:], {}, ".env") for arg in opts.args: print_response(invoke(arg, **opts.kwargs)) diff --git a/examples/stail.py b/examples/stail.py index 0f04b0d8c..6ba4ee54e 100755 --- a/examples/stail.py +++ b/examples/stail.py @@ -25,7 +25,7 @@ from pprint import pprint from splunklib.client import connect -from splunklib.results import ResultsReader +from splunklib.results import JSONResultsReader try: import utils @@ -35,7 +35,7 @@ def main(): usage = "usage: %prog " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) @@ -49,9 +49,10 @@ def main(): search=search, earliest_time="rt", latest_time="rt", - search_mode="realtime") + search_mode="realtime", + output_mode="json") - for result in ResultsReader(result.body): + for result in JSONResultsReader(result.body): if result is not None: print(pprint(result)) diff --git a/examples/submit.py b/examples/submit.py index 358ce9fb0..1e74e7a49 100755 --- a/examples/submit.py +++ b/examples/submit.py @@ -45,7 +45,7 @@ def main(argv): usage = 'usage: %prog [options] ' - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) if len(opts.args) == 0: error("Index name required", 2) index = opts.args[0] diff --git a/examples/twitted/input.py b/examples/twitted/input.py index ececa09b1..e907cc55d 100755 --- a/examples/twitted/input.py +++ b/examples/twitted/input.py @@ -94,7 +94,7 @@ def connect(self): } def cmdline(): - kwargs = parse(sys.argv[1:], RULES, ".splunkrc").kwargs + kwargs = parse(sys.argv[1:], RULES, ".env").kwargs # Prompt for Twitter username/password if not provided on command line if 'tusername' not in kwargs: diff --git a/examples/twitted/twitted/bin/tophashtags.py b/examples/twitted/twitted/bin/tophashtags.py index 6df5765f1..499f9f389 100755 --- a/examples/twitted/twitted/bin/tophashtags.py +++ b/examples/twitted/twitted/bin/tophashtags.py @@ -19,10 +19,7 @@ import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir))) -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from splunklib.ordereddict import OrderedDict +from collections import OrderedDict from splunklib import six from splunklib.six.moves import zip diff --git a/examples/upload.py b/examples/upload.py index 8e9137e42..af592b949 100755 --- a/examples/upload.py +++ b/examples/upload.py @@ -58,7 +58,7 @@ def main(argv): usage = 'usage: %prog [options] *' - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) service = client.connect(**kwargs_splunk) diff --git a/scripts/build-splunkrc.py b/scripts/build-env.py similarity index 81% rename from scripts/build-splunkrc.py rename to scripts/build-env.py index 0d544665c..e1a153d4a 100644 --- a/scripts/build-splunkrc.py +++ b/scripts/build-env.py @@ -30,10 +30,10 @@ 'version': '8.0' } -DEFAULT_SPLUNKRC_PATH = os.path.join(str(Path.home()), '.splunkrc') +DEFAULT_ENV_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '.env') -SPLUNKRC_TEMPLATE_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'templates/splunkrc.template') +ENV_TEMPLATE_PATH = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'templates/env.template') # { # "server_roles": { @@ -82,31 +82,31 @@ def parse_hostport(host_port): result = urllib.parse.urlsplit('//' + host_port) return result.hostname, result.port -def run(variable, splunkrc_path=None): +def run(variable, env_path=None): # read JSON from input # parse the JSON input_config = build_config(variable) if variable else DEFAULT_CONFIG config = {**DEFAULT_CONFIG, **input_config} - # build a splunkrc file - with open(SPLUNKRC_TEMPLATE_PATH, 'r') as f: + # build a env file + with open(ENV_TEMPLATE_PATH, 'r') as f: template = Template(f.read()) - splunkrc_string = template.substitute(config) - - # if no splunkrc, dry-run - if not splunkrc_path: - print(splunkrc_string) + env_string = template.substitute(config) + env_path = DEFAULT_ENV_PATH if env_path is None else env_path + # if no env, dry-run + if not env_path: + print(env_string) return - # write the .splunkrc file - with open(splunkrc_path, 'w') as f: - f.write(splunkrc_string) + # write the .env file + with open(env_path, 'w') as f: + f.write(env_string) if sys.stdin.isatty(): DATA = None else: DATA = sys.stdin.read() -run(DATA, sys.argv[1] if len(sys.argv) > 1 else None) +run(DATA, sys.argv[1] if len(sys.argv) > 1 else None) \ No newline at end of file diff --git a/scripts/templates/splunkrc.template b/scripts/templates/env.template similarity index 93% rename from scripts/templates/splunkrc.template rename to scripts/templates/env.template index b98f93af6..a45851b6a 100644 --- a/scripts/templates/splunkrc.template +++ b/scripts/templates/env.template @@ -13,4 +13,4 @@ version=$version # Bearer token for authentication #bearerToken= # Session key for authentication -#sessionKey= +#sessionKey= \ No newline at end of file diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 41c261fdc..87d26b749 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -16,5 +16,20 @@ from __future__ import absolute_import from splunklib.six.moves import map -__version_info__ = (1, 6, 18) +import logging + +DEFAULT_LOG_FORMAT = '%(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, ' \ + 'Line=%(lineno)s, %(message)s' +DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S %Z' + + +# To set the logging level of splunklib +# ex. To enable debug logs, call this method with parameter 'logging.DEBUG' +# default logging level is set to 'WARNING' +def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE_FORMAT): + logging.basicConfig(level=level, + format=log_format, + datefmt=date_format) + +__version_info__ = (1, 6, 19) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index 94cc55818..6bf4f0714 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -47,6 +47,7 @@ except ImportError as e: from xml.parsers.expat import ExpatError as ParseError +logger = logging.getLogger(__name__) __all__ = [ "AuthenticationError", @@ -68,7 +69,7 @@ def new_f(*args, **kwargs): start_time = datetime.now() val = f(*args, **kwargs) end_time = datetime.now() - logging.debug("Operation took %s", end_time-start_time) + logger.debug("Operation took %s", end_time-start_time) return val return new_f @@ -616,7 +617,7 @@ def delete(self, path_segment, owner=None, app=None, sharing=None, **query): """ path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("DELETE request to %s (body: %s)", path, repr(query)) + logger.debug("DELETE request to %s (body: %s)", path, repr(query)) response = self.http.delete(path, self._auth_headers, **query) return response @@ -679,7 +680,7 @@ def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, ** path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("GET request to %s (body: %s)", path, repr(query)) + logger.debug("GET request to %s (body: %s)", path, repr(query)) all_headers = headers + self.additional_headers + self._auth_headers response = self.http.get(path, all_headers, **query) return response @@ -757,14 +758,20 @@ def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, * headers = [] path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("POST request to %s (body: %s)", path, repr(query)) + + # To avoid writing sensitive data in debug logs + endpoint_having_sensitive_data = ["/storage/passwords"] + if any(endpoint in path for endpoint in endpoint_having_sensitive_data): + logger.debug("POST request to %s ", path) + else: + logger.debug("POST request to %s (body: %s)", path, repr(query)) all_headers = headers + self.additional_headers + self._auth_headers response = self.http.post(path, all_headers, **query) return response @_authentication @_log_duration - def request(self, path_segment, method="GET", headers=None, body="", + def request(self, path_segment, method="GET", headers=None, body={}, owner=None, app=None, sharing=None): """Issues an arbitrary HTTP request to the REST path segment. @@ -824,13 +831,28 @@ def request(self, path_segment, method="GET", headers=None, body="", path = self.authority \ + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) + all_headers = headers + self.additional_headers + self._auth_headers - logging.debug("%s request to %s (headers: %s, body: %s)", + logger.debug("%s request to %s (headers: %s, body: %s)", method, path, str(all_headers), repr(body)) - response = self.http.request(path, - {'method': method, - 'headers': all_headers, - 'body': body}) + + if body: + body = _encode(**body) + + if method == "GET": + path = path + UrlEncoded('?' + body, skip_encode=True) + message = {'method': method, + 'headers': all_headers} + else: + message = {'method': method, + 'headers': all_headers, + 'body': body} + else: + message = {'method': method, + 'headers': all_headers} + + response = self.http.request(path, message) + return response def login(self): @@ -1290,7 +1312,10 @@ def __init__(self, response, connection=None): self._buffer = b'' def __str__(self): - return self.read() + if six.PY2: + return self.read() + else: + return str(self.read(), 'UTF-8') @property def empty(self): @@ -1389,7 +1414,7 @@ def request(url, message, **kwargs): head = { "Content-Length": str(len(body)), "Host": host, - "User-Agent": "splunk-sdk-python/1.6.18", + "User-Agent": "splunk-sdk-python/1.6.19", "Accept": "*/*", "Connection": "Close", } # defaults diff --git a/splunklib/client.py b/splunklib/client.py index 21d27a6e0..0979140c2 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -75,6 +75,8 @@ namespace) from .data import record +logger = logging.getLogger(__name__) + __all__ = [ "connect", "NotSupportedError", @@ -724,7 +726,7 @@ class Endpoint(object): """ def __init__(self, service, path): self.service = service - self.path = path if path.endswith('/') else path + '/' + self.path = path def get(self, path_segment="", owner=None, app=None, sharing=None, **query): """Performs a GET operation on the path segment relative to this endpoint. @@ -782,6 +784,8 @@ def get(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) # ^-- This was "%s%s" % (self.path, path_segment). @@ -842,6 +846,8 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: + if not self.path.endswith('/') and path_segment != "": + self.path = self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) @@ -978,7 +984,10 @@ def __getitem__(self, key): def _load_atom_entry(self, response): elem = _load_atom(response, XNAME_ENTRY) if isinstance(elem, list): - raise AmbiguousReferenceException("Fetch from server returned multiple entries for name %s." % self.name) + apps = [ele.entry.content.get('eai:appName') for ele in elem] + + raise AmbiguousReferenceException( + "Fetch from server returned multiple entries for name '%s' in apps %s." % (elem[0].entry.title, apps)) else: return elem.entry @@ -1469,7 +1478,7 @@ def iter(self, offset=0, count=None, pagesize=None, **kwargs): if pagesize is None or N < pagesize: break offset += N - logging.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) + logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) # kwargs: count, offset, search, sort_dir, sort_key, sort_mode def list(self, count=None, **kwargs): @@ -2111,10 +2120,6 @@ def submit(self, event, host=None, source=None, sourcetype=None): if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype - # The reason we use service.request directly rather than POST - # is that we are not sending a POST request encoded using - # x-www-form-urlencoded (as we do not have a key=value body), - # because we aren't really sending a "form". self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) return self @@ -2542,9 +2547,9 @@ def list(self, *kinds, **kwargs): kinds = self.kinds if len(kinds) == 1: kind = kinds[0] - logging.debug("Inputs.list taking short circuit branch for single kind.") + logger.debug("Inputs.list taking short circuit branch for single kind.") path = self.kindpath(kind) - logging.debug("Path for inputs: %s", path) + logger.debug("Path for inputs: %s", path) try: path = UrlEncoded(path, skip_encode=True) response = self.get(path, **kwargs) @@ -2762,9 +2767,8 @@ def pause(self): return self def results(self, **query_params): - """Returns a streaming handle to this job's search results. To get a - nice, Pythonic iterator, pass the handle to :class:`splunklib.results.ResultsReader`, - as in:: + """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle + to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: import splunklib.client as client import splunklib.results as results @@ -2773,7 +2777,7 @@ def results(self, **query_params): job = service.jobs.create("search * | head 5") while not job.is_done(): sleep(.2) - rr = results.ResultsReader(job.results()) + rr = results.JSONResultsReader(job.results(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -2803,19 +2807,17 @@ def results(self, **query_params): def preview(self, **query_params): """Returns a streaming handle to this job's preview search results. - Unlike :class:`splunklib.results.ResultsReader`, which requires a job to - be finished to - return any results, the ``preview`` method returns any results that have - been generated so far, whether the job is running or not. The - returned search results are the raw data from the server. Pass - the handle returned to :class:`splunklib.results.ResultsReader` to get a - nice, Pythonic iterator over objects, as in:: + Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", + which requires a job to be finished to return any results, the ``preview`` method returns any results that + have been generated so far, whether the job is running or not. The returned search results are the raw data + from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, + Pythonic iterator over objects, as in:: import splunklib.client as client import splunklib.results as results service = client.connect(...) job = service.jobs.create("search * | head 5") - rr = results.ResultsReader(job.preview()) + rr = results.JSONResultsReader(job.preview(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -2970,15 +2972,15 @@ def create(self, query, **kwargs): return Job(self.service, sid) def export(self, query, **params): - """Runs a search and immediately starts streaming preview events. - This method returns a streaming handle to this job's events as an XML - document from the server. To parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.ResultsReader`:: + """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to + this job's events as an XML document from the server. To parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'":: import splunklib.client as client import splunklib.results as results service = client.connect(...) - rr = results.ResultsReader(service.jobs.export("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -3027,14 +3029,14 @@ def itemmeta(self): def oneshot(self, query, **params): """Run a oneshot search and returns a streaming handle to the results. - The ``InputStream`` object streams XML fragments from the server. To - parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.ResultsReader`:: + The ``InputStream`` object streams XML fragments from the server. To parse this stream into usable Python + objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'" :: import splunklib.client as client import splunklib.results as results service = client.connect(...) - rr = results.ResultsReader(service.jobs.oneshot("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -3616,7 +3618,7 @@ def update_index(self, name, value): :return: Result of POST request """ kwargs = {} - kwargs['index.' + name] = value if isinstance(value, basestring) else json.dumps(value) + kwargs['index.' + name] = value if isinstance(value, six.string_types) else json.dumps(value) return self.post(**kwargs) def update_field(self, name, value): diff --git a/splunklib/data.py b/splunklib/data.py index dedbb3310..f9ffb8692 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -161,8 +161,8 @@ def load_value(element, nametable=None): text = element.text if text is None: return None - text = text.strip() - if len(text) == 0: + + if len(text.strip()) == 0: return None return text diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py old mode 100644 new mode 100755 index b868a18ff..5f8c5aa8b --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -82,5 +82,6 @@ def write_xml_document(self, document): def close(self): """Write the closing tag to make this XML well formed.""" - self._out.write("") + if self.header_written: + self._out.write("") self._out.flush() diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 853694a0d..3d42b6326 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -64,11 +64,14 @@ def parse_parameters(param_node): def parse_xml_data(parent_node, child_node_tag): data = {} for child in parent_node: + child_name = child.get("name") if child.tag == child_node_tag: if child_node_tag == "stanza": - data[child.get("name")] = {} + data[child_name] = { + "__app": child.get("app", None) + } for param in child: - data[child.get("name")][param.get("name")] = parse_parameters(param) + data[child_name][param.get("name")] = parse_parameters(param) elif "item" == parent_node.tag: - data[child.get("name")] = parse_parameters(child) + data[child_name] = parse_parameters(child) return data diff --git a/splunklib/ordereddict.py b/splunklib/ordereddict.py deleted file mode 100644 index 9495566cf..000000000 --- a/splunklib/ordereddict.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Raymond Hettinger -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation files -# (the "Software"), to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, merge, -# publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. - -from UserDict import DictMixin - - -class OrderedDict(dict, DictMixin): - - def __init__(self, *args, **kwds): - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__end - except AttributeError: - self.clear() - self.update(*args, **kwds) - - def clear(self): - self.__end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.__map = {} # key --> [key, prev, next] - dict.clear(self) - - def __setitem__(self, key, value): - if key not in self: - end = self.__end - curr = end[1] - curr[2] = end[1] = self.__map[key] = [key, curr, end] - dict.__setitem__(self, key, value) - - def __delitem__(self, key): - dict.__delitem__(self, key) - key, prev, next = self.__map.pop(key) - prev[2] = next - next[1] = prev - - def __iter__(self): - end = self.__end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.__end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - def popitem(self, last=True): - if not self: - raise KeyError('dictionary is empty') - if last: - key = reversed(self).next() - else: - key = iter(self).next() - value = self.pop(key) - return key, value - - def __reduce__(self): - items = [[k, self[k]] for k in self] - tmp = self.__map, self.__end - del self.__map, self.__end - inst_dict = vars(self).copy() - self.__map, self.__end = tmp - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def keys(self): - return list(self) - - setdefault = DictMixin.setdefault - update = DictMixin.update - pop = DictMixin.pop - values = DictMixin.values - items = DictMixin.items - iterkeys = DictMixin.iterkeys - itervalues = DictMixin.itervalues - iteritems = DictMixin.iteritems - - def __repr__(self): - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - - def copy(self): - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - if isinstance(other, OrderedDict): - if len(self) != len(other): - return False - for p, q in zip(self.items(), other.items()): - if p != q: - return False - return True - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other diff --git a/splunklib/results.py b/splunklib/results.py index 20501c5b7..5f3966859 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -34,18 +34,19 @@ from __future__ import absolute_import -from io import BytesIO +from io import BufferedReader, BytesIO from splunklib import six + +from splunklib.six import deprecated + try: import xml.etree.cElementTree as et except: import xml.etree.ElementTree as et -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from .ordereddict import OrderedDict +from collections import OrderedDict +from json import loads as json_loads try: from splunklib.six.moves import cStringIO as StringIO @@ -57,6 +58,7 @@ "Message" ] + class Message(object): """This class represents informational messages that Splunk interleaves in the results stream. @@ -67,6 +69,7 @@ class Message(object): m = Message("DEBUG", "There's something in that variable...") """ + def __init__(self, type_, message): self.type = type_ self.message = message @@ -80,6 +83,7 @@ def __eq__(self, other): def __hash__(self): return hash((self.type, self.message)) + class _ConcatenatedStream(object): """Lazily concatenate zero or more streams into a stream. @@ -92,6 +96,7 @@ class _ConcatenatedStream(object): s = _ConcatenatedStream(StringIO("abc"), StringIO("def")) assert s.read() == "abcdef" """ + def __init__(self, *streams): self.streams = list(streams) @@ -110,6 +115,7 @@ def read(self, n=None): del self.streams[0] return response + class _XMLDTDFilter(object): """Lazily remove all XML DTDs from a stream. @@ -123,6 +129,7 @@ class _XMLDTDFilter(object): s = _XMLDTDFilter("") assert s.read() == "" """ + def __init__(self, stream): self.stream = stream @@ -153,6 +160,8 @@ def read(self, n=None): n -= 1 return response + +@deprecated("Use the JSONResultsReader function instead in conjuction with the 'output_mode' query param set to 'json'") class ResultsReader(object): """This class returns dictionaries and Splunk messages from an XML results stream. @@ -180,6 +189,7 @@ class ResultsReader(object): print "Message: %s" % result print "is_preview = %s " % reader.is_preview """ + # Be sure to update the docstrings of client.Jobs.oneshot, # client.Job.results_preview and client.Job.results to match any # changes made to ResultsReader. @@ -260,16 +270,16 @@ def _parse_results(self, stream): # So we'll define it here def __itertext(self): - tag = self.tag - if not isinstance(tag, six.string_types) and tag is not None: - return - if self.text: - yield self.text - for e in self: - for s in __itertext(e): - yield s - if e.tail: - yield e.tail + tag = self.tag + if not isinstance(tag, six.string_types) and tag is not None: + return + if self.text: + yield self.text + for e in self: + for s in __itertext(e): + yield s + if e.tail: + yield e.tail text = "".join(__itertext(elem)) values.append(text) @@ -291,5 +301,69 @@ def __itertext(self): raise +class JSONResultsReader(object): + """This class returns dictionaries and Splunk messages from a JSON results + stream. + ``JSONResultsReader`` is iterable, and returns a ``dict`` for results, or a + :class:`Message` object for Splunk messages. This class has one field, + ``is_preview``, which is ``True`` when the results are a preview from a + running search, or ``False`` when the results are from a completed search. + This function has no network activity other than what is implicit in the + stream it operates on. + :param `stream`: The stream to read from (any object that supports + ``.read()``). + **Example**:: + import results + response = ... # the body of an HTTP response + reader = results.JSONResultsReader(response) + for result in reader: + if isinstance(result, dict): + print "Result: %s" % result + elif isinstance(result, results.Message): + print "Message: %s" % result + print "is_preview = %s " % reader.is_preview + """ + + # Be sure to update the docstrings of client.Jobs.oneshot, + # client.Job.results_preview and client.Job.results to match any + # changes made to JSONResultsReader. + # + # This wouldn't be a class, just the _parse_results function below, + # except that you cannot get the current generator inside the + # function creating that generator. Thus it's all wrapped up for + # the sake of one field. + def __init__(self, stream): + # The search/jobs/exports endpoint, when run with + # earliest_time=rt and latest_time=rt, output_mode=json, streams a sequence of + # JSON documents, each containing a result, as opposed to one + # results element containing lots of results. + stream = BufferedReader(stream) + self.is_preview = None + self._gen = self._parse_results(stream) + + def __iter__(self): + return self + + def next(self): + return next(self._gen) + __next__ = next + def _parse_results(self, stream): + """Parse results and messages out of *stream*.""" + for line in stream.readlines(): + strip_line = line.strip() + if strip_line.__len__() == 0: continue + parsed_line = json_loads(strip_line) + if "preview" in parsed_line: + self.is_preview = parsed_line["preview"] + if "messages" in parsed_line and parsed_line["messages"].__len__() > 0: + for message in parsed_line["messages"]: + msg_type = message.get("type", "Unknown Message Type") + text = message.get("text") + yield Message(msg_type, text) + if "result" in parsed_line: + yield parsed_line["result"] + if "results" in parsed_line: + for result in parsed_line["results"]: + yield result diff --git a/splunklib/searchcommands/__init__.py b/splunklib/searchcommands/__init__.py index c56c510d5..8a929039c 100644 --- a/splunklib/searchcommands/__init__.py +++ b/splunklib/searchcommands/__init__.py @@ -134,9 +134,13 @@ .. topic:: References - 1. `Search command style guide `__ + 1. `Custom Search Command manual: `__ - 2. `Commands.conf.spec `_ + 2. `Create Custom Search Commands with commands.conf.spec `_ + + 3. `Configure seach assistant with searchbnf.conf `_ + + 4. `Control search distribution with distsearch.conf `_ """ diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index 36590a76b..d8b3f48cc 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -17,10 +17,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals from splunklib import six -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict # must be python 2.7 from inspect import getmembers, isclass, isfunction from splunklib.six.moves import map as imap diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index fa32f0b1c..1ea2833db 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -19,10 +19,7 @@ from io import TextIOWrapper from collections import deque, namedtuple from splunklib import six -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict from splunklib.six.moves import StringIO from itertools import chain from splunklib.six.moves import map as imap diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 5a626cc5c..dd11391d6 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -22,10 +22,7 @@ import io -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict from copy import deepcopy from splunklib.six.moves import StringIO from itertools import chain, islice @@ -649,6 +646,19 @@ def _process_protocol_v1(self, argv, ifile, ofile): debug('%s.process finished under protocol_version=1', class_name) + def _protocol_v2_option_parser(self, arg): + """ Determines if an argument is an Option/Value pair, or just a Positional Argument. + Method so different search commands can handle parsing of arguments differently. + + :param arg: A single argument provided to the command from SPL + :type arg: str + + :return: [OptionName, OptionValue] OR [PositionalArgument] + :rtype: List[str] + + """ + return arg.split('=', 1) + def _process_protocol_v2(self, argv, ifile, ofile): """ Processes records on the `input stream optionally writing records to the output stream. @@ -719,7 +729,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): if args and type(args) == list: for arg in args: - result = arg.split('=', 1) + result = self._protocol_v2_option_parser(arg) if len(result) == 1: self.fieldnames.append(str(result[0])) else: diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index 0278fbd59..22f0e16b2 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -95,10 +95,7 @@ def __call__(self, value): try: return Code.object(compile(value, 'string', self._mode), six.text_type(value)) except (SyntaxError, TypeError) as error: - if six.PY2: - message = error.message - else: - message = str(error) + message = str(error) six.raise_from(ValueError(message), error) @@ -204,6 +201,48 @@ def format(self, value): return None if value is None else six.text_type(int(value)) +class Float(Validator): + """ Validates float option values. + + """ + def __init__(self, minimum=None, maximum=None): + if minimum is not None and maximum is not None: + def check_range(value): + if not (minimum <= value <= maximum): + raise ValueError('Expected float in the range [{0},{1}], not {2}'.format(minimum, maximum, value)) + return + elif minimum is not None: + def check_range(value): + if value < minimum: + raise ValueError('Expected float in the range [{0},+∞], not {1}'.format(minimum, value)) + return + elif maximum is not None: + def check_range(value): + if value > maximum: + raise ValueError('Expected float in the range [-∞,{0}], not {1}'.format(maximum, value)) + return + else: + def check_range(value): + return + + self.check_range = check_range + return + + def __call__(self, value): + if value is None: + return None + try: + value = float(value) + except ValueError: + raise ValueError('Expected float value, not {}'.format(json_encode_string(value))) + + self.check_range(value) + return value + + def format(self, value): + return None if value is None else six.text_type(float(value)) + + class Duration(Validator): """ Validates duration option values. @@ -391,4 +430,4 @@ def format(self, value): return self.__call__(value) -__all__ = ['Boolean', 'Code', 'Duration', 'File', 'Integer', 'List', 'Map', 'RegularExpression', 'Set'] +__all__ = ['Boolean', 'Code', 'Duration', 'File', 'Integer', 'Float', 'List', 'Map', 'RegularExpression', 'Set'] diff --git a/splunklib/six.py b/splunklib/six.py index 5fe9f8e14..d13e50c93 100644 --- a/splunklib/six.py +++ b/splunklib/six.py @@ -978,3 +978,16 @@ def python_2_unicode_compatible(klass): del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) + +import warnings + +def deprecated(message): + def deprecated_decorator(func): + def deprecated_func(*args, **kwargs): + warnings.warn("{} is a deprecated function. {}".format(func.__name__, message), + category=DeprecationWarning, + stacklevel=2) + warnings.simplefilter('default', DeprecationWarning) + return func(*args, **kwargs) + return deprecated_func + return deprecated_decorator \ No newline at end of file diff --git a/tests/modularinput/data/conf_with_2_inputs.xml b/tests/modularinput/data/conf_with_2_inputs.xml index 95c44bb2a..bcfd81204 100644 Binary files a/tests/modularinput/data/conf_with_2_inputs.xml and b/tests/modularinput/data/conf_with_2_inputs.xml differ diff --git a/tests/modularinput/data/conf_with_invalid_inputs.xml b/tests/modularinput/data/conf_with_invalid_inputs.xml index f3dd460f4..dd399dce7 100644 Binary files a/tests/modularinput/data/conf_with_invalid_inputs.xml and b/tests/modularinput/data/conf_with_invalid_inputs.xml differ diff --git a/tests/modularinput/test_input_definition.py b/tests/modularinput/test_input_definition.py index e814711dd..d0f59a04e 100644 --- a/tests/modularinput/test_input_definition.py +++ b/tests/modularinput/test_input_definition.py @@ -48,12 +48,14 @@ def test_parse_inputdef_with_two_inputs(self): "session_key": "123102983109283019283" } expectedDefinition.inputs["foobar://aaa"] = { + "__app": "search", "param1": "value1", "param2": "value2", "disabled": "0", "index": "default" } expectedDefinition.inputs["foobar://bbb"] = { + "__app": "my_app", "param1": "value11", "param2": "value22", "disabled": "0", diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 84900d416..dd65aa0ab 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -121,6 +121,18 @@ class TestSearchCommand(SearchCommand): **Syntax:** **integer=**** **Description:** An integer value''', require=True, validate=validators.Integer()) + + float = Option( + doc=''' + **Syntax:** **float=**** + **Description:** An float value''', + validate=validators.Float()) + + required_float = Option( + doc=''' + **Syntax:** **float=**** + **Description:** An float value''', + require=True, validate=validators.Float()) map = Option( doc=''' @@ -369,6 +381,7 @@ def test_option(self): validators.Fieldname: ('some.field_name', 'non-fieldname value'), validators.File: (__file__, 'non-existent file'), validators.Integer: ('100', 'non-integer value'), + validators.Float: ('99.9', 'non-float value'), validators.List: ('a,b,c', '"non-list value'), validators.Map: ('foo', 'non-existent map entry'), validators.Match: ('123-45-6789', 'not a social security number'), @@ -408,6 +421,7 @@ def test_option(self): 'fieldname': u'some.field_name', 'file': six.text_type(repr(__file__)), 'integer': 100, + 'float': 99.9, 'logging_configuration': environment.logging_configuration, 'logging_level': u'WARNING', 'map': 'foo', @@ -421,6 +435,7 @@ def test_option(self): 'required_fieldname': u'some.field_name', 'required_file': six.text_type(repr(__file__)), 'required_integer': 100, + 'required_float': 99.9, 'required_map': 'foo', 'required_match': u'123-45-6789', 'required_optionname': u'some_option_name', @@ -445,17 +460,17 @@ def test_option(self): self.assertEqual(expected[x.name], x.value.pattern) elif isinstance(x.value, TextIOWrapper): self.assertEqual(expected[x.name], "'%s'" % x.value.name) - elif not isinstance(x.value, (bool,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): + elif not isinstance(x.value, (bool,) + (float,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): self.assertEqual(expected[x.name], repr(x.value)) else: self.assertEqual(expected[x.name], x.value) expected = ( 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' - 'file=' + json_encode_string(__file__) + ' integer="100" map="foo" match="123-45-6789" ' + 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' - 'required_file=' + json_encode_string(__file__) + ' required_integer="100" required_map="foo" ' + 'required_file=' + json_encode_string(__file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' 'required_set="bar" set="bar" show_configuration="f"') diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index 34e6b61c4..c221cc53c 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -21,10 +21,7 @@ from splunklib.searchcommands import SearchMetric from splunklib import six from splunklib.six.moves import range -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from splunklib.ordereddict import OrderedDict +from collections import OrderedDict from collections import namedtuple, deque from splunklib.six import BytesIO as BytesIO from functools import wraps diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 44b76ff79..baa8edb7d 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -140,7 +140,7 @@ def test_process_scpv1(self): result = BytesIO() self.assertRaises(SystemExit, command.process, argv, ofile=result) - self.assertRegexpMatches(result.getvalue().decode('UTF-8'), expected) + six.assertRegex(self, result.getvalue().decode('UTF-8'), expected) # TestCommand.process should return configuration settings on Getinfo probe @@ -307,7 +307,8 @@ def test_process_scpv1(self): command.process(argv, ifile, ofile=result) except SystemExit as error: self.assertNotEqual(error.code, 0) - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('UTF-8'), r'^error_message=RuntimeError at ".+", line \d+ : Testing\r\n\r\n$') except BaseException as error: @@ -331,7 +332,8 @@ def test_process_scpv1(self): except BaseException as error: self.fail('Expected no exception, but caught {}: {}'.format(type(error).__name__, error)) else: - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('UTF-8'), r'^\r\n' r'(' @@ -715,7 +717,8 @@ def test_process_scpv2(self): r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('utf-8'), r'^chunked 1.0,2,0\n' r'\{\}\n' diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index f174ad213..cc524b307 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -205,6 +205,63 @@ def test(integer): self.assertRaises(ValueError, validator.__call__, maxsize + 1) return + + def test_float(self): + # Float validator test + + maxsize = 1.5 + minsize = -1.5 + + validator = validators.Float() + + def test(float_val): + try: + float_val = float(float_val) + except ValueError: + assert False + for s in str(float_val), six.text_type(float_val): + value = validator.__call__(s) + self.assertAlmostEqual(value, float_val) + self.assertIsInstance(value, float) + self.assertEqual(validator.format(float_val), six.text_type(float_val)) + + test(2 * minsize) + test(minsize) + test(-1) + test(0) + test(-1.12345) + test(0.0001) + test(100101.011) + test(2 * maxsize) + test('18.32123') + self.assertRaises(ValueError, validator.__call__, 'Splunk!') + + validator = validators.Float(minimum=0) + self.assertEqual(validator.__call__(0), 0) + self.assertEqual(validator.__call__(1.154), 1.154) + self.assertEqual(validator.__call__(888.51), 888.51) + self.assertEqual(validator.__call__(2 * maxsize), (2 * maxsize)) + self.assertRaises(ValueError, validator.__call__, -1) + self.assertRaises(ValueError, validator.__call__, -1111.00578) + self.assertRaises(ValueError, validator.__call__, -0.005) + + validator = validators.Float(minimum=1, maximum=maxsize) + self.assertEqual(validator.__call__(1), float(1)) + self.assertEqual(validator.__call__(maxsize), maxsize) + self.assertRaises(ValueError, validator.__call__, 0) + self.assertRaises(ValueError, validator.__call__, 0.9999) + self.assertRaises(ValueError, validator.__call__, maxsize + 1) + + validator = validators.Float(minimum=minsize, maximum=maxsize) + self.assertEqual(validator.__call__(minsize), minsize) + self.assertEqual(validator.__call__(0.123456), 0.123456) + self.assertEqual(validator.__call__(0), float(0)) + self.assertEqual(validator.__call__(-0.012), -0.012) + self.assertEqual(validator.__call__(maxsize), maxsize) + self.assertRaises(ValueError, validator.__call__, minsize - 1) + self.assertRaises(ValueError, validator.__call__, maxsize + 1) + + return def test_list(self): diff --git a/tests/test_binding.py b/tests/test_binding.py index 2d3107507..3bce0de1b 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -69,7 +69,7 @@ class BindingTestCase(unittest.TestCase): context = None def setUp(self): logging.info("%s", self.__class__.__name__) - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.context = binding.connect(**self.opts.kwargs) logging.debug("Connected to splunkd.") @@ -512,7 +512,7 @@ def test_logout(self): class TestCookieAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.context = binding.connect(**self.opts.kwargs) # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before @@ -709,7 +709,7 @@ def test_namespace_fails(self): @pytest.mark.smoke class TestBasicAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") opts = self.opts.kwargs.copy() opts["basic"] = True opts["username"] = self.opts.kwargs["username"] diff --git a/tests/test_examples.py b/tests/test_examples.py index b187dbbbd..e2057ffb7 100755 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -146,7 +146,7 @@ def test_handlers(self): result = run( "handlers/handlers_certs.py --ca_file=handlers/cacert.bad.pem", stderr=PIPE) - self.assertNotEquals(result, 0) + self.assertNotEqual(result, 0) # The proxy handler example requires that there be a proxy available # to relay requests, so we spin up a local proxy using the proxy @@ -167,7 +167,7 @@ def test_handlers(self): # Run it again without the proxy and it should fail. result = run( "handlers/handler_proxy.py --proxy=localhost:80801", stderr=PIPE) - self.assertNotEquals(result, 0) + self.assertNotEqual(result, 0) def test_index(self): self.check_commands( diff --git a/tests/test_job.py b/tests/test_job.py index dc4c3e4e7..44326086b 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -54,8 +54,8 @@ def test_oneshot_with_garbage_fails(self): def test_oneshot(self): jobs = self.service.jobs - stream = jobs.oneshot("search index=_internal earliest=-1m | head 3") - result = results.ResultsReader(stream) + stream = jobs.oneshot("search index=_internal earliest=-1m | head 3", output_mode='json') + result = results.JSONResultsReader(stream) ds = list(result) self.assertEqual(result.is_preview, False) self.assertTrue(isinstance(ds[0], dict) or \ @@ -69,8 +69,8 @@ def test_export_with_garbage_fails(self): def test_export(self): jobs = self.service.jobs - stream = jobs.export("search index=_internal earliest=-1m | head 3") - result = results.ResultsReader(stream) + stream = jobs.export("search index=_internal earliest=-1m | head 3", output_mode='json') + result = results.JSONResultsReader(stream) ds = list(result) self.assertEqual(result.is_preview, False) self.assertTrue(isinstance(ds[0], dict) or \ @@ -82,7 +82,7 @@ def test_export_docstring_sample(self): import splunklib.client as client import splunklib.results as results service = self.service # cheat - rr = results.ResultsReader(service.jobs.export("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5", output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -98,7 +98,7 @@ def test_results_docstring_sample(self): job = service.jobs.create("search * | head 5") while not job.is_done(): sleep(0.2) - rr = results.ResultsReader(job.results()) + rr = results.JSONResultsReader(job.results(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -113,7 +113,7 @@ def test_preview_docstring_sample(self): import splunklib.results as results service = self.service # cheat job = service.jobs.create("search * | head 5") - rr = results.ResultsReader(job.preview()) + rr = results.JSONResultsReader(job.preview(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -130,7 +130,7 @@ def test_oneshot_docstring_sample(self): import splunklib.client as client import splunklib.results as results service = self.service # cheat - rr = results.ResultsReader(service.jobs.oneshot("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5", output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -295,12 +295,12 @@ def test_get_preview_and_events(self): self.assertEventuallyTrue(self.job.is_done) self.assertLessEqual(int(self.job['eventCount']), 3) - preview_stream = self.job.preview() - preview_r = results.ResultsReader(preview_stream) + preview_stream = self.job.preview(output_mode='json') + preview_r = results.JSONResultsReader(preview_stream) self.assertFalse(preview_r.is_preview) - events_stream = self.job.events() - events_r = results.ResultsReader(events_stream) + events_stream = self.job.events(output_mode='json') + events_r = results.JSONResultsReader(events_stream) n_events = len([x for x in events_r if isinstance(x, dict)]) n_preview = len([x for x in preview_r if isinstance(x, dict)]) @@ -389,10 +389,7 @@ def test_results_reader(self): N_results = 0 N_messages = 0 for r in reader: - try: - from collections import OrderedDict - except: - from splunklib.ordereddict import OrderedDict + from collections import OrderedDict self.assertTrue(isinstance(r, OrderedDict) or isinstance(r, results.Message)) if isinstance(r, OrderedDict): @@ -411,10 +408,7 @@ def test_results_reader_with_streaming_results(self): N_results = 0 N_messages = 0 for r in reader: - try: - from collections import OrderedDict - except: - from splunklib.ordereddict import OrderedDict + from collections import OrderedDict self.assertTrue(isinstance(r, OrderedDict) or isinstance(r, results.Message)) if isinstance(r, OrderedDict): diff --git a/tests/test_results.py b/tests/test_results.py index 52e290f25..5fdca2b91 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -30,7 +30,7 @@ def test_read_from_empty_result_set(self): job = self.service.jobs.create("search index=_internal_does_not_exist | head 2") while not job.is_done(): sleep(0.5) - self.assertEqual(0, len(list(results.ResultsReader(io.BufferedReader(job.results()))))) + self.assertEqual(0, len(list(results.JSONResultsReader(io.BufferedReader(job.results(output_mode='json')))))) def test_read_normal_results(self): xml_text = """ diff --git a/tests/test_service.py b/tests/test_service.py index df78f54f7..127ce75f5 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -167,10 +167,20 @@ def _create_unauthenticated_service(self): 'scheme': self.opts.kwargs['scheme'] }) + #To check the HEC event endpoint using Endpoint instance + def test_hec_event(self): + import json + service_hec = client.connect(host='localhost', scheme='https', port=8088, + token="11111111-1111-1111-1111-1111111111113") + event_collector_endpoint = client.Endpoint(service_hec, "/services/collector/event") + msg = {"index": "main", "event": "Hello World"} + response = event_collector_endpoint.post("", body=json.dumps(msg)) + self.assertEqual(response.status,200) + class TestCookieAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.service = client.Service(**self.opts.kwargs) if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: @@ -184,7 +194,7 @@ def test_login_and_store_cookie(self): self.assertEqual(len(self.service.get_cookies()), 0) self.service.login() self.assertIsNotNone(self.service.get_cookies()) - self.assertNotEquals(self.service.get_cookies(), {}) + self.assertNotEqual(self.service.get_cookies(), {}) self.assertEqual(len(self.service.get_cookies()), 1) def test_login_with_cookie(self): diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 59840b794..4f2fee81f 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -222,6 +222,16 @@ def test_delete(self): self.storage_passwords.delete(username + "/foo", "/myrealm") self.assertEqual(start_count, len(self.storage_passwords)) + def test_spaces_in_username(self): + start_count = len(self.storage_passwords) + realm = testlib.tmpname() + username = " user1 " + + p = self.storage_passwords.create("changeme", username, realm) + self.assertEqual(p.username, username) + + p.delete() + self.assertEqual(start_count, len(self.storage_passwords)) if __name__ == "__main__": try: diff --git a/tests/testlib.py b/tests/testlib.py index 984b6a94c..61be722ea 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -232,7 +232,7 @@ def restartSplunk(self, timeout=240): @classmethod def setUpClass(cls): - cls.opts = parse([], {}, ".splunkrc") + cls.opts = parse([], {}, ".env") # Before we start, make sure splunk doesn't need a restart. service = client.connect(**cls.opts.kwargs) diff --git a/tox.ini b/tox.ini index d9a001e25..58ee004ca 100644 --- a/tox.ini +++ b/tox.ini @@ -32,6 +32,8 @@ deps = pytest xmlrunner unittest2 unittest-xml-reporting + python-dotenv + deprecation distdir = build commands = diff --git a/utils/__init__.py b/utils/__init__.py index f38027efe..b1bb77a50 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -109,7 +109,8 @@ def dslice(value, *args): def parse(argv, rules=None, config=None, **kwargs): """Parse the given arg vector with the default Splunk command rules.""" parser_ = parser(rules, **kwargs) - if config is not None: parser_.loadrc(config) + if config is not None: + parser_.loadenv(config) return parser_.parse(argv).result def parser(rules=None, **kwargs): diff --git a/utils/cmdopts.py b/utils/cmdopts.py index 5938efd17..b0cbb7328 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -19,6 +19,7 @@ from os import path from optparse import OptionParser import sys +from dotenv import dotenv_values __all__ = [ "error", "Parser", "cmdline" ] @@ -67,22 +68,24 @@ def init(self, rules): # Remember the dest vars that we see, so that we can merge results self.dests.add(dest) - - # Load command options from given 'config' file. Long form options may omit - # the leading "--", and if so we fix that up here. + + # Load command options from '.env' file def load(self, filepath): argv = [] try: - file = open(filepath) + filedata = dotenv_values(filepath) except: error("Unable to open '%s'" % filepath, 2) - for line in file: - if line.startswith("#"): continue # Skip comment - line = line.strip() - if len(line) == 0: continue # Skip blank line - if not line.startswith("-"): line = "--" + line - argv.append(line) - self.parse(argv) + + # update result kwargs value with .env file data + for key, value in filedata.items(): + value = value.strip() + if len(value) == 0 or value is None: continue # Skip blank value + elif key in self.dests: + self.result['kwargs'][key] = value + else: + raise NameError("No such option --" + key) + return self def loadif(self, filepath): @@ -90,8 +93,9 @@ def loadif(self, filepath): if path.isfile(filepath): self.load(filepath) return self - def loadrc(self, filename): - filepath = path.expanduser(path.join("~", "%s" % filename)) + def loadenv(self, filename): + dir_path = path.dirname(path.realpath(__file__)) + filepath = path.join(dir_path, '..', filename) self.loadif(filepath) return self @@ -114,6 +118,6 @@ def cmdline(argv, rules=None, config=None, **kwargs): """Simplified cmdopts interface that does not default any parsing rules and that does not allow compounding calls to the parser.""" parser = Parser(rules, **kwargs) - if config is not None: parser.loadrc(config) + if config is not None: parser.loadenv(config) return parser.parse(argv).result