From 3dd5d75d03751bd95e506d9da7f61246efd9a6be Mon Sep 17 00:00:00 2001 From: Lingda Tang Date: Tue, 26 Jul 2016 18:01:39 +0800 Subject: [PATCH 01/58] Identical entity names will cause an infinite loop "RuntimeError: maximum recursion depth exceeded". Give a clear message about which applications have this entity. --- splunklib/client.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index b5dd96561..393748706 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -933,7 +933,10 @@ def __getitem__(self, key): def _load_atom_entry(self, response): elem = _load_atom(response, XNAME_ENTRY) if isinstance(elem, list): - raise AmbiguousReferenceException("Fetch from server returned multiple entries for name %s." % self.name) + apps = [ele.entry.content.get('eai:appName') for ele in elem] + + raise AmbiguousReferenceException( + "Fetch from server returned multiple entries for name '%s' in apps %s." % (elem[0].entry.title, apps)) else: return elem.entry From d44c2af3bd871d51b049a872f75eb2e2128d8a72 Mon Sep 17 00:00:00 2001 From: Doug Brown Date: Sat, 30 Jul 2016 12:15:22 +0000 Subject: [PATCH 02/58] Fixing missing handlers stanza in examples/searchcommands_template/default/logging.conf --- examples/searchcommands_template/default/logging.conf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/searchcommands_template/default/logging.conf b/examples/searchcommands_template/default/logging.conf index 39afa6518..aeaba74e2 100644 --- a/examples/searchcommands_template/default/logging.conf +++ b/examples/searchcommands_template/default/logging.conf @@ -22,6 +22,9 @@ level = NOTSET ; Default: WARNING handlers = app ; Default: stderr propagate = 0 ; Default: 1 +[handlers] +keys = app, splunklib, stderr + [handler_app] # Select this handler to log events to $SPLUNK_HOME/var/log/splunk/searchcommands_app.log class = logging.handlers.RotatingFileHandler From e934bc8f3f1f6ebb23a559f2be8726768463c334 Mon Sep 17 00:00:00 2001 From: Doug Brown Date: Sat, 30 Jul 2016 12:47:35 +0000 Subject: [PATCH 03/58] Making stream.py work without modification --- examples/searchcommands_template/bin/stream.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/searchcommands_template/bin/stream.py b/examples/searchcommands_template/bin/stream.py index 2ab2b4c1d..9277913a9 100644 --- a/examples/searchcommands_template/bin/stream.py +++ b/examples/searchcommands_template/bin/stream.py @@ -20,6 +20,7 @@ class %(command.title())Command(StreamingCommand): """ def stream(self, events): # Put your event transformation code here - pass + for event in events: + yield event dispatch(%(command.title())Command, sys.argv, sys.stdin, sys.stdout, __name__) From 20a86dabd6077dc6fa7b25fb4b2e8939f62d1235 Mon Sep 17 00:00:00 2001 From: Scott Savarese Date: Tue, 8 Jan 2019 18:02:35 +0000 Subject: [PATCH 04/58] Don't output close tags if you haven't written a start tag --- splunklib/modularinput/event_writer.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index fb96c9149..1d7b3f3c9 100755 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -82,4 +82,5 @@ def write_xml_document(self, document): def close(self): """Write the closing tag to make this XML well formed.""" - self._out.write(b"") + if self.header_written: + self._out.write(b"") From e7a631a2b542d34c025260eea37496c7b854164f Mon Sep 17 00:00:00 2001 From: Bill Murrin Date: Tue, 23 Jul 2019 22:55:06 -0400 Subject: [PATCH 05/58] properly add parameters to request based on the method of the request --- splunklib/binding.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 3fe7c8495..ae732cc90 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -754,7 +754,7 @@ def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, * @_authentication @_log_duration - def request(self, path_segment, method="GET", headers=None, body="", + def request(self, path_segment, method="GET", headers=None, body={}, owner=None, app=None, sharing=None): """Issues an arbitrary HTTP request to the REST path segment. @@ -814,13 +814,27 @@ def request(self, path_segment, method="GET", headers=None, body="", path = self.authority \ + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) + all_headers = headers + self.additional_headers + self._auth_headers logging.debug("%s request to %s (headers: %s, body: %s)", method, path, str(all_headers), repr(body)) - response = self.http.request(path, + + if body: + body = _encode(**body) + if method == "GET": + path = path + UrlEncoded('?' + body, skip_encode=True) + response = self.http.request(path, {'method': method, - 'headers': all_headers, - 'body': body}) + 'headers': all_headers}) + else: + response = self.http.request(path, + {'method': method, + 'headers': all_headers, + 'body': body}) + else: + response = self.http.request(path, + {'method': method, + 'headers': all_headers}) return response def login(self): From 19d59c6c63149d68c4893dc7d10f87e5d88a1fb4 Mon Sep 17 00:00:00 2001 From: Bill Murrin Date: Fri, 9 Aug 2019 11:28:39 -0700 Subject: [PATCH 06/58] reduced number of api calls --- splunklib/binding.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index ae732cc90..024b4c190 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -821,20 +821,21 @@ def request(self, path_segment, method="GET", headers=None, body={}, if body: body = _encode(**body) + if method == "GET": path = path + UrlEncoded('?' + body, skip_encode=True) - response = self.http.request(path, - {'method': method, - 'headers': all_headers}) + message = {'method': method, + 'headers': all_headers} else: - response = self.http.request(path, - {'method': method, - 'headers': all_headers, - 'body': body}) + message = {'method': method, + 'headers': all_headers, + 'body': body} else: - response = self.http.request(path, - {'method': method, - 'headers': all_headers}) + message = {'method': method, + 'headers': all_headers} + + response = self.http.request(path, message) + return response def login(self): From 6a0910cb44ce892dc6346a8e75501a9a8326b4fc Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Tue, 16 Jun 2020 11:47:59 +0200 Subject: [PATCH 07/58] tests: stop using assertNotEquals --- tests/test_examples.py | 4 ++-- tests/test_service.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index 3b63fc6da..92aed9ca7 100755 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -150,7 +150,7 @@ def test_handlers(self): result = run( "handlers/handlers_certs.py --ca_file=handlers/cacert.bad.pem", stderr=PIPE) - self.assertNotEquals(result, 0) + self.assertNotEqual(result, 0) # The proxy handler example requires that there be a proxy available # to relay requests, so we spin up a local proxy using the proxy @@ -171,7 +171,7 @@ def test_handlers(self): # Run it again without the proxy and it should fail. result = run( "handlers/handler_proxy.py --proxy=localhost:80801", stderr=PIPE) - self.assertNotEquals(result, 0) + self.assertNotEqual(result, 0) def test_index(self): self.check_commands( diff --git a/tests/test_service.py b/tests/test_service.py index df78f54f7..2d239afb1 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -184,7 +184,7 @@ def test_login_and_store_cookie(self): self.assertEqual(len(self.service.get_cookies()), 0) self.service.login() self.assertIsNotNone(self.service.get_cookies()) - self.assertNotEquals(self.service.get_cookies(), {}) + self.assertNotEqual(self.service.get_cookies(), {}) self.assertEqual(len(self.service.get_cookies()), 1) def test_login_with_cookie(self): From 99d28bcd5deeb3bf1957f8aa109efba84aa43c4b Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Tue, 16 Jun 2020 11:50:17 +0200 Subject: [PATCH 08/58] searchcommands: don't use deprecated BaseException.message --- splunklib/searchcommands/validators.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index 0278fbd59..62fdf6235 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -95,10 +95,7 @@ def __call__(self, value): try: return Code.object(compile(value, 'string', self._mode), six.text_type(value)) except (SyntaxError, TypeError) as error: - if six.PY2: - message = error.message - else: - message = str(error) + message = str(error) six.raise_from(ValueError(message), error) From 5aaef2970f99c09debf2006db5ce909bc231665f Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Tue, 16 Jun 2020 11:57:18 +0200 Subject: [PATCH 09/58] tests: use six.assertRegex instead of deprecated assertRegexpMatches --- tests/searchcommands/test_search_command.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/searchcommands/test_search_command.py b/tests/searchcommands/test_search_command.py index 1ebb29fb9..38c49bd54 100755 --- a/tests/searchcommands/test_search_command.py +++ b/tests/searchcommands/test_search_command.py @@ -127,7 +127,7 @@ def test_process_scpv1(self): result = BytesIO() self.assertRaises(SystemExit, command.process, argv, ofile=result) - self.assertRegexpMatches(result.getvalue().decode('UTF-8'), expected) + six.assertRegex(self, result.getvalue().decode('UTF-8'), expected) # TestCommand.process should return configuration settings on Getinfo probe @@ -294,7 +294,8 @@ def test_process_scpv1(self): command.process(argv, ifile, ofile=result) except SystemExit as error: self.assertNotEqual(error.code, 0) - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('UTF-8'), r'^error_message=RuntimeError at ".+", line \d+ : Testing\r\n\r\n$') except BaseException as error: @@ -318,7 +319,8 @@ def test_process_scpv1(self): except BaseException as error: self.fail('Expected no exception, but caught {}: {}'.format(type(error).__name__, error)) else: - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('UTF-8'), r'^\r\n' r'(' @@ -705,7 +707,8 @@ def test_process_scpv2(self): r'logging_configuration=\\\".+\\\" logging_level=\\\"WARNING\\\" record=\\\"f\\\" ' \ r'required_option_1=\\\"value_1\\\" required_option_2=\\\"value_2\\\" show_configuration=\\\"f\\\"\"\]\]\}' - self.assertRegexpMatches( + six.assertRegex( + self, result.getvalue().decode('utf-8'), r'^chunked 1.0,2,0\n' r'\{\}\n' From 3687786dadba031d698d7b2a518c7566a11edcae Mon Sep 17 00:00:00 2001 From: Vatsal Jagani Date: Mon, 14 Jun 2021 22:05:32 +0530 Subject: [PATCH 10/58] Added Float parameter validator for custom search commands. --- docs/searchcommands.rst | 4 ++ splunklib/searchcommands/validators.py | 44 +++++++++++++++++- tests/searchcommands/test_decorators.py | 18 +++++++- tests/searchcommands/test_validators.py | 59 +++++++++++++++++++++++++ 4 files changed, 122 insertions(+), 3 deletions(-) diff --git a/docs/searchcommands.rst b/docs/searchcommands.rst index a620fbb84..a2923f7b7 100644 --- a/docs/searchcommands.rst +++ b/docs/searchcommands.rst @@ -88,6 +88,10 @@ splunklib.searchcommands :members: :inherited-members: +.. autoclass:: Float + :members: + :inherited-members: + .. autoclass:: RegularExpression :members: :inherited-members: diff --git a/splunklib/searchcommands/validators.py b/splunklib/searchcommands/validators.py index 0278fbd59..df1872dbe 100644 --- a/splunklib/searchcommands/validators.py +++ b/splunklib/searchcommands/validators.py @@ -204,6 +204,48 @@ def format(self, value): return None if value is None else six.text_type(int(value)) +class Float(Validator): + """ Validates float option values. + + """ + def __init__(self, minimum=None, maximum=None): + if minimum is not None and maximum is not None: + def check_range(value): + if not (minimum <= value <= maximum): + raise ValueError('Expected float in the range [{0},{1}], not {2}'.format(minimum, maximum, value)) + return + elif minimum is not None: + def check_range(value): + if value < minimum: + raise ValueError('Expected float in the range [{0},+∞], not {1}'.format(minimum, value)) + return + elif maximum is not None: + def check_range(value): + if value > maximum: + raise ValueError('Expected float in the range [-∞,{0}], not {1}'.format(maximum, value)) + return + else: + def check_range(value): + return + + self.check_range = check_range + return + + def __call__(self, value): + if value is None: + return None + try: + value = float(value) + except ValueError: + raise ValueError('Expected float value, not {}'.format(json_encode_string(value))) + + self.check_range(value) + return value + + def format(self, value): + return None if value is None else six.text_type(float(value)) + + class Duration(Validator): """ Validates duration option values. @@ -391,4 +433,4 @@ def format(self, value): return self.__call__(value) -__all__ = ['Boolean', 'Code', 'Duration', 'File', 'Integer', 'List', 'Map', 'RegularExpression', 'Set'] +__all__ = ['Boolean', 'Code', 'Duration', 'File', 'Integer', 'Float', 'List', 'Map', 'RegularExpression', 'Set'] diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 84900d416..082ab184d 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -121,6 +121,18 @@ class TestSearchCommand(SearchCommand): **Syntax:** **integer=**** **Description:** An integer value''', require=True, validate=validators.Integer()) + + float = Option( + doc=''' + **Syntax:** **float=**** + **Description:** An float value''', + validate=validators.Float()) + + required_float = Option( + doc=''' + **Syntax:** **float=**** + **Description:** An float value''', + require=True, validate=validators.Float()) map = Option( doc=''' @@ -408,6 +420,7 @@ def test_option(self): 'fieldname': u'some.field_name', 'file': six.text_type(repr(__file__)), 'integer': 100, + 'float': 99.9, 'logging_configuration': environment.logging_configuration, 'logging_level': u'WARNING', 'map': 'foo', @@ -421,6 +434,7 @@ def test_option(self): 'required_fieldname': u'some.field_name', 'required_file': six.text_type(repr(__file__)), 'required_integer': 100, + 'required_float': 99.9, 'required_map': 'foo', 'required_match': u'123-45-6789', 'required_optionname': u'some_option_name', @@ -452,10 +466,10 @@ def test_option(self): expected = ( 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' - 'file=' + json_encode_string(__file__) + ' integer="100" map="foo" match="123-45-6789" ' + 'file=' + json_encode_string(__file__) + ' integer="100" float="99.9" map="foo" match="123-45-6789" ' 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' - 'required_file=' + json_encode_string(__file__) + ' required_integer="100" required_map="foo" ' + 'required_file=' + json_encode_string(__file__) + ' required_integer="100" required_float="99.9" required_map="foo" ' 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' 'required_set="bar" set="bar" show_configuration="f"') diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index f174ad213..8532fa423 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -205,6 +205,65 @@ def test(integer): self.assertRaises(ValueError, validator.__call__, maxsize + 1) return + + def test_float(self): + # Float validator test + + maxsize = sys.maxsize + minsize = -(sys.maxsize - 1) + + validator = validators.Float() + + def test(float_val): + try: + float_val = float(float_val) + except ValueError: + assert False + for s in str(float_val), six.text_type(float_val): + value = validator.__call__(s) + self.assertEqual(value, float_val) + self.assertIsInstance(value, float) + self.assertEqual(validator.format(float_val), six.text_type(float_val)) + + test(2 * minsize) + test(minsize) + test(-1) + test(0) + test(-1.12345) + test(0.0001) + test(100101.011) + test(2 * maxsize) + test('18.32123') + self.assertRaises(ValueError, validator.__call__, 'Splunk!') + + validator = validators.Float(minimum=0) + self.assertEqual(validator.__call__(0), 0) + self.assertEqual(validator.__call__(1.154), 1.154) + self.assertEqual(validator.__call__(888.51), 888.51) + self.assertEqual(validator.__call__(2 * maxsize), float(2 * maxsize)) + self.assertRaises(ValueError, validator.__call__, -1) + self.assertRaises(ValueError, validator.__call__, -1111.00578) + self.assertRaises(ValueError, validator.__call__, -0.005) + + validator = validators.Float(minimum=1, maximum=maxsize) + self.assertEqual(validator.__call__(1), float(1)) + self.assertEqual(validator.__call__(100.111), 100.111) + self.assertEqual(validator.__call__(9999.0), 9999.0) + self.assertEqual(validator.__call__(maxsize), float(maxsize)) + self.assertRaises(ValueError, validator.__call__, 0) + self.assertRaises(ValueError, validator.__call__, 0.9999) + self.assertRaises(ValueError, validator.__call__, -199) + self.assertRaises(ValueError, validator.__call__, maxsize + 1) + + validator = validators.Float(minimum=-1, maximum=1) + self.assertEqual(validator.__call__(0), float(0)) + self.assertEqual(validator.__call__(0.123456), 0.123456) + self.assertEqual(validator.__call__(-0.012), -0.012) + self.assertRaises(ValueError, validator.__call__, -1.1) + self.assertRaises(ValueError, validator.__call__, 100.123456) + self.assertRaises(ValueError, validator.__call__, maxsize + 1) + + return def test_list(self): From 4b3aef0a6fc005fb21e20c83d4133b6618079320 Mon Sep 17 00:00:00 2001 From: Roey <33815022+SaltyHash123@users.noreply.github.com> Date: Tue, 6 Jul 2021 14:47:43 +0200 Subject: [PATCH 11/58] Update filter.py Seems the import was wrong --- examples/searchcommands_template/bin/filter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/searchcommands_template/bin/filter.py b/examples/searchcommands_template/bin/filter.py index 194118af0..153c76a69 100644 --- a/examples/searchcommands_template/bin/filter.py +++ b/examples/searchcommands_template/bin/filter.py @@ -5,7 +5,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) from splunklib.searchcommands import \ - dispatch, StreamingCommand, Configuration, Option, validators + dispatch, EventingCommand, Configuration, Option, validators @Configuration() From 8cea66fe10098f6441ffc472a281581503192f42 Mon Sep 17 00:00:00 2001 From: Charlie Huggard Date: Sun, 29 Aug 2021 17:43:58 -0500 Subject: [PATCH 12/58] Break out search argument parsing This should be a passive change, while providing a useful extension point. --- splunklib/searchcommands/search_command.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 7383a5efa..ae31a758c 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -634,6 +634,19 @@ def _process_protocol_v1(self, argv, ifile, ofile): debug('%s.process finished under protocol_version=1', class_name) + def _protocol_v2_option_parser(self, arg): + """ Determines if an argument is an Option/Value pair, or just a Positional Argument. + Method so different search commands can handle parsing of arguments differently. + + :param arg: A single argument provided to the command from SPL + :type arg: str + + :return: [OptionName, OptionValue] OR [PositionalArgument] + :rtype: List[str] + + """ + return arg.split('=', 1) + def _process_protocol_v2(self, argv, ifile, ofile): """ Processes records on the `input stream optionally writing records to the output stream. @@ -704,7 +717,7 @@ def _process_protocol_v2(self, argv, ifile, ofile): if args and type(args) == list: for arg in args: - result = arg.split('=', 1) + result = self._protocol_v2_option_parser(arg) if len(result) == 1: self.fieldnames.append(str(result[0])) else: From d9a6faedd647881b4f866b711b5d2b8a4fdba878 Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Tue, 16 Jun 2020 11:39:03 +0200 Subject: [PATCH 13/58] client: use six.string_types instead of basestring Which is gone on modern python. --- splunklib/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 39b1dcc34..baa212298 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -3591,7 +3591,7 @@ def update_index(self, name, value): :return: Result of POST request """ kwargs = {} - kwargs['index.' + name] = value if isinstance(value, basestring) else json.dumps(value) + kwargs['index.' + name] = value if isinstance(value, six.string_types) else json.dumps(value) return self.post(**kwargs) def update_field(self, name, value): From 0c3b64add2e2de4e8b869a18a31807a8350e0590 Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Tue, 16 Jun 2020 11:23:02 +0200 Subject: [PATCH 14/58] client: remove outdated comment in Index.submit Code is not using request since 8 years :) --- splunklib/client.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 39b1dcc34..286719c5e 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -2086,10 +2086,6 @@ def submit(self, event, host=None, source=None, sourcetype=None): if source is not None: args['source'] = source if sourcetype is not None: args['sourcetype'] = sourcetype - # The reason we use service.request directly rather than POST - # is that we are not sending a POST request encoded using - # x-www-form-urlencoded (as we do not have a key=value body), - # because we aren't really sending a "form". self.service.post(PATH_RECEIVERS_SIMPLE, body=event, **args) return self From 526e5d7dc86e29d27b143a0d84a18d1726c86ef8 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Thu, 18 Nov 2021 14:23:30 +0530 Subject: [PATCH 15/58] Removed strip() method from data.py file - removed strip() method in load_value() method, so now response value contain leading and trailing spaces while printing data in console --- splunklib/data.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/splunklib/data.py b/splunklib/data.py index dedbb3310..f9ffb8692 100644 --- a/splunklib/data.py +++ b/splunklib/data.py @@ -161,8 +161,8 @@ def load_value(element, nametable=None): text = element.text if text is None: return None - text = text.strip() - if len(text) == 0: + + if len(text.strip()) == 0: return None return text From ec76d1e96a342f83618ab7d0cea5d749db1bc790 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 23 Nov 2021 11:33:11 +0530 Subject: [PATCH 16/58] Update test_storage_passwords.py - added test case for check spaces in username --- tests/test_storage_passwords.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_storage_passwords.py b/tests/test_storage_passwords.py index 59840b794..4f2fee81f 100644 --- a/tests/test_storage_passwords.py +++ b/tests/test_storage_passwords.py @@ -222,6 +222,16 @@ def test_delete(self): self.storage_passwords.delete(username + "/foo", "/myrealm") self.assertEqual(start_count, len(self.storage_passwords)) + def test_spaces_in_username(self): + start_count = len(self.storage_passwords) + realm = testlib.tmpname() + username = " user1 " + + p = self.storage_passwords.create("changeme", username, realm) + self.assertEqual(p.username, username) + + p.delete() + self.assertEqual(start_count, len(self.storage_passwords)) if __name__ == "__main__": try: From 07f04fc5e9b57b51d02826a654c27d814495323a Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 25 Nov 2021 00:11:31 +0530 Subject: [PATCH 17/58] Update client.py --- splunklib/client.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 21d27a6e0..0f4d884b5 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -724,7 +724,7 @@ class Endpoint(object): """ def __init__(self, service, path): self.service = service - self.path = path if path.endswith('/') else path + '/' + self.path = path #if path.endswith('/') else path + '/' def get(self, path_segment="", owner=None, app=None, sharing=None, **query): """Performs a GET operation on the path segment relative to this endpoint. @@ -782,6 +782,8 @@ def get(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: + if not self.path.endswith('/'): + self.path = self.path if self.path != "" and path_segment.startswith('/') else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) # ^-- This was "%s%s" % (self.path, path_segment). @@ -842,6 +844,8 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: + if not self.path.endswith('/'): + self.path = self.path if self.path != "" and path_segment.startswith('/') else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) From b2d7bdee0ebc561d92eba71873306a9220600df7 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 26 Nov 2021 21:20:47 +0530 Subject: [PATCH 18/58] Update client.py --- splunklib/client.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 0f4d884b5..84e6d0709 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -782,8 +782,8 @@ def get(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: - if not self.path.endswith('/'): - self.path = self.path if self.path != "" and path_segment.startswith('/') else self.path + '/' + if not self.path.endswith('/') and path_segment != "": + self.path = self.path if path_segment.startswith('/') else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) # ^-- This was "%s%s" % (self.path, path_segment). @@ -844,9 +844,10 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): if path_segment.startswith('/'): path = path_segment else: - if not self.path.endswith('/'): - self.path = self.path if self.path != "" and path_segment.startswith('/') else self.path + '/' + if not self.path.endswith('/') and path_segment != "": + self.path = self.path if path_segment.startswith('/') else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) + print(path) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) From a44a8f1d12cd258f6fd1cf6516e50ae733762244 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Fri, 26 Nov 2021 21:21:25 +0530 Subject: [PATCH 19/58] Update client.py --- splunklib/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 84e6d0709..3ed8d7295 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -847,7 +847,6 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): if not self.path.endswith('/') and path_segment != "": self.path = self.path if path_segment.startswith('/') else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) - print(path) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) From 2adde31aaf755274d55afa78ee2a6722a85e4646 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 1 Dec 2021 17:52:02 +0530 Subject: [PATCH 20/58] Test case for HEC event --- splunklib/client.py | 4 ++-- tests/test_service.py | 11 +++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 3ed8d7295..0dbdb1689 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -783,7 +783,7 @@ def get(self, path_segment="", owner=None, app=None, sharing=None, **query): path = path_segment else: if not self.path.endswith('/') and path_segment != "": - self.path = self.path if path_segment.startswith('/') else self.path + '/' + self.path = self.path if path_segment != "" else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) # ^-- This was "%s%s" % (self.path, path_segment). @@ -845,7 +845,7 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): path = path_segment else: if not self.path.endswith('/') and path_segment != "": - self.path = self.path if path_segment.startswith('/') else self.path + '/' + self.path = self.path if path_segment != "" else self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) diff --git a/tests/test_service.py b/tests/test_service.py index df78f54f7..406c47a3d 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -167,6 +167,17 @@ def _create_unauthenticated_service(self): 'scheme': self.opts.kwargs['scheme'] }) + #To check the HEC event endpoint using Endpoint instance + def test_hec_event(self): + import json + service_hec = client.connect(host='localhost', scheme='https', port=8088, + token="11111111-1111-1111-1111-1111111111113") + event_collector_endpoint = client.Endpoint(service_hec, "/services/collector/event") + msg = {"index": "main", "event": "Hello World"} + response = event_collector_endpoint.post("", body=json.dumps(msg)) + body = response.body.read() + self.assertEqual(body.code, 200) + class TestCookieAuthentication(unittest.TestCase): def setUp(self): From ea8dae0e49bedf5fb516c8e589e82a891cbebc74 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 1 Dec 2021 18:14:57 +0530 Subject: [PATCH 21/58] Update test_service.py --- tests/test_service.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_service.py b/tests/test_service.py index 406c47a3d..d1fa89118 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -175,8 +175,7 @@ def test_hec_event(self): event_collector_endpoint = client.Endpoint(service_hec, "/services/collector/event") msg = {"index": "main", "event": "Hello World"} response = event_collector_endpoint.post("", body=json.dumps(msg)) - body = response.body.read() - self.assertEqual(body.code, 200) + self.assertEqual(response.status,200) class TestCookieAuthentication(unittest.TestCase): From dbfd038f324390acaa5a2d3511f5f9af254983aa Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 1 Dec 2021 18:28:14 +0530 Subject: [PATCH 22/58] Update client.py --- splunklib/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/splunklib/client.py b/splunklib/client.py index 0dbdb1689..63b8d637a 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -783,7 +783,7 @@ def get(self, path_segment="", owner=None, app=None, sharing=None, **query): path = path_segment else: if not self.path.endswith('/') and path_segment != "": - self.path = self.path if path_segment != "" else self.path + '/' + self.path = self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) # ^-- This was "%s%s" % (self.path, path_segment). @@ -845,7 +845,7 @@ def post(self, path_segment="", owner=None, app=None, sharing=None, **query): path = path_segment else: if not self.path.endswith('/') and path_segment != "": - self.path = self.path if path_segment != "" else self.path + '/' + self.path = self.path + '/' path = self.service._abspath(self.path + path_segment, owner=owner, app=app, sharing=sharing) return self.service.post(path, owner=owner, app=app, sharing=sharing, **query) From f054c827c7b8c63d8040b48982e1a3c3a1c6e183 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 1 Dec 2021 18:49:09 +0530 Subject: [PATCH 23/58] Update client.py removed commented code --- splunklib/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/client.py b/splunklib/client.py index 63b8d637a..860f0c859 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -724,7 +724,7 @@ class Endpoint(object): """ def __init__(self, service, path): self.service = service - self.path = path #if path.endswith('/') else path + '/' + self.path = path def get(self, path_segment="", owner=None, app=None, sharing=None, **query): """Performs a GET operation on the path segment relative to this endpoint. From 0fbd782fa99cd1f4a42945412b9cbc88a6c6b604 Mon Sep 17 00:00:00 2001 From: browse Date: Thu, 2 Dec 2021 14:49:16 +0800 Subject: [PATCH 24/58] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1436ad240..c735e9854 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ The SDK command-line examples require a common set of arguments that specify the #### Using username/password ```python import splunklib.client as client - service = client.connect(host=, username=, password=, autoLogin=True) + service = client.connect(host=, username=, password=, autologin=True) ``` #### Using bearer token From 28e93b5a0df78ac1cf2dfa202359f8c8194cce8c Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Thu, 2 Dec 2021 14:46:31 +0530 Subject: [PATCH 25/58] ordereddict and all its reference removed --- examples/twitted/twitted/bin/tophashtags.py | 5 +- splunklib/ordereddict.py | 128 -------------------- splunklib/results.py | 5 +- splunklib/searchcommands/decorators.py | 5 +- splunklib/searchcommands/internals.py | 5 +- splunklib/searchcommands/search_command.py | 5 +- tests/searchcommands/test_internals_v2.py | 5 +- tests/test_job.py | 10 +- 8 files changed, 8 insertions(+), 160 deletions(-) delete mode 100644 splunklib/ordereddict.py diff --git a/examples/twitted/twitted/bin/tophashtags.py b/examples/twitted/twitted/bin/tophashtags.py index 6df5765f1..499f9f389 100755 --- a/examples/twitted/twitted/bin/tophashtags.py +++ b/examples/twitted/twitted/bin/tophashtags.py @@ -19,10 +19,7 @@ import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir))) -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from splunklib.ordereddict import OrderedDict +from collections import OrderedDict from splunklib import six from splunklib.six.moves import zip diff --git a/splunklib/ordereddict.py b/splunklib/ordereddict.py deleted file mode 100644 index 9495566cf..000000000 --- a/splunklib/ordereddict.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright (c) 2009 Raymond Hettinger -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation files -# (the "Software"), to deal in the Software without restriction, -# including without limitation the rights to use, copy, modify, merge, -# publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, -# subject to the following conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. - -from UserDict import DictMixin - - -class OrderedDict(dict, DictMixin): - - def __init__(self, *args, **kwds): - if len(args) > 1: - raise TypeError('expected at most 1 arguments, got %d' % len(args)) - try: - self.__end - except AttributeError: - self.clear() - self.update(*args, **kwds) - - def clear(self): - self.__end = end = [] - end += [None, end, end] # sentinel node for doubly linked list - self.__map = {} # key --> [key, prev, next] - dict.clear(self) - - def __setitem__(self, key, value): - if key not in self: - end = self.__end - curr = end[1] - curr[2] = end[1] = self.__map[key] = [key, curr, end] - dict.__setitem__(self, key, value) - - def __delitem__(self, key): - dict.__delitem__(self, key) - key, prev, next = self.__map.pop(key) - prev[2] = next - next[1] = prev - - def __iter__(self): - end = self.__end - curr = end[2] - while curr is not end: - yield curr[0] - curr = curr[2] - - def __reversed__(self): - end = self.__end - curr = end[1] - while curr is not end: - yield curr[0] - curr = curr[1] - - def popitem(self, last=True): - if not self: - raise KeyError('dictionary is empty') - if last: - key = reversed(self).next() - else: - key = iter(self).next() - value = self.pop(key) - return key, value - - def __reduce__(self): - items = [[k, self[k]] for k in self] - tmp = self.__map, self.__end - del self.__map, self.__end - inst_dict = vars(self).copy() - self.__map, self.__end = tmp - if inst_dict: - return (self.__class__, (items,), inst_dict) - return self.__class__, (items,) - - def keys(self): - return list(self) - - setdefault = DictMixin.setdefault - update = DictMixin.update - pop = DictMixin.pop - values = DictMixin.values - items = DictMixin.items - iterkeys = DictMixin.iterkeys - itervalues = DictMixin.itervalues - iteritems = DictMixin.iteritems - - def __repr__(self): - if not self: - return '%s()' % (self.__class__.__name__,) - return '%s(%r)' % (self.__class__.__name__, self.items()) - - def copy(self): - return self.__class__(self) - - @classmethod - def fromkeys(cls, iterable, value=None): - d = cls() - for key in iterable: - d[key] = value - return d - - def __eq__(self, other): - if isinstance(other, OrderedDict): - if len(self) != len(other): - return False - for p, q in zip(self.items(), other.items()): - if p != q: - return False - return True - return dict.__eq__(self, other) - - def __ne__(self, other): - return not self == other diff --git a/splunklib/results.py b/splunklib/results.py index 20501c5b7..66e9ad7d1 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -42,10 +42,7 @@ except: import xml.etree.ElementTree as et -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from .ordereddict import OrderedDict +from collections import OrderedDict try: from splunklib.six.moves import cStringIO as StringIO diff --git a/splunklib/searchcommands/decorators.py b/splunklib/searchcommands/decorators.py index 36590a76b..d8b3f48cc 100644 --- a/splunklib/searchcommands/decorators.py +++ b/splunklib/searchcommands/decorators.py @@ -17,10 +17,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals from splunklib import six -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict # must be python 2.7 from inspect import getmembers, isclass, isfunction from splunklib.six.moves import map as imap diff --git a/splunklib/searchcommands/internals.py b/splunklib/searchcommands/internals.py index fa32f0b1c..1ea2833db 100644 --- a/splunklib/searchcommands/internals.py +++ b/splunklib/searchcommands/internals.py @@ -19,10 +19,7 @@ from io import TextIOWrapper from collections import deque, namedtuple from splunklib import six -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict from splunklib.six.moves import StringIO from itertools import chain from splunklib.six.moves import map as imap diff --git a/splunklib/searchcommands/search_command.py b/splunklib/searchcommands/search_command.py index 5a626cc5c..b3cba7b61 100644 --- a/splunklib/searchcommands/search_command.py +++ b/splunklib/searchcommands/search_command.py @@ -22,10 +22,7 @@ import io -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from ..ordereddict import OrderedDict +from collections import OrderedDict from copy import deepcopy from splunklib.six.moves import StringIO from itertools import chain, islice diff --git a/tests/searchcommands/test_internals_v2.py b/tests/searchcommands/test_internals_v2.py index 34e6b61c4..c221cc53c 100755 --- a/tests/searchcommands/test_internals_v2.py +++ b/tests/searchcommands/test_internals_v2.py @@ -21,10 +21,7 @@ from splunklib.searchcommands import SearchMetric from splunklib import six from splunklib.six.moves import range -try: - from collections import OrderedDict # must be python 2.7 -except ImportError: - from splunklib.ordereddict import OrderedDict +from collections import OrderedDict from collections import namedtuple, deque from splunklib.six import BytesIO as BytesIO from functools import wraps diff --git a/tests/test_job.py b/tests/test_job.py index dc4c3e4e7..4de34b611 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -389,10 +389,7 @@ def test_results_reader(self): N_results = 0 N_messages = 0 for r in reader: - try: - from collections import OrderedDict - except: - from splunklib.ordereddict import OrderedDict + from collections import OrderedDict self.assertTrue(isinstance(r, OrderedDict) or isinstance(r, results.Message)) if isinstance(r, OrderedDict): @@ -411,10 +408,7 @@ def test_results_reader_with_streaming_results(self): N_results = 0 N_messages = 0 for r in reader: - try: - from collections import OrderedDict - except: - from splunklib.ordereddict import OrderedDict + from collections import OrderedDict self.assertTrue(isinstance(r, OrderedDict) or isinstance(r, results.Message)) if isinstance(r, OrderedDict): From b066cef7d33bcff49b154a2d0540dc5d2719876f Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Thu, 2 Dec 2021 16:17:56 +0530 Subject: [PATCH 26/58] Python3 compatibility for ResponseReader.__str__() --- splunklib/binding.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 94cc55818..73e4cad65 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1290,7 +1290,11 @@ def __init__(self, response, connection=None): self._buffer = b'' def __str__(self): - return self.read() + import sys + if sys.version_info[0] < 3: + return self.read() + else: + return str(self.read(), 'UTF-8') @property def empty(self): From 7928675be0b6607376cb9f3fce5f3835725df135 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 13 Dec 2021 22:23:17 +0530 Subject: [PATCH 27/58] Update test_validators.py Updated and fixed the float validator test case added by the user for the PR #384 --- tests/searchcommands/test_validators.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index 8532fa423..f9aeea804 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -208,9 +208,10 @@ def test(integer): def test_float(self): # Float validator test + import random - maxsize = sys.maxsize - minsize = -(sys.maxsize - 1) + maxsize = random.random() + 1 + minsize = random.random() - 1 validator = validators.Float() @@ -240,27 +241,25 @@ def test(float_val): self.assertEqual(validator.__call__(0), 0) self.assertEqual(validator.__call__(1.154), 1.154) self.assertEqual(validator.__call__(888.51), 888.51) - self.assertEqual(validator.__call__(2 * maxsize), float(2 * maxsize)) + self.assertEqual(validator.__call__(2 * maxsize), (2 * maxsize)) self.assertRaises(ValueError, validator.__call__, -1) self.assertRaises(ValueError, validator.__call__, -1111.00578) self.assertRaises(ValueError, validator.__call__, -0.005) validator = validators.Float(minimum=1, maximum=maxsize) self.assertEqual(validator.__call__(1), float(1)) - self.assertEqual(validator.__call__(100.111), 100.111) - self.assertEqual(validator.__call__(9999.0), 9999.0) - self.assertEqual(validator.__call__(maxsize), float(maxsize)) + self.assertEqual(validator.__call__(maxsize), maxsize) self.assertRaises(ValueError, validator.__call__, 0) self.assertRaises(ValueError, validator.__call__, 0.9999) - self.assertRaises(ValueError, validator.__call__, -199) self.assertRaises(ValueError, validator.__call__, maxsize + 1) - validator = validators.Float(minimum=-1, maximum=1) - self.assertEqual(validator.__call__(0), float(0)) + validator = validators.Float(minimum=minsize, maximum=maxsize) + self.assertEqual(validator.__call__(minsize), minsize) self.assertEqual(validator.__call__(0.123456), 0.123456) + self.assertEqual(validator.__call__(0), float(0)) self.assertEqual(validator.__call__(-0.012), -0.012) - self.assertRaises(ValueError, validator.__call__, -1.1) - self.assertRaises(ValueError, validator.__call__, 100.123456) + self.assertEqual(validator.__call__(maxsize), maxsize) + self.assertRaises(ValueError, validator.__call__, minsize - 1) self.assertRaises(ValueError, validator.__call__, maxsize + 1) return From 03bcab1bb52d619d4a4a3910bee4f7c6bd57f01e Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 13 Dec 2021 22:41:19 +0530 Subject: [PATCH 28/58] Update test_decorators.py --- tests/searchcommands/test_decorators.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 082ab184d..b9df0ef37 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -381,6 +381,7 @@ def test_option(self): validators.Fieldname: ('some.field_name', 'non-fieldname value'), validators.File: (__file__, 'non-existent file'), validators.Integer: ('100', 'non-integer value'), + validators.Float: ('99.9', 'non-float value'), validators.List: ('a,b,c', '"non-list value'), validators.Map: ('foo', 'non-existent map entry'), validators.Match: ('123-45-6789', 'not a social security number'), From 40ca0a2d8944edc12958a8c7587bd788a5d60b25 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 13 Dec 2021 22:58:05 +0530 Subject: [PATCH 29/58] Update test_decorators.py --- tests/searchcommands/test_decorators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index b9df0ef37..60e737c0c 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -460,7 +460,7 @@ def test_option(self): self.assertEqual(expected[x.name], x.value.pattern) elif isinstance(x.value, TextIOWrapper): self.assertEqual(expected[x.name], "'%s'" % x.value.name) - elif not isinstance(x.value, (bool,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): + elif not isinstance(x.value, (bool,) + (float,) + (six.text_type,) + (six.binary_type,) + tuplewrap(six.integer_types)): self.assertEqual(expected[x.name], repr(x.value)) else: self.assertEqual(expected[x.name], x.value) From 529094684e4f3e92af5cb86c62f439c0e35ef1b2 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Mon, 13 Dec 2021 23:31:37 +0530 Subject: [PATCH 30/58] Update test_decorators.py --- tests/searchcommands/test_decorators.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 60e737c0c..738132517 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -467,13 +467,16 @@ def test_option(self): expected = ( 'foo="f" boolean="f" code="foo == \\"bar\\"" duration="24:59:59" fieldname="some.field_name" ' - 'file=' + json_encode_string(__file__) + ' integer="100" float="99.9" map="foo" match="123-45-6789" ' + 'file=' + json_encode_string(__file__) + ' float="99.9" integer="100" map="foo" match="123-45-6789" ' 'optionname="some_option_name" record="f" regularexpression="\\\\s+" required_boolean="f" ' 'required_code="foo == \\"bar\\"" required_duration="24:59:59" required_fieldname="some.field_name" ' - 'required_file=' + json_encode_string(__file__) + ' required_integer="100" required_float="99.9" required_map="foo" ' + 'required_file=' + json_encode_string(__file__) + ' required_float="99.9" required_integer="100" required_map="foo" ' 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' 'required_set="bar" set="bar" show_configuration="f"') + print(command.options) + print() + print(expected) observed = six.text_type(command.options) self.assertEqual(observed, expected) From 9c8df4444da78c450b9e0b8cf12503f9617ef743 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 14 Dec 2021 00:09:49 +0530 Subject: [PATCH 31/58] Update test_validators.py --- tests/searchcommands/test_validators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index f9aeea804..38836c4aa 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -222,7 +222,7 @@ def test(float_val): assert False for s in str(float_val), six.text_type(float_val): value = validator.__call__(s) - self.assertEqual(value, float_val) + self.assertAlmostEqual(value, float_val) self.assertIsInstance(value, float) self.assertEqual(validator.format(float_val), six.text_type(float_val)) From 87bb9207901b3bcacafb2c52a37ff61ed8379eb2 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 14 Dec 2021 10:41:48 +0530 Subject: [PATCH 32/58] Update test_decorators.py removed print statements --- tests/searchcommands/test_decorators.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/searchcommands/test_decorators.py b/tests/searchcommands/test_decorators.py index 738132517..dd65aa0ab 100755 --- a/tests/searchcommands/test_decorators.py +++ b/tests/searchcommands/test_decorators.py @@ -474,9 +474,6 @@ def test_option(self): 'required_match="123-45-6789" required_optionname="some_option_name" required_regularexpression="\\\\s+" ' 'required_set="bar" set="bar" show_configuration="f"') - print(command.options) - print() - print(expected) observed = six.text_type(command.options) self.assertEqual(observed, expected) From 877fe64fb6145566f1b85abeff4f31135351ca84 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Fri, 31 Dec 2021 17:55:46 +0530 Subject: [PATCH 33/58] Added new github_commit modular input example - updated docker-compose.yml file that will mount the example and splunklib directory during docker container spin up process - added instruction in readme.md file for how to run example --- docker-compose.yml | 2 + examples/github_commits/README.md | 13 + .../github_commits/README/inputs.conf.spec | 6 + examples/github_commits/bin/github_commits.py | 265 ++++++++++++++++++ examples/github_commits/default/app.conf | 11 + 5 files changed, 297 insertions(+) create mode 100644 examples/github_commits/README.md create mode 100644 examples/github_commits/README/inputs.conf.spec create mode 100644 examples/github_commits/bin/github_commits.py create mode 100644 examples/github_commits/default/app.conf diff --git a/docker-compose.yml b/docker-compose.yml index 6885cfd5f..84c427072 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,6 +14,8 @@ services: - ./splunklib:/opt/splunk/etc/apps/github_forks/lib/splunklib - ./examples/random_numbers:/opt/splunk/etc/apps/random_numbers - ./splunklib:/opt/splunk/etc/apps/random_numbers/lib/splunklib + - ./examples/github_commits:/opt/splunk/etc/apps/github_commits + - ./splunklib:/opt/splunk/etc/apps/github_commits/lib/splunklib - ./examples/searchcommands_app/package:/opt/splunk/etc/apps/searchcommands_app - ./splunklib:/opt/splunk/etc/apps/searchcommands_app/lib/splunklib - ./examples/twitted/twitted:/opt/splunk/etc/apps/twitted diff --git a/examples/github_commits/README.md b/examples/github_commits/README.md new file mode 100644 index 000000000..fe7832c5e --- /dev/null +++ b/examples/github_commits/README.md @@ -0,0 +1,13 @@ +splunk-sdk-python github_commits example +======================================== + +This app provides an example of a modular input that Pulls down commit data from GitHub and creates events for each commit, which are then streamed to Splunk, based on the owner and repo_name provided by the user during setup of the input. + +To run this example locally run `SPLUNK_VERSION=latest docker compose up -d` from the root of this repository which will mount this example alongside the latest version of splunklib within `/opt/splunk/etc/apps/github_commits` and `/opt/splunk/etc/apps/github_commits/lib/splunklib` within the `splunk` container. + +Once the docker container is up and healthy log into the Splunk UI and setup a new `Github Commits` input by visiting this page: http://localhost:8000/en-US/manager/github_commits/datainputstats and selecting the "Add new..." button next to the Local Inputs > Github Commits. Enter values for a Github Repository owner and repo_name, for example owner = `splunk` repo_name = `splunk-sdk-python`. +(optional) `token` if using a private repository and/or to avoid Github's API limits. To get a Github API token visit the [Github settings page](https://github.com/settings/tokens/new) and make sure the repo and public_repo scopes are selected. + +NOTE: If no events appears then the script is likely not running properly, see https://docs.splunk.com/Documentation/SplunkCloud/latest/AdvancedDev/ModInputsDevTools for more details on debugging the modular input using the command line and relevant logs. + +Once the input is created you should be able to see an event when running the following search: `source="github_commits://*"` the event should contain commit data from given GitHub repository. diff --git a/examples/github_commits/README/inputs.conf.spec b/examples/github_commits/README/inputs.conf.spec new file mode 100644 index 000000000..156e60a4d --- /dev/null +++ b/examples/github_commits/README/inputs.conf.spec @@ -0,0 +1,6 @@ +[github_commits://] +*This example modular input retrieves GitHub commits and indexes them in Splunk. + +owner = +repo_name = +token = diff --git a/examples/github_commits/bin/github_commits.py b/examples/github_commits/bin/github_commits.py new file mode 100644 index 000000000..4e17dcb55 --- /dev/null +++ b/examples/github_commits/bin/github_commits.py @@ -0,0 +1,265 @@ +#!/usr/bin/env python +# +# Copyright 2013 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from __future__ import absolute_import + +import os +import re +import sys +import json +# NOTE: splunklib must exist within github_commits/lib/splunklib for this +# example to run! To run this locally use `SPLUNK_VERSION=latest docker compose up -d` +# from the root of this repo which mounts this example and the latest splunklib +# code together at /opt/splunk/etc/apps/github_commits +from datetime import datetime + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "lib")) + +from splunklib.modularinput import * +from splunklib import six +from six.moves import http_client + + +class MyScript(Script): + """All modular inputs should inherit from the abstract base class Script + from splunklib.modularinput.script. + They must override the get_scheme and stream_events functions, and, + if the scheme returned by get_scheme has Scheme.use_external_validation + set to True, the validate_input function. + """ + + def get_scheme(self): + """When Splunk starts, it looks for all the modular inputs defined by + its configuration, and tries to run them with the argument --scheme. + Splunkd expects the modular inputs to print a description of the + input in XML on stdout. The modular input framework takes care of all + the details of formatting XML and printing it. The user need only + override get_scheme and return a new Scheme object. + + :return: scheme, a Scheme object + """ + # Splunk will display "Github Commits" to users for this input + scheme = Scheme("Github Commits") + + scheme.description = "Streams events of commits in the specified Github repository (must be public, unless setting a token)." + # If you set external validation to True, without overriding validate_input, + # the script will accept anything as valid. Generally you only need external + # validation if there are relationships you must maintain among the + # parameters, such as requiring min to be less than max in this example, + # or you need to check that some resource is reachable or valid. + # Otherwise, Splunk lets you specify a validation string for each argument + # and will run validation internally using that string. + scheme.use_external_validation = True + scheme.use_single_instance = False # Set to false so an input can have an optional interval parameter. + + owner_argument = Argument("owner") + owner_argument.title = "Owner" + owner_argument.data_type = Argument.data_type_string + owner_argument.description = "Github user or organization that created the repository." + owner_argument.required_on_create = True + # If you are not using external validation, you would add something like: + # + # scheme.validation = "owner==splunk" + scheme.add_argument(owner_argument) + + repo_name_argument = Argument("repo_name") + repo_name_argument.title = "Repo Name" + repo_name_argument.data_type = Argument.data_type_string + repo_name_argument.description = "Name of the Github repository." + repo_name_argument.required_on_create = True + scheme.add_argument(repo_name_argument) + + token_argument = Argument("token") + token_argument.title = "Token" + token_argument.data_type = Argument.data_type_string + token_argument.description = "(Optional) A Github API access token. Required for private repositories (the token must have the 'repo' and 'public_repo' scopes enabled). Recommended to avoid Github's API limit, especially if setting an interval." + token_argument.required_on_create = False + token_argument.required_on_edit = False + scheme.add_argument(token_argument) + + return scheme + + def validate_input(self, validation_definition): + """In this example we are using external validation to verify that the Github + repository exists. If validate_input does not raise an Exception, the input + is assumed to be valid. Otherwise it prints the exception as an error message + when telling splunkd that the configuration is invalid. + + When using external validation, after splunkd calls the modular input with + --scheme to get a scheme, it calls it again with --validate-arguments for + each instance of the modular input in its configuration files, feeding XML + on stdin to the modular input to do validation. It is called the same way + whenever a modular input's configuration is edited. + + :param validation_definition: a ValidationDefinition object + """ + # Get the values of the parameters, and construct a URL for the Github API + + owner = validation_definition.parameters["owner"] + repo_name = validation_definition.parameters["repo_name"] + token = None + if "token" in validation_definition.parameters: + token = validation_definition.parameters["token"] + + # Call Github to retrieve repo information + res = _get_github_commits(owner, repo_name, 1, 1, token) + + # If we get any kind of message, that's a bad sign. + if "message" in res: + raise ValueError("Some error occur during fetching commits. - " + res["message"]) + elif len(res) == 1 and "sha" in res[0]: + pass + else: + raise ValueError("Expected only the latest commit, instead found " + str(len(res)) + " commits.") + + def stream_events(self, inputs, ew): + """This function handles all the action: splunk calls this modular input + without arguments, streams XML describing the inputs to stdin, and waits + for XML on stdout describing events. + + If you set use_single_instance to True on the scheme in get_scheme, it + will pass all the instances of this input to a single instance of this + script. + + :param inputs: an InputDefinition object + :param ew: an EventWriter object + """ + + # Go through each input for this modular input + for input_name, input_item in six.iteritems(inputs.inputs): + # Get fields from the InputDefinition object + owner = input_item["owner"] + repo_name = input_item["repo_name"] + token = None + if "token" in input_item: + token = input_item["token"] + # Get the checkpoint directory out of the modular input's metadata + checkpoint_dir = inputs.metadata["checkpoint_dir"] + + checkpoint_file_path = os.path.join(checkpoint_dir, owner + " " + repo_name + ".txt") + checkpoint_file_new_contents = "" + error_found = False + + # Set the temporary contents of the checkpoint file to an empty string + checkpoint_file_contents = "" + + try: + # read sha values from file, if exist + file = open(checkpoint_file_path, 'r') + checkpoint_file_contents = file.read() + file.close() + except: + # If there's an exception, assume the file doesn't exist + # Create the checkpoint file with an empty string + file = open(checkpoint_file_path, "a") + file.write("") + file.close() + + per_page = 100 # The maximum per page value supported by the Github API. + page = 1 + + while True: + # Get the commit count from the Github API + res = _get_github_commits(owner, repo_name, per_page, page, token) + if len(res) == 0: + break + + file = open(checkpoint_file_path, "a") + + for record in res: + if error_found: + break + + # If the file exists and doesn't contain the sha, or if the file doesn't exist. + if checkpoint_file_contents.find(record["sha"] + "\n") < 0: + try: + _stream_commit(ew, owner, repo_name, record) + # Append this commit to the string we'll write at the end + checkpoint_file_new_contents += record["sha"] + "\n" + except: + error_found = True + file.write(checkpoint_file_new_contents) + + # We had an error, die. + return + + file.write(checkpoint_file_new_contents) + file.close() + + page += 1 + + +def _get_display_date(date): + month_strings = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] + date_format = "%Y-%m-%d %H:%M:%S" + date = datetime.strptime(date, date_format) + + hours = date.hour + if hours < 10: + hours = "0" + str(hours) + + mins = date.minute + if mins < 10: + mins = "0" + str(mins) + + return "{month} {day}, {year} - {hour}:{minute} {period}".format(month=month_strings[date.month - 1], day=date.day, + year=date.year, hour=hours, minute=mins, + period="AM" if date.hour < 12 else "PM") + + +def _get_github_commits(owner, repo_name, per_page=1, page=1, token=None): + # Read the response from the Github API, then parse the JSON data into an object + repo_path = "/repos/%s/%s/commits?per_page=%d&page=%d" % (owner, repo_name, per_page, page) + connection = http_client.HTTPSConnection('api.github.com') + headers = { + 'Content-type': 'application/json', + 'User-Agent': 'splunk-sdk-python' + } + if token: + headers['Authorization'] = 'token ' + token + connection.request('GET', repo_path, headers=headers) + response = connection.getresponse() + body = response.read().decode() + return json.loads(body) + + +def _stream_commit(ew, owner, repo_name, commitData): + json_data = { + "sha": commitData["sha"], + "api_url": commitData["url"], + "url": "https://github.com/" + owner + "/" + repo_name + "/commit/" + commitData["sha"] + } + commit = commitData["commit"] + + # At this point, assumed checkpoint doesn't exist. + json_data["message"] = re.sub("\n|\r", " ", commit["message"]) + json_data["author"] = commit["author"]["name"] + json_data["rawdate"] = commit["author"]["date"] + commit_date = re.sub("T|Z", " ", commit["author"]["date"]).strip() + json_data["displaydate"] = _get_display_date(commit_date) + + # Create an Event object, and set its fields + event = Event() + event.stanza = repo_name + event.sourceType = "github_commits" + event.data = json.dumps(json_data) + + # Tell the EventWriter to write this event + ew.write_event(event) + + +if __name__ == "__main__": + sys.exit(MyScript().run(sys.argv)) diff --git a/examples/github_commits/default/app.conf b/examples/github_commits/default/app.conf new file mode 100644 index 000000000..14086d5a2 --- /dev/null +++ b/examples/github_commits/default/app.conf @@ -0,0 +1,11 @@ +[install] +is_configured = 0 + +[ui] +is_visible = 1 +label = GitHub Commits Modular Input + +[launcher] +author=Splunk +description=This example modular input retrieves GitHub commits and indexes them in Splunk. +version = 1.0 From 72916145ac1bc123cc34c47214ea93418db3bca3 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Wed, 5 Jan 2022 11:32:02 +0530 Subject: [PATCH 34/58] change checkpoint_filename and copyright year --- examples/github_commits/bin/github_commits.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/github_commits/bin/github_commits.py b/examples/github_commits/bin/github_commits.py index 4e17dcb55..d27b61871 100644 --- a/examples/github_commits/bin/github_commits.py +++ b/examples/github_commits/bin/github_commits.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# Copyright 2013 Splunk, Inc. +# Copyright 2021 Splunk, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"): you may # not use this file except in compliance with the License. You may obtain @@ -149,7 +149,7 @@ def stream_events(self, inputs, ew): # Get the checkpoint directory out of the modular input's metadata checkpoint_dir = inputs.metadata["checkpoint_dir"] - checkpoint_file_path = os.path.join(checkpoint_dir, owner + " " + repo_name + ".txt") + checkpoint_file_path = os.path.join(checkpoint_dir, owner + "_" + repo_name + ".txt") checkpoint_file_new_contents = "" error_found = False From 397f0aa4997da7a4f04d780bc976f57ee588457b Mon Sep 17 00:00:00 2001 From: Lowell Alleman Date: Fri, 12 Mar 2021 21:31:23 -0500 Subject: [PATCH 35/58] Capture 'app' context for modinput input_items - Update modularinput XML parsing to capture the 'app' tag for configuration stanza elements. This value is stored in the input dict under '__app' to avoid potential naming collisions with user defined parameters. - Updated unittest --- splunklib/modularinput/utils.py | 4 +++- .../modularinput/data/conf_with_2_inputs.xml | Bin 2332 -> 2384 bytes .../data/conf_with_invalid_inputs.xml | Bin 1332 -> 1358 bytes tests/modularinput/test_input_definition.py | 2 ++ 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index 853694a0d..d731b5d55 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -66,7 +66,9 @@ def parse_xml_data(parent_node, child_node_tag): for child in parent_node: if child.tag == child_node_tag: if child_node_tag == "stanza": - data[child.get("name")] = {} + data[child.get("name")] = { + "__app": child.get("app", None) + } for param in child: data[child.get("name")][param.get("name")] = parse_parameters(param) elif "item" == parent_node.tag: diff --git a/tests/modularinput/data/conf_with_2_inputs.xml b/tests/modularinput/data/conf_with_2_inputs.xml index 95c44bb2a1ed46f31edf644cb1e35f2cff33b70c..bcfd8120471ce77d767d959a79d55e7f4193478e 100644 GIT binary patch delta 69 zcmbOubU|nXKeM<3Ln1=~5ZW>*F%&bT0(nIY$qX5rd6-`_qDba4R5HXv6i?=14&Cg@ HdYKsj6H^a) delta 37 ncmca0G)HIyKl5Z|4w=dEEK-}}nKv=QIFsXQuLn1=~5ZW>*F%&bT0(nIY$qX4lo*e_% Date: Fri, 12 Mar 2021 22:23:53 -0500 Subject: [PATCH 36/58] Simplified variable handing in parse_xml_data --- splunklib/modularinput/utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/splunklib/modularinput/utils.py b/splunklib/modularinput/utils.py index d731b5d55..3d42b6326 100644 --- a/splunklib/modularinput/utils.py +++ b/splunklib/modularinput/utils.py @@ -64,13 +64,14 @@ def parse_parameters(param_node): def parse_xml_data(parent_node, child_node_tag): data = {} for child in parent_node: + child_name = child.get("name") if child.tag == child_node_tag: if child_node_tag == "stanza": - data[child.get("name")] = { + data[child_name] = { "__app": child.get("app", None) } for param in child: - data[child.get("name")][param.get("name")] = parse_parameters(param) + data[child_name][param.get("name")] = parse_parameters(param) elif "item" == parent_node.tag: - data[child.get("name")] = parse_parameters(child) + data[child_name] = parse_parameters(child) return data From 0f65cd7da44e87ee2c9d5fce151c191c26c37889 Mon Sep 17 00:00:00 2001 From: Lowell Alleman Date: Fri, 12 Mar 2021 22:25:35 -0500 Subject: [PATCH 37/58] Add 'app' input example into github_fork.py --- examples/github_forks/bin/github_forks.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/examples/github_forks/bin/github_forks.py b/examples/github_forks/bin/github_forks.py index 5ffa4e409..46b42a81b 100755 --- a/examples/github_forks/bin/github_forks.py +++ b/examples/github_forks/bin/github_forks.py @@ -130,6 +130,10 @@ def stream_events(self, inputs, ew): owner = input_item["owner"] repo_name = input_item["repo_name"] + # Hint: API auth required?, get a secret from passwords.conf + # self.service.namespace["app"] = input_item["__app"] + # api_token = self.service.storage_passwords["github_api_token"].clear_password + # Get the fork count from the Github API jsondata = _get_github_repos(owner, repo_name) fork_count = jsondata["forks_count"] From 4e3cab2dea0ab57d305b982c588df1cd70b295ff Mon Sep 17 00:00:00 2001 From: vmalaviya Date: Tue, 11 Jan 2022 13:40:59 +0530 Subject: [PATCH 38/58] version checking updated for __str__ method --- splunklib/binding.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 73e4cad65..d6489950a 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1290,8 +1290,7 @@ def __init__(self, response, connection=None): self._buffer = b'' def __str__(self): - import sys - if sys.version_info[0] < 3: + if six.PY2: return self.read() else: return str(self.read(), 'UTF-8') From 09551a86e2e113a2c34890f7c43a7eebd0147fa3 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 11 Jan 2022 19:24:59 +0530 Subject: [PATCH 39/58] Replace .splunkrc with .env file in test and examples - Replace .spluncrc file with .env in examples and test cases - updated __init__.py and cmdopts classes method to support .env file --- .env | 16 ++++++++++++++ examples/analytics/input.py | 2 +- examples/analytics/output.py | 2 +- examples/analytics/server.py | 2 +- examples/async/async.py | 2 +- examples/binding1.py | 2 +- examples/conf.py | 2 +- examples/dashboard/feed.py | 2 +- examples/event_types.py | 2 +- examples/explorer/README.md | 2 +- examples/explorer/explorer.py | 2 +- examples/export/export.py | 2 +- examples/fired_alerts.py | 2 +- examples/follow.py | 2 +- examples/genevents.py | 2 +- examples/get_job.py | 2 +- examples/handlers/handler_certs.py | 2 +- examples/handlers/handler_debug.py | 2 +- examples/handlers/handler_proxy.py | 2 +- examples/handlers/handler_urllib2.py | 2 +- examples/index.py | 2 +- examples/info.py | 2 +- examples/inputs.py | 2 +- examples/job.py | 2 +- examples/kvstore.py | 2 +- examples/loggers.py | 2 +- examples/oneshot.py | 2 +- examples/saved_search/saved_search.py | 2 +- examples/saved_searches.py | 2 +- examples/search.py | 2 +- examples/search_modes.py | 2 +- examples/spcmd.py | 2 +- examples/spurl.py | 2 +- examples/stail.py | 2 +- examples/submit.py | 2 +- examples/twitted/input.py | 2 +- examples/upload.py | 2 +- tests/test_binding.py | 6 ++--- tests/test_service.py | 2 +- tests/testlib.py | 2 +- tox.ini | 1 + utils/__init__.py | 3 ++- utils/cmdopts.py | 32 +++++++++++++++------------ 43 files changed, 78 insertions(+), 56 deletions(-) create mode 100644 .env diff --git a/.env b/.env new file mode 100644 index 000000000..0d5fabf11 --- /dev/null +++ b/.env @@ -0,0 +1,16 @@ +# Splunk host (default: localhost) +host=localhost +# Splunk admin port (default: 8089) +port=8089 +# Splunk username +username=admin +# Splunk password +password=changed! +# Access scheme (default: https) +scheme=https +# Your version of Splunk (default: 6.2) +version=8.0 +# Bearer token for authentication +#bearerToken="" +# Session key for authentication +#sessionKey="" diff --git a/examples/analytics/input.py b/examples/analytics/input.py index 93432adb8..1bbd1db98 100755 --- a/examples/analytics/input.py +++ b/examples/analytics/input.py @@ -102,7 +102,7 @@ def main(): argv = sys.argv[1:] - splunk_opts = utils.parse(argv, {}, ".splunkrc", usage=usage) + splunk_opts = utils.parse(argv, {}, ".env", usage=usage) tracker = AnalyticsTracker("cli_app", splunk_opts.kwargs) #tracker.track("test_event", "abc123", foo="bar", bar="foo") diff --git a/examples/analytics/output.py b/examples/analytics/output.py index 07e0753b0..cbbb697f5 100755 --- a/examples/analytics/output.py +++ b/examples/analytics/output.py @@ -152,7 +152,7 @@ def main(): argv = sys.argv[1:] - opts = utils.parse(argv, {}, ".splunkrc", usage=usage) + opts = utils.parse(argv, {}, ".env", usage=usage) retriever = AnalyticsRetriever(opts.args[0], opts.kwargs) #events = retriever.events() diff --git a/examples/analytics/server.py b/examples/analytics/server.py index f4b849f76..a1235e52e 100755 --- a/examples/analytics/server.py +++ b/examples/analytics/server.py @@ -146,7 +146,7 @@ def application(name): def main(): argv = sys.argv[1:] - opts = utils.parse(argv, {}, ".splunkrc") + opts = utils.parse(argv, {}, ".env") global splunk_opts splunk_opts = opts.kwargs diff --git a/examples/async/async.py b/examples/async/async.py index ececa8989..097e50b3c 100755 --- a/examples/async/async.py +++ b/examples/async/async.py @@ -51,7 +51,7 @@ def main(argv): usage = "async.py " # Parse the command line args. - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") # We have to see if we got either the "sync" or # "async" command line arguments. diff --git a/examples/binding1.py b/examples/binding1.py index 1dae4f927..19c850879 100755 --- a/examples/binding1.py +++ b/examples/binding1.py @@ -52,7 +52,7 @@ def search(self, query, **kwargs): return self.context.post("search/jobs/export", search=query, **kwargs) def main(argv): - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") context = connect(**opts.kwargs) service = Service(context) assert service.apps().status == 200 diff --git a/examples/conf.py b/examples/conf.py index 33d9755ef..f4163be80 100755 --- a/examples/conf.py +++ b/examples/conf.py @@ -151,7 +151,7 @@ def main(): commands = ['create', 'delete', 'list'] # parse args, connect and setup - opts = parse(argv, {}, ".splunkrc", usage=usage) + opts = parse(argv, {}, ".env", usage=usage) service = connect(**opts.kwargs) program = Program(service) diff --git a/examples/dashboard/feed.py b/examples/dashboard/feed.py index 38f5fc0a2..e61f1ba72 100755 --- a/examples/dashboard/feed.py +++ b/examples/dashboard/feed.py @@ -171,7 +171,7 @@ def iterate(job): def main(argv): # Parse the command line args. - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") # Connect to Splunk service = client.connect(**opts.kwargs) diff --git a/examples/event_types.py b/examples/event_types.py index eec68fa07..c7e17d123 100755 --- a/examples/event_types.py +++ b/examples/event_types.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for item in service.event_types: diff --git a/examples/explorer/README.md b/examples/explorer/README.md index 2e5093837..e51d9a8d4 100644 --- a/examples/explorer/README.md +++ b/examples/explorer/README.md @@ -8,7 +8,7 @@ To run, simply execute: ./explorer.py -It will pick up all relevant values from your .splunkrc, or you can pass them +It will pick up all relevant values from your .env, or you can pass them in on the command line. You can see help by adding `--help` to the exectuion. The API Explorer will open up a browser window that will show you a drop down diff --git a/examples/explorer/explorer.py b/examples/explorer/explorer.py index be3dc3279..62ebf85eb 100755 --- a/examples/explorer/explorer.py +++ b/examples/explorer/explorer.py @@ -43,7 +43,7 @@ def main(argv): }, } - opts = utils.parse(argv, redirect_port_args, ".splunkrc", usage=usage) + opts = utils.parse(argv, redirect_port_args, ".env", usage=usage) args = [("scheme", opts.kwargs["scheme"]), ("host", opts.kwargs["host"]), diff --git a/examples/export/export.py b/examples/export/export.py index 06b433f5f..3664a7691 100755 --- a/examples/export/export.py +++ b/examples/export/export.py @@ -320,7 +320,7 @@ def export(options, service): def main(): """ main entry """ - options = parse(sys.argv[1:], CLIRULES, ".splunkrc") + options = parse(sys.argv[1:], CLIRULES, ".env") if options.kwargs['omode'] not in OUTPUT_MODES: print("output mode must be one of %s, found %s" % (OUTPUT_MODES, diff --git a/examples/fired_alerts.py b/examples/fired_alerts.py index c70352f5d..e736ea167 100755 --- a/examples/fired_alerts.py +++ b/examples/fired_alerts.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for group in service.fired_alerts: diff --git a/examples/follow.py b/examples/follow.py index 96652e5f3..64b3e1ac6 100755 --- a/examples/follow.py +++ b/examples/follow.py @@ -48,7 +48,7 @@ def follow(job, count, items): def main(): usage = "usage: follow.py " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) diff --git a/examples/genevents.py b/examples/genevents.py index b717c2ae2..8b9b2d3bf 100755 --- a/examples/genevents.py +++ b/examples/genevents.py @@ -113,7 +113,7 @@ def main(): print("must supply an index name") sys.exit(1) - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) service = connect(**opts.kwargs) if opts.kwargs['ingest'] not in INGEST_TYPE: diff --git a/examples/get_job.py b/examples/get_job.py index 073917185..3d2568154 100755 --- a/examples/get_job.py +++ b/examples/get_job.py @@ -33,7 +33,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(argv): - opts = parse(argv, {}, ".splunkrc") + opts = parse(argv, {}, ".env") service = client.connect(**opts.kwargs) # Execute a simple search, and store the sid diff --git a/examples/handlers/handler_certs.py b/examples/handlers/handler_certs.py index e97e45f44..7140cd651 100755 --- a/examples/handlers/handler_certs.py +++ b/examples/handlers/handler_certs.py @@ -114,7 +114,7 @@ def request(url, message, **kwargs): return request -opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") +opts = utils.parse(sys.argv[1:], RULES, ".env") ca_file = opts.kwargs['ca_file'] service = client.connect(handler=handler(ca_file), **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/handlers/handler_debug.py b/examples/handlers/handler_debug.py index 1ed4b6334..383428ae4 100755 --- a/examples/handlers/handler_debug.py +++ b/examples/handlers/handler_debug.py @@ -41,6 +41,6 @@ def request(url, message, **kwargs): return response return request -opts = utils.parse(sys.argv[1:], {}, ".splunkrc") +opts = utils.parse(sys.argv[1:], {}, ".env") service = client.connect(handler=handler(), **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/handlers/handler_proxy.py b/examples/handlers/handler_proxy.py index dbf36457d..eff371541 100755 --- a/examples/handlers/handler_proxy.py +++ b/examples/handlers/handler_proxy.py @@ -80,7 +80,7 @@ def handler(proxy): urllib.request.install_opener(opener) return request -opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") +opts = utils.parse(sys.argv[1:], RULES, ".env") proxy = opts.kwargs['proxy'] try: service = client.connect(handler=handler(proxy), **opts.kwargs) diff --git a/examples/handlers/handler_urllib2.py b/examples/handlers/handler_urllib2.py index 359dabc0b..d81d66d59 100755 --- a/examples/handlers/handler_urllib2.py +++ b/examples/handlers/handler_urllib2.py @@ -53,7 +53,7 @@ def request(url, message, **kwargs): 'body': BytesIO(response.read()) } -opts = utils.parse(sys.argv[1:], {}, ".splunkrc") +opts = utils.parse(sys.argv[1:], {}, ".env") service = client.connect(handler=request, **opts.kwargs) pprint([app.name for app in service.apps]) diff --git a/examples/index.py b/examples/index.py index 9260e88d7..0c8da974f 100755 --- a/examples/index.py +++ b/examples/index.py @@ -183,7 +183,7 @@ def main(): options = argv[:index] command = argv[index:] - opts = parse(options, {}, ".splunkrc", usage=usage, epilog=HELP_EPILOG) + opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) service = connect(**opts.kwargs) program = Program(service) program.run(command) diff --git a/examples/info.py b/examples/info.py index da60aeaa8..e54349d4c 100755 --- a/examples/info.py +++ b/examples/info.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") if __name__ == "__main__": - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = client.connect(**opts.kwargs) content = service.info diff --git a/examples/inputs.py b/examples/inputs.py index 7c6436817..be77d02d5 100755 --- a/examples/inputs.py +++ b/examples/inputs.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for item in service.inputs: diff --git a/examples/job.py b/examples/job.py index 257281e4d..8e51ba6a7 100755 --- a/examples/job.py +++ b/examples/job.py @@ -267,7 +267,7 @@ def main(): options = argv[:index] command = argv[index:] - opts = parse(options, {}, ".splunkrc", usage=usage, epilog=HELP_EPILOG) + opts = parse(options, {}, ".env", usage=usage, epilog=HELP_EPILOG) service = connect(**opts.kwargs) program = Program(service) program.run(command) diff --git a/examples/kvstore.py b/examples/kvstore.py index 7ea2cd6f4..2ca32e5a9 100644 --- a/examples/kvstore.py +++ b/examples/kvstore.py @@ -30,7 +30,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") opts.kwargs["owner"] = "nobody" opts.kwargs["app"] = "search" service = connect(**opts.kwargs) diff --git a/examples/loggers.py b/examples/loggers.py index 2d88b8969..df71af09e 100755 --- a/examples/loggers.py +++ b/examples/loggers.py @@ -32,7 +32,7 @@ def main(argv): usage = "usage: %prog [options]" - opts = parse(argv, {}, ".splunkrc", usage=usage) + opts = parse(argv, {}, ".env", usage=usage) service = client.connect(**opts.kwargs) for logger in service.loggers: diff --git a/examples/oneshot.py b/examples/oneshot.py index 9c28ff0e4..dc34bb8cb 100755 --- a/examples/oneshot.py +++ b/examples/oneshot.py @@ -39,7 +39,7 @@ def pretty(response): def main(): usage = "usage: oneshot.py " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) diff --git a/examples/saved_search/saved_search.py b/examples/saved_search/saved_search.py index 91f5ef70a..657f6aa69 100755 --- a/examples/saved_search/saved_search.py +++ b/examples/saved_search/saved_search.py @@ -160,7 +160,7 @@ def main(argv): """ main entry """ usage = 'usage: %prog --help for options' - opts = utils.parse(argv, RULES, ".splunkrc", usage=usage) + opts = utils.parse(argv, RULES, ".env", usage=usage) context = binding.connect(**opts.kwargs) operation = None diff --git a/examples/saved_searches.py b/examples/saved_searches.py index 5455f2cec..6301339f5 100755 --- a/examples/saved_searches.py +++ b/examples/saved_searches.py @@ -31,7 +31,7 @@ def main(): - opts = parse(sys.argv[1:], {}, ".splunkrc") + opts = parse(sys.argv[1:], {}, ".env") service = connect(**opts.kwargs) for saved_search in service.saved_searches: diff --git a/examples/search.py b/examples/search.py index 1c5ace22e..858e92312 100755 --- a/examples/search.py +++ b/examples/search.py @@ -49,7 +49,7 @@ def cmdline(argv, flags, **kwargs): """A cmdopts wrapper that takes a list of flags and builds the corresponding cmdopts rules to match those flags.""" rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".splunkrc", **kwargs) + return parse(argv, rules, ".env", **kwargs) def main(argv): usage = 'usage: %prog [options] "search"' diff --git a/examples/search_modes.py b/examples/search_modes.py index dbbb8442a..f3e05f362 100644 --- a/examples/search_modes.py +++ b/examples/search_modes.py @@ -12,7 +12,7 @@ def cmdline(argv, flags, **kwargs): """A cmdopts wrapper that takes a list of flags and builds the corresponding cmdopts rules to match those flags.""" rules = dict([(flag, {'flags': ["--%s" % flag]}) for flag in flags]) - return parse(argv, rules, ".splunkrc", **kwargs) + return parse(argv, rules, ".env", **kwargs) def modes(argv): opts = cmdline(argv, []) diff --git a/examples/spcmd.py b/examples/spcmd.py index f2b21378d..28b4e9a93 100755 --- a/examples/spcmd.py +++ b/examples/spcmd.py @@ -118,7 +118,7 @@ def actions(opts): return len(opts.args) > 0 or 'eval' in opts.kwargs def main(): - opts = utils.parse(sys.argv[1:], RULES, ".splunkrc") + opts = utils.parse(sys.argv[1:], RULES, ".env") # Connect and initialize the command session session = Session(**opts.kwargs) diff --git a/examples/spurl.py b/examples/spurl.py index 71c60e2ae..748b56d9c 100755 --- a/examples/spurl.py +++ b/examples/spurl.py @@ -47,7 +47,7 @@ def print_response(response): print(body) def main(): - opts = utils.parse(sys.argv[1:], {}, ".splunkrc") + opts = utils.parse(sys.argv[1:], {}, ".env") for arg in opts.args: print_response(invoke(arg, **opts.kwargs)) diff --git a/examples/stail.py b/examples/stail.py index 0f04b0d8c..85f38a853 100755 --- a/examples/stail.py +++ b/examples/stail.py @@ -35,7 +35,7 @@ def main(): usage = "usage: %prog " - opts = utils.parse(sys.argv[1:], {}, ".splunkrc", usage=usage) + opts = utils.parse(sys.argv[1:], {}, ".env", usage=usage) if len(opts.args) != 1: utils.error("Search expression required", 2) diff --git a/examples/submit.py b/examples/submit.py index 358ce9fb0..1e74e7a49 100755 --- a/examples/submit.py +++ b/examples/submit.py @@ -45,7 +45,7 @@ def main(argv): usage = 'usage: %prog [options] ' - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) if len(opts.args) == 0: error("Index name required", 2) index = opts.args[0] diff --git a/examples/twitted/input.py b/examples/twitted/input.py index ececa09b1..e907cc55d 100755 --- a/examples/twitted/input.py +++ b/examples/twitted/input.py @@ -94,7 +94,7 @@ def connect(self): } def cmdline(): - kwargs = parse(sys.argv[1:], RULES, ".splunkrc").kwargs + kwargs = parse(sys.argv[1:], RULES, ".env").kwargs # Prompt for Twitter username/password if not provided on command line if 'tusername' not in kwargs: diff --git a/examples/upload.py b/examples/upload.py index 8e9137e42..af592b949 100755 --- a/examples/upload.py +++ b/examples/upload.py @@ -58,7 +58,7 @@ def main(argv): usage = 'usage: %prog [options] *' - opts = parse(argv, RULES, ".splunkrc", usage=usage) + opts = parse(argv, RULES, ".env", usage=usage) kwargs_splunk = dslice(opts.kwargs, FLAGS_SPLUNK) service = client.connect(**kwargs_splunk) diff --git a/tests/test_binding.py b/tests/test_binding.py index 2d3107507..3bce0de1b 100755 --- a/tests/test_binding.py +++ b/tests/test_binding.py @@ -69,7 +69,7 @@ class BindingTestCase(unittest.TestCase): context = None def setUp(self): logging.info("%s", self.__class__.__name__) - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.context = binding.connect(**self.opts.kwargs) logging.debug("Connected to splunkd.") @@ -512,7 +512,7 @@ def test_logout(self): class TestCookieAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.context = binding.connect(**self.opts.kwargs) # Skip these tests if running below Splunk 6.2, cookie-auth didn't exist before @@ -709,7 +709,7 @@ def test_namespace_fails(self): @pytest.mark.smoke class TestBasicAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") opts = self.opts.kwargs.copy() opts["basic"] = True opts["username"] = self.opts.kwargs["username"] diff --git a/tests/test_service.py b/tests/test_service.py index d1fa89118..c86cf0ccd 100755 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -180,7 +180,7 @@ def test_hec_event(self): class TestCookieAuthentication(unittest.TestCase): def setUp(self): - self.opts = testlib.parse([], {}, ".splunkrc") + self.opts = testlib.parse([], {}, ".env") self.service = client.Service(**self.opts.kwargs) if getattr(unittest.TestCase, 'assertIsNotNone', None) is None: diff --git a/tests/testlib.py b/tests/testlib.py index 984b6a94c..61be722ea 100644 --- a/tests/testlib.py +++ b/tests/testlib.py @@ -232,7 +232,7 @@ def restartSplunk(self, timeout=240): @classmethod def setUpClass(cls): - cls.opts = parse([], {}, ".splunkrc") + cls.opts = parse([], {}, ".env") # Before we start, make sure splunk doesn't need a restart. service = client.connect(**cls.opts.kwargs) diff --git a/tox.ini b/tox.ini index d9a001e25..227be746c 100644 --- a/tox.ini +++ b/tox.ini @@ -32,6 +32,7 @@ deps = pytest xmlrunner unittest2 unittest-xml-reporting + python-dotenv distdir = build commands = diff --git a/utils/__init__.py b/utils/__init__.py index f38027efe..b1bb77a50 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -109,7 +109,8 @@ def dslice(value, *args): def parse(argv, rules=None, config=None, **kwargs): """Parse the given arg vector with the default Splunk command rules.""" parser_ = parser(rules, **kwargs) - if config is not None: parser_.loadrc(config) + if config is not None: + parser_.loadenv(config) return parser_.parse(argv).result def parser(rules=None, **kwargs): diff --git a/utils/cmdopts.py b/utils/cmdopts.py index 5938efd17..b0cbb7328 100644 --- a/utils/cmdopts.py +++ b/utils/cmdopts.py @@ -19,6 +19,7 @@ from os import path from optparse import OptionParser import sys +from dotenv import dotenv_values __all__ = [ "error", "Parser", "cmdline" ] @@ -67,22 +68,24 @@ def init(self, rules): # Remember the dest vars that we see, so that we can merge results self.dests.add(dest) - - # Load command options from given 'config' file. Long form options may omit - # the leading "--", and if so we fix that up here. + + # Load command options from '.env' file def load(self, filepath): argv = [] try: - file = open(filepath) + filedata = dotenv_values(filepath) except: error("Unable to open '%s'" % filepath, 2) - for line in file: - if line.startswith("#"): continue # Skip comment - line = line.strip() - if len(line) == 0: continue # Skip blank line - if not line.startswith("-"): line = "--" + line - argv.append(line) - self.parse(argv) + + # update result kwargs value with .env file data + for key, value in filedata.items(): + value = value.strip() + if len(value) == 0 or value is None: continue # Skip blank value + elif key in self.dests: + self.result['kwargs'][key] = value + else: + raise NameError("No such option --" + key) + return self def loadif(self, filepath): @@ -90,8 +93,9 @@ def loadif(self, filepath): if path.isfile(filepath): self.load(filepath) return self - def loadrc(self, filename): - filepath = path.expanduser(path.join("~", "%s" % filename)) + def loadenv(self, filename): + dir_path = path.dirname(path.realpath(__file__)) + filepath = path.join(dir_path, '..', filename) self.loadif(filepath) return self @@ -114,6 +118,6 @@ def cmdline(argv, rules=None, config=None, **kwargs): """Simplified cmdopts interface that does not default any parsing rules and that does not allow compounding calls to the parser.""" parser = Parser(rules, **kwargs) - if config is not None: parser.loadrc(config) + if config is not None: parser.loadenv(config) return parser.parse(argv).result From fbb867cce1153b523b31255bbee91799e38527f6 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 11 Jan 2022 19:27:53 +0530 Subject: [PATCH 40/58] removed .splunkrc support - Updated README.md file - removed scripts and makefile commands use for creating .splunkrc file - removed .splunkrc create steps from GitHub action for test --- .github/workflows/test.yml | 9 --- Makefile | 11 --- README.md | 39 +--------- scripts/build-splunkrc.py | 112 ---------------------------- scripts/templates/splunkrc.template | 16 ---- 5 files changed, 4 insertions(+), 183 deletions(-) delete mode 100644 scripts/build-splunkrc.py delete mode 100644 scripts/templates/splunkrc.template diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 71ed1e667..e09f04988 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -36,15 +36,6 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} - - name: Create .splunkrc file - run: | - cd ~ - echo host=localhost > .splunkrc - echo port=8089 >> .splunkrc - echo username=admin >> .splunkrc - echo password=changed! >> .splunkrc - echo scheme=https >> .splunkrc - echo version=${{ matrix.splunk }} >> .splunkrc - name: Install tox run: pip install tox - name: Test Execution diff --git a/Makefile b/Makefile index 233978781..452a47243 100644 --- a/Makefile +++ b/Makefile @@ -56,17 +56,6 @@ test_smoke_no_app: @echo "$(ATTN_COLOR)==> test_smoke_no_app $(NO_COLOR)" @tox -e py27,py37 -- -m "smoke and not app" -.PHONY: splunkrc -splunkrc: - @echo "$(ATTN_COLOR)==> splunkrc $(NO_COLOR)" - @echo "To make a .splunkrc:" - @echo " [SPLUNK_INSTANCE_JSON] | python scripts/build-splunkrc.py ~/.splunkrc" - -.PHONY: splunkrc_default -splunkrc_default: - @echo "$(ATTN_COLOR)==> splunkrc_default $(NO_COLOR)" - @python scripts/build-splunkrc.py ~/.splunkrc - .PHONY: up up: @echo "$(ATTN_COLOR)==> up $(NO_COLOR)" diff --git a/README.md b/README.md index c735e9854..10503828b 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,6 @@ You'll need `docker` and `docker-compose` to get up and running using this metho ``` make up SPLUNK_VERSION=8.0 make wait_up -make splunkrc_default make test make down ``` @@ -91,42 +90,12 @@ service = client.connect(host=, token=, autologin=True) ``` ### -#### Create a .splunkrc convenience file +#### Update a .env file -To connect to Splunk Enterprise, many of the SDK examples and unit tests take command-line arguments that specify values for the host, port, and login credentials for Splunk Enterprise. For convenience during development, you can store these arguments as key-value pairs in a text file named **.splunkrc**. Then, the SDK examples and unit tests use the values from the **.splunkrc** file when you don't specify them. +To connect to Splunk Enterprise, many of the SDK examples and unit tests take command-line arguments that specify values for the host, port, and login credentials for Splunk Enterprise. For convenience during development, you can store these arguments as key-value pairs in a **.env** file. Then, the SDK examples and unit tests use the values from the **.env** file when you don't specify them. ->**Note**: Storing login credentials in the **.splunkrc** file is only for convenience during development. This file isn't part of the Splunk platform and shouldn't be used for storing user credentials for production. And, if you're at all concerned about the security of your credentials, enter them at the command line rather than saving them in this file. +>**Note**: Storing login credentials in the **.env** file is only for convenience during development. This file isn't part of the Splunk platform and shouldn't be used for storing user credentials for production. And, if you're at all concerned about the security of your credentials, enter them at the command line rather than saving them in this file. -To use this convenience file, create a text file with the following format: - - # Splunk Enterprise host (default: localhost) - host=localhost - # Splunk Enterprise admin port (default: 8089) - port=8089 - # Splunk Enterprise username - username=admin - # Splunk Enterprise password - password=changeme - # Access scheme (default: https) - scheme=https - # Your version of Splunk Enterprise - version=8.0 - -Save the file as **.splunkrc** in the current user's home directory. - -* For example on OS X, save the file as: - - ~/.splunkrc - -* On Windows, save the file as: - - C:\Users\currentusername\.splunkrc - - You might get errors in Windows when you try to name the file because ".splunkrc" appears to be a nameless file with an extension. You can use the command line to create this file by going to the **C:\Users\\<currentusername>** directory and entering the following command: - - Notepad.exe .splunkrc - - Click **Yes**, then continue creating the file. #### Run the examples @@ -144,7 +113,7 @@ Using Session key python examplename.py --sessionKey="" -If you saved your login credentials in the **.splunkrc** file, you can omit those arguments: +If you saved your login credentials in the **.env** file, you can omit those arguments: python examplename.py diff --git a/scripts/build-splunkrc.py b/scripts/build-splunkrc.py deleted file mode 100644 index 0d544665c..000000000 --- a/scripts/build-splunkrc.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2011-2020 Splunk, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"): you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -#!/usr/bin/env python - -import sys -import json -import urllib.parse -import os -from pathlib import Path -from string import Template - -DEFAULT_CONFIG = { - 'host': 'localhost', - 'port': '8089', - 'username': 'admin', - 'password': 'changed!', - 'scheme': 'https', - 'version': '8.0' -} - -DEFAULT_SPLUNKRC_PATH = os.path.join(str(Path.home()), '.splunkrc') - -SPLUNKRC_TEMPLATE_PATH = os.path.join( - os.path.dirname(os.path.realpath(__file__)), 'templates/splunkrc.template') - -# { -# "server_roles": { -# "standalone": [ -# { -# "host": "10.224.106.158", -# "ports": { -# "8089/tcp": "10.224.106.158:55759", -# }, -# "splunk": { -# "user_roles": { -# "admin": { -# "password": "Chang3d!", -# "username": "admin" -# } -# }, -# "version": "8.1.0", -# "web_url": "http://10.224.106.158:55761" -# } -# } -# ] -# } -# } -def build_config(json_string): - try: - spec_config = json.loads(json_string) - - server_config = spec_config['server_roles']['standalone'][0] - splunk_config = server_config['splunk'] - - host, port = parse_hostport(server_config['ports']['8089/tcp']) - - return { - 'host': host, - 'port': port, - 'username': splunk_config['user_roles']['admin']['username'], - 'password': splunk_config['user_roles']['admin']['password'], - 'version': splunk_config['version'], - } - except Exception as e: - raise ValueError('Invalid configuration JSON string') from e - -# Source: https://stackoverflow.com/a/53172593 -def parse_hostport(host_port): - # urlparse() and urlsplit() insists on absolute URLs starting with "//" - result = urllib.parse.urlsplit('//' + host_port) - return result.hostname, result.port - -def run(variable, splunkrc_path=None): - # read JSON from input - # parse the JSON - input_config = build_config(variable) if variable else DEFAULT_CONFIG - - config = {**DEFAULT_CONFIG, **input_config} - - # build a splunkrc file - with open(SPLUNKRC_TEMPLATE_PATH, 'r') as f: - template = Template(f.read()) - - splunkrc_string = template.substitute(config) - - # if no splunkrc, dry-run - if not splunkrc_path: - print(splunkrc_string) - return - - # write the .splunkrc file - with open(splunkrc_path, 'w') as f: - f.write(splunkrc_string) - -if sys.stdin.isatty(): - DATA = None -else: - DATA = sys.stdin.read() - -run(DATA, sys.argv[1] if len(sys.argv) > 1 else None) diff --git a/scripts/templates/splunkrc.template b/scripts/templates/splunkrc.template deleted file mode 100644 index b98f93af6..000000000 --- a/scripts/templates/splunkrc.template +++ /dev/null @@ -1,16 +0,0 @@ -# Splunk host (default: localhost) -host=$host -# Splunk admin port (default: 8089) -port=$port -# Splunk username -username=$username -# Splunk password -password=$password -# Access scheme (default: https) -scheme=$scheme -# Your version of Splunk (default: 6.2) -version=$version -# Bearer token for authentication -#bearerToken= -# Session key for authentication -#sessionKey= From 2f0eab121fa0c5ac2f375efc15ae2d15c79363cd Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 18 Jan 2022 14:20:07 +0530 Subject: [PATCH 41/58] documented how to access modular input metadata --- README.md | 16 ++++++++++++++++ examples/github_commits/bin/github_commits.py | 7 +++++++ 2 files changed, 23 insertions(+) diff --git a/README.md b/README.md index c735e9854..edaf0d15d 100644 --- a/README.md +++ b/README.md @@ -227,6 +227,22 @@ Don't yield {'_time': time.time(), 'two': 2} ``` +### Access metadata of modular inputs app +* In stream_events() method we can access modular input app metadata from InputDefinition object +* See [GitHub Commit](https://github.com/splunk/splunk-sdk-python/blob/develop/examples/github_commits/bin/github_commits.py) Modular input App example for reference. +```python + def stream_events(self, inputs, ew): + # other code + + # access metadata (like server_host, server_uri, etc) of modular inputs app from InputDefinition object + # here inputs is a InputDefinition object + server_host = inputs.metadata["server_host"] + server_uri = inputs.metadata["server_uri"] + + # Get the checkpoint directory out of the modular input's metadata + checkpoint_dir = inputs.metadata["checkpoint_dir"] +``` + ### Changelog The [CHANGELOG](CHANGELOG.md) contains a description of changes for each version of the SDK. For the latest version, see the [CHANGELOG.md](https://github.com/splunk/splunk-sdk-python/blob/master/CHANGELOG.md) on GitHub. diff --git a/examples/github_commits/bin/github_commits.py b/examples/github_commits/bin/github_commits.py index d27b61871..5581b9897 100644 --- a/examples/github_commits/bin/github_commits.py +++ b/examples/github_commits/bin/github_commits.py @@ -146,6 +146,13 @@ def stream_events(self, inputs, ew): token = None if "token" in input_item: token = input_item["token"] + + ''' + access metadata (like server_host, server_uri, etc) of modular inputs app from InputDefinition object + here inputs is a InputDefinition object + server_host = inputs.metadata["server_host"] + server_uri = inputs.metadata["server_uri"] + ''' # Get the checkpoint directory out of the modular input's metadata checkpoint_dir = inputs.metadata["checkpoint_dir"] From 048aeabdb825bff638b4e3fddbe138124d7aa9da Mon Sep 17 00:00:00 2001 From: Scott Savarese <6461527+scottsavarese@users.noreply.github.com> Date: Wed, 19 Jan 2022 10:21:27 -0500 Subject: [PATCH 42/58] Fix bug in write for python3 --- splunklib/modularinput/event_writer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splunklib/modularinput/event_writer.py b/splunklib/modularinput/event_writer.py index ab090cc64..5f8c5aa8b 100755 --- a/splunklib/modularinput/event_writer.py +++ b/splunklib/modularinput/event_writer.py @@ -83,5 +83,5 @@ def write_xml_document(self, document): def close(self): """Write the closing tag to make this XML well formed.""" if self.header_written: - self._out.write(b"") + self._out.write("") self._out.flush() From 49a387a90a109a17e125b45d12a885731ce20c18 Mon Sep 17 00:00:00 2001 From: Artem Rys Date: Wed, 19 Jan 2022 22:36:14 +0100 Subject: [PATCH 43/58] Fix indentation in README --- README.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index c735e9854..80ed84bf6 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ The SDK command-line examples require a common set of arguments that specify the #### Using username/password ```python import splunklib.client as client - service = client.connect(host=, username=, password=, autologin=True) +service = client.connect(host=, username=, password=, autologin=True) ``` #### Using bearer token @@ -212,19 +212,19 @@ class CustomStreamingCommand(StreamingCommand): Do ```python @Configuration() - class GeneratorTest(GeneratingCommand): - def generate(self): - yield self.gen_record(_time=time.time(), one=1) - yield self.gen_record(_time=time.time(), two=2) +class GeneratorTest(GeneratingCommand): + def generate(self): + yield self.gen_record(_time=time.time(), one=1) + yield self.gen_record(_time=time.time(), two=2) ``` Don't ```python @Configuration() - class GeneratorTest(GeneratingCommand): - def generate(self): - yield {'_time': time.time(), 'one': 1} - yield {'_time': time.time(), 'two': 2} +class GeneratorTest(GeneratingCommand): + def generate(self): + yield {'_time': time.time(), 'one': 1} + yield {'_time': time.time(), 'two': 2} ``` ### Changelog From 34421e937482227aa153d134c0e108f9eccb788a Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 25 Jan 2022 14:02:10 +0530 Subject: [PATCH 44/58] Added build-env.py file and updated Makefile commands to create an env file --- Makefile | 11 ++++ README.md | 18 ++++++ scripts/build-env.py | 112 +++++++++++++++++++++++++++++++++ scripts/templates/env.template | 16 +++++ 4 files changed, 157 insertions(+) create mode 100644 scripts/build-env.py create mode 100644 scripts/templates/env.template diff --git a/Makefile b/Makefile index 452a47243..2810c6aec 100644 --- a/Makefile +++ b/Makefile @@ -56,6 +56,17 @@ test_smoke_no_app: @echo "$(ATTN_COLOR)==> test_smoke_no_app $(NO_COLOR)" @tox -e py27,py37 -- -m "smoke and not app" +.PHONY: env +env: + @echo "$(ATTN_COLOR)==> env $(NO_COLOR)" + @echo "To make a .env:" + @echo " [SPLUNK_INSTANCE_JSON] | python scripts/build-env.py" + +.PHONY: env_default +env_default: + @echo "$(ATTN_COLOR)==> env_default $(NO_COLOR)" + @python scripts/build-env.py + .PHONY: up up: @echo "$(ATTN_COLOR)==> up $(NO_COLOR)" diff --git a/README.md b/README.md index 10503828b..e9ade60c4 100644 --- a/README.md +++ b/README.md @@ -96,6 +96,24 @@ To connect to Splunk Enterprise, many of the SDK examples and unit tests take co >**Note**: Storing login credentials in the **.env** file is only for convenience during development. This file isn't part of the Splunk platform and shouldn't be used for storing user credentials for production. And, if you're at all concerned about the security of your credentials, enter them at the command line rather than saving them in this file. +here is an example of .env file: + + # Splunk Enterprise host (default: localhost) + host=localhost + # Splunk Enterprise admin port (default: 8089) + port=8089 + # Splunk Enterprise username + username=admin + # Splunk Enterprise password + password=changed! + # Access scheme (default: https) + scheme=https + # Your version of Splunk Enterprise + version=8.0 + # Bearer token for authentication + #bearerToken= + # Session key for authentication + #sessionKey= #### Run the examples diff --git a/scripts/build-env.py b/scripts/build-env.py new file mode 100644 index 000000000..e1a153d4a --- /dev/null +++ b/scripts/build-env.py @@ -0,0 +1,112 @@ +# Copyright 2011-2020 Splunk, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"): you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +#!/usr/bin/env python + +import sys +import json +import urllib.parse +import os +from pathlib import Path +from string import Template + +DEFAULT_CONFIG = { + 'host': 'localhost', + 'port': '8089', + 'username': 'admin', + 'password': 'changed!', + 'scheme': 'https', + 'version': '8.0' +} + +DEFAULT_ENV_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '.env') + +ENV_TEMPLATE_PATH = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'templates/env.template') + +# { +# "server_roles": { +# "standalone": [ +# { +# "host": "10.224.106.158", +# "ports": { +# "8089/tcp": "10.224.106.158:55759", +# }, +# "splunk": { +# "user_roles": { +# "admin": { +# "password": "Chang3d!", +# "username": "admin" +# } +# }, +# "version": "8.1.0", +# "web_url": "http://10.224.106.158:55761" +# } +# } +# ] +# } +# } +def build_config(json_string): + try: + spec_config = json.loads(json_string) + + server_config = spec_config['server_roles']['standalone'][0] + splunk_config = server_config['splunk'] + + host, port = parse_hostport(server_config['ports']['8089/tcp']) + + return { + 'host': host, + 'port': port, + 'username': splunk_config['user_roles']['admin']['username'], + 'password': splunk_config['user_roles']['admin']['password'], + 'version': splunk_config['version'], + } + except Exception as e: + raise ValueError('Invalid configuration JSON string') from e + +# Source: https://stackoverflow.com/a/53172593 +def parse_hostport(host_port): + # urlparse() and urlsplit() insists on absolute URLs starting with "//" + result = urllib.parse.urlsplit('//' + host_port) + return result.hostname, result.port + +def run(variable, env_path=None): + # read JSON from input + # parse the JSON + input_config = build_config(variable) if variable else DEFAULT_CONFIG + + config = {**DEFAULT_CONFIG, **input_config} + + # build a env file + with open(ENV_TEMPLATE_PATH, 'r') as f: + template = Template(f.read()) + + env_string = template.substitute(config) + env_path = DEFAULT_ENV_PATH if env_path is None else env_path + # if no env, dry-run + if not env_path: + print(env_string) + return + + # write the .env file + with open(env_path, 'w') as f: + f.write(env_string) + +if sys.stdin.isatty(): + DATA = None +else: + DATA = sys.stdin.read() + +run(DATA, sys.argv[1] if len(sys.argv) > 1 else None) \ No newline at end of file diff --git a/scripts/templates/env.template b/scripts/templates/env.template new file mode 100644 index 000000000..a45851b6a --- /dev/null +++ b/scripts/templates/env.template @@ -0,0 +1,16 @@ +# Splunk host (default: localhost) +host=$host +# Splunk admin port (default: 8089) +port=$port +# Splunk username +username=$username +# Splunk password +password=$password +# Access scheme (default: https) +scheme=$scheme +# Your version of Splunk (default: 6.2) +version=$version +# Bearer token for authentication +#bearerToken= +# Session key for authentication +#sessionKey= \ No newline at end of file From d47f9188e7825a7c86adab9bfe7fb665e81ea418 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 28 Jan 2022 14:39:15 +1000 Subject: [PATCH 45/58] Create distsearch.conf --- examples/searchcommands_template/default/distsearch.conf | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 examples/searchcommands_template/default/distsearch.conf diff --git a/examples/searchcommands_template/default/distsearch.conf b/examples/searchcommands_template/default/distsearch.conf new file mode 100644 index 000000000..8abbe3b9e --- /dev/null +++ b/examples/searchcommands_template/default/distsearch.conf @@ -0,0 +1,7 @@ +# Valid in <=8.2 +[replicationWhitelist] +searchcommands_template = apps/searchcommands_template/lib/... + +# Valid in >=8.3 +[replicationAllowlist] +searchcommands_template = apps/searchcommands_template/lib/... From 514cb4332d31efb021a4b8b6dea810b46c65acc8 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 28 Jan 2022 14:42:06 +1000 Subject: [PATCH 46/58] Create distsearch.conf --- .../searchcommands_app/package/default/distsearch.conf | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 examples/searchcommands_app/package/default/distsearch.conf diff --git a/examples/searchcommands_app/package/default/distsearch.conf b/examples/searchcommands_app/package/default/distsearch.conf new file mode 100644 index 000000000..1c13e5414 --- /dev/null +++ b/examples/searchcommands_app/package/default/distsearch.conf @@ -0,0 +1,7 @@ +# Valid in <=8.2 +[replicationWhitelist] +searchcommands_app = apps/searchcommands_app/lib/... + +# Valid in >=8.3 +[replicationAllowlist] +searchcommands_app = apps/searchcommands_app/lib/... From 00fadcb0ee24481b921f9b0bb067b94c80630c0b Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Mon, 7 Feb 2022 17:37:20 +0530 Subject: [PATCH 47/58] Used module-specific logger in splunklib code instead of root logger --- splunklib/__init__.py | 5 +++++ splunklib/binding.py | 11 ++++++----- splunklib/client.py | 8 +++++--- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 41c261fdc..bee2dd499 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -16,5 +16,10 @@ from __future__ import absolute_import from splunklib.six.moves import map +import logging + +# To enable debug logs, set the level to 'logging.DEBUG' +logging.basicConfig(level=logging.WARNING) + __version_info__ = (1, 6, 18) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index d1d4c3ce3..60fc5294f 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -47,6 +47,7 @@ except ImportError as e: from xml.parsers.expat import ExpatError as ParseError +logger = logging.getLogger(__name__) __all__ = [ "AuthenticationError", @@ -68,7 +69,7 @@ def new_f(*args, **kwargs): start_time = datetime.now() val = f(*args, **kwargs) end_time = datetime.now() - logging.debug("Operation took %s", end_time-start_time) + logger.debug("Operation took %s", end_time-start_time) return val return new_f @@ -616,7 +617,7 @@ def delete(self, path_segment, owner=None, app=None, sharing=None, **query): """ path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("DELETE request to %s (body: %s)", path, repr(query)) + logger.debug("DELETE request to %s (body: %s)", path, repr(query)) response = self.http.delete(path, self._auth_headers, **query) return response @@ -679,7 +680,7 @@ def get(self, path_segment, owner=None, app=None, headers=None, sharing=None, ** path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("GET request to %s (body: %s)", path, repr(query)) + logger.debug("GET request to %s (body: %s)", path, repr(query)) all_headers = headers + self.additional_headers + self._auth_headers response = self.http.get(path, all_headers, **query) return response @@ -757,7 +758,7 @@ def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, * headers = [] path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logging.debug("POST request to %s (body: %s)", path, repr(query)) + logger.debug("POST request to %s (body: %s)", path, repr(query)) all_headers = headers + self.additional_headers + self._auth_headers response = self.http.post(path, all_headers, **query) return response @@ -826,7 +827,7 @@ def request(self, path_segment, method="GET", headers=None, body={}, app=app, sharing=sharing) all_headers = headers + self.additional_headers + self._auth_headers - logging.debug("%s request to %s (headers: %s, body: %s)", + logger.debug("%s request to %s (headers: %s, body: %s)", method, path, str(all_headers), repr(body)) if body: diff --git a/splunklib/client.py b/splunklib/client.py index 22bb0fc53..7b0772f11 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -75,6 +75,8 @@ namespace) from .data import record +logger = logging.getLogger(__name__) + __all__ = [ "connect", "NotSupportedError", @@ -1476,7 +1478,7 @@ def iter(self, offset=0, count=None, pagesize=None, **kwargs): if pagesize is None or N < pagesize: break offset += N - logging.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) + logger.debug("pagesize=%d, fetched=%d, offset=%d, N=%d, kwargs=%s", pagesize, fetched, offset, N, kwargs) # kwargs: count, offset, search, sort_dir, sort_key, sort_mode def list(self, count=None, **kwargs): @@ -2545,9 +2547,9 @@ def list(self, *kinds, **kwargs): kinds = self.kinds if len(kinds) == 1: kind = kinds[0] - logging.debug("Inputs.list taking short circuit branch for single kind.") + logger.debug("Inputs.list taking short circuit branch for single kind.") path = self.kindpath(kind) - logging.debug("Path for inputs: %s", path) + logger.debug("Path for inputs: %s", path) try: path = UrlEncoded(path, skip_encode=True) response = self.get(path, **kwargs) From 313e97bd50396bbc50fabe762bf9164fa0b93338 Mon Sep 17 00:00:00 2001 From: tdhellmann Date: Tue, 22 Feb 2022 08:15:54 -0800 Subject: [PATCH 48/58] Docs updates for #434 Updating broken docs link and adding links to additional references. --- splunklib/searchcommands/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/splunklib/searchcommands/__init__.py b/splunklib/searchcommands/__init__.py index c56c510d5..8a929039c 100644 --- a/splunklib/searchcommands/__init__.py +++ b/splunklib/searchcommands/__init__.py @@ -134,9 +134,13 @@ .. topic:: References - 1. `Search command style guide `__ + 1. `Custom Search Command manual: `__ - 2. `Commands.conf.spec `_ + 2. `Create Custom Search Commands with commands.conf.spec `_ + + 3. `Configure seach assistant with searchbnf.conf `_ + + 4. `Control search distribution with distsearch.conf `_ """ From ec068498adc218296322dbc2cd7a98d53a5678a2 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 1 Mar 2022 19:07:35 +0530 Subject: [PATCH 49/58] add method to set logging for splunklib directory --- splunklib/__init__.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/splunklib/__init__.py b/splunklib/__init__.py index bee2dd499..d85857578 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -18,8 +18,11 @@ from splunklib.six.moves import map import logging -# To enable debug logs, set the level to 'logging.DEBUG' -logging.basicConfig(level=logging.WARNING) +# To set the logging level of splunklib +# ex. To enable debug logs, call this method with parameter 'logging.DEBUG' +# default logging level is set to 'WARNING' +def setLoggingLevel(level): + logging.basicConfig(level=level) __version_info__ = (1, 6, 18) __version__ = ".".join(map(str, __version_info__)) From fbeff528f2c54d0893a2eadc1fd03ab4824fe137 Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Thu, 3 Mar 2022 19:37:59 +0530 Subject: [PATCH 50/58] add formatter for logs and update README.md file - add default time and log formatter in setup_logging() method - add details of setup_logging() method and logging.conf files in README.md file --- README.md | 13 +++++++++++++ examples/searchcommands_app/README.md | 11 ++++++++++- .../searchcommands_app/package/default/logging.conf | 1 + splunklib/__init__.py | 11 +++++++++-- 4 files changed, 33 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 77ecfbf5a..4e522e8e4 100644 --- a/README.md +++ b/README.md @@ -243,6 +243,19 @@ class GeneratorTest(GeneratingCommand): checkpoint_dir = inputs.metadata["checkpoint_dir"] ``` +#### Optional:Set up logging for splunklib ++ The default level is WARNING, which means that only events of this level and above will be visible ++ To change a logging level we can call setup_logging() method and pass the logging level as an argument. ++ Optional: we can also pass log format and date format string as a method argument to modify default format + +```python +import logging +from splunklib import setup_logging + +# To see debug and above level logs +setup_logging(logging.DEBUG) +``` + ### Changelog The [CHANGELOG](CHANGELOG.md) contains a description of changes for each version of the SDK. For the latest version, see the [CHANGELOG.md](https://github.com/splunk/splunk-sdk-python/blob/master/CHANGELOG.md) on GitHub. diff --git a/examples/searchcommands_app/README.md b/examples/searchcommands_app/README.md index 075253134..b1c07311d 100644 --- a/examples/searchcommands_app/README.md +++ b/examples/searchcommands_app/README.md @@ -35,7 +35,7 @@ The app is tested on Splunk 5 and 6. Here is its manifest: └── default.meta ............. Permits the search assistant to use searchbnf.conf[6] ``` **References** -[1] [app.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Appconf app.conf) +[1] [app.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Appconf) [2] [commands.conf](https://docs.splunk.com/Documentation/Splunk/latest/Admin/Commandsconf) [3] [Python Logging HOWTO](https://docs.python.org/2/howto/logging.html) [4] [ConfigParser—Configuration file parser](https://docs.python.org/2/library/configparser.html) @@ -110,6 +110,15 @@ word_counts | :-----| 4497.0 | +## Optional:Set up logging using logging.conf file ++ Inside the **default** directory of our app, we have a [logging.conf](https://github.com/splunk/splunk-sdk-python/blob/master/examples/searchcommands_app/package/default/logging.conf) file. ++ In logging.conf file we can define loggers, handlers and formatters for our app. refer [this doc](https://docs.python.org/2/library/logging.config.html#configuration-file-format) for more details ++ Logs will be written in the files specified in the handlers defined for the respective loggers + + For **'searchcommands_app'** app logs will be written in **searchcommands_app.log** and **splunklib.log** files defined in respective handlers, and are present at $SPLUNK_HOME/etc/apps/searchcommands_app/ dir + + By default logs will be written in the app's root directory, but it can be overriden by specifying the absolute path for the logs file in the conf file ++ By default, logging level is set to WARNING ++ To see debug and above level logs, Set level to DEBUG in logging.conf file + ## License This software is licensed under the Apache License 2.0. Details can be found in diff --git a/examples/searchcommands_app/package/default/logging.conf b/examples/searchcommands_app/package/default/logging.conf index 4b2ae621e..f3220a63d 100644 --- a/examples/searchcommands_app/package/default/logging.conf +++ b/examples/searchcommands_app/package/default/logging.conf @@ -96,3 +96,4 @@ keys = searchcommands [formatter_searchcommands] format = %(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, Line=%(lineno)s, %(message)s +datefmt = %Y-%m-%d %H:%M:%S %Z diff --git a/splunklib/__init__.py b/splunklib/__init__.py index d85857578..5b7c32122 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -18,11 +18,18 @@ from splunklib.six.moves import map import logging +DEFAULT_LOG_FORMAT = '%(asctime)s, Level=%(levelname)s, Pid=%(process)s, Logger=%(name)s, File=%(filename)s, ' \ + 'Line=%(lineno)s, %(message)s' +DEFAULT_DATE_FORMAT = '%Y-%m-%d %H:%M:%S %Z' + + # To set the logging level of splunklib # ex. To enable debug logs, call this method with parameter 'logging.DEBUG' # default logging level is set to 'WARNING' -def setLoggingLevel(level): - logging.basicConfig(level=level) +def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE_FORMAT): + logging.basicConfig(level=level, + format=log_format, + datefmt=date_format) __version_info__ = (1, 6, 18) __version__ = ".".join(map(str, __version_info__)) From 88a9869f9090f073e2cf4e8bc7a5350e708f86c5 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 8 Mar 2022 16:59:43 +0530 Subject: [PATCH 51/58] JSONResultsReader changes --- examples/follow.py | 6 ++-- examples/oneshot.py | 4 +-- examples/search_modes.py | 4 +-- examples/stail.py | 7 +++-- splunklib/results.py | 65 +++++++++++++++++++++++++++++++++++++++- tests/test_job.py | 24 +++++++-------- tests/test_results.py | 2 +- 7 files changed, 88 insertions(+), 24 deletions(-) diff --git a/examples/follow.py b/examples/follow.py index 64b3e1ac6..cbb559deb 100755 --- a/examples/follow.py +++ b/examples/follow.py @@ -42,7 +42,7 @@ def follow(job, count, items): job.refresh() continue stream = items(offset+1) - for event in results.ResultsReader(stream): + for event in results.JSONResultsReader(stream): pprint(event) offset = total @@ -72,10 +72,10 @@ def main(): if job['reportSearch'] is not None: # Is it a transforming search? count = lambda: int(job['numPreviews']) - items = lambda _: job.preview() + items = lambda _: job.preview(output_mode='json') else: count = lambda: int(job['eventCount']) - items = lambda offset: job.events(offset=offset) + items = lambda offset: job.events(offset=offset, output_mode='json') try: follow(job, count, items) diff --git a/examples/oneshot.py b/examples/oneshot.py index dc34bb8cb..8429aedfb 100755 --- a/examples/oneshot.py +++ b/examples/oneshot.py @@ -32,7 +32,7 @@ "(e.g., export PYTHONPATH=~/splunk-sdk-python.") def pretty(response): - reader = results.ResultsReader(response) + reader = results.JSONResultsReader(response) for result in reader: if isinstance(result, dict): pprint(result) @@ -46,7 +46,7 @@ def main(): search = opts.args[0] service = connect(**opts.kwargs) socket.setdefaulttimeout(None) - response = service.jobs.oneshot(search) + response = service.jobs.oneshot(search, output_mode='json') pretty(response) diff --git a/examples/search_modes.py b/examples/search_modes.py index f3e05f362..66fa77cd4 100644 --- a/examples/search_modes.py +++ b/examples/search_modes.py @@ -24,7 +24,7 @@ def modes(argv): while not job.is_ready(): time.sleep(0.5) pass - reader = results.ResultsReader(job.events()) + reader = results.JSONResultsReader(job.events(output_mode='json')) # Events found: 0 print('Events found with adhoc_search_level="smart": %s' % len([e for e in reader])) @@ -33,7 +33,7 @@ def modes(argv): while not job.is_ready(): time.sleep(0.5) pass - reader = results.ResultsReader(job.events()) + reader = results.ResultsReader(job.events(output_mode='json')) # Events found: 10 print('Events found with adhoc_search_level="verbose": %s' % len([e for e in reader])) diff --git a/examples/stail.py b/examples/stail.py index 85f38a853..3df3f10d7 100755 --- a/examples/stail.py +++ b/examples/stail.py @@ -25,7 +25,7 @@ from pprint import pprint from splunklib.client import connect -from splunklib.results import ResultsReader +from splunklib.results import ResultsReader, JSONResultsReader try: import utils @@ -49,9 +49,10 @@ def main(): search=search, earliest_time="rt", latest_time="rt", - search_mode="realtime") + search_mode="realtime", + output_mode="json") - for result in ResultsReader(result.body): + for result in JSONResultsReader(result.body): if result is not None: print(pprint(result)) diff --git a/splunklib/results.py b/splunklib/results.py index 66e9ad7d1..19c8182df 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -34,7 +34,7 @@ from __future__ import absolute_import -from io import BytesIO +from io import BufferedReader, BytesIO from splunklib import six try: @@ -43,6 +43,7 @@ import xml.etree.ElementTree as et from collections import OrderedDict +from json import loads as json_loads try: from splunklib.six.moves import cStringIO as StringIO @@ -287,6 +288,68 @@ def __itertext(self): else: raise +class JSONResultsReader(object): + """This class returns dictionaries and Splunk messages from a JSON results + stream. + ``JSONResultsReader`` is iterable, and returns a ``dict`` for results, or a + :class:`Message` object for Splunk messages. This class has one field, + ``is_preview``, which is ``True`` when the results are a preview from a + running search, or ``False`` when the results are from a completed search. + This function has no network activity other than what is implicit in the + stream it operates on. + :param `stream`: The stream to read from (any object that supports + ``.read()``). + **Example**:: + import results + response = ... # the body of an HTTP response + reader = results.JSONResultsReader(response) + for result in reader: + if isinstance(result, dict): + print "Result: %s" % result + elif isinstance(result, results.Message): + print "Message: %s" % result + print "is_preview = %s " % reader.is_preview + """ + # Be sure to update the docstrings of client.Jobs.oneshot, + # client.Job.results_preview and client.Job.results to match any + # changes made to JSONResultsReader. + # + # This wouldn't be a class, just the _parse_results function below, + # except that you cannot get the current generator inside the + # function creating that generator. Thus it's all wrapped up for + # the sake of one field. + def __init__(self, stream): + # The search/jobs/exports endpoint, when run with + # earliest_time=rt and latest_time=rt, output_mode=json, streams a sequence of + # JSON documents, each containing a result, as opposed to one + # results element containing lots of results. + stream = BufferedReader(stream) + self.is_preview = None + self._gen = self._parse_results(stream) + def __iter__(self): + return self + def next(self): + return next(self._gen) + __next__ = next + + def _parse_results(self, stream): + """Parse results and messages out of *stream*.""" + for line in stream.readlines(): + strip_line = line.strip() + if strip_line.__len__() == 0 : continue + parsed_line = json_loads(strip_line) + if "preview" in parsed_line: + self.is_preview = parsed_line["preview"] + if "messages" in parsed_line and parsed_line["messages"].__len__() > 0: + for message in parsed_line["messages"]: + msg_type = message.get("type", "Unknown Message Type") + text = message.get("text") + yield Message(msg_type, text) + if "result" in parsed_line: + yield parsed_line["result"] + if "results" in parsed_line: + for result in parsed_line["results"]: + yield result \ No newline at end of file diff --git a/tests/test_job.py b/tests/test_job.py index 4de34b611..44326086b 100755 --- a/tests/test_job.py +++ b/tests/test_job.py @@ -54,8 +54,8 @@ def test_oneshot_with_garbage_fails(self): def test_oneshot(self): jobs = self.service.jobs - stream = jobs.oneshot("search index=_internal earliest=-1m | head 3") - result = results.ResultsReader(stream) + stream = jobs.oneshot("search index=_internal earliest=-1m | head 3", output_mode='json') + result = results.JSONResultsReader(stream) ds = list(result) self.assertEqual(result.is_preview, False) self.assertTrue(isinstance(ds[0], dict) or \ @@ -69,8 +69,8 @@ def test_export_with_garbage_fails(self): def test_export(self): jobs = self.service.jobs - stream = jobs.export("search index=_internal earliest=-1m | head 3") - result = results.ResultsReader(stream) + stream = jobs.export("search index=_internal earliest=-1m | head 3", output_mode='json') + result = results.JSONResultsReader(stream) ds = list(result) self.assertEqual(result.is_preview, False) self.assertTrue(isinstance(ds[0], dict) or \ @@ -82,7 +82,7 @@ def test_export_docstring_sample(self): import splunklib.client as client import splunklib.results as results service = self.service # cheat - rr = results.ResultsReader(service.jobs.export("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5", output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -98,7 +98,7 @@ def test_results_docstring_sample(self): job = service.jobs.create("search * | head 5") while not job.is_done(): sleep(0.2) - rr = results.ResultsReader(job.results()) + rr = results.JSONResultsReader(job.results(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -113,7 +113,7 @@ def test_preview_docstring_sample(self): import splunklib.results as results service = self.service # cheat job = service.jobs.create("search * | head 5") - rr = results.ResultsReader(job.preview()) + rr = results.JSONResultsReader(job.preview(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -130,7 +130,7 @@ def test_oneshot_docstring_sample(self): import splunklib.client as client import splunklib.results as results service = self.service # cheat - rr = results.ResultsReader(service.jobs.oneshot("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5", output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -295,12 +295,12 @@ def test_get_preview_and_events(self): self.assertEventuallyTrue(self.job.is_done) self.assertLessEqual(int(self.job['eventCount']), 3) - preview_stream = self.job.preview() - preview_r = results.ResultsReader(preview_stream) + preview_stream = self.job.preview(output_mode='json') + preview_r = results.JSONResultsReader(preview_stream) self.assertFalse(preview_r.is_preview) - events_stream = self.job.events() - events_r = results.ResultsReader(events_stream) + events_stream = self.job.events(output_mode='json') + events_r = results.JSONResultsReader(events_stream) n_events = len([x for x in events_r if isinstance(x, dict)]) n_preview = len([x for x in preview_r if isinstance(x, dict)]) diff --git a/tests/test_results.py b/tests/test_results.py index 52e290f25..5fdca2b91 100755 --- a/tests/test_results.py +++ b/tests/test_results.py @@ -30,7 +30,7 @@ def test_read_from_empty_result_set(self): job = self.service.jobs.create("search index=_internal_does_not_exist | head 2") while not job.is_done(): sleep(0.5) - self.assertEqual(0, len(list(results.ResultsReader(io.BufferedReader(job.results()))))) + self.assertEqual(0, len(list(results.JSONResultsReader(io.BufferedReader(job.results(output_mode='json')))))) def test_read_normal_results(self): xml_text = """ From debd64cf863a71dcbf26149abadaa82f048b7958 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Tue, 8 Mar 2022 17:08:25 +0530 Subject: [PATCH 52/58] deprecated annotation for ResultsReader --- splunklib/results.py | 38 ++++++++++++++++++++++++++------------ tox.ini | 1 + 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/splunklib/results.py b/splunklib/results.py index 19c8182df..1487486fc 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -36,7 +36,10 @@ from io import BufferedReader, BytesIO +import deprecation + from splunklib import six + try: import xml.etree.cElementTree as et except: @@ -55,6 +58,7 @@ "Message" ] + class Message(object): """This class represents informational messages that Splunk interleaves in the results stream. @@ -65,6 +69,7 @@ class Message(object): m = Message("DEBUG", "There's something in that variable...") """ + def __init__(self, type_, message): self.type = type_ self.message = message @@ -78,6 +83,7 @@ def __eq__(self, other): def __hash__(self): return hash((self.type, self.message)) + class _ConcatenatedStream(object): """Lazily concatenate zero or more streams into a stream. @@ -90,6 +96,7 @@ class _ConcatenatedStream(object): s = _ConcatenatedStream(StringIO("abc"), StringIO("def")) assert s.read() == "abcdef" """ + def __init__(self, *streams): self.streams = list(streams) @@ -108,6 +115,7 @@ def read(self, n=None): del self.streams[0] return response + class _XMLDTDFilter(object): """Lazily remove all XML DTDs from a stream. @@ -121,6 +129,7 @@ class _XMLDTDFilter(object): s = _XMLDTDFilter("") assert s.read() == "" """ + def __init__(self, stream): self.stream = stream @@ -151,6 +160,8 @@ def read(self, n=None): n -= 1 return response + +@deprecation.deprecated(deprecated_in="1.16.9", details="Use the JSONResultsReader function instead") class ResultsReader(object): """This class returns dictionaries and Splunk messages from an XML results stream. @@ -178,6 +189,7 @@ class ResultsReader(object): print "Message: %s" % result print "is_preview = %s " % reader.is_preview """ + # Be sure to update the docstrings of client.Jobs.oneshot, # client.Job.results_preview and client.Job.results to match any # changes made to ResultsReader. @@ -258,16 +270,16 @@ def _parse_results(self, stream): # So we'll define it here def __itertext(self): - tag = self.tag - if not isinstance(tag, six.string_types) and tag is not None: - return - if self.text: - yield self.text - for e in self: - for s in __itertext(e): - yield s - if e.tail: - yield e.tail + tag = self.tag + if not isinstance(tag, six.string_types) and tag is not None: + return + if self.text: + yield self.text + for e in self: + for s in __itertext(e): + yield s + if e.tail: + yield e.tail text = "".join(__itertext(elem)) values.append(text) @@ -288,6 +300,7 @@ def __itertext(self): else: raise + class JSONResultsReader(object): """This class returns dictionaries and Splunk messages from a JSON results stream. @@ -310,6 +323,7 @@ class JSONResultsReader(object): print "Message: %s" % result print "is_preview = %s " % reader.is_preview """ + # Be sure to update the docstrings of client.Jobs.oneshot, # client.Job.results_preview and client.Job.results to match any # changes made to JSONResultsReader. @@ -339,7 +353,7 @@ def _parse_results(self, stream): """Parse results and messages out of *stream*.""" for line in stream.readlines(): strip_line = line.strip() - if strip_line.__len__() == 0 : continue + if strip_line.__len__() == 0: continue parsed_line = json_loads(strip_line) if "preview" in parsed_line: self.is_preview = parsed_line["preview"] @@ -352,4 +366,4 @@ def _parse_results(self, stream): yield parsed_line["result"] if "results" in parsed_line: for result in parsed_line["results"]: - yield result \ No newline at end of file + yield result diff --git a/tox.ini b/tox.ini index 227be746c..58ee004ca 100644 --- a/tox.ini +++ b/tox.ini @@ -33,6 +33,7 @@ deps = pytest unittest2 unittest-xml-reporting python-dotenv + deprecation distdir = build commands = From 82bff6caba892b90bf0a66cb1eb913145cd503db Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Tue, 8 Mar 2022 19:19:32 +0530 Subject: [PATCH 53/58] Added condition check for post method debug logs - Added check to avoid writing sensitive data in debug logs - ex. '/storage/passwords' endpoint is having password field in it's body during post method call --- splunklib/binding.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/splunklib/binding.py b/splunklib/binding.py index 60fc5294f..85713a22c 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -758,7 +758,13 @@ def post(self, path_segment, owner=None, app=None, sharing=None, headers=None, * headers = [] path = self.authority + self._abspath(path_segment, owner=owner, app=app, sharing=sharing) - logger.debug("POST request to %s (body: %s)", path, repr(query)) + + # To avoid writing sensitive data in debug logs + endpoint_having_sensitive_data = ["/storage/passwords"] + if any(endpoint in path for endpoint in endpoint_having_sensitive_data): + logger.debug("POST request to %s ", path) + else: + logger.debug("POST request to %s (body: %s)", path, repr(query)) all_headers = headers + self.additional_headers + self._auth_headers response = self.http.post(path, all_headers, **query) return response From 003859754dc2e22f1550ff32c9ebb1ca614cebcc Mon Sep 17 00:00:00 2001 From: bparmar-splunk Date: Thu, 10 Mar 2022 17:32:37 +0530 Subject: [PATCH 54/58] Github release workflow modified Update: - API docs are generated. (Zip file) - Upload artifact module of github action was used to upload docs zip. --- .github/workflows/release.yml | 16 ++++++++++++++-- .github/workflows/test.yml | 4 ++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 450736ec8..d588537b3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ on: push: branches: - master - workflow_dispatch: {} + workflow_dispatch: { } jobs: find_version: @@ -21,6 +21,13 @@ jobs: - name: Get version id: get-version run: python -c 'import splunklib; print("::set-output name=version::%s" % splunklib.__version__)' + - name: Install tox + run: pip install tox + - name: Generate API docs + run: | + rm -rf ./docs/_build + tox -e docs + cd ./docs/_build/html && zip -r ../docs_html.zip . -x ".*" -x "__MACOSX" tag_version: needs: find_version name: Tag Version @@ -32,7 +39,7 @@ jobs: repo-token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ needs.find_version.outputs.version }} release: - needs: [find_version, tag_version] + needs: [ find_version, tag_version ] name: Create Release runs-on: ubuntu-latest steps: @@ -52,6 +59,11 @@ jobs: **TODO: Insert CHANGELOG.md contents here.** draft: false prerelease: false + - name: Upload Artifact + uses: actions/upload-artifact@v3 + with: + name: apidocs + path: docs/_build/docs_html.zip publish: needs: release name: Deploy Release to PyPI diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e09f04988..42713a686 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,7 +1,7 @@ name: Python CI on: - [push, pull_request] + [ push, pull_request ] jobs: build: @@ -11,7 +11,7 @@ jobs: matrix: os: - ubuntu-latest - python: [2.7, 3.7] + python: [ 2.7, 3.7 ] splunk-version: - "8.0" - "latest" From af44b5e52053c79f48be860bc7c5b6d6bf392088 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 10 Mar 2022 17:55:34 +0530 Subject: [PATCH 55/58] added deprecated function annotation --- examples/results.py | 7 +++++-- examples/search_modes.py | 2 +- splunklib/client.py | 39 ++++++++++++++++++--------------------- splunklib/results.py | 6 +++--- splunklib/six.py | 13 +++++++++++++ 5 files changed, 40 insertions(+), 27 deletions(-) diff --git a/examples/results.py b/examples/results.py index 9c0f18751..e18e8f567 100755 --- a/examples/results.py +++ b/examples/results.py @@ -17,18 +17,21 @@ """A script that reads XML search results from stdin and pretty-prints them back to stdout. The script is designed to be used with the search.py example, eg: './search.py "search 404" | ./results.py'""" - + from __future__ import absolute_import from pprint import pprint import sys, os + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) import splunklib.results as results + def pretty(): - reader = results.ResultsReader(sys.stdin) + reader = results.JSONResultsReader(sys.stdin) for event in reader: pprint(event) + if __name__ == "__main__": pretty() diff --git a/examples/search_modes.py b/examples/search_modes.py index 66fa77cd4..f1d1687f2 100644 --- a/examples/search_modes.py +++ b/examples/search_modes.py @@ -33,7 +33,7 @@ def modes(argv): while not job.is_ready(): time.sleep(0.5) pass - reader = results.ResultsReader(job.events(output_mode='json')) + reader = results.JSONResultsReader(job.events(output_mode='json')) # Events found: 10 print('Events found with adhoc_search_level="verbose": %s' % len([e for e in reader])) diff --git a/splunklib/client.py b/splunklib/client.py index 7b0772f11..0979140c2 100644 --- a/splunklib/client.py +++ b/splunklib/client.py @@ -2767,9 +2767,8 @@ def pause(self): return self def results(self, **query_params): - """Returns a streaming handle to this job's search results. To get a - nice, Pythonic iterator, pass the handle to :class:`splunklib.results.ResultsReader`, - as in:: + """Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle + to :class:`splunklib.results.JSONResultsReader` along with the query param "output_mode='json'", as in:: import splunklib.client as client import splunklib.results as results @@ -2778,7 +2777,7 @@ def results(self, **query_params): job = service.jobs.create("search * | head 5") while not job.is_done(): sleep(.2) - rr = results.ResultsReader(job.results()) + rr = results.JSONResultsReader(job.results(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -2808,19 +2807,17 @@ def results(self, **query_params): def preview(self, **query_params): """Returns a streaming handle to this job's preview search results. - Unlike :class:`splunklib.results.ResultsReader`, which requires a job to - be finished to - return any results, the ``preview`` method returns any results that have - been generated so far, whether the job is running or not. The - returned search results are the raw data from the server. Pass - the handle returned to :class:`splunklib.results.ResultsReader` to get a - nice, Pythonic iterator over objects, as in:: + Unlike :class:`splunklib.results.JSONResultsReader`along with the query param "output_mode='json'", + which requires a job to be finished to return any results, the ``preview`` method returns any results that + have been generated so far, whether the job is running or not. The returned search results are the raw data + from the server. Pass the handle returned to :class:`splunklib.results.JSONResultsReader` to get a nice, + Pythonic iterator over objects, as in:: import splunklib.client as client import splunklib.results as results service = client.connect(...) job = service.jobs.create("search * | head 5") - rr = results.ResultsReader(job.preview()) + rr = results.JSONResultsReader(job.preview(output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -2975,15 +2972,15 @@ def create(self, query, **kwargs): return Job(self.service, sid) def export(self, query, **params): - """Runs a search and immediately starts streaming preview events. - This method returns a streaming handle to this job's events as an XML - document from the server. To parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.ResultsReader`:: + """Runs a search and immediately starts streaming preview events. This method returns a streaming handle to + this job's events as an XML document from the server. To parse this stream into usable Python objects, + pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'":: import splunklib.client as client import splunklib.results as results service = client.connect(...) - rr = results.ResultsReader(service.jobs.export("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.export("search * | head 5",output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results @@ -3032,14 +3029,14 @@ def itemmeta(self): def oneshot(self, query, **params): """Run a oneshot search and returns a streaming handle to the results. - The ``InputStream`` object streams XML fragments from the server. To - parse this stream into usable Python objects, - pass the handle to :class:`splunklib.results.ResultsReader`:: + The ``InputStream`` object streams XML fragments from the server. To parse this stream into usable Python + objects, pass the handle to :class:`splunklib.results.JSONResultsReader` along with the query param + "output_mode='json'" :: import splunklib.client as client import splunklib.results as results service = client.connect(...) - rr = results.ResultsReader(service.jobs.oneshot("search * | head 5")) + rr = results.JSONResultsReader(service.jobs.oneshot("search * | head 5",output_mode='json')) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results diff --git a/splunklib/results.py b/splunklib/results.py index 1487486fc..5f3966859 100644 --- a/splunklib/results.py +++ b/splunklib/results.py @@ -36,10 +36,10 @@ from io import BufferedReader, BytesIO -import deprecation - from splunklib import six +from splunklib.six import deprecated + try: import xml.etree.cElementTree as et except: @@ -161,7 +161,7 @@ def read(self, n=None): return response -@deprecation.deprecated(deprecated_in="1.16.9", details="Use the JSONResultsReader function instead") +@deprecated("Use the JSONResultsReader function instead in conjuction with the 'output_mode' query param set to 'json'") class ResultsReader(object): """This class returns dictionaries and Splunk messages from an XML results stream. diff --git a/splunklib/six.py b/splunklib/six.py index 5fe9f8e14..d13e50c93 100644 --- a/splunklib/six.py +++ b/splunklib/six.py @@ -978,3 +978,16 @@ def python_2_unicode_compatible(klass): del i, importer # Finally, add the importer to the meta path import hook. sys.meta_path.append(_importer) + +import warnings + +def deprecated(message): + def deprecated_decorator(func): + def deprecated_func(*args, **kwargs): + warnings.warn("{} is a deprecated function. {}".format(func.__name__, message), + category=DeprecationWarning, + stacklevel=2) + warnings.simplefilter('default', DeprecationWarning) + return func(*args, **kwargs) + return deprecated_func + return deprecated_decorator \ No newline at end of file From 9ea866224273372ecb25d0c51bc169019485206e Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Thu, 10 Mar 2022 19:00:07 +0530 Subject: [PATCH 56/58] Update test_validators.py --- tests/searchcommands/test_validators.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/searchcommands/test_validators.py b/tests/searchcommands/test_validators.py index 38836c4aa..cc524b307 100755 --- a/tests/searchcommands/test_validators.py +++ b/tests/searchcommands/test_validators.py @@ -208,10 +208,9 @@ def test(integer): def test_float(self): # Float validator test - import random - maxsize = random.random() + 1 - minsize = random.random() - 1 + maxsize = 1.5 + minsize = -1.5 validator = validators.Float() From 55d46038b5b4c54e4191d11672db0ac748227492 Mon Sep 17 00:00:00 2001 From: Abhi Shah Date: Wed, 23 Mar 2022 15:56:58 +0530 Subject: [PATCH 57/58] Update stail.py --- examples/stail.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/stail.py b/examples/stail.py index 3df3f10d7..6ba4ee54e 100755 --- a/examples/stail.py +++ b/examples/stail.py @@ -25,7 +25,7 @@ from pprint import pprint from splunklib.client import connect -from splunklib.results import ResultsReader, JSONResultsReader +from splunklib.results import JSONResultsReader try: import utils From 7b0b486302dfd08fb2a56fc7f9082ceadbc673fc Mon Sep 17 00:00:00 2001 From: akaila-splunk Date: Fri, 25 Mar 2022 18:20:47 +0530 Subject: [PATCH 58/58] release/1.6.19 changes --- CHANGELOG.md | 37 +++++++++++++++++++++++++++++++++++++ README.md | 2 +- splunklib/__init__.py | 2 +- splunklib/binding.py | 2 +- 4 files changed, 40 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7edf338d6..78d7edbc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,42 @@ # Splunk Enterprise SDK for Python Changelog +## Version 1.6.19 + +### New features and APIs +* [#441](https://github.com/splunk/splunk-sdk-python/pull/441) JSONResultsReader added and deprecated ResultsReader + * Pre-requisite: Query parameter 'output_mode' must be set to 'json' + * Improves performance by approx ~80-90% + * ResultsReader is deprecated and will be removed in future releases (NOTE: Please migrate to JSONResultsReader) +* [#437](https://github.com/splunk/splunk-sdk-python/pull/437) added setup_logging() method in splunklib for logging +* [#426](https://github.com/splunk/splunk-sdk-python/pull/426) Added new github_commit modular input example +* [#392](https://github.com/splunk/splunk-sdk-python/pull/392) Break out search argument to option parsing for v2 custom search commands +* [#384](https://github.com/splunk/splunk-sdk-python/pull/384) Added Float parameter validator for custom search commands +* [#371](https://github.com/splunk/splunk-sdk-python/pull/371) Modinput preserve 'app' context + +### Bug fixes +* [#439](https://github.com/splunk/splunk-sdk-python/pull/439) Modified POST method debug log to not log sensitive body/data +* [#431](https://github.com/splunk/splunk-sdk-python/pull/431) Add distsearch.conf to Stream Search Command examples [ [issue#418](https://github.com/splunk/splunk-sdk-python/issues/418) ] +* [#419](https://github.com/splunk/splunk-sdk-python/pull/419) Hec endpoint issue[ [issue#345](https://github.com/splunk/splunk-sdk-python/issues/345) ] +* [#416](https://github.com/splunk/splunk-sdk-python/pull/416) Removed strip() method in load_value() method from data.py file [ [issue#400](https://github.com/splunk/splunk-sdk-python/issues/400) ] +* [#148](https://github.com/splunk/splunk-sdk-python/pull/148) Identical entity names will cause an infinite loop + +### Minor changes +* [#440](https://github.com/splunk/splunk-sdk-python/pull/440) Github release workflow modified to generate docs +* [#430](https://github.com/splunk/splunk-sdk-python/pull/430) Fix indentation in README +* [#429](https://github.com/splunk/splunk-sdk-python/pull/429) documented how to access modular input metadata +* [#427](https://github.com/splunk/splunk-sdk-python/pull/427) Replace .splunkrc with .env file in test and examples +* [#424](https://github.com/splunk/splunk-sdk-python/pull/424) Float validator test fix +* [#423](https://github.com/splunk/splunk-sdk-python/pull/423) Python3 compatibility for ResponseReader.__str__() +* [#422](https://github.com/splunk/splunk-sdk-python/pull/422) ordereddict and all its reference removed +* [#421](https://github.com/splunk/splunk-sdk-python/pull/421) Update README.md +* [#387](https://github.com/splunk/splunk-sdk-python/pull/387) Update filter.py +* [#331](https://github.com/splunk/splunk-sdk-python/pull/331) Fix a couple of warnings spotted when running python 2.7 tests +* [#330](https://github.com/splunk/splunk-sdk-python/pull/330) client: use six.string_types instead of basestring +* [#329](https://github.com/splunk/splunk-sdk-python/pull/329) client: remove outdated comment in Index.submit +* [#262](https://github.com/splunk/splunk-sdk-python/pull/262) properly add parameters to request based on the method of the request +* [#237](https://github.com/splunk/splunk-sdk-python/pull/237) Don't output close tags if you haven't written a start tag +* [#149](https://github.com/splunk/splunk-sdk-python/pull/149) "handlers" stanza missing in examples/searchcommands_template/default/logging.conf + ## Version 1.6.18 ### Bug fixes diff --git a/README.md b/README.md index 252f0231e..77dedf876 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ # The Splunk Enterprise Software Development Kit for Python -#### Version 1.6.18 +#### Version 1.6.19 The Splunk Enterprise Software Development Kit (SDK) for Python contains library code and examples designed to enable developers to build applications using the Splunk platform. diff --git a/splunklib/__init__.py b/splunklib/__init__.py index 5b7c32122..87d26b749 100644 --- a/splunklib/__init__.py +++ b/splunklib/__init__.py @@ -31,5 +31,5 @@ def setup_logging(level, log_format=DEFAULT_LOG_FORMAT, date_format=DEFAULT_DATE format=log_format, datefmt=date_format) -__version_info__ = (1, 6, 18) +__version_info__ = (1, 6, 19) __version__ = ".".join(map(str, __version_info__)) diff --git a/splunklib/binding.py b/splunklib/binding.py index 85713a22c..6bf4f0714 100644 --- a/splunklib/binding.py +++ b/splunklib/binding.py @@ -1414,7 +1414,7 @@ def request(url, message, **kwargs): head = { "Content-Length": str(len(body)), "Host": host, - "User-Agent": "splunk-sdk-python/1.6.18", + "User-Agent": "splunk-sdk-python/1.6.19", "Accept": "*/*", "Connection": "Close", } # defaults