diff --git a/config/check_spidermonkey_style.py b/config/check_spidermonkey_style.py index c8e766ef389e8..98b3f648ffb6c 100644 --- a/config/check_spidermonkey_style.py +++ b/config/check_spidermonkey_style.py @@ -529,7 +529,7 @@ def append_ordinary_line(self, line): self.kids[-1].lines.append(line) def style_relevant_kids(self): - """ Return a list of kids in this block that are style-relevant. """ + """Return a list of kids in this block that are style-relevant.""" return [kid for kid in self.kids if kid.is_style_relevant()] def sorted(self, enclosing_inclname): @@ -623,7 +623,7 @@ def to_source(self): class OrdinaryCode(object): - """ A list of lines of code that aren't #include/#if/#else/#endif lines. """ + """A list of lines of code that aren't #include/#if/#else/#endif lines.""" def __init__(self, lines=None): self.lines = lines if lines is not None else [] diff --git a/config/printprereleasesuffix.py b/config/printprereleasesuffix.py index aad6e40634df6..b7e667e870863 100644 --- a/config/printprereleasesuffix.py +++ b/config/printprereleasesuffix.py @@ -17,7 +17,7 @@ def get_prerelease_suffix(version): - """ Returns the prerelease suffix from the version string argument """ + """Returns the prerelease suffix from the version string argument""" def mfunc(m): return " {0} {1} {2}".format( diff --git a/config/tests/unit-printprereleasesuffix.py b/config/tests/unit-printprereleasesuffix.py index 9b68312170204..9e45ad0ea40df 100644 --- a/config/tests/unit-printprereleasesuffix.py +++ b/config/tests/unit-printprereleasesuffix.py @@ -72,7 +72,7 @@ def test_a_b(self): self.assertEqual(self.c, "") def test_plus(self): - """test 1.2+ version string """ + """test 1.2+ version string""" self.c = get_prerelease_suffix("1.2+") self.assertEqual(self.c, "") diff --git a/dom/bindings/Codegen.py b/dom/bindings/Codegen.py index 6dfc6107653ee..63c930b8abe5c 100644 --- a/dom/bindings/Codegen.py +++ b/dom/bindings/Codegen.py @@ -13120,7 +13120,7 @@ def deps(self): class ClassItem: - """ Use with CGClass """ + """Use with CGClass""" def __init__(self, name, visibility): self.name = name diff --git a/js/src/builtin/intl/make_intl_data.py b/js/src/builtin/intl/make_intl_data.py index b1d92d4441464..d3eafad7ef396 100755 --- a/js/src/builtin/intl/make_intl_data.py +++ b/js/src/builtin/intl/make_intl_data.py @@ -517,7 +517,7 @@ def compare_tags(language, script): def writeVariantTagMappings(println, variant_mappings, description, source, url): - """ Writes a function definition that maps variant subtags. """ + """Writes a function definition that maps variant subtags.""" println( """ static const char* ToCharPointer(const char* str) { @@ -649,7 +649,7 @@ def writeVariantTagMappings(println, variant_mappings, description, source, url) def writeLegacyMappingsFunction(println, legacy_mappings, description, source, url): - """ Writes a function definition that maps legacy language tags. """ + """Writes a function definition that maps legacy language tags.""" println("") writeMappingHeader(println, description, source, url) println( @@ -915,7 +915,7 @@ def variant_size(m): def writeSignLanguageMappingsFunction( println, legacy_mappings, description, source, url ): - """ Writes a function definition that maps legacy sign language tags. """ + """Writes a function definition that maps legacy sign language tags.""" println("") writeMappingHeader(println, description, source, url) println( @@ -1623,7 +1623,7 @@ def readSupplementalMetadata(file): def writeCLDRLanguageTagData(println, data, url): - """ Writes the language tag data to the Intl data file. """ + """Writes the language tag data to the Intl data file.""" println(generatedFileWarning) println("// Version: CLDR-{}".format(data["version"])) @@ -1851,7 +1851,7 @@ def writeCLDRLanguageTagData(println, data, url): def writeCLDRLanguageTagLikelySubtagsTest(println, data, url): - """ Writes the likely-subtags test file. """ + """Writes the likely-subtags test file.""" println(generatedFileWarning) @@ -2035,7 +2035,7 @@ def readCLDRVersionFromICU(): def updateCLDRLangTags(args): - """ Update the LocaleGenerated.cpp file. """ + """Update the LanguageTagGenerated.cpp file.""" version = args.version url = args.url out = args.out @@ -2095,7 +2095,7 @@ def readFiles(cldr_file): def flines(filepath, encoding="utf-8"): - """ Open filepath and iterate over its content. """ + """Open filepath and iterate over its content.""" with io.open(filepath, mode="r", encoding=encoding) as f: for line in f: yield line @@ -2103,7 +2103,7 @@ def flines(filepath, encoding="utf-8"): @total_ordering class Zone(object): - """ Time zone with optional file name. """ + """Time zone with optional file name.""" def __init__(self, name, filename=""): self.name = name @@ -2126,7 +2126,7 @@ def __repr__(self): class TzDataDir(object): - """ tzdata source from a directory. """ + """tzdata source from a directory.""" def __init__(self, obj): self.name = partial(os.path.basename, obj) @@ -2138,7 +2138,7 @@ def __init__(self, obj): class TzDataFile(object): - """ tzdata source from a file (tar or gzipped). """ + """tzdata source from a file (tar or gzipped).""" def __init__(self, obj): self.name = lambda: os.path.splitext( @@ -2157,7 +2157,7 @@ def _tarlines(self, tar, m): def validateTimeZones(zones, links): - """ Validate the zone and link entries. """ + """Validate the zone and link entries.""" linkZones = set(links.keys()) intersect = linkZones.intersection(zones) if intersect: @@ -2195,7 +2195,7 @@ def isTzFile(d, m, f): def readIANAFiles(tzdataDir, files): - """ Read all IANA time zone files from the given iterable. """ + """Read all IANA time zone files from the given iterable.""" nameSyntax = "[\w/+\-]+" pZone = re.compile(r"Zone\s+(?P%s)\s+.*" % nameSyntax) pLink = re.compile( @@ -2227,7 +2227,7 @@ def createLink(line, fname): def readIANATimeZones(tzdataDir, ignoreBackzone, ignoreFactory): - """ Read the IANA time zone information from `tzdataDir`. """ + """Read the IANA time zone information from `tzdataDir`.""" backzoneFiles = {"backzone"} (bkfiles, tzfiles) = partition(listIANAFiles(tzdataDir), backzoneFiles.__contains__) @@ -2524,7 +2524,7 @@ def otherICULegacyLinks(): def icuTzDataVersion(icuTzDir): - """ Read the ICU time zone version from `icuTzDir`/zoneinfo64.txt. """ + """Read the ICU time zone version from `icuTzDir`/zoneinfo64.txt.""" def searchInFile(pattern, f): p = re.compile(pattern) @@ -2546,7 +2546,7 @@ def searchInFile(pattern, f): def findIncorrectICUZones(ianaZones, ianaLinks, icuZones, icuLinks, ignoreBackzone): - """ Find incorrect ICU zone entries. """ + """Find incorrect ICU zone entries.""" def isIANATimeZone(zone): return zone in ianaZones or zone in ianaLinks @@ -2588,7 +2588,7 @@ def isICULink(zone): def findIncorrectICULinks(ianaZones, ianaLinks, icuZones, icuLinks): - """ Find incorrect ICU link entries. """ + """Find incorrect ICU link entries.""" def isIANATimeZone(zone): return zone in ianaZones or zone in ianaLinks @@ -2651,7 +2651,7 @@ def isICUZone(zone): def processTimeZones( tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignoreFactory, out ): - """ Read the time zone info and create a new time zone cpp file. """ + """Read the time zone info and create a new time zone cpp file.""" print("Processing tzdata mapping...") (ianaZones, ianaLinks) = readIANATimeZones(tzdataDir, ignoreBackzone, ignoreFactory) (icuZones, icuLinks) = readICUTimeZones(icuDir, icuTzDir, ignoreFactory) @@ -3012,7 +3012,7 @@ def generateTzDataTests(tzdataDir, version, ignoreBackzone, ignoreFactory, testD def updateTzdata(topsrcdir, args): - """ Update the time zone cpp file. """ + """Update the time zone cpp file.""" icuDir = os.path.join(topsrcdir, "intl/icu/source") if not os.path.isdir(icuDir): @@ -3142,7 +3142,7 @@ def writeCurrencyFile(published, currencies, out): def updateCurrency(topsrcdir, args): - """ Update the CurrencyDataGenerated.js file. """ + """Update the CurrencyDataGenerated.js file.""" import xml.etree.ElementTree as ET from random import randint @@ -3643,7 +3643,7 @@ def find_unit_type(unit): def writeUnitTestFiles(all_units, sanctioned_units): - """ Generate test files for unit number formatters. """ + """Generate test files for unit number formatters.""" js_src_builtin_intl_dir = os.path.dirname(os.path.abspath(__file__)) test_dir = os.path.join( diff --git a/js/src/util/make_unicode.py b/js/src/util/make_unicode.py index 13fc354a9e035..d7d42c81c1726 100755 --- a/js/src/util/make_unicode.py +++ b/js/src/util/make_unicode.py @@ -198,7 +198,7 @@ def read_special_casing(special_casing): def int_ranges(ints): - """ Yields consecutive ranges (inclusive) from integer values. """ + """Yields consecutive ranges (inclusive) from integer values.""" (a, b) = tee(sorted(ints)) start = next(b) for (curr, succ) in zip_longest(a, b): @@ -680,7 +680,7 @@ def make_non_bmp_file(version, non_bmp_lower_map, non_bmp_upper_map, codepoint_t def write_special_casing_methods(unconditional_toupper, codepoint_table, println): def hexlit(n): - """ Returns C++ hex-literal for |n|. """ + """Returns C++ hex-literal for |n|.""" return "0x{:04X}".format(n) def describe_range(ranges, depth): @@ -697,13 +697,13 @@ def describe_range(ranges, depth): ) def out_range(start, end): - """ Tests if the input character isn't a member of the set {x | start <= x <= end}. """ + """Tests if the input character isn't a member of the set {x | start <= x <= end}.""" if start == end: return "ch != {}".format(hexlit(start)) return "ch < {} || ch > {}".format(hexlit(start), hexlit(end)) def in_range(start, end, parenthesize=False): - """ Tests if the input character is in the set {x | start <= x <= end}. """ + """Tests if the input character is in the set {x | start <= x <= end}.""" if start == end: return "ch == {}".format(hexlit(start)) (left, right) = ("(", ")") if parenthesize else ("", "") @@ -712,7 +712,7 @@ def in_range(start, end, parenthesize=False): ) def in_any_range(ranges, spaces): - """ Tests if the input character is included in any of the given ranges. """ + """Tests if the input character is included in any of the given ranges.""" lines = [[]] for (start, end) in ranges: expr = in_range(start, end, parenthesize=True) @@ -724,7 +724,7 @@ def in_any_range(ranges, spaces): return " ||\n{}".format(spaces).join(" || ".join(t) for t in lines) def write_range_accept(parent_list, child_list, depth): - """ Accepts the input character if it matches any code unit in |child_list|. """ + """Accepts the input character if it matches any code unit in |child_list|.""" (min_parent, max_parent) = (parent_list[0], parent_list[-1]) (min_child, max_child) = (child_list[0], child_list[-1]) assert min_child >= min_parent @@ -770,7 +770,7 @@ def write_range_accept(parent_list, child_list, depth): println(indent, "}") def write_ChangesWhenUpperCasedSpecialCasing(): - """ Checks if the input has a special upper case mapping. """ + """Checks if the input has a special upper case mapping.""" println("bool") println("js::unicode::ChangesWhenUpperCasedSpecialCasing(char16_t ch)") println("{") @@ -829,7 +829,7 @@ def write_ChangesWhenUpperCasedSpecialCasing(): println("}") def write_LengthUpperCaseSpecialCasing(): - """ Slow case: Special casing character was found, returns its mapping length. """ + """Slow case: Special casing character was found, returns its mapping length.""" println("size_t") println("js::unicode::LengthUpperCaseSpecialCasing(char16_t ch)") println("{") @@ -851,7 +851,7 @@ def write_LengthUpperCaseSpecialCasing(): println("}") def write_AppendUpperCaseSpecialCasing(): - """ Slow case: Special casing character was found, append its mapping characters. """ + """Slow case: Special casing character was found, append its mapping characters.""" println("void") println( "js::unicode::AppendUpperCaseSpecialCasing(char16_t ch, char16_t* elements, size_t* index)" # NOQA: E501 @@ -1378,7 +1378,7 @@ def write_supplemental_identifier_method(name, group_set, println): def getsize(data): - """ return smallest possible integer size for the given array """ + """return smallest possible integer size for the given array""" maxdata = max(data) assert maxdata < 2 ** 32 diff --git a/layout/tools/reftest/remotereftest.py b/layout/tools/reftest/remotereftest.py index a3c0010aec979..f721357018def 100644 --- a/layout/tools/reftest/remotereftest.py +++ b/layout/tools/reftest/remotereftest.py @@ -252,7 +252,7 @@ def findPath(self, paths, filename=None): return None def startWebServer(self, options): - """ Create the webserver on the host and start it up """ + """Create the webserver on the host and start it up""" remoteXrePath = options.xrePath remoteUtilityPath = options.utilityPath @@ -300,7 +300,7 @@ def stopWebServer(self, options): self.server.stop() def killNamedProc(self, pname, orphans=True): - """ Kill processes matching the given command name """ + """Kill processes matching the given command name""" try: import psutil except ImportError as e: diff --git a/python/mozboot/mozboot/base.py b/python/mozboot/mozboot/base.py index 36209a54ff46c..843ba5e5e32ef 100644 --- a/python/mozboot/mozboot/base.py +++ b/python/mozboot/mozboot/base.py @@ -526,7 +526,7 @@ def prompt_int(self, prompt, low, high, default=None): print("ERROR! Please enter a valid option!") def prompt_yesno(self, prompt): - """ Prompts the user with prompt and requires a yes/no answer.""" + """Prompts the user with prompt and requires a yes/no answer.""" if self.no_interactive: print(prompt) print('Selecting "Y" because context is not interactive.') diff --git a/python/mozbuild/mozbuild/backend/mach_commands.py b/python/mozbuild/mozbuild/backend/mach_commands.py index 94f76256cce71..7c0abf6c64bb2 100644 --- a/python/mozbuild/mozbuild/backend/mach_commands.py +++ b/python/mozbuild/mozbuild/backend/mach_commands.py @@ -376,7 +376,7 @@ def _get_clang_tools(command_context, clang_tools_path): def prompt_bool(prompt, limit=5): - """ Prompts the user with prompt and requires a boolean value. """ + """Prompts the user with prompt and requires a boolean value.""" from distutils.util import strtobool for _ in range(limit): diff --git a/python/mozbuild/mozbuild/test/test_expression.py b/python/mozbuild/mozbuild/test/test_expression.py index 3ec23efb12242..1edbc12fae936 100644 --- a/python/mozbuild/mozbuild/test/test_expression.py +++ b/python/mozbuild/mozbuild/test/test_expression.py @@ -54,25 +54,25 @@ def test_not(self): self.assert_(not Expression("!1").evaluate(self.c)) def test_equals(self): - """ Test for the == operator""" + """Test for the == operator""" self.assert_(Expression("FAIL == PASS").evaluate(self.c)) def test_notequals(self): - """ Test for the != operator""" + """Test for the != operator""" self.assert_(Expression("FAIL != 1").evaluate(self.c)) def test_logical_and(self): - """ Test for the && operator""" + """Test for the && operator""" self.assertTrue(Expression("PASS == PASS && PASS != NOTPASS").evaluate(self.c)) def test_logical_or(self): - """ Test for the || operator""" + """Test for the || operator""" self.assertTrue( Expression("PASS == NOTPASS || PASS != NOTPASS").evaluate(self.c) ) def test_logical_ops(self): - """ Test for the && and || operators precedence""" + """Test for the && and || operators precedence""" # Would evaluate to false if precedence was wrong self.assertTrue( Expression("PASS == PASS || PASS != NOTPASS && PASS == NOTPASS").evaluate( @@ -81,7 +81,7 @@ def test_logical_ops(self): ) def test_defined(self): - """ Test for the defined() value""" + """Test for the defined() value""" self.assertTrue(Expression("defined(FAIL)").evaluate(self.c)) self.assertTrue(Expression("!defined(PASS)").evaluate(self.c)) diff --git a/python/mozrelease/mozrelease/partner_repack.py b/python/mozrelease/mozrelease/partner_repack.py index 7a6b51afb6eb2..b44d096eb64c3 100644 --- a/python/mozrelease/mozrelease/partner_repack.py +++ b/python/mozrelease/mozrelease/partner_repack.py @@ -187,7 +187,7 @@ def isValidPlatform(platform): def parseRepackConfig(filename, platform): - """ Did you hear about this cool file format called yaml ? json ? Yeah, me neither """ + """Did you hear about this cool file format called yaml ? json ? Yeah, me neither""" config = {} config["platforms"] = [] f = open(filename, "r") diff --git a/security/manager/tools/getCTKnownLogs.py b/security/manager/tools/getCTKnownLogs.py index 536d7ab0e1773..a50be4f9de140 100755 --- a/security/manager/tools/getCTKnownLogs.py +++ b/security/manager/tools/getCTKnownLogs.py @@ -96,7 +96,7 @@ def get_disqualification_time(time_str): def get_hex_lines(blob, width): - """ Convert a binary string to a multiline text of C escape sequences. """ + """Convert a binary string to a multiline text of C escape sequences.""" text = "".join(["\\x{:02x}".format(ord(c)) for c in blob]) # When escaped, a single byte takes 4 chars (e.g. "\x00"). # Make sure we don't break an escaped byte between the lines. @@ -104,7 +104,7 @@ def get_hex_lines(blob, width): def get_operator_and_index(json_data, operator_id): - """ Return operator's entry from the JSON along with its array index. """ + """Return operator's entry from the JSON along with its array index.""" matches = [ (operator, index) for (index, operator) in enumerate(json_data["operators"]) @@ -118,7 +118,7 @@ def get_operator_and_index(json_data, operator_id): def get_log_info_structs(json_data): - """ Return array of CTLogInfo initializers for the known logs. """ + """Return array of CTLogInfo initializers for the known logs.""" tmpl = Template( textwrap.dedent( """\ @@ -177,7 +177,7 @@ def get_log_info_structs(json_data): def get_log_operator_structs(json_data): - """ Return array of CTLogOperatorInfo initializers. """ + """Return array of CTLogOperatorInfo initializers.""" tmpl = Template(" { $name, $id }") initializers = [] for operator in json_data["operators"]: @@ -193,7 +193,7 @@ def get_log_operator_structs(json_data): def generate_cpp_header_file(json_data, out_file): - """ Generate the C++ header file for the known logs. """ + """Generate the C++ header file for the known logs.""" filename = os.path.basename(out_file.name) include_guard = filename.replace(".", "_").replace("/", "_") log_info_initializers = get_log_info_structs(json_data) @@ -209,7 +209,7 @@ def generate_cpp_header_file(json_data, out_file): def patch_in_test_logs(json_data): - """ Insert Mozilla-specific test log data. """ + """Insert Mozilla-specific test log data.""" max_id = 0 for operator in json_data["operators"]: if operator["id"] > max_id: @@ -300,7 +300,7 @@ def run(args): def parse_arguments_and_run(): - """ Parse the command line arguments and run the program. """ + """Parse the command line arguments and run the program.""" arg_parser = argparse.ArgumentParser( description="Parses a JSON file listing the known " "Certificate Transparency logs and generates " diff --git a/taskcluster/gecko_taskgraph/actions/registry.py b/taskcluster/gecko_taskgraph/actions/registry.py index 1f5135305b5f3..15e3cfdde066f 100644 --- a/taskcluster/gecko_taskgraph/actions/registry.py +++ b/taskcluster/gecko_taskgraph/actions/registry.py @@ -25,7 +25,7 @@ def is_json(data): - """ Return ``True``, if ``data`` is a JSON serializable data structure. """ + """Return ``True``, if ``data`` is a JSON serializable data structure.""" try: json.dumps(data) except ValueError: diff --git a/taskcluster/gecko_taskgraph/transforms/job/common.py b/taskcluster/gecko_taskgraph/transforms/job/common.py index abcad70e29c14..d0ef54f23a1c9 100644 --- a/taskcluster/gecko_taskgraph/transforms/job/common.py +++ b/taskcluster/gecko_taskgraph/transforms/job/common.py @@ -63,14 +63,14 @@ def add_artifacts(config, job, taskdesc, path): def docker_worker_add_artifacts(config, job, taskdesc): - """ Adds an artifact directory to the task """ + """Adds an artifact directory to the task""" path = "{workdir}/artifacts/".format(**job["run"]) taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path add_artifacts(config, job, taskdesc, path) def generic_worker_add_artifacts(config, job, taskdesc): - """ Adds an artifact directory to the task """ + """Adds an artifact directory to the task""" # The path is the location on disk; it doesn't necessarily # mean the artifacts will be public or private; that is set via the name # attribute in add_artifacts. diff --git a/taskcluster/gecko_taskgraph/transforms/l10n.py b/taskcluster/gecko_taskgraph/transforms/l10n.py index 5690d0b98fae5..ec4bf19b79553 100644 --- a/taskcluster/gecko_taskgraph/transforms/l10n.py +++ b/taskcluster/gecko_taskgraph/transforms/l10n.py @@ -180,7 +180,7 @@ def copy_in_useful_magic(config, jobs): @transforms.add def setup_shippable_dependency(config, jobs): - """ Sets up a task dependency to the signing job this relates to """ + """Sets up a task dependency to the signing job this relates to""" for job in jobs: job["dependencies"] = {"build": job["dependent-tasks"]["build"].label} if job["attributes"]["build_platform"].startswith("win") or job["attributes"][ @@ -269,7 +269,7 @@ def all_locales_attribute(config, jobs): @transforms.add def chunk_locales(config, jobs): - """ Utilizes chunking for l10n stuff """ + """Utilizes chunking for l10n stuff""" for job in jobs: locales_per_chunk = job.get("locales-per-chunk") locales_with_changesets = job["attributes"]["all_locales_with_changesets"] diff --git a/taskcluster/gecko_taskgraph/util/partners.py b/taskcluster/gecko_taskgraph/util/partners.py index eb20ba689a2cc..cf2d4494323de 100644 --- a/taskcluster/gecko_taskgraph/util/partners.py +++ b/taskcluster/gecko_taskgraph/util/partners.py @@ -178,7 +178,7 @@ def get_token(params): def query_api(query, token): - """ Make a query with a Github auth header, returning the json """ + """Make a query with a Github auth header, returning the json""" headers = {"Authorization": "bearer %s" % token} r = requests.post(GITHUB_API_ENDPOINT, json={"query": query}, headers=headers) r.raise_for_status() @@ -195,7 +195,7 @@ def check_login(token): def get_repo_params(repo): - """ Parse the organisation and repo name from an https or git url for a repo """ + """Parse the organisation and repo name from an https or git url for a repo""" if repo.startswith("https"): # eg https://github.com/mozilla-partners/mozilla-EME-free return repo.rsplit("/", 2)[-2:] @@ -279,7 +279,7 @@ def parse_config(data): def get_repack_configs(repackRepo, token): - """ For a partner repository, retrieve all the repack.cfg files and parse them into a dict """ + """For a partner repository, retrieve all the repack.cfg files and parse them into a dict""" log.debug("Querying for configs in %s", repackRepo) query = REPACK_CFG_QUERY % repackRepo raw_configs = query_api(query, token) diff --git a/taskcluster/scripts/misc/fetch-chromium.py b/taskcluster/scripts/misc/fetch-chromium.py index c7eae1df5e15c..cd530a3246fd2 100644 --- a/taskcluster/scripts/misc/fetch-chromium.py +++ b/taskcluster/scripts/misc/fetch-chromium.py @@ -86,7 +86,7 @@ def unzip(zippath, target): def fetch_chromium_revision(platform): - """Get the revision of the latest chromium build. """ + """Get the revision of the latest chromium build.""" chromium_platform = CHROMIUM_INFO[platform]["platform"] revision_url = LAST_CHANGE_URL.format(chromium_platform) @@ -102,7 +102,7 @@ def fetch_chromium_revision(platform): def fetch_chromium_build(platform, revision, zippath): - """Download a chromium build for a given revision, or the latest. """ + """Download a chromium build for a given revision, or the latest.""" if not revision: revision = fetch_chromium_revision(platform) diff --git a/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_test_result.py b/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_test_result.py index 42e11f6f1934d..c35fd721b76a1 100644 --- a/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_test_result.py +++ b/testing/marionette/harness/marionette_harness/tests/harness_unit/test_marionette_test_result.py @@ -12,7 +12,7 @@ @pytest.fixture def empty_marionette_testcase(): - """ Testable MarionetteTestCase class """ + """Testable MarionetteTestCase class""" from marionette_harness import MarionetteTestCase class EmptyTestCase(MarionetteTestCase): @@ -31,7 +31,7 @@ def empty_marionette_test(mock_marionette, empty_marionette_testcase): @pytest.mark.parametrize("has_crashed", [True, False]) def test_crash_is_recorded_as_error(empty_marionette_test, logger, has_crashed): - """ Number of errors is incremented by stopTest iff has_crashed is true """ + """Number of errors is incremented by stopTest iff has_crashed is true""" # collect results from the empty test result = MarionetteTestResult( marionette=empty_marionette_test._marionette_weakref(), diff --git a/testing/mochitest/runtests.py b/testing/mochitest/runtests.py index 72464d02d16b7..38eca05cfa243 100644 --- a/testing/mochitest/runtests.py +++ b/testing/mochitest/runtests.py @@ -718,7 +718,7 @@ def buildConfig(self, locations, public=None): self.writeLocation(config, loc) def start(self): - """ Starts the SSL Tunnel """ + """Starts the SSL Tunnel""" # start ssltunnel to provide https:// URLs capability ssltunnel = os.path.join(self.utilityPath, "ssltunnel") @@ -737,7 +737,7 @@ def start(self): self.log.info("runtests.py | SSL tunnel pid: %d" % self.process.pid) def stop(self): - """ Stops the SSL Tunnel and cleans up """ + """Stops the SSL Tunnel and cleans up""" if self.process is not None: self.process.kill() if os.path.exists(self.configFile): @@ -994,7 +994,7 @@ def environment(self, **kwargs): return test_environment(**kwargs) def getFullPath(self, path): - " Get an absolute path relative to self.oldcwd." + "Get an absolute path relative to self.oldcwd." return os.path.normpath(os.path.join(self.oldcwd, os.path.expanduser(path))) def getLogFilePath(self, logFile): @@ -1204,7 +1204,7 @@ def getTestsByScheme(self, options, testsToFilter=None, disabled=True): yield (scheme, grouped_tests) def startWebSocketServer(self, options, debuggerInfo): - """ Launch the websocket server """ + """Launch the websocket server""" self.wsserver = WebSocketServer(options, SCRIPT_DIR, self.log, debuggerInfo) self.wsserver.start() @@ -1868,7 +1868,7 @@ def buildBrowserEnv(self, options, debugger=False, env=None): return browserEnv def killNamedProc(self, pname, orphans=True): - """ Kill processes matching the given command name """ + """Kill processes matching the given command name""" self.log.info("Checking for %s processes..." % pname) if HAVE_PSUTIL: @@ -2060,7 +2060,7 @@ def merge_base_profiles(self, options, category): self.profile.merge(path, interpolation=interpolation) def buildProfile(self, options): - """ create the profile and add optional chrome bits and files if requested """ + """create the profile and add optional chrome bits and files if requested""" # get extensions to install extensions = self.getExtensionsToInstall(options) @@ -2202,7 +2202,7 @@ def getGMPPluginPath(self, options): return os.pathsep.join(gmp_paths) def cleanup(self, options, final=False): - """ remove temporary files and profile """ + """remove temporary files and profile""" if hasattr(self, "manifest") and self.manifest is not None: if os.path.exists(self.manifest): os.remove(self.manifest) @@ -2854,7 +2854,7 @@ def fission_step2(): return 0 def runTests(self, options): - """ Prepare, configure, run tests and cleanup """ + """Prepare, configure, run tests and cleanup""" self.extraPrefs = parse_preferences(options.extraPrefs) # for test manifest parsing. diff --git a/testing/mochitest/runtestsremote.py b/testing/mochitest/runtestsremote.py index a71c962e013b3..2193b43132b3f 100644 --- a/testing/mochitest/runtestsremote.py +++ b/testing/mochitest/runtestsremote.py @@ -150,7 +150,7 @@ def findPath(self, paths, filename=None): # ones that the base class understands. This is necessary for the web # server, SSL tunnel and profile building functions. def switchToLocalPaths(self, options): - """ Set local paths in the options, return a function that will restore remote values """ + """Set local paths in the options, return a function that will restore remote values""" remoteXrePath = options.xrePath remoteProfilePath = options.profilePath remoteUtilityPath = options.utilityPath @@ -207,7 +207,7 @@ def fixup(): return fixup def startServers(self, options, debuggerInfo, public=None): - """ Create the servers on the host and start them up """ + """Create the servers on the host and start them up""" restoreRemotePaths = self.switchToLocalPaths(options) MochitestDesktop.startServers(self, options, debuggerInfo, public=True) restoreRemotePaths() diff --git a/testing/mozbase/mozfile/mozfile/mozfile.py b/testing/mozbase/mozfile/mozfile/mozfile.py index b8d649ee8b7c3..694ab61b7232f 100644 --- a/testing/mozbase/mozfile/mozfile/mozfile.py +++ b/testing/mozbase/mozfile/mozfile/mozfile.py @@ -321,7 +321,7 @@ def move(src, dst): def depth(directory): - """returns the integer depth of a directory or path relative to '/' """ + """returns the integer depth of a directory or path relative to '/'""" directory = os.path.abspath(directory) level = 0 diff --git a/testing/mozbase/mozhttpd/mozhttpd/handlers.py b/testing/mozbase/mozhttpd/mozhttpd/handlers.py index dc2b8cf861b3b..920968a05152d 100644 --- a/testing/mozbase/mozhttpd/mozhttpd/handlers.py +++ b/testing/mozbase/mozhttpd/mozhttpd/handlers.py @@ -8,7 +8,7 @@ def json_response(func): - """ Translates results of 'func' into a JSON response. """ + """Translates results of 'func' into a JSON response.""" def wrap(*a, **kw): (code, data) = func(*a, **kw) diff --git a/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py b/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py index ef8f85c6b402c..ddaa86caa3664 100644 --- a/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py +++ b/testing/mozbase/mozlog/mozlog/formatters/html/xmlgen.py @@ -147,7 +147,7 @@ def __init__(self, uniobj): class SimpleUnicodeVisitor(object): - """ recursive visitor to write unicode. """ + """recursive visitor to write unicode.""" def __init__(self, write, indent=0, curindent=0, shortempty=True): self.write = write @@ -159,7 +159,7 @@ def __init__(self, write, indent=0, curindent=0, shortempty=True): self.shortempty = shortempty # short empty tags or not def visit(self, node): - """ dispatcher on node's class/bases name. """ + """dispatcher on node's class/bases name.""" cls = node.__class__ try: visitmethod = self.cache[cls] @@ -238,7 +238,7 @@ def repr_attribute(self, attrs, name): return ' %s="%s"' % (name, insert) def getstyle(self, tag): - """ return attribute list suitable for styling. """ + """return attribute list suitable for styling.""" try: styledict = tag.style.__dict__ except AttributeError: @@ -306,7 +306,7 @@ def _replacer(self, match): return self.escape[match.group(0)] def __call__(self, ustring): - """ xml-escape the given unicode string. """ + """xml-escape the given unicode string.""" ustring = unicode(ustring) return self.charef_rex.sub(self._replacer, ustring) diff --git a/testing/mozbase/mozprocess/tests/test_kill.py b/testing/mozbase/mozprocess/tests/test_kill.py index d829b47a17ab3..d2baf2cd62efb 100644 --- a/testing/mozbase/mozprocess/tests/test_kill.py +++ b/testing/mozbase/mozprocess/tests/test_kill.py @@ -17,7 +17,7 @@ class ProcTestKill(proctest.ProcTest): - """ Class to test various process tree killing scenatios """ + """Class to test various process tree killing scenatios""" def test_kill_before_run(self): """Process is not started, and kill() is called""" diff --git a/testing/mozbase/mozprocess/tests/test_misc.py b/testing/mozbase/mozprocess/tests/test_misc.py index ef678501be528..ead6fbdaf8bfa 100644 --- a/testing/mozbase/mozprocess/tests/test_misc.py +++ b/testing/mozbase/mozprocess/tests/test_misc.py @@ -15,7 +15,7 @@ class ProcTestMisc(proctest.ProcTest): - """ Class to test misc operations """ + """Class to test misc operations""" def test_process_timeout_no_kill(self): """Process is started, runs but we time out waiting on it diff --git a/testing/mozbase/mozprocess/tests/test_output.py b/testing/mozbase/mozprocess/tests/test_output.py index d93c1a254ba54..906bcdcaab10e 100644 --- a/testing/mozbase/mozprocess/tests/test_output.py +++ b/testing/mozbase/mozprocess/tests/test_output.py @@ -13,7 +13,7 @@ class ProcTestOutput(proctest.ProcTest): - """ Class to test operations related to output handling """ + """Class to test operations related to output handling""" def test_process_output_twice(self): """ diff --git a/testing/mozbase/mozprocess/tests/test_wait.py b/testing/mozbase/mozprocess/tests/test_wait.py index 64026029a6453..edcca51b43dd9 100644 --- a/testing/mozbase/mozprocess/tests/test_wait.py +++ b/testing/mozbase/mozprocess/tests/test_wait.py @@ -15,7 +15,7 @@ class ProcTestWait(proctest.ProcTest): - """ Class to test process waits and timeouts """ + """Class to test process waits and timeouts""" def test_normal_finish(self): """Process is started, runs to completion while we wait for it""" diff --git a/testing/mozbase/mozprofile/mozprofile/cli.py b/testing/mozbase/mozprofile/mozprofile/cli.py index 44bb3361c1abf..9110c9640cf6b 100755 --- a/testing/mozbase/mozprofile/mozprofile/cli.py +++ b/testing/mozbase/mozprofile/mozprofile/cli.py @@ -161,7 +161,7 @@ def profile(self, restore=False): def cli(args=sys.argv[1:]): - """ Handles the command line arguments for ``mozprofile`` via ``sys.argv``""" + """Handles the command line arguments for ``mozprofile`` via ``sys.argv``""" # add a view method for this cli method only def add_options(parser): diff --git a/testing/mozbase/mozprofile/tests/test_addons.py b/testing/mozbase/mozprofile/tests/test_addons.py index bcff62f7245fc..a7dc688221b90 100644 --- a/testing/mozbase/mozprofile/tests/test_addons.py +++ b/testing/mozbase/mozprofile/tests/test_addons.py @@ -259,7 +259,7 @@ def test_install_invalid_addons(tmpdir, am): @pytest.mark.xfail(reason="feature not implemented as part of AddonManger") def test_install_error(am): - """ Check install raises an error with an invalid addon""" + """Check install raises an error with an invalid addon""" temp_addon = generate_addon("test-addon-invalid-version@mozilla.org") # This should raise an error here with pytest.raises(Exception): diff --git a/testing/mozbase/mozrunner/mozrunner/application.py b/testing/mozbase/mozrunner/mozrunner/application.py index 09974af11a1af..afdf8edecd69a 100644 --- a/testing/mozbase/mozrunner/mozrunner/application.py +++ b/testing/mozbase/mozrunner/mozrunner/application.py @@ -94,7 +94,7 @@ def which(self, binary): @abstractmethod def stop_application(self): - """ Run (device manager) command to stop application. """ + """Run (device manager) command to stop application.""" pass diff --git a/testing/mozbase/moztest/moztest/results.py b/testing/mozbase/moztest/moztest/results.py index 222001c30634d..228bacf77228a 100644 --- a/testing/mozbase/moztest/moztest/results.py +++ b/testing/mozbase/moztest/moztest/results.py @@ -13,7 +13,7 @@ class TestContext(object): - """ Stores context data about the test """ + """Stores context data about the test""" attrs = [ "hostname", @@ -79,7 +79,7 @@ def get(attr): class TestResult(object): - """ Stores test result data """ + """Stores test result data""" FAIL_RESULTS = [ "UNEXPECTED-PASS", @@ -215,7 +215,7 @@ def finish(self, result, time_end=None, output=None, reason=None): @property def finished(self): - """ Boolean saying if the test is finished or not """ + """Boolean saying if the test is finished or not""" return self.result is not None @property @@ -230,7 +230,7 @@ def duration(self): class TestResultCollection(list): - """ Container class that stores test results """ + """Container class that stores test results""" resultClass = TestResult @@ -256,16 +256,16 @@ def subset(self, predicate): @property def contexts(self): - """ List of unique contexts for the test results contained """ + """List of unique contexts for the test results contained""" cs = [tr.context for tr in self] return list(set(cs)) def filter(self, predicate): - """ Returns a generator of TestResults that satisfy a given predicate """ + """Returns a generator of TestResults that satisfy a given predicate""" return (tr for tr in self if predicate(tr)) def tests_with_result(self, result): - """ Returns a generator of TestResults with the given result """ + """Returns a generator of TestResults with the given result""" msg = "Result '%s' not in possible results: %s" % ( result, ", ".join(self.resultClass.COMPUTED_RESULTS), @@ -275,7 +275,7 @@ def tests_with_result(self, result): @property def tests(self): - """ Generator of all tests in the collection """ + """Generator of all tests in the collection""" return (t for t in self) def add_result( @@ -308,7 +308,7 @@ def num_failures(self): return fails def add_unittest_result(self, result, context=None): - """ Adds the python unittest result provided to the collection""" + """Adds the python unittest result provided to the collection""" if hasattr(result, "time_taken"): self.time_taken += result.time_taken diff --git a/testing/mozharness/mozharness/base/log.py b/testing/mozharness/mozharness/base/log.py index b2c41b1bf1a0d..8b3a927afffe1 100755 --- a/testing/mozharness/mozharness/base/log.py +++ b/testing/mozharness/mozharness/base/log.py @@ -538,7 +538,7 @@ def _clear_handlers(self): self.all_handlers = [] def __del__(self): - """ BaseLogger class destructor; shutdown, flush and remove all handlers""" + """BaseLogger class destructor; shutdown, flush and remove all handlers""" logging.shutdown() self._clear_handlers() @@ -657,7 +657,7 @@ def __init__( self.init_message() def new_logger(self): - """ calls the BaseLogger.new_logger method and adds a file handler to it.""" + """calls the BaseLogger.new_logger method and adds a file handler to it.""" BaseLogger.new_logger(self) self.log_path = os.path.join(self.abs_log_dir, "%s.log" % self.log_name) @@ -781,5 +781,5 @@ def numeric_log_level(level): # __main__ {{{1 if __name__ == "__main__": - """ Useless comparison, due to the `pass` keyword on its body""" + """Useless comparison, due to the `pass` keyword on its body""" pass diff --git a/testing/mozharness/mozharness/base/script.py b/testing/mozharness/mozharness/base/script.py index 4de2b2bc76b4f..14c032fa7ab12 100644 --- a/testing/mozharness/mozharness/base/script.py +++ b/testing/mozharness/mozharness/base/script.py @@ -801,7 +801,7 @@ def _determine_extraction_method_and_kwargs(url): self.exception(level=FATAL) def load_json_url(self, url, error_level=None, *args, **kwargs): - """ Returns a json object from a url (it retries). """ + """Returns a json object from a url (it retries).""" contents = self._retry_download( url=url, error_level=error_level, *args, **kwargs ) diff --git a/testing/mozharness/mozharness/mozilla/testing/talos.py b/testing/mozharness/mozharness/mozilla/testing/talos.py index d7fa96ff11b58..cfc8414edcc85 100755 --- a/testing/mozharness/mozharness/mozilla/testing/talos.py +++ b/testing/mozharness/mozharness/mozilla/testing/talos.py @@ -458,7 +458,7 @@ def query_webextensions_zip(self): return self.webextensions_zip def get_suite_from_test(self): - """ Retrieve the talos suite name from a given talos test name.""" + """Retrieve the talos suite name from a given talos test name.""" # running locally, single test name provided instead of suite; go through tests and # find suite name suite_name = None @@ -484,7 +484,7 @@ def get_suite_from_test(self): return suite_name def validate_suite(self): - """ Ensure suite name is a valid talos suite. """ + """Ensure suite name is a valid talos suite.""" if self.query_talos_json_config() and self.suite is not None: if self.suite not in self.talos_json_config.get("suites"): self.fatal( diff --git a/testing/mozharness/mozharness/mozilla/testing/testbase.py b/testing/mozharness/mozharness/mozilla/testing/testbase.py index 8dd3978de81b3..8a05f614c524b 100755 --- a/testing/mozharness/mozharness/mozilla/testing/testbase.py +++ b/testing/mozharness/mozharness/mozilla/testing/testbase.py @@ -607,7 +607,7 @@ def preflight_install(self): ) def install_app(self, app=None, target_dir=None, installer_path=None): - """ Dependent on mozinstall """ + """Dependent on mozinstall""" # install the application cmd = [self.query_python_path("mozinstall")] if app: @@ -636,7 +636,7 @@ def install(self): self.binary_path = self.install_app(app=self.config.get("application")) def uninstall_app(self, install_dir=None): - """ Dependent on mozinstall """ + """Dependent on mozinstall""" # uninstall the application cmd = self.query_exe( "mozuninstall", diff --git a/testing/mozharness/mozharness/mozilla/testing/try_tools.py b/testing/mozharness/mozharness/mozilla/testing/try_tools.py index 0a161fe7aa255..8989780ca6eba 100644 --- a/testing/mozharness/mozharness/mozilla/testing/try_tools.py +++ b/testing/mozharness/mozharness/mozilla/testing/try_tools.py @@ -85,7 +85,7 @@ def _extract_try_message(self): return msg def _extract_try_args(self, msg): - """ Returns a list of args from a try message, for parsing """ + """Returns a list of args from a try message, for parsing""" if not msg: return None all_try_args = None diff --git a/testing/mozharness/scripts/merge_day/gecko_migration.py b/testing/mozharness/scripts/merge_day/gecko_migration.py index 2c92ccb63dba0..f4986ab7c74dd 100755 --- a/testing/mozharness/scripts/merge_day/gecko_migration.py +++ b/testing/mozharness/scripts/merge_day/gecko_migration.py @@ -436,7 +436,7 @@ def beta_to_release(self, *args, **kwargs): self.touch_clobber_file(dirs["abs_to_dir"]) def release_to_esr(self, *args, **kwargs): - """ mozilla-release -> mozilla-esrNN behavior. """ + """mozilla-release -> mozilla-esrNN behavior.""" dirs = self.query_abs_dirs() self.apply_replacements() self.touch_clobber_file(dirs["abs_to_dir"]) @@ -456,7 +456,7 @@ def apply_replacements(self): self.replace(os.path.join(dirs["abs_to_dir"], f), from_, to) def pull_from_repo(self, from_dir, to_dir, revision=None, branch=None): - """ Pull from one repo to another. """ + """Pull from one repo to another.""" hg = self.query_exe("hg", return_type="list") cmd = hg + ["pull"] if revision: diff --git a/testing/talos/talos/xtalos/xperf_analyzer.py b/testing/talos/talos/xtalos/xperf_analyzer.py index bee03caa866b3..6d3370da44b27 100644 --- a/testing/talos/talos/xtalos/xperf_analyzer.py +++ b/testing/talos/talos/xtalos/xperf_analyzer.py @@ -652,7 +652,7 @@ def __str__(self): class SessionStoreWindowRestored(ClassicEvent): - """ The Firefox session store window restored event """ + """The Firefox session store window restored event""" def __init__(self): super(SessionStoreWindowRestored, self).__init__( diff --git a/testing/tools/websocketprocessbridge/websocketprocessbridge.py b/testing/tools/websocketprocessbridge/websocketprocessbridge.py index b7b640520513f..70c4fa6afca34 100644 --- a/testing/tools/websocketprocessbridge/websocketprocessbridge.py +++ b/testing/tools/websocketprocessbridge/websocketprocessbridge.py @@ -98,7 +98,7 @@ def processGone(self): def check_parent(): - """ Checks if parent process is still alive, and exits if not """ + """Checks if parent process is still alive, and exits if not""" if not parent_process.is_running(): print("websocket/process bridge exiting because parent process is gone") reactor.stop() diff --git a/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py b/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py index 9c592e1d2e3b0..226d9359378e4 100644 --- a/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py +++ b/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py @@ -174,7 +174,7 @@ def add_expiration_postfix(expiration): def load_yaml_file(filename): - """ Load a YAML file from disk, throw a ParserError on failure.""" + """Load a YAML file from disk, throw a ParserError on failure.""" try: with open(filename, "r") as f: return yaml.safe_load(f) diff --git a/toolkit/crashreporter/generate_crash_reporter_sources.py b/toolkit/crashreporter/generate_crash_reporter_sources.py index c0699bd9e3702..8f4532762333a 100644 --- a/toolkit/crashreporter/generate_crash_reporter_sources.py +++ b/toolkit/crashreporter/generate_crash_reporter_sources.py @@ -18,7 +18,7 @@ def validate_annotations(annotations): - """ Ensure that the annotations have all the required fields """ + """Ensure that the annotations have all the required fields""" for (name, data) in sorted(annotations.items()): if "description" not in data: diff --git a/xpcom/idl-parser/xpidl/xpidl.py b/xpcom/idl-parser/xpidl/xpidl.py index c447569515cbe..bf0eeaed9a909 100755 --- a/xpcom/idl-parser/xpidl/xpidl.py +++ b/xpcom/idl-parser/xpidl/xpidl.py @@ -849,7 +849,7 @@ def needsJSTypes(self): return False def countEntries(self): - """ Returns the number of entries in the vtable for this interface. """ + """Returns the number of entries in the vtable for this interface.""" total = sum(member.count() for member in self.members) if self.base is not None: realbase = self.idl.getName(TypeId(self.base), self.location) @@ -1638,7 +1638,7 @@ def p_idlfile(self, p): p[0] = IDL(p[1]) def p_productions_start(self, p): - """productions : """ + """productions :""" p[0] = [] def p_productions_cdata(self, p): @@ -1678,7 +1678,7 @@ def p_native(self, p): ) def p_afternativeid(self, p): - """afternativeid : """ + """afternativeid :""" # this is a place marker: we switch the lexer into literal identifier # mode here, to slurp up everything until the closeparen self.lexer.begin("nativeid") @@ -1765,7 +1765,7 @@ def p_ifacebase(self, p): p[0] = p[2] def p_members_start(self, p): - """members : """ + """members :""" p[0] = [] def p_members_continue(self, p): @@ -1778,7 +1778,7 @@ def p_member_cdata(self, p): p[0] = CDATA(p[1], self.getLocation(p, 1)) def p_member_const(self, p): - """member : CONST type IDENTIFIER '=' number ';' """ + """member : CONST type IDENTIFIER '=' number ';'""" p[0] = ConstMember( type=p[2], name=p[3], @@ -1854,7 +1854,7 @@ def p_member_cenum(self, p): ) def p_variants_start(self, p): - """variants : """ + """variants :""" p[0] = [] def p_variants_single(self, p):