forked from googleapis/synthtool
/
common.py
383 lines (323 loc) · 16 KB
/
common.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import re
import shutil
import fnmatch
from copy import deepcopy
from pathlib import Path
from typing import Dict, List, Optional
import jinja2
from synthtool import shell, _tracked_paths
from synthtool.gcp import partials
from synthtool.languages import node
from synthtool.log import logger
from synthtool.sources import git, templates
PathOrStr = templates.PathOrStr
TEMPLATES_URL: str = git.make_repo_clone_url("googleapis/synthtool")
DEFAULT_TEMPLATES_PATH = "synthtool/gcp/templates"
LOCAL_TEMPLATES: Optional[str] = os.environ.get("SYNTHTOOL_TEMPLATES")
class CommonTemplates:
def __init__(self, template_path: Optional[Path] = None):
if template_path:
self._template_root = template_path
elif LOCAL_TEMPLATES:
logger.debug(f"Using local templates at {LOCAL_TEMPLATES}")
self._template_root = Path(LOCAL_TEMPLATES)
else:
templates_git = git.clone(TEMPLATES_URL)
self._template_root = templates_git / DEFAULT_TEMPLATES_PATH
self._templates = templates.Templates(self._template_root)
self.excludes = [] # type: List[str]
def _generic_library(self, directory: str, **kwargs) -> Path:
# load common repo meta information (metadata that's not language specific).
if "metadata" in kwargs:
self._load_generic_metadata(kwargs["metadata"])
# if no samples were found, don't attempt to render a
# samples/README.md.
if "samples" not in kwargs["metadata"] or not kwargs["metadata"]["samples"]:
self.excludes.append("samples/README.md")
t = templates.TemplateGroup(self._template_root / directory, self.excludes)
if "repository" in kwargs["metadata"] and "repo" in kwargs["metadata"]:
kwargs["metadata"]["repo"]["default_branch"] = _get_default_branch_name(
kwargs["metadata"]["repository"]
)
# TODO: migrate to python.py once old sample gen is deprecated
if directory == "python_samples":
t.env.globals["get_help"] = lambda filename: shell.run(
["python", filename, "--help"]
).stdout
result = t.render(**kwargs)
_tracked_paths.add(result)
return result
def py_samples(self, **kwargs) -> List[Path]:
"""
Handles generation of README.md templates for Python samples
- Determines whether generation is being done in a client library or in a samples
folder automatically
- Otherwise accepts manually set sample_project_dir through kwargs metadata
- Delegates generation of additional sample documents alternate/overridden folders
through py_samples_override()
"""
# kwargs["metadata"] is required to load values from .repo-metadata.json
if "metadata" not in kwargs:
kwargs["metadata"] = {}
# load common repo meta information (metadata that's not language specific).
self._load_generic_metadata(kwargs["metadata"])
# temporary exclusion prior to old templates being migrated out
self.excludes.extend(
[
"README.rst",
"auth_api_key.tmpl.rst",
"auth.tmpl.rst",
"install_deps.tmpl.rst",
"install_portaudio.tmpl.rst",
"noxfile.py.j2",
]
)
# ensure samples will generate
kwargs["metadata"]["samples"] = True
# determine if in client lib and set custom root sample dir if specified, else None
in_client_library = Path("samples").exists()
sample_project_dir = kwargs["metadata"]["repo"].get("sample_project_dir")
if sample_project_dir is None: # Not found in metadata
if in_client_library:
sample_project_dir = "samples"
else:
sample_project_dir = "."
elif not Path(sample_project_dir).exists():
raise Exception(f"'{sample_project_dir}' does not exist")
override_paths_to_samples: Dict[
str, List[str]
] = {} # Dict of format { override_path : sample(s) }
samples_dict = deepcopy(kwargs["metadata"]["repo"].get("samples"))
default_samples_dict = [] # Dict which will generate in sample_project_dir
# Iterate through samples to store override_paths_to_samples for all existing
# override paths
for sample_idx, sample in enumerate(samples_dict):
override_path = samples_dict[sample_idx].get("override_path")
if override_path is not None:
# add absolute path to metadata so `python foo.py --help` succeeds
if sample.get("file") is not None:
path = os.path.join(
sample_project_dir, override_path, sample.get("file")
)
sample["abs_path"] = Path(path).resolve()
cur_override_sample = override_paths_to_samples.get(override_path)
# Base case: No samples are yet planned to gen in this override dir
if cur_override_sample is None:
override_paths_to_samples[override_path] = [sample]
# Else: Sample docs will be generated in README merged with other
# sample doc(s) already planned to generate in this dir
else:
cur_override_sample.append(sample)
override_paths_to_samples[override_path] = cur_override_sample
# If override path none, will be generated in the default
# folder: sample_project_dir
else:
if sample.get("file") is not None:
path = os.path.join(sample_project_dir, sample.get("file"))
sample["abs_path"] = Path(path).resolve()
default_samples_dict.append(sample)
# List of paths to tempdirs which will be copied into sample folders
result = []
# deep copy is req. here to avoid kwargs being affected
overridden_samples_kwargs = deepcopy(kwargs)
for override_path in override_paths_to_samples:
# Generate override sample docs
result.append(
self.py_samples_override(
root=sample_project_dir,
override_path=override_path,
override_samples=override_paths_to_samples[override_path],
**overridden_samples_kwargs,
)
)
kwargs["metadata"]["repo"]["samples"] = default_samples_dict
logger.debug(
f"Generating templates for samples directory '{sample_project_dir}'"
)
kwargs["subdir"] = sample_project_dir
# Generate default sample docs
result.append(self._generic_library("python_samples", **kwargs))
for path in result:
# .add() records the root of the paths and needs to be applied to each
_tracked_paths.add(path)
return result
def py_samples_override(
self, root, override_path, override_samples, **overridden_samples_kwargs
) -> Path:
"""
Handles additional generation of READMEs where "override_path"s
are set in one or more samples' metadata
"""
overridden_samples_kwargs["metadata"]["repo"][
"sample_project_dir"
] = override_path
# Set samples metadata to ONLY samples intended to generate
# under this directory (override_path)
overridden_samples_kwargs["metadata"]["repo"]["samples"] = override_samples
if root != ".":
override_path = Path(root) / override_path
logger.debug(f"Generating templates for override path '{override_path}'")
overridden_samples_kwargs["subdir"] = override_path
return self._generic_library("python_samples", **overridden_samples_kwargs)
def python_notebooks(self, **kwargs) -> Path:
# kwargs["metadata"] is required to load values from .repo-metadata.json
if "metadata" not in kwargs:
kwargs["metadata"] = {}
return self._generic_library("python_notebooks", **kwargs)
def py_library(self, **kwargs) -> Path:
# kwargs["metadata"] is required to load values from .repo-metadata.json
if "metadata" not in kwargs:
kwargs["metadata"] = {}
# rename variable to accomodate existing synth.py files
if "system_test_dependencies" in kwargs:
kwargs["system_test_local_dependencies"] = kwargs[
"system_test_dependencies"
]
logger.warning(
"Template argument 'system_test_dependencies' is deprecated."
"Use 'system_test_local_dependencies' or 'system_test_external_dependencies'"
"instead."
)
# Set default Python versions for noxfile.py
if "default_python_version" not in kwargs:
kwargs["default_python_version"] = "3.8"
if "unit_test_python_versions" not in kwargs:
kwargs["unit_test_python_versions"] = ["3.6", "3.7", "3.8", "3.9"]
if "system_test_python_versions" not in kwargs:
kwargs["system_test_python_versions"] = ["3.8"]
# If cov_level is not given, set it to None.
if "cov_level" not in kwargs:
kwargs["cov_level"] = None
# Don't add samples templates if there are no samples
if "samples" not in kwargs:
self.excludes += ["samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md"]
ret = self._generic_library("python_library", **kwargs)
# If split_system_tests is set to True, we disable the system
# test in the main presubmit build and create individual build
# configs for each python versions.
if kwargs.get("split_system_tests", False):
template_root = self._template_root / "py_library_split_systests"
# copy the main presubmit config
shutil.copy2(
template_root / ".kokoro/presubmit/presubmit.cfg",
ret / ".kokoro/presubmit/presubmit.cfg",
)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(str(template_root)))
tmpl = env.get_template(".kokoro/presubmit/system.cfg")
for v in kwargs["system_test_python_versions"]:
nox_session = f"system-{v}"
dest = ret / f".kokoro/presubmit/system-{v}.cfg"
content = tmpl.render(nox_session=nox_session)
with open(dest, "w") as f:
f.write(content)
return ret
def java_library(self, **kwargs) -> Path:
# kwargs["metadata"] is required to load values from .repo-metadata.json
if "metadata" not in kwargs:
kwargs["metadata"] = {}
return self._generic_library("java_library", **kwargs)
def node_library(self, **kwargs) -> Path:
# TODO: once we've migrated all Node.js repos to either having
# .repo-metadata.json, or excluding README.md, we can remove this.
if not os.path.exists("./.repo-metadata.json"):
self.excludes.append("README.md")
if "samples/README.md" not in self.excludes:
self.excludes.append("samples/README.md")
kwargs["metadata"] = node.template_metadata()
kwargs["publish_token"] = node.get_publish_token(kwargs["metadata"]["name"])
ignore_src_index = [
"yes" for f in self.excludes if fnmatch.fnmatch("src/index.ts", f)
]
# generate root-level `src/index.ts` to export multiple versions and its default clients
if (
"versions" in kwargs
and "default_version" in kwargs
and not ignore_src_index
):
node.generate_index_ts(
versions=kwargs["versions"], default_version=kwargs["default_version"]
)
return self._generic_library("node_library", **kwargs)
def php_library(self, **kwargs) -> Path:
return self._generic_library("php_library", **kwargs)
def ruby_library(self, **kwargs) -> Path:
# kwargs["metadata"] is required to load values from .repo-metadata.json
if "metadata" not in kwargs:
kwargs["metadata"] = {}
return self._generic_library("ruby_library", **kwargs)
def render(self, template_name: str, **kwargs) -> Path:
template = self._templates.render(template_name, **kwargs)
_tracked_paths.add(template)
return template
def _load_generic_metadata(self, metadata: Dict):
"""
loads additional meta information from .repo-metadata.json.
"""
metadata["partials"] = partials.load_partials()
# Loads repo metadata information from the default location if it
# hasn't already been set. Some callers may have already loaded repo
# metadata, so we don't need to do it again or overwrite it. Also, only
# set the "repo" key.
if "repo" not in metadata:
metadata["repo"] = _load_repo_metadata()
def decamelize(value: str):
""" parser to convert fooBar.js to Foo Bar. """
if not value:
return ""
str_decamelize = re.sub("^.", value[0].upper(), value) # apple -> Apple.
str_decamelize = re.sub(
"([A-Z]+)([A-Z])([a-z0-9])", r"\1 \2\3", str_decamelize
) # ACLBatman -> ACL Batman.
return re.sub("([a-z0-9])([A-Z])", r"\1 \2", str_decamelize) # FooBar -> Foo Bar.
def _load_repo_metadata(metadata_file: str = "./.repo-metadata.json") -> Dict:
"""Parse a metadata JSON file into a Dict.
Currently, the defined fields are:
* `name` - The service's API name
* `name_pretty` - The service's API title. This will be used for generating titles on READMEs
* `product_documentation` - The product documentation on cloud.google.com
* `client_documentation` - The client library reference documentation
* `issue_tracker` - The public issue tracker for the product
* `release_level` - The release level of the client library. One of: alpha, beta, ga, deprecated
* `language` - The repo language. One of dotnet, go, java, nodejs, php, python, ruby
* `repo` - The GitHub repo in the format {owner}/{repo}
* `distribution_name` - The language-idiomatic package/distribution name
* `api_id` - The API ID associated with the service. Fully qualified identifier use to
enable a service in the cloud platform (e.g. monitoring.googleapis.com)
* `requires_billing` - Whether or not the API requires billing to be configured on the
customer's acocunt
Args:
metadata_file (str, optional): Path to the metadata json file
Returns:
A dictionary of metadata. This may not necessarily include all the defined fields above.
"""
if os.path.exists(metadata_file):
with open(metadata_file) as f:
return json.load(f)
return {}
def _get_default_branch_name(repository_name: str) -> str:
"""Read the default branch name from the environment.
First checks environment variable DEFAULT_BRANCH_PATH. If found, it
reads the contents of the file at DEFAULT_BRANCH_PATH and returns it.
Then checks environment varabile DEFAULT_BRANCH, and returns it if found.
"""
default_branch_path = os.getenv("DEFAULT_BRANCH_PATH")
if default_branch_path:
return Path(default_branch_path).read_text().strip()
# This default should be switched to "main" once we've migrated
# the majority of our repositories:
return os.getenv("DEFAULT_BRANCH", "master")