We have some common code used by several docs scripts. Migrate that
into the build-only shared code repository.
* Move lib/ansible/utils/_build_helpers.py to the directory for common
build code
* Migrate docs/bin/dump_config.py to a build-ansible subcommand
* Migrate dump_keywords to the build-ansible framework
* Make the script more maintainable by using functions and good
variable names
* Port to Python3 idioms
* Fix bug so that private attributes will be undocumented
* Move generate_man to a build-ansible subcommand
* Port plugin_formatter to a build-ansible subcommand
* Rework command_plugins so that docs scripts can target Python-3.4+ and
releng-only subcommands can use more recent versions of Python.
The architecture is now that command_plugins/* need to be importable
on Python-3.4. The init_parsers() method needs to run on Python-3.4.
But the main() method can utilize features of more recent Python as
long as it fits within those parameters.
* Update docs build requirements
Port the plugin_formatter to build-ansible framework
... | ... |
@@ -33,7 +33,7 @@ ASCII2MAN = @echo "ERROR: rst2man from docutils command is not installed but is |
33 | 33 |
endif |
34 | 34 |
|
35 | 35 |
PYTHON=python |
36 |
-GENERATE_CLI = $(PYTHON) docs/bin/generate_man.py |
|
36 |
+GENERATE_CLI = hacking/build-ansible.py generate-man |
|
37 | 37 |
|
38 | 38 |
SITELIB = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print get_python_lib()") |
39 | 39 |
|
40 | 40 |
deleted file mode 100755 |
... | ... |
@@ -1,74 +0,0 @@ |
1 |
-#!/usr/bin/env python |
|
2 |
- |
|
3 |
-import optparse |
|
4 |
-import os |
|
5 |
-import sys |
|
6 |
-import yaml |
|
7 |
- |
|
8 |
-from jinja2 import Environment, FileSystemLoader |
|
9 |
-from ansible.module_utils._text import to_bytes |
|
10 |
-from ansible.utils._build_helpers import update_file_if_different |
|
11 |
- |
|
12 |
-DEFAULT_TEMPLATE_FILE = 'config.rst.j2' |
|
13 |
- |
|
14 |
- |
|
15 |
-def generate_parser(): |
|
16 |
- p = optparse.OptionParser( |
|
17 |
- version='%prog 1.0', |
|
18 |
- usage='usage: %prog [options]', |
|
19 |
- description='Generate module documentation from metadata', |
|
20 |
- ) |
|
21 |
- p.add_option("-t", "--template-file", action="store", dest="template_file", default=DEFAULT_TEMPLATE_FILE, help="directory containing Jinja2 templates") |
|
22 |
- p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") |
|
23 |
- p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs") |
|
24 |
- |
|
25 |
- (options, args) = p.parse_args() |
|
26 |
- |
|
27 |
- return p |
|
28 |
- |
|
29 |
- |
|
30 |
-def fix_description(config_options): |
|
31 |
- '''some descriptions are strings, some are lists. workaround it...''' |
|
32 |
- |
|
33 |
- for config_key in config_options: |
|
34 |
- description = config_options[config_key].get('description', []) |
|
35 |
- if isinstance(description, list): |
|
36 |
- desc_list = description |
|
37 |
- else: |
|
38 |
- desc_list = [description] |
|
39 |
- config_options[config_key]['description'] = desc_list |
|
40 |
- return config_options |
|
41 |
- |
|
42 |
- |
|
43 |
-def main(args): |
|
44 |
- |
|
45 |
- parser = generate_parser() |
|
46 |
- (options, args) = parser.parse_args() |
|
47 |
- |
|
48 |
- output_dir = os.path.abspath(options.output_dir) |
|
49 |
- template_file_full_path = os.path.abspath(options.template_file) |
|
50 |
- template_file = os.path.basename(template_file_full_path) |
|
51 |
- template_dir = os.path.dirname(os.path.abspath(template_file_full_path)) |
|
52 |
- |
|
53 |
- if options.docs: |
|
54 |
- with open(options.docs) as f: |
|
55 |
- docs = yaml.safe_load(f) |
|
56 |
- else: |
|
57 |
- docs = {} |
|
58 |
- |
|
59 |
- config_options = docs |
|
60 |
- config_options = fix_description(config_options) |
|
61 |
- |
|
62 |
- env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) |
|
63 |
- template = env.get_template(template_file) |
|
64 |
- output_name = os.path.join(output_dir, template_file.replace('.j2', '')) |
|
65 |
- temp_vars = {'config_options': config_options} |
|
66 |
- |
|
67 |
- data = to_bytes(template.render(temp_vars)) |
|
68 |
- update_file_if_different(output_name, data) |
|
69 |
- |
|
70 |
- return 0 |
|
71 |
- |
|
72 |
- |
|
73 |
-if __name__ == '__main__': |
|
74 |
- sys.exit(main(sys.argv[:])) |
75 | 1 |
deleted file mode 100755 |
... | ... |
@@ -1,84 +0,0 @@ |
1 |
-#!/usr/bin/env python |
|
2 |
- |
|
3 |
-import optparse |
|
4 |
-import re |
|
5 |
-from distutils.version import LooseVersion |
|
6 |
- |
|
7 |
-import jinja2 |
|
8 |
-import yaml |
|
9 |
-from jinja2 import Environment, FileSystemLoader |
|
10 |
- |
|
11 |
-from ansible.module_utils._text import to_bytes |
|
12 |
-from ansible.playbook import Play |
|
13 |
-from ansible.playbook.block import Block |
|
14 |
-from ansible.playbook.role import Role |
|
15 |
-from ansible.playbook.task import Task |
|
16 |
-from ansible.utils._build_helpers import update_file_if_different |
|
17 |
- |
|
18 |
-template_file = 'playbooks_keywords.rst.j2' |
|
19 |
-oblist = {} |
|
20 |
-clist = [] |
|
21 |
-class_list = [Play, Role, Block, Task] |
|
22 |
- |
|
23 |
-p = optparse.OptionParser( |
|
24 |
- version='%prog 1.0', |
|
25 |
- usage='usage: %prog [options]', |
|
26 |
- description='Generate playbook keyword documentation from code and descriptions', |
|
27 |
-) |
|
28 |
-p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="../templates", help="directory containing Jinja2 templates") |
|
29 |
-p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") |
|
30 |
-p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs") |
|
31 |
- |
|
32 |
-(options, args) = p.parse_args() |
|
33 |
- |
|
34 |
-for aclass in class_list: |
|
35 |
- aobj = aclass() |
|
36 |
- name = type(aobj).__name__ |
|
37 |
- |
|
38 |
- if options.docs: |
|
39 |
- with open(options.docs) as f: |
|
40 |
- docs = yaml.safe_load(f) |
|
41 |
- else: |
|
42 |
- docs = {} |
|
43 |
- |
|
44 |
- # build ordered list to loop over and dict with attributes |
|
45 |
- clist.append(name) |
|
46 |
- oblist[name] = dict((x, aobj.__dict__['_attributes'][x]) for x in aobj.__dict__['_attributes'] if 'private' not in x or not x.private) |
|
47 |
- |
|
48 |
- # pick up docs if they exist |
|
49 |
- for a in oblist[name]: |
|
50 |
- if a in docs: |
|
51 |
- oblist[name][a] = docs[a] |
|
52 |
- else: |
|
53 |
- # check if there is an alias, otherwise undocumented |
|
54 |
- alias = getattr(getattr(aobj, '_%s' % a), 'alias', None) |
|
55 |
- if alias and alias in docs: |
|
56 |
- oblist[name][alias] = docs[alias] |
|
57 |
- del oblist[name][a] |
|
58 |
- else: |
|
59 |
- oblist[name][a] = ' UNDOCUMENTED!! ' |
|
60 |
- |
|
61 |
- # loop is really with_ for users |
|
62 |
- if name == 'Task': |
|
63 |
- oblist[name]['with_<lookup_plugin>'] = 'The same as ``loop`` but magically adds the output of any lookup plugin to generate the item list.' |
|
64 |
- |
|
65 |
- # local_action is implicit with action |
|
66 |
- if 'action' in oblist[name]: |
|
67 |
- oblist[name]['local_action'] = 'Same as action but also implies ``delegate_to: localhost``' |
|
68 |
- |
|
69 |
- # remove unusable (used to be private?) |
|
70 |
- for nouse in ('loop_args', 'loop_with'): |
|
71 |
- if nouse in oblist[name]: |
|
72 |
- del oblist[name][nouse] |
|
73 |
- |
|
74 |
-env = Environment(loader=FileSystemLoader(options.template_dir), trim_blocks=True,) |
|
75 |
-template = env.get_template(template_file) |
|
76 |
-outputname = options.output_dir + template_file.replace('.j2', '') |
|
77 |
-tempvars = {'oblist': oblist, 'clist': clist} |
|
78 |
- |
|
79 |
-keyword_page = template.render(tempvars) |
|
80 |
-if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
81 |
- # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
82 |
- keyword_page = re.sub(' +\n', '\n', keyword_page) |
|
83 |
- |
|
84 |
-update_file_if_different(outputname, to_bytes(keyword_page)) |
85 | 1 |
deleted file mode 100755 |
... | ... |
@@ -1,290 +0,0 @@ |
1 |
-#!/usr/bin/env python |
|
2 |
- |
|
3 |
-import argparse |
|
4 |
-import os |
|
5 |
-import sys |
|
6 |
- |
|
7 |
-from jinja2 import Environment, FileSystemLoader |
|
8 |
- |
|
9 |
-from ansible.module_utils._text import to_bytes |
|
10 |
-from ansible.utils._build_helpers import update_file_if_different |
|
11 |
- |
|
12 |
- |
|
13 |
-def generate_parser(): |
|
14 |
- p = argparse.ArgumentParser( |
|
15 |
- description='Generate cli documentation from cli docstrings', |
|
16 |
- ) |
|
17 |
- |
|
18 |
- p.add_argument("-t", "--template-file", action="store", dest="template_file", default="../templates/man.j2", help="path to jinja2 template") |
|
19 |
- p.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") |
|
20 |
- p.add_argument("-f", "--output-format", action="store", dest="output_format", default='man', help="Output format for docs (the default 'man' or 'rst')") |
|
21 |
- p.add_argument('args', help='CLI module(s)', metavar='module', nargs='*') |
|
22 |
- return p |
|
23 |
- |
|
24 |
- |
|
25 |
-# from https://www.python.org/dev/peps/pep-0257/ |
|
26 |
-def trim_docstring(docstring): |
|
27 |
- if not docstring: |
|
28 |
- return '' |
|
29 |
- # Convert tabs to spaces (following the normal Python rules) |
|
30 |
- # and split into a list of lines: |
|
31 |
- lines = docstring.expandtabs().splitlines() |
|
32 |
- # Determine minimum indentation (first line doesn't count): |
|
33 |
- indent = sys.maxsize |
|
34 |
- for line in lines[1:]: |
|
35 |
- stripped = line.lstrip() |
|
36 |
- if stripped: |
|
37 |
- indent = min(indent, len(line) - len(stripped)) |
|
38 |
- # Remove indentation (first line is special): |
|
39 |
- trimmed = [lines[0].strip()] |
|
40 |
- if indent < sys.maxsize: |
|
41 |
- for line in lines[1:]: |
|
42 |
- trimmed.append(line[indent:].rstrip()) |
|
43 |
- # Strip off trailing and leading blank lines: |
|
44 |
- while trimmed and not trimmed[-1]: |
|
45 |
- trimmed.pop() |
|
46 |
- while trimmed and not trimmed[0]: |
|
47 |
- trimmed.pop(0) |
|
48 |
- # Return a single string: |
|
49 |
- return '\n'.join(trimmed) |
|
50 |
- |
|
51 |
- |
|
52 |
-def get_options(optlist): |
|
53 |
- ''' get actual options ''' |
|
54 |
- |
|
55 |
- opts = [] |
|
56 |
- for opt in optlist: |
|
57 |
- res = { |
|
58 |
- 'desc': opt.help, |
|
59 |
- 'options': opt.option_strings |
|
60 |
- } |
|
61 |
- if isinstance(opt, argparse._StoreAction): |
|
62 |
- res['arg'] = opt.dest.upper() |
|
63 |
- elif not res['options']: |
|
64 |
- continue |
|
65 |
- opts.append(res) |
|
66 |
- |
|
67 |
- return opts |
|
68 |
- |
|
69 |
- |
|
70 |
-def dedupe_groups(parser): |
|
71 |
- action_groups = [] |
|
72 |
- for action_group in parser._action_groups: |
|
73 |
- found = False |
|
74 |
- for a in action_groups: |
|
75 |
- if a._actions == action_group._actions: |
|
76 |
- found = True |
|
77 |
- break |
|
78 |
- if not found: |
|
79 |
- action_groups.append(action_group) |
|
80 |
- return action_groups |
|
81 |
- |
|
82 |
- |
|
83 |
-def get_option_groups(option_parser): |
|
84 |
- groups = [] |
|
85 |
- for action_group in dedupe_groups(option_parser)[1:]: |
|
86 |
- group_info = {} |
|
87 |
- group_info['desc'] = action_group.description |
|
88 |
- group_info['options'] = action_group._actions |
|
89 |
- group_info['group_obj'] = action_group |
|
90 |
- groups.append(group_info) |
|
91 |
- return groups |
|
92 |
- |
|
93 |
- |
|
94 |
-def opt_doc_list(parser): |
|
95 |
- ''' iterate over options lists ''' |
|
96 |
- |
|
97 |
- results = [] |
|
98 |
- for option_group in dedupe_groups(parser)[1:]: |
|
99 |
- results.extend(get_options(option_group._actions)) |
|
100 |
- |
|
101 |
- results.extend(get_options(parser._actions)) |
|
102 |
- |
|
103 |
- return results |
|
104 |
- |
|
105 |
- |
|
106 |
-# def opts_docs(cli, name): |
|
107 |
-def opts_docs(cli_class_name, cli_module_name): |
|
108 |
- ''' generate doc structure from options ''' |
|
109 |
- |
|
110 |
- cli_name = 'ansible-%s' % cli_module_name |
|
111 |
- if cli_module_name == 'adhoc': |
|
112 |
- cli_name = 'ansible' |
|
113 |
- |
|
114 |
- # WIth no action/subcommand |
|
115 |
- # shared opts set |
|
116 |
- # instantiate each cli and ask its options |
|
117 |
- cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name, |
|
118 |
- fromlist=[cli_class_name]), cli_class_name) |
|
119 |
- cli = cli_klass([cli_name]) |
|
120 |
- |
|
121 |
- # parse the common options |
|
122 |
- try: |
|
123 |
- cli.init_parser() |
|
124 |
- except Exception: |
|
125 |
- pass |
|
126 |
- |
|
127 |
- # base/common cli info |
|
128 |
- docs = { |
|
129 |
- 'cli': cli_module_name, |
|
130 |
- 'cli_name': cli_name, |
|
131 |
- 'usage': cli.parser.format_usage(), |
|
132 |
- 'short_desc': cli.parser.description, |
|
133 |
- 'long_desc': trim_docstring(cli.__doc__), |
|
134 |
- 'actions': {}, |
|
135 |
- 'content_depth': 2, |
|
136 |
- } |
|
137 |
- option_info = {'option_names': [], |
|
138 |
- 'options': [], |
|
139 |
- 'groups': []} |
|
140 |
- |
|
141 |
- for extras in ('ARGUMENTS'): |
|
142 |
- if hasattr(cli, extras): |
|
143 |
- docs[extras.lower()] = getattr(cli, extras) |
|
144 |
- |
|
145 |
- common_opts = opt_doc_list(cli.parser) |
|
146 |
- groups_info = get_option_groups(cli.parser) |
|
147 |
- shared_opt_names = [] |
|
148 |
- for opt in common_opts: |
|
149 |
- shared_opt_names.extend(opt.get('options', [])) |
|
150 |
- |
|
151 |
- option_info['options'] = common_opts |
|
152 |
- option_info['option_names'] = shared_opt_names |
|
153 |
- |
|
154 |
- option_info['groups'].extend(groups_info) |
|
155 |
- |
|
156 |
- docs.update(option_info) |
|
157 |
- |
|
158 |
- # now for each action/subcommand |
|
159 |
- # force populate parser with per action options |
|
160 |
- |
|
161 |
- def get_actions(parser, docs): |
|
162 |
- # use class attrs not the attrs on a instance (not that it matters here...) |
|
163 |
- try: |
|
164 |
- subparser = parser._subparsers._group_actions[0].choices |
|
165 |
- except AttributeError: |
|
166 |
- subparser = {} |
|
167 |
- |
|
168 |
- depth = 0 |
|
169 |
- |
|
170 |
- for action, parser in subparser.items(): |
|
171 |
- action_info = {'option_names': [], |
|
172 |
- 'options': [], |
|
173 |
- 'actions': {}} |
|
174 |
- # docs['actions'][action] = {} |
|
175 |
- # docs['actions'][action]['name'] = action |
|
176 |
- action_info['name'] = action |
|
177 |
- action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__) |
|
178 |
- |
|
179 |
- # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip() |
|
180 |
- action_doc_list = opt_doc_list(parser) |
|
181 |
- |
|
182 |
- uncommon_options = [] |
|
183 |
- for action_doc in action_doc_list: |
|
184 |
- # uncommon_options = [] |
|
185 |
- |
|
186 |
- option_aliases = action_doc.get('options', []) |
|
187 |
- for option_alias in option_aliases: |
|
188 |
- |
|
189 |
- if option_alias in shared_opt_names: |
|
190 |
- continue |
|
191 |
- |
|
192 |
- # TODO: use set |
|
193 |
- if option_alias not in action_info['option_names']: |
|
194 |
- action_info['option_names'].append(option_alias) |
|
195 |
- |
|
196 |
- if action_doc in action_info['options']: |
|
197 |
- continue |
|
198 |
- |
|
199 |
- uncommon_options.append(action_doc) |
|
200 |
- |
|
201 |
- action_info['options'] = uncommon_options |
|
202 |
- |
|
203 |
- depth = 1 + get_actions(parser, action_info) |
|
204 |
- |
|
205 |
- docs['actions'][action] = action_info |
|
206 |
- |
|
207 |
- return depth |
|
208 |
- |
|
209 |
- action_depth = get_actions(cli.parser, docs) |
|
210 |
- docs['content_depth'] = action_depth + 1 |
|
211 |
- |
|
212 |
- docs['options'] = opt_doc_list(cli.parser) |
|
213 |
- return docs |
|
214 |
- |
|
215 |
- |
|
216 |
-if __name__ == '__main__': |
|
217 |
- |
|
218 |
- parser = generate_parser() |
|
219 |
- |
|
220 |
- options = parser.parse_args() |
|
221 |
- |
|
222 |
- template_file = options.template_file |
|
223 |
- template_path = os.path.expanduser(template_file) |
|
224 |
- template_dir = os.path.abspath(os.path.dirname(template_path)) |
|
225 |
- template_basename = os.path.basename(template_file) |
|
226 |
- |
|
227 |
- output_dir = os.path.abspath(options.output_dir) |
|
228 |
- output_format = options.output_format |
|
229 |
- |
|
230 |
- cli_modules = options.args |
|
231 |
- |
|
232 |
- # various cli parsing things checks sys.argv if the 'args' that are passed in are [] |
|
233 |
- # so just remove any args so the cli modules dont try to parse them resulting in warnings |
|
234 |
- sys.argv = [sys.argv[0]] |
|
235 |
- # need to be in right dir |
|
236 |
- os.chdir(os.path.dirname(__file__)) |
|
237 |
- |
|
238 |
- allvars = {} |
|
239 |
- output = {} |
|
240 |
- cli_list = [] |
|
241 |
- cli_bin_name_list = [] |
|
242 |
- |
|
243 |
- # for binary in os.listdir('../../lib/ansible/cli'): |
|
244 |
- for cli_module_name in cli_modules: |
|
245 |
- binary = os.path.basename(os.path.expanduser(cli_module_name)) |
|
246 |
- |
|
247 |
- if not binary.endswith('.py'): |
|
248 |
- continue |
|
249 |
- elif binary == '__init__.py': |
|
250 |
- continue |
|
251 |
- |
|
252 |
- cli_name = os.path.splitext(binary)[0] |
|
253 |
- |
|
254 |
- if cli_name == 'adhoc': |
|
255 |
- cli_class_name = 'AdHocCLI' |
|
256 |
- # myclass = 'AdHocCLI' |
|
257 |
- output[cli_name] = 'ansible.1.rst.in' |
|
258 |
- cli_bin_name = 'ansible' |
|
259 |
- else: |
|
260 |
- # myclass = "%sCLI" % libname.capitalize() |
|
261 |
- cli_class_name = "%sCLI" % cli_name.capitalize() |
|
262 |
- output[cli_name] = 'ansible-%s.1.rst.in' % cli_name |
|
263 |
- cli_bin_name = 'ansible-%s' % cli_name |
|
264 |
- |
|
265 |
- # FIXME: |
|
266 |
- allvars[cli_name] = opts_docs(cli_class_name, cli_name) |
|
267 |
- cli_bin_name_list.append(cli_bin_name) |
|
268 |
- |
|
269 |
- cli_list = allvars.keys() |
|
270 |
- |
|
271 |
- doc_name_formats = {'man': '%s.1.rst.in', |
|
272 |
- 'rst': '%s.rst'} |
|
273 |
- |
|
274 |
- for cli_name in cli_list: |
|
275 |
- |
|
276 |
- # template it! |
|
277 |
- env = Environment(loader=FileSystemLoader(template_dir)) |
|
278 |
- template = env.get_template(template_basename) |
|
279 |
- |
|
280 |
- # add rest to vars |
|
281 |
- tvars = allvars[cli_name] |
|
282 |
- tvars['cli_list'] = cli_list |
|
283 |
- tvars['cli_bin_name_list'] = cli_bin_name_list |
|
284 |
- tvars['cli'] = cli_name |
|
285 |
- if '-i' in tvars['options']: |
|
286 |
- print('uses inventory') |
|
287 |
- |
|
288 |
- manpage = template.render(tvars) |
|
289 |
- filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name']) |
|
290 |
- update_file_if_different(filename, to_bytes(manpage)) |
291 | 1 |
deleted file mode 100755 |
... | ... |
@@ -1,815 +0,0 @@ |
1 |
-#!/usr/bin/env python |
|
2 |
-# Copyright: (c) 2012, Jan-Piet Mens <jpmens () gmail.com> |
|
3 |
-# Copyright: (c) 2012-2014, Michael DeHaan <michael@ansible.com> and others |
|
4 |
-# Copyright: (c) 2017, Ansible Project |
|
5 |
- |
|
6 |
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
7 |
- |
|
8 |
-from __future__ import absolute_import, division, print_function |
|
9 |
-__metaclass__ = type |
|
10 |
- |
|
11 |
- |
|
12 |
-import datetime |
|
13 |
-import glob |
|
14 |
-import json |
|
15 |
-import optparse |
|
16 |
-import os |
|
17 |
-import re |
|
18 |
-import sys |
|
19 |
-import warnings |
|
20 |
-from collections import defaultdict |
|
21 |
-from copy import deepcopy |
|
22 |
-from distutils.version import LooseVersion |
|
23 |
-from functools import partial |
|
24 |
-from pprint import PrettyPrinter |
|
25 |
- |
|
26 |
-try: |
|
27 |
- from html import escape as html_escape |
|
28 |
-except ImportError: |
|
29 |
- # Python-3.2 or later |
|
30 |
- import cgi |
|
31 |
- |
|
32 |
- def html_escape(text, quote=True): |
|
33 |
- return cgi.escape(text, quote) |
|
34 |
- |
|
35 |
-import jinja2 |
|
36 |
-import yaml |
|
37 |
-from jinja2 import Environment, FileSystemLoader |
|
38 |
-from jinja2.runtime import Undefined |
|
39 |
- |
|
40 |
-from ansible.errors import AnsibleError |
|
41 |
-from ansible.module_utils._text import to_bytes, to_text |
|
42 |
-from ansible.module_utils.common.collections import is_sequence |
|
43 |
-from ansible.module_utils.parsing.convert_bool import boolean |
|
44 |
-from ansible.module_utils.six import iteritems, string_types |
|
45 |
-from ansible.plugins.loader import fragment_loader |
|
46 |
-from ansible.utils import plugin_docs |
|
47 |
-from ansible.utils.display import Display |
|
48 |
-from ansible.utils._build_helpers import update_file_if_different |
|
49 |
- |
|
50 |
- |
|
51 |
-##################################################################################### |
|
52 |
-# constants and paths |
|
53 |
- |
|
54 |
-# if a module is added in a version of Ansible older than this, don't print the version added information |
|
55 |
-# in the module documentation because everyone is assumed to be running something newer than this already. |
|
56 |
-TOO_OLD_TO_BE_NOTABLE = 2.3 |
|
57 |
- |
|
58 |
-# Get parent directory of the directory this script lives in |
|
59 |
-MODULEDIR = os.path.abspath(os.path.join( |
|
60 |
- os.path.dirname(os.path.realpath(__file__)), os.pardir, 'lib', 'ansible', 'modules' |
|
61 |
-)) |
|
62 |
- |
|
63 |
-# The name of the DOCUMENTATION template |
|
64 |
-EXAMPLE_YAML = os.path.abspath(os.path.join( |
|
65 |
- os.path.dirname(os.path.realpath(__file__)), os.pardir, 'examples', 'DOCUMENTATION.yml' |
|
66 |
-)) |
|
67 |
- |
|
68 |
-_ITALIC = re.compile(r"I\(([^)]+)\)") |
|
69 |
-_BOLD = re.compile(r"B\(([^)]+)\)") |
|
70 |
-_MODULE = re.compile(r"M\(([^)]+)\)") |
|
71 |
-_URL = re.compile(r"U\(([^)]+)\)") |
|
72 |
-_LINK = re.compile(r"L\(([^)]+),([^)]+)\)") |
|
73 |
-_CONST = re.compile(r"C\(([^)]+)\)") |
|
74 |
-_RULER = re.compile(r"HORIZONTALLINE") |
|
75 |
- |
|
76 |
-DEPRECATED = b" (D)" |
|
77 |
- |
|
78 |
-pp = PrettyPrinter() |
|
79 |
-display = Display() |
|
80 |
- |
|
81 |
- |
|
82 |
-# kludge_ns gives us a kludgey way to set variables inside of loops that need to be visible outside |
|
83 |
-# the loop. We can get rid of this when we no longer need to build docs with less than Jinja-2.10 |
|
84 |
-# http://jinja.pocoo.org/docs/2.10/templates/#assignments |
|
85 |
-# With Jinja-2.10 we can use jinja2's namespace feature, restoring the namespace template portion |
|
86 |
-# of: fa5c0282a4816c4dd48e80b983ffc1e14506a1f5 |
|
87 |
-NS_MAP = {} |
|
88 |
- |
|
89 |
- |
|
90 |
-def to_kludge_ns(key, value): |
|
91 |
- NS_MAP[key] = value |
|
92 |
- return "" |
|
93 |
- |
|
94 |
- |
|
95 |
-def from_kludge_ns(key): |
|
96 |
- return NS_MAP[key] |
|
97 |
- |
|
98 |
- |
|
99 |
-# The max filter was added in Jinja2-2.10. Until we can require that version, use this |
|
100 |
-def do_max(seq): |
|
101 |
- return max(seq) |
|
102 |
- |
|
103 |
- |
|
104 |
-def rst_ify(text): |
|
105 |
- ''' convert symbols like I(this is in italics) to valid restructured text ''' |
|
106 |
- |
|
107 |
- try: |
|
108 |
- t = _ITALIC.sub(r"*\1*", text) |
|
109 |
- t = _BOLD.sub(r"**\1**", t) |
|
110 |
- t = _MODULE.sub(r":ref:`\1 <\1_module>`", t) |
|
111 |
- t = _LINK.sub(r"`\1 <\2>`_", t) |
|
112 |
- t = _URL.sub(r"\1", t) |
|
113 |
- t = _CONST.sub(r"``\1``", t) |
|
114 |
- t = _RULER.sub(r"------------", t) |
|
115 |
- except Exception as e: |
|
116 |
- raise AnsibleError("Could not process (%s) : %s" % (text, e)) |
|
117 |
- |
|
118 |
- return t |
|
119 |
- |
|
120 |
- |
|
121 |
-def html_ify(text): |
|
122 |
- ''' convert symbols like I(this is in italics) to valid HTML ''' |
|
123 |
- |
|
124 |
- if not isinstance(text, string_types): |
|
125 |
- text = to_text(text) |
|
126 |
- |
|
127 |
- t = html_escape(text) |
|
128 |
- t = _ITALIC.sub(r"<em>\1</em>", t) |
|
129 |
- t = _BOLD.sub(r"<b>\1</b>", t) |
|
130 |
- t = _MODULE.sub(r"<span class='module'>\1</span>", t) |
|
131 |
- t = _URL.sub(r"<a href='\1'>\1</a>", t) |
|
132 |
- t = _LINK.sub(r"<a href='\2'>\1</a>", t) |
|
133 |
- t = _CONST.sub(r"<code>\1</code>", t) |
|
134 |
- t = _RULER.sub(r"<hr/>", t) |
|
135 |
- |
|
136 |
- return t.strip() |
|
137 |
- |
|
138 |
- |
|
139 |
-def rst_fmt(text, fmt): |
|
140 |
- ''' helper for Jinja2 to do format strings ''' |
|
141 |
- |
|
142 |
- return fmt % (text) |
|
143 |
- |
|
144 |
- |
|
145 |
-def rst_xline(width, char="="): |
|
146 |
- ''' return a restructured text line of a given length ''' |
|
147 |
- |
|
148 |
- return char * width |
|
149 |
- |
|
150 |
- |
|
151 |
-def documented_type(text): |
|
152 |
- ''' Convert any python type to a type for documentation ''' |
|
153 |
- |
|
154 |
- if isinstance(text, Undefined): |
|
155 |
- return '-' |
|
156 |
- if text == 'str': |
|
157 |
- return 'string' |
|
158 |
- if text == 'bool': |
|
159 |
- return 'boolean' |
|
160 |
- if text == 'int': |
|
161 |
- return 'integer' |
|
162 |
- if text == 'dict': |
|
163 |
- return 'dictionary' |
|
164 |
- return text |
|
165 |
- |
|
166 |
- |
|
167 |
-test_list = partial(is_sequence, include_strings=False) |
|
168 |
- |
|
169 |
- |
|
170 |
-def normalize_options(value): |
|
171 |
- """Normalize boolean option value.""" |
|
172 |
- |
|
173 |
- if value.get('type') == 'bool' and 'default' in value: |
|
174 |
- try: |
|
175 |
- value['default'] = boolean(value['default'], strict=True) |
|
176 |
- except TypeError: |
|
177 |
- pass |
|
178 |
- return value |
|
179 |
- |
|
180 |
- |
|
181 |
-def write_data(text, output_dir, outputname, module=None): |
|
182 |
- ''' dumps module output to a file or the screen, as requested ''' |
|
183 |
- |
|
184 |
- if output_dir is not None: |
|
185 |
- if module: |
|
186 |
- outputname = outputname % module |
|
187 |
- |
|
188 |
- if not os.path.exists(output_dir): |
|
189 |
- os.makedirs(output_dir) |
|
190 |
- fname = os.path.join(output_dir, outputname) |
|
191 |
- fname = fname.replace(".py", "") |
|
192 |
- |
|
193 |
- try: |
|
194 |
- updated = update_file_if_different(fname, to_bytes(text)) |
|
195 |
- except Exception as e: |
|
196 |
- display.display("while rendering %s, an error occured: %s" % (module, e)) |
|
197 |
- raise |
|
198 |
- if updated: |
|
199 |
- display.display("rendering: %s" % module) |
|
200 |
- else: |
|
201 |
- print(text) |
|
202 |
- |
|
203 |
- |
|
204 |
-IS_STDOUT_TTY = sys.stdout.isatty() |
|
205 |
- |
|
206 |
- |
|
207 |
-def show_progress(progress): |
|
208 |
- '''Show a little process indicator.''' |
|
209 |
- if IS_STDOUT_TTY: |
|
210 |
- sys.stdout.write('\r%s\r' % ("-/|\\"[progress % 4])) |
|
211 |
- sys.stdout.flush() |
|
212 |
- |
|
213 |
- |
|
214 |
-def get_plugin_info(module_dir, limit_to=None, verbose=False): |
|
215 |
- ''' |
|
216 |
- Returns information about plugins and the categories that they belong to |
|
217 |
- |
|
218 |
- :arg module_dir: file system path to the top of the plugin directory |
|
219 |
- :kwarg limit_to: If given, this is a list of plugin names to |
|
220 |
- generate information for. All other plugins will be ignored. |
|
221 |
- :returns: Tuple of two dicts containing module_info, categories, and |
|
222 |
- aliases and a set listing deprecated modules: |
|
223 |
- |
|
224 |
- :module_info: mapping of module names to information about them. The fields of the dict are: |
|
225 |
- |
|
226 |
- :path: filesystem path to the module |
|
227 |
- :deprecated: boolean. True means the module is deprecated otherwise not. |
|
228 |
- :aliases: set of aliases to this module name |
|
229 |
- :metadata: The modules metadata (as recorded in the module) |
|
230 |
- :doc: The documentation structure for the module |
|
231 |
- :seealso: The list of dictionaries with references to related subjects |
|
232 |
- :examples: The module's examples |
|
233 |
- :returndocs: The module's returndocs |
|
234 |
- |
|
235 |
- :categories: maps category names to a dict. The dict contains at |
|
236 |
- least one key, '_modules' which contains a list of module names in |
|
237 |
- that category. Any other keys in the dict are subcategories with |
|
238 |
- the same structure. |
|
239 |
- |
|
240 |
- ''' |
|
241 |
- |
|
242 |
- categories = dict() |
|
243 |
- module_info = defaultdict(dict) |
|
244 |
- |
|
245 |
- # * windows powershell modules have documentation stubs in python docstring |
|
246 |
- # format (they are not executed) so skip the ps1 format files |
|
247 |
- # * One glob level for every module level that we're going to traverse |
|
248 |
- files = ( |
|
249 |
- glob.glob("%s/*.py" % module_dir) + |
|
250 |
- glob.glob("%s/*/*.py" % module_dir) + |
|
251 |
- glob.glob("%s/*/*/*.py" % module_dir) + |
|
252 |
- glob.glob("%s/*/*/*/*.py" % module_dir) |
|
253 |
- ) |
|
254 |
- |
|
255 |
- module_index = 0 |
|
256 |
- for module_path in files: |
|
257 |
- # Do not list __init__.py files |
|
258 |
- if module_path.endswith('__init__.py'): |
|
259 |
- continue |
|
260 |
- |
|
261 |
- # Do not list blacklisted modules |
|
262 |
- module = os.path.splitext(os.path.basename(module_path))[0] |
|
263 |
- if module in plugin_docs.BLACKLIST['MODULE'] or module == 'base': |
|
264 |
- continue |
|
265 |
- |
|
266 |
- # If requested, limit module documentation building only to passed-in |
|
267 |
- # modules. |
|
268 |
- if limit_to is not None and module.lower() not in limit_to: |
|
269 |
- continue |
|
270 |
- |
|
271 |
- deprecated = False |
|
272 |
- if module.startswith("_"): |
|
273 |
- if os.path.islink(module_path): |
|
274 |
- # Handle aliases |
|
275 |
- source = os.path.splitext(os.path.basename(os.path.realpath(module_path)))[0] |
|
276 |
- module = module.replace("_", "", 1) |
|
277 |
- if source.startswith("_"): |
|
278 |
- source = source.replace("_", "", 1) |
|
279 |
- aliases = module_info[source].get('aliases', set()) |
|
280 |
- aliases.add(module) |
|
281 |
- aliases_deprecated = module_info[source].get('aliases_deprecated', set()) |
|
282 |
- aliases_deprecated.add(module) |
|
283 |
- # In case we just created this via get()'s fallback |
|
284 |
- module_info[source]['aliases'] = aliases |
|
285 |
- module_info[source]['aliases_deprecated'] = aliases_deprecated |
|
286 |
- continue |
|
287 |
- else: |
|
288 |
- # Handle deprecations |
|
289 |
- module = module.replace("_", "", 1) |
|
290 |
- deprecated = True |
|
291 |
- |
|
292 |
- # |
|
293 |
- # Regular module to process |
|
294 |
- # |
|
295 |
- |
|
296 |
- module_index += 1 |
|
297 |
- show_progress(module_index) |
|
298 |
- |
|
299 |
- # use ansible core library to parse out doc metadata YAML and plaintext examples |
|
300 |
- doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose) |
|
301 |
- |
|
302 |
- if metadata and 'removed' in metadata.get('status', []): |
|
303 |
- continue |
|
304 |
- |
|
305 |
- category = categories |
|
306 |
- |
|
307 |
- # Start at the second directory because we don't want the "vendor" |
|
308 |
- mod_path_only = os.path.dirname(module_path[len(module_dir):]) |
|
309 |
- |
|
310 |
- # Find the subcategory for each module |
|
311 |
- relative_dir = mod_path_only.split('/')[1] |
|
312 |
- sub_category = mod_path_only[len(relative_dir) + 2:] |
|
313 |
- |
|
314 |
- primary_category = '' |
|
315 |
- module_categories = [] |
|
316 |
- # build up the categories that this module belongs to |
|
317 |
- for new_cat in mod_path_only.split('/')[1:]: |
|
318 |
- if new_cat not in category: |
|
319 |
- category[new_cat] = dict() |
|
320 |
- category[new_cat]['_modules'] = [] |
|
321 |
- module_categories.append(new_cat) |
|
322 |
- category = category[new_cat] |
|
323 |
- |
|
324 |
- category['_modules'].append(module) |
|
325 |
- |
|
326 |
- # the category we will use in links (so list_of_all_plugins can point to plugins/action_plugins/*' |
|
327 |
- if module_categories: |
|
328 |
- primary_category = module_categories[0] |
|
329 |
- |
|
330 |
- if not doc: |
|
331 |
- display.error("*** ERROR: DOCUMENTATION section missing for %s. ***" % module_path) |
|
332 |
- continue |
|
333 |
- |
|
334 |
- if 'options' in doc and doc['options'] is None: |
|
335 |
- display.error("*** ERROR: DOCUMENTATION.options must be a dictionary/hash when used. ***") |
|
336 |
- pos = getattr(doc, "ansible_pos", None) |
|
337 |
- if pos is not None: |
|
338 |
- display.error("Module position: %s, %d, %d" % doc.ansible_pos) |
|
339 |
- doc['options'] = dict() |
|
340 |
- |
|
341 |
- for key, opt in doc.get('options', {}).items(): |
|
342 |
- doc['options'][key] = normalize_options(opt) |
|
343 |
- |
|
344 |
- # save all the information |
|
345 |
- module_info[module] = {'path': module_path, |
|
346 |
- 'source': os.path.relpath(module_path, module_dir), |
|
347 |
- 'deprecated': deprecated, |
|
348 |
- 'aliases': module_info[module].get('aliases', set()), |
|
349 |
- 'aliases_deprecated': module_info[module].get('aliases_deprecated', set()), |
|
350 |
- 'metadata': metadata, |
|
351 |
- 'doc': doc, |
|
352 |
- 'examples': examples, |
|
353 |
- 'returndocs': returndocs, |
|
354 |
- 'categories': module_categories, |
|
355 |
- 'primary_category': primary_category, |
|
356 |
- 'sub_category': sub_category, |
|
357 |
- } |
|
358 |
- |
|
359 |
- # keep module tests out of becoming module docs |
|
360 |
- if 'test' in categories: |
|
361 |
- del categories['test'] |
|
362 |
- |
|
363 |
- return module_info, categories |
|
364 |
- |
|
365 |
- |
|
366 |
-def generate_parser(): |
|
367 |
- ''' generate an optparse parser ''' |
|
368 |
- |
|
369 |
- p = optparse.OptionParser( |
|
370 |
- version='%prog 1.0', |
|
371 |
- usage='usage: %prog [options] arg1 arg2', |
|
372 |
- description='Generate module documentation from metadata', |
|
373 |
- ) |
|
374 |
- |
|
375 |
- p.add_option("-A", "--ansible-version", action="store", dest="ansible_version", default="unknown", help="Ansible version number") |
|
376 |
- p.add_option("-M", "--module-dir", action="store", dest="module_dir", default=MODULEDIR, help="Ansible library path") |
|
377 |
- p.add_option("-P", "--plugin-type", action="store", dest="plugin_type", default='module', help="The type of plugin (module, lookup, etc)") |
|
378 |
- p.add_option("-T", "--template-dir", action="append", dest="template_dir", help="directory containing Jinja2 templates") |
|
379 |
- p.add_option("-t", "--type", action='store', dest='type', choices=['rst'], default='rst', help="Document type") |
|
380 |
- p.add_option("-o", "--output-dir", action="store", dest="output_dir", default=None, help="Output directory for module files") |
|
381 |
- p.add_option("-I", "--includes-file", action="store", dest="includes_file", default=None, help="Create a file containing list of processed modules") |
|
382 |
- p.add_option("-l", "--limit-to-modules", '--limit-to', action="store", dest="limit_to", default=None, |
|
383 |
- help="Limit building module documentation to comma-separated list of plugins. Specify non-existing plugin name for no plugins.") |
|
384 |
- p.add_option('-V', action='version', help='Show version number and exit') |
|
385 |
- p.add_option('-v', '--verbose', dest='verbosity', default=0, action="count", help="verbose mode (increase number of 'v's for more)") |
|
386 |
- return p |
|
387 |
- |
|
388 |
- |
|
389 |
-def jinja2_environment(template_dir, typ, plugin_type): |
|
390 |
- |
|
391 |
- env = Environment(loader=FileSystemLoader(template_dir), |
|
392 |
- variable_start_string="@{", |
|
393 |
- variable_end_string="}@", |
|
394 |
- trim_blocks=True) |
|
395 |
- env.globals['xline'] = rst_xline |
|
396 |
- |
|
397 |
- # Can be removed (and template switched to use namespace) when we no longer need to build |
|
398 |
- # with <Jinja-2.10 |
|
399 |
- env.globals['to_kludge_ns'] = to_kludge_ns |
|
400 |
- env.globals['from_kludge_ns'] = from_kludge_ns |
|
401 |
- if 'max' not in env.filters: |
|
402 |
- # Jinja < 2.10 |
|
403 |
- env.filters['max'] = do_max |
|
404 |
- |
|
405 |
- if 'tojson' not in env.filters: |
|
406 |
- # Jinja < 2.9 |
|
407 |
- env.filters['tojson'] = json.dumps |
|
408 |
- |
|
409 |
- templates = {} |
|
410 |
- if typ == 'rst': |
|
411 |
- env.filters['rst_ify'] = rst_ify |
|
412 |
- env.filters['html_ify'] = html_ify |
|
413 |
- env.filters['fmt'] = rst_fmt |
|
414 |
- env.filters['xline'] = rst_xline |
|
415 |
- env.filters['documented_type'] = documented_type |
|
416 |
- env.tests['list'] = test_list |
|
417 |
- templates['plugin'] = env.get_template('plugin.rst.j2') |
|
418 |
- templates['plugin_deprecation_stub'] = env.get_template('plugin_deprecation_stub.rst.j2') |
|
419 |
- |
|
420 |
- if plugin_type == 'module': |
|
421 |
- name = 'modules' |
|
422 |
- else: |
|
423 |
- name = 'plugins' |
|
424 |
- |
|
425 |
- templates['category_list'] = env.get_template('%s_by_category.rst.j2' % name) |
|
426 |
- templates['support_list'] = env.get_template('%s_by_support.rst.j2' % name) |
|
427 |
- templates['list_of_CATEGORY_modules'] = env.get_template('list_of_CATEGORY_%s.rst.j2' % name) |
|
428 |
- else: |
|
429 |
- raise Exception("Unsupported format type: %s" % typ) |
|
430 |
- |
|
431 |
- return templates |
|
432 |
- |
|
433 |
- |
|
434 |
-def too_old(added): |
|
435 |
- if not added: |
|
436 |
- return False |
|
437 |
- try: |
|
438 |
- added_tokens = str(added).split(".") |
|
439 |
- readded = added_tokens[0] + "." + added_tokens[1] |
|
440 |
- added_float = float(readded) |
|
441 |
- except ValueError as e: |
|
442 |
- warnings.warn("Could not parse %s: %s" % (added, str(e))) |
|
443 |
- return False |
|
444 |
- return added_float < TOO_OLD_TO_BE_NOTABLE |
|
445 |
- |
|
446 |
- |
|
447 |
-def process_plugins(module_map, templates, outputname, output_dir, ansible_version, plugin_type): |
|
448 |
- for module_index, module in enumerate(module_map): |
|
449 |
- |
|
450 |
- show_progress(module_index) |
|
451 |
- |
|
452 |
- fname = module_map[module]['path'] |
|
453 |
- display.vvvvv(pp.pformat(('process_plugins info: ', module_map[module]))) |
|
454 |
- |
|
455 |
- # crash if module is missing documentation and not explicitly hidden from docs index |
|
456 |
- if module_map[module]['doc'] is None: |
|
457 |
- display.error("%s MISSING DOCUMENTATION" % (fname,)) |
|
458 |
- _doc = {plugin_type: module, |
|
459 |
- 'version_added': '2.4', |
|
460 |
- 'filename': fname} |
|
461 |
- module_map[module]['doc'] = _doc |
|
462 |
- # continue |
|
463 |
- |
|
464 |
- # Going to reference this heavily so make a short name to reference it by |
|
465 |
- doc = module_map[module]['doc'] |
|
466 |
- display.vvvvv(pp.pformat(('process_plugins doc: ', doc))) |
|
467 |
- |
|
468 |
- # add some defaults for plugins that dont have most of the info |
|
469 |
- doc['module'] = doc.get('module', module) |
|
470 |
- doc['version_added'] = doc.get('version_added', 'historical') |
|
471 |
- |
|
472 |
- doc['plugin_type'] = plugin_type |
|
473 |
- |
|
474 |
- if module_map[module]['deprecated'] and 'deprecated' not in doc: |
|
475 |
- display.warning("%s PLUGIN MISSING DEPRECATION DOCUMENTATION: %s" % (fname, 'deprecated')) |
|
476 |
- |
|
477 |
- required_fields = ('short_description',) |
|
478 |
- for field in required_fields: |
|
479 |
- if field not in doc: |
|
480 |
- display.warning("%s PLUGIN MISSING field '%s'" % (fname, field)) |
|
481 |
- |
|
482 |
- not_nullable_fields = ('short_description',) |
|
483 |
- for field in not_nullable_fields: |
|
484 |
- if field in doc and doc[field] in (None, ''): |
|
485 |
- print("%s: WARNING: MODULE field '%s' DOCUMENTATION is null/empty value=%s" % (fname, field, doc[field])) |
|
486 |
- |
|
487 |
- if 'version_added' not in doc: |
|
488 |
- display.error("*** ERROR: missing version_added in: %s ***\n" % module) |
|
489 |
- |
|
490 |
- # |
|
491 |
- # The present template gets everything from doc so we spend most of this |
|
492 |
- # function moving data into doc for the template to reference |
|
493 |
- # |
|
494 |
- |
|
495 |
- if module_map[module]['aliases']: |
|
496 |
- doc['aliases'] = module_map[module]['aliases'] |
|
497 |
- |
|
498 |
- # don't show version added information if it's too old to be called out |
|
499 |
- added = 0 |
|
500 |
- if doc['version_added'] == 'historical': |
|
501 |
- del doc['version_added'] |
|
502 |
- else: |
|
503 |
- added = doc['version_added'] |
|
504 |
- |
|
505 |
- # Strip old version_added for the module |
|
506 |
- if too_old(added): |
|
507 |
- del doc['version_added'] |
|
508 |
- |
|
509 |
- option_names = [] |
|
510 |
- |
|
511 |
- if 'options' in doc and doc['options']: |
|
512 |
- for (k, v) in iteritems(doc['options']): |
|
513 |
- # Error out if there's no description |
|
514 |
- if 'description' not in doc['options'][k]: |
|
515 |
- raise AnsibleError("Missing required description for parameter '%s' in '%s' " % (k, module)) |
|
516 |
- |
|
517 |
- # Error out if required isn't a boolean (people have been putting |
|
518 |
- # information on when something is required in here. Those need |
|
519 |
- # to go in the description instead). |
|
520 |
- required_value = doc['options'][k].get('required', False) |
|
521 |
- if not isinstance(required_value, bool): |
|
522 |
- raise AnsibleError("Invalid required value '%s' for parameter '%s' in '%s' (must be truthy)" % (required_value, k, module)) |
|
523 |
- |
|
524 |
- # Strip old version_added information for options |
|
525 |
- if 'version_added' in doc['options'][k] and too_old(doc['options'][k]['version_added']): |
|
526 |
- del doc['options'][k]['version_added'] |
|
527 |
- |
|
528 |
- # Make sure description is a list of lines for later formatting |
|
529 |
- if not isinstance(doc['options'][k]['description'], list): |
|
530 |
- doc['options'][k]['description'] = [doc['options'][k]['description']] |
|
531 |
- |
|
532 |
- option_names.append(k) |
|
533 |
- |
|
534 |
- option_names.sort() |
|
535 |
- |
|
536 |
- doc['option_keys'] = option_names |
|
537 |
- doc['filename'] = fname |
|
538 |
- doc['source'] = module_map[module]['source'] |
|
539 |
- doc['docuri'] = doc['module'].replace('_', '-') |
|
540 |
- doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') |
|
541 |
- doc['ansible_version'] = ansible_version |
|
542 |
- |
|
543 |
- # check the 'deprecated' field in doc. We expect a dict potentially with 'why', 'version', and 'alternative' fields |
|
544 |
- # examples = module_map[module]['examples'] |
|
545 |
- # print('\n\n%s: type of examples: %s\n' % (module, type(examples))) |
|
546 |
- # if examples and not isinstance(examples, (str, unicode, list)): |
|
547 |
- # raise TypeError('module %s examples is wrong type (%s): %s' % (module, type(examples), examples)) |
|
548 |
- |
|
549 |
- # use 'examples' for 'plainexamples' if 'examples' is a string |
|
550 |
- if isinstance(module_map[module]['examples'], string_types): |
|
551 |
- doc['plainexamples'] = module_map[module]['examples'] # plain text |
|
552 |
- else: |
|
553 |
- doc['plainexamples'] = '' |
|
554 |
- |
|
555 |
- doc['metadata'] = module_map[module]['metadata'] |
|
556 |
- |
|
557 |
- display.vvvvv(pp.pformat(module_map[module])) |
|
558 |
- if module_map[module]['returndocs']: |
|
559 |
- try: |
|
560 |
- doc['returndocs'] = yaml.safe_load(module_map[module]['returndocs']) |
|
561 |
- except Exception as e: |
|
562 |
- print("%s:%s:yaml error:%s:returndocs=%s" % (fname, module, e, module_map[module]['returndocs'])) |
|
563 |
- doc['returndocs'] = None |
|
564 |
- else: |
|
565 |
- doc['returndocs'] = None |
|
566 |
- |
|
567 |
- doc['author'] = doc.get('author', ['UNKNOWN']) |
|
568 |
- if isinstance(doc['author'], string_types): |
|
569 |
- doc['author'] = [doc['author']] |
|
570 |
- |
|
571 |
- display.v('about to template %s' % module) |
|
572 |
- display.vvvvv(pp.pformat(doc)) |
|
573 |
- try: |
|
574 |
- text = templates['plugin'].render(doc) |
|
575 |
- except Exception as e: |
|
576 |
- display.warning(msg="Could not parse %s due to %s" % (module, e)) |
|
577 |
- continue |
|
578 |
- |
|
579 |
- if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
580 |
- # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
581 |
- text = re.sub(' +\n', '\n', text) |
|
582 |
- |
|
583 |
- write_data(text, output_dir, outputname, module) |
|
584 |
- |
|
585 |
- # Create deprecation stub pages for deprecated aliases |
|
586 |
- if module_map[module]['aliases']: |
|
587 |
- for alias in module_map[module]['aliases']: |
|
588 |
- if alias in module_map[module]['aliases_deprecated']: |
|
589 |
- doc['alias'] = alias |
|
590 |
- |
|
591 |
- display.v('about to template %s (deprecation alias %s)' % (module, alias)) |
|
592 |
- display.vvvvv(pp.pformat(doc)) |
|
593 |
- try: |
|
594 |
- text = templates['plugin_deprecation_stub'].render(doc) |
|
595 |
- except Exception as e: |
|
596 |
- display.warning(msg="Could not parse %s (deprecation alias %s) due to %s" % (module, alias, e)) |
|
597 |
- continue |
|
598 |
- |
|
599 |
- if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
600 |
- # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
601 |
- text = re.sub(' +\n', '\n', text) |
|
602 |
- |
|
603 |
- write_data(text, output_dir, outputname, alias) |
|
604 |
- |
|
605 |
- |
|
606 |
-def process_categories(plugin_info, categories, templates, output_dir, output_name, plugin_type): |
|
607 |
- # For some reason, this line is changing plugin_info: |
|
608 |
- # text = templates['list_of_CATEGORY_modules'].render(template_data) |
|
609 |
- # To avoid that, make a deepcopy of the data. |
|
610 |
- # We should track that down and fix it at some point in the future. |
|
611 |
- plugin_info = deepcopy(plugin_info) |
|
612 |
- for category in sorted(categories.keys()): |
|
613 |
- module_map = categories[category] |
|
614 |
- category_filename = output_name % category |
|
615 |
- |
|
616 |
- display.display("*** recording category %s in %s ***" % (category, category_filename)) |
|
617 |
- |
|
618 |
- # start a new category file |
|
619 |
- |
|
620 |
- category_name = category.replace("_", " ") |
|
621 |
- category_title = category_name.title() |
|
622 |
- |
|
623 |
- subcategories = dict((k, v) for k, v in module_map.items() if k != '_modules') |
|
624 |
- template_data = {'title': category_title, |
|
625 |
- 'category_name': category_name, |
|
626 |
- 'category': module_map, |
|
627 |
- 'subcategories': subcategories, |
|
628 |
- 'module_info': plugin_info, |
|
629 |
- 'plugin_type': plugin_type |
|
630 |
- } |
|
631 |
- |
|
632 |
- text = templates['list_of_CATEGORY_modules'].render(template_data) |
|
633 |
- write_data(text, output_dir, category_filename) |
|
634 |
- |
|
635 |
- |
|
636 |
-def process_support_levels(plugin_info, categories, templates, output_dir, plugin_type): |
|
637 |
- supported_by = {'Ansible Core Team': {'slug': 'core_supported', |
|
638 |
- 'modules': [], |
|
639 |
- 'output': 'core_maintained.rst', |
|
640 |
- 'blurb': "These are :doc:`modules maintained by the" |
|
641 |
- " Ansible Core Team<core_maintained>` and will always ship" |
|
642 |
- " with Ansible itself."}, |
|
643 |
- 'Ansible Network Team': {'slug': 'network_supported', |
|
644 |
- 'modules': [], |
|
645 |
- 'output': 'network_maintained.rst', |
|
646 |
- 'blurb': "These are :doc:`modules maintained by the" |
|
647 |
- " Ansible Network Team<network_maintained>` in" |
|
648 |
- " a relationship similar to how the Ansible Core Team" |
|
649 |
- " maintains the Core modules."}, |
|
650 |
- 'Ansible Partners': {'slug': 'certified_supported', |
|
651 |
- 'modules': [], |
|
652 |
- 'output': 'partner_maintained.rst', |
|
653 |
- 'blurb': """ |
|
654 |
-Some examples of :doc:`Certified Modules<partner_maintained>` are those submitted by other |
|
655 |
-companies. Maintainers of these types of modules must watch for any issues reported or pull requests |
|
656 |
-raised against the module. |
|
657 |
- |
|
658 |
-The Ansible Core Team will review all modules becoming certified. Core committers will review |
|
659 |
-proposed changes to existing Certified Modules once the community maintainers of the module have |
|
660 |
-approved the changes. Core committers will also ensure that any issues that arise due to Ansible |
|
661 |
-engine changes will be remediated. Also, it is strongly recommended (but not presently required) |
|
662 |
-for these types of modules to have unit tests. |
|
663 |
- |
|
664 |
-These modules are currently shipped with Ansible, but might be shipped separately in the future. |
|
665 |
-"""}, |
|
666 |
- 'Ansible Community': {'slug': 'community_supported', |
|
667 |
- 'modules': [], |
|
668 |
- 'output': 'community_maintained.rst', |
|
669 |
- 'blurb': """ |
|
670 |
-These are :doc:`modules maintained by the Ansible Community<community_maintained>`. They **are |
|
671 |
-not** supported by the Ansible Core Team or by companies/partners associated to the module. |
|
672 |
- |
|
673 |
-They are still fully usable, but the response rate to issues is purely up to the community. Best |
|
674 |
-effort support will be provided but is not covered under any support contracts. |
|
675 |
- |
|
676 |
-These modules are currently shipped with Ansible, but will most likely be shipped separately in the future. |
|
677 |
- """}, |
|
678 |
- } |
|
679 |
- |
|
680 |
- # only gen support pages for modules for now, need to split and namespace templates and generated docs |
|
681 |
- if plugin_type == 'plugins': |
|
682 |
- return |
|
683 |
- # Separate the modules by support_level |
|
684 |
- for module, info in plugin_info.items(): |
|
685 |
- if not info.get('metadata', None): |
|
686 |
- display.warning('no metadata for %s' % module) |
|
687 |
- continue |
|
688 |
- if info['metadata']['supported_by'] == 'core': |
|
689 |
- supported_by['Ansible Core Team']['modules'].append(module) |
|
690 |
- elif info['metadata']['supported_by'] == 'network': |
|
691 |
- supported_by['Ansible Network Team']['modules'].append(module) |
|
692 |
- elif info['metadata']['supported_by'] == 'certified': |
|
693 |
- supported_by['Ansible Partners']['modules'].append(module) |
|
694 |
- elif info['metadata']['supported_by'] == 'community': |
|
695 |
- supported_by['Ansible Community']['modules'].append(module) |
|
696 |
- else: |
|
697 |
- raise AnsibleError('Unknown supported_by value: %s' % info['metadata']['supported_by']) |
|
698 |
- |
|
699 |
- # Render the module lists based on category and subcategory |
|
700 |
- for maintainers, data in supported_by.items(): |
|
701 |
- subcategories = {} |
|
702 |
- subcategories[''] = {} |
|
703 |
- for module in data['modules']: |
|
704 |
- new_cat = plugin_info[module]['sub_category'] |
|
705 |
- category = plugin_info[module]['primary_category'] |
|
706 |
- if category not in subcategories: |
|
707 |
- subcategories[category] = {} |
|
708 |
- subcategories[category][''] = {} |
|
709 |
- subcategories[category]['']['_modules'] = [] |
|
710 |
- if new_cat not in subcategories[category]: |
|
711 |
- subcategories[category][new_cat] = {} |
|
712 |
- subcategories[category][new_cat]['_modules'] = [] |
|
713 |
- subcategories[category][new_cat]['_modules'].append(module) |
|
714 |
- |
|
715 |
- template_data = {'maintainers': maintainers, |
|
716 |
- 'subcategories': subcategories, |
|
717 |
- 'modules': data['modules'], |
|
718 |
- 'slug': data['slug'], |
|
719 |
- 'module_info': plugin_info, |
|
720 |
- 'plugin_type': plugin_type |
|
721 |
- } |
|
722 |
- text = templates['support_list'].render(template_data) |
|
723 |
- write_data(text, output_dir, data['output']) |
|
724 |
- |
|
725 |
- |
|
726 |
-def validate_options(options): |
|
727 |
- ''' validate option parser options ''' |
|
728 |
- |
|
729 |
- if not options.module_dir: |
|
730 |
- sys.exit("--module-dir is required", file=sys.stderr) |
|
731 |
- if not os.path.exists(options.module_dir): |
|
732 |
- sys.exit("--module-dir does not exist: %s" % options.module_dir, file=sys.stderr) |
|
733 |
- if not options.template_dir: |
|
734 |
- sys.exit("--template-dir must be specified") |
|
735 |
- |
|
736 |
- |
|
737 |
-def main(): |
|
738 |
- |
|
739 |
- # INIT |
|
740 |
- p = generate_parser() |
|
741 |
- (options, args) = p.parse_args() |
|
742 |
- if not options.template_dir: |
|
743 |
- options.template_dir = ["hacking/templates"] |
|
744 |
- validate_options(options) |
|
745 |
- display.verbosity = options.verbosity |
|
746 |
- plugin_type = options.plugin_type |
|
747 |
- |
|
748 |
- display.display("Evaluating %s files..." % plugin_type) |
|
749 |
- |
|
750 |
- # prep templating |
|
751 |
- templates = jinja2_environment(options.template_dir, options.type, plugin_type) |
|
752 |
- |
|
753 |
- # set file/directory structure |
|
754 |
- if plugin_type == 'module': |
|
755 |
- # trim trailing s off of plugin_type for plugin_type=='modules'. ie 'copy_module.rst' |
|
756 |
- outputname = '%s_' + '%s.rst' % plugin_type |
|
757 |
- output_dir = options.output_dir |
|
758 |
- else: |
|
759 |
- # for plugins, just use 'ssh.rst' vs 'ssh_module.rst' |
|
760 |
- outputname = '%s.rst' |
|
761 |
- output_dir = '%s/plugins/%s' % (options.output_dir, plugin_type) |
|
762 |
- |
|
763 |
- display.vv('output name: %s' % outputname) |
|
764 |
- display.vv('output dir: %s' % output_dir) |
|
765 |
- |
|
766 |
- # Convert passed-in limit_to to None or list of modules. |
|
767 |
- if options.limit_to is not None: |
|
768 |
- options.limit_to = [s.lower() for s in options.limit_to.split(",")] |
|
769 |
- |
|
770 |
- plugin_info, categories = get_plugin_info(options.module_dir, limit_to=options.limit_to, verbose=(options.verbosity > 0)) |
|
771 |
- |
|
772 |
- categories['all'] = {'_modules': plugin_info.keys()} |
|
773 |
- |
|
774 |
- if display.verbosity >= 3: |
|
775 |
- display.vvv(pp.pformat(categories)) |
|
776 |
- if display.verbosity >= 5: |
|
777 |
- display.vvvvv(pp.pformat(plugin_info)) |
|
778 |
- |
|
779 |
- # Transform the data |
|
780 |
- if options.type == 'rst': |
|
781 |
- display.v('Generating rst') |
|
782 |
- for key, record in plugin_info.items(): |
|
783 |
- display.vv(key) |
|
784 |
- if display.verbosity >= 5: |
|
785 |
- display.vvvvv(pp.pformat(('record', record))) |
|
786 |
- if record.get('doc', None): |
|
787 |
- short_desc = record['doc']['short_description'].rstrip('.') |
|
788 |
- if short_desc is None: |
|
789 |
- display.warning('short_description for %s is None' % key) |
|
790 |
- short_desc = '' |
|
791 |
- record['doc']['short_description'] = rst_ify(short_desc) |
|
792 |
- |
|
793 |
- if plugin_type == 'module': |
|
794 |
- display.v('Generating Categories') |
|
795 |
- # Write module master category list |
|
796 |
- category_list_text = templates['category_list'].render(categories=sorted(categories.keys())) |
|
797 |
- category_index_name = '%ss_by_category.rst' % plugin_type |
|
798 |
- write_data(category_list_text, output_dir, category_index_name) |
|
799 |
- |
|
800 |
- # Render all the individual plugin pages |
|
801 |
- display.v('Generating plugin pages') |
|
802 |
- process_plugins(plugin_info, templates, outputname, output_dir, options.ansible_version, plugin_type) |
|
803 |
- |
|
804 |
- # Render all the categories for modules |
|
805 |
- if plugin_type == 'module': |
|
806 |
- display.v('Generating Category lists') |
|
807 |
- category_list_name_template = 'list_of_%s_' + '%ss.rst' % plugin_type |
|
808 |
- process_categories(plugin_info, categories, templates, output_dir, category_list_name_template, plugin_type) |
|
809 |
- |
|
810 |
- # Render all the categories for modules |
|
811 |
- process_support_levels(plugin_info, categories, templates, output_dir, plugin_type) |
|
812 |
- |
|
813 |
- |
|
814 |
-if __name__ == '__main__': |
|
815 |
- main() |
... | ... |
@@ -1,10 +1,10 @@ |
1 | 1 |
OS := $(shell uname -s) |
2 | 2 |
SITELIB = $(shell python -c "from distutils.sysconfig import get_python_lib; print get_python_lib()"): |
3 |
-FORMATTER=../bin/plugin_formatter.py |
|
3 |
+PLUGIN_FORMATTER=../../hacking/build-ansible.py document-plugins |
|
4 | 4 |
TESTING_FORMATTER=../bin/testing_formatter.sh |
5 |
-DUMPER=../bin/dump_keywords.py |
|
6 |
-CONFIG_DUMPER=../bin/dump_config.py |
|
7 |
-GENERATE_CLI=../bin/generate_man.py |
|
5 |
+KEYWORD_DUMPER=../../hacking/build-ansible.py document-keywords |
|
6 |
+CONFIG_DUMPER=../../hacking/build-ansible.py document-config |
|
7 |
+GENERATE_CLI=../../hacking/build-ansible.py generate-man |
|
8 | 8 |
ifeq ($(shell echo $(OS) | egrep -ic 'Darwin|FreeBSD|OpenBSD|DragonFly'),1) |
9 | 9 |
CPUS ?= $(shell sysctl hw.ncpu|awk '{print $$2}') |
10 | 10 |
else |
... | ... |
@@ -79,24 +79,24 @@ clean: |
79 | 79 |
.PHONY: docs clean |
80 | 80 |
|
81 | 81 |
# TODO: make generate_man output dir cli option |
82 |
-cli: $(GENERATE_CLI) |
|
82 |
+cli: |
|
83 | 83 |
mkdir -p rst/cli |
84 | 84 |
PYTHONPATH=../../lib $(GENERATE_CLI) --template-file=../templates/cli_rst.j2 --output-dir=rst/cli/ --output-format rst ../../lib/ansible/cli/*.py |
85 | 85 |
|
86 |
-keywords: $(FORMATTER) ../templates/playbooks_keywords.rst.j2 |
|
87 |
- PYTHONPATH=../../lib $(DUMPER) --template-dir=../templates --output-dir=rst/reference_appendices/ -d ./keyword_desc.yml |
|
86 |
+keywords: ../templates/playbooks_keywords.rst.j2 |
|
87 |
+ PYTHONPATH=../../lib $(KEYWORD_DUMPER) --template-dir=../templates --output-dir=rst/reference_appendices/ -d ./keyword_desc.yml |
|
88 | 88 |
|
89 |
-config: |
|
89 |
+config: ../templates/config.rst.j2 |
|
90 | 90 |
PYTHONPATH=../../lib $(CONFIG_DUMPER) --template-file=../templates/config.rst.j2 --output-dir=rst/reference_appendices/ -d ../../lib/ansible/config/base.yml |
91 | 91 |
|
92 |
-modules: $(FORMATTER) ../templates/plugin.rst.j2 |
|
93 |
- PYTHONPATH=../../lib $(FORMATTER) -t rst --template-dir=../templates --module-dir=../../lib/ansible/modules -o rst/modules/ $(MODULE_ARGS) |
|
92 |
+modules: ../templates/plugin.rst.j2 |
|
93 |
+ PYTHONPATH=../../lib $(PLUGIN_FORMATTER) -t rst --template-dir=../templates --module-dir=../../lib/ansible/modules -o rst/modules/ $(MODULE_ARGS) |
|
94 | 94 |
|
95 |
-plugins: $(FORMATTER) ../templates/plugin.rst.j2 |
|
95 |
+plugins: ../templates/plugin.rst.j2 |
|
96 | 96 |
@echo "looping over doc plugins" |
97 | 97 |
for plugin in $(DOC_PLUGINS); \ |
98 | 98 |
do \ |
99 |
- PYTHONPATH=../../lib $(FORMATTER) -t rst --plugin-type $$plugin --template-dir=../templates --module-dir=../../lib/ansible/plugins/$$plugin -o rst $(PLUGIN_ARGS); \ |
|
99 |
+ PYTHONPATH=../../lib $(PLUGIN_FORMATTER) -t rst --plugin-type $$plugin --template-dir=../templates --module-dir=../../lib/ansible/plugins/$$plugin -o rst $(PLUGIN_ARGS); \ |
|
100 | 100 |
done |
101 | 101 |
|
102 | 102 |
testing: |
... | ... |
@@ -60,7 +60,8 @@ If you make multiple changes to the documentation, or add more than a line to it |
60 | 60 |
#. Test your changes for rST errors. |
61 | 61 |
#. Build the page, and preferably the entire documentation site, locally. |
62 | 62 |
|
63 |
-To work with documentation on your local machine, you need the following packages installed: |
|
63 |
+To work with documentation on your local machine, you need to have python-3.5 or greater and the |
|
64 |
+following packages installed: |
|
64 | 65 |
|
65 | 66 |
- gcc |
66 | 67 |
- jinja2 |
... | ... |
@@ -72,6 +73,7 @@ To work with documentation on your local machine, you need the following package |
72 | 72 |
- six |
73 | 73 |
- sphinx |
74 | 74 |
- sphinx-notfound-page |
75 |
+- straight.plugin |
|
75 | 76 |
|
76 | 77 |
.. note:: |
77 | 78 |
|
... | ... |
@@ -19,15 +19,15 @@ These are the keywords available on common playbook objects. Keywords are one of |
19 | 19 |
:local: |
20 | 20 |
:depth: 1 |
21 | 21 |
|
22 |
-{% for name in clist %} |
|
22 |
+{% for name in playbook_class_names %} |
|
23 | 23 |
|
24 | 24 |
{{ name }} |
25 | 25 |
{{ '-' * name|length }} |
26 | 26 |
.. glossary:: |
27 | 27 |
|
28 |
-{% for attribute in oblist[name]|sort %} |
|
28 |
+{% for attribute in pb_keywords[name]|sort %} |
|
29 | 29 |
{{ attribute }} |
30 |
- {{ oblist[name][attribute] |indent(8) }} |
|
30 |
+ {{ pb_keywords[name][attribute] |indent(8) }} |
|
31 | 31 |
|
32 | 32 |
{% endfor %} |
33 | 33 |
{% endfor %} |
... | ... |
@@ -21,18 +21,17 @@ except ImportError: |
21 | 21 |
argcomplete = None |
22 | 22 |
|
23 | 23 |
|
24 |
-def set_sys_path(this_script=__file__): |
|
25 |
- """Add path to the common librarydirectory to :attr:`sys.path`""" |
|
24 |
+def build_lib_path(this_script=__file__): |
|
25 |
+ """Return path to the common build library directory""" |
|
26 | 26 |
hacking_dir = os.path.dirname(this_script) |
27 | 27 |
libdir = os.path.abspath(os.path.join(hacking_dir, 'build_library')) |
28 | 28 |
|
29 |
- if libdir not in sys.path: |
|
30 |
- sys.path.insert(0, libdir) |
|
29 |
+ return libdir |
|
31 | 30 |
|
32 | 31 |
|
33 |
-set_sys_path() |
|
32 |
+sys.path.insert(0, build_lib_path()) |
|
34 | 33 |
|
35 |
-from build_ansible import commands |
|
34 |
+from build_ansible import commands, errors |
|
36 | 35 |
|
37 | 36 |
|
38 | 37 |
def create_arg_parser(program_name): |
... | ... |
@@ -63,13 +62,26 @@ def main(): |
63 | 63 |
argcomplete.autocomplete(arg_parser) |
64 | 64 |
|
65 | 65 |
args = arg_parser.parse_args(sys.argv[1:]) |
66 |
+ if args.command is None: |
|
67 |
+ print('Please specify a subcommand to run') |
|
68 |
+ sys.exit(1) |
|
66 | 69 |
|
67 | 70 |
for subcommand in subcommands: |
68 | 71 |
if subcommand.name == args.command: |
69 |
- sys.exit(subcommand.main(args)) |
|
70 |
- |
|
71 |
- print('Error: Select a subcommand') |
|
72 |
- arg_parser.print_usage() |
|
72 |
+ command = subcommand |
|
73 |
+ break |
|
74 |
+ else: |
|
75 |
+ # Note: We should never trigger this because argparse should shield us from it |
|
76 |
+ print('Error: {0} was not a recognized subcommand'.format(args.command)) |
|
77 |
+ sys.exit(1) |
|
78 |
+ |
|
79 |
+ try: |
|
80 |
+ retval = command.main(args) |
|
81 |
+ except errors.DependencyError as e: |
|
82 |
+ print(e) |
|
83 |
+ sys.exit(2) |
|
84 |
+ |
|
85 |
+ sys.exit(retval) |
|
73 | 86 |
|
74 | 87 |
|
75 | 88 |
if __name__ == '__main__': |
76 | 89 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,41 @@ |
0 |
+# Copyright: (c) 2018, Ansible Project |
|
1 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
2 |
+ |
|
3 |
+from __future__ import (absolute_import, division, print_function) |
|
4 |
+__metaclass__ = type |
|
5 |
+ |
|
6 |
+""" |
|
7 |
+This file contains common code for building ansible. If you want to use code from here at runtime, |
|
8 |
+it needs to be moved out of this file and the implementation looked over to figure out whether API |
|
9 |
+should be changed before being made public. |
|
10 |
+""" |
|
11 |
+ |
|
12 |
+import os.path |
|
13 |
+ |
|
14 |
+ |
|
15 |
+def update_file_if_different(filename, b_data): |
|
16 |
+ ''' |
|
17 |
+ Replace file content only if content is different. |
|
18 |
+ |
|
19 |
+ This preserves timestamps in case the file content has not changed. It performs multiple |
|
20 |
+ operations on the file so it is not atomic and may be slower than simply writing to the file. |
|
21 |
+ |
|
22 |
+ :arg filename: The filename to write to |
|
23 |
+ :b_data: Byte string containing the data to write to the file |
|
24 |
+ ''' |
|
25 |
+ try: |
|
26 |
+ with open(filename, 'rb') as f: |
|
27 |
+ b_data_old = f.read() |
|
28 |
+ except IOError as e: |
|
29 |
+ if e.errno != 2: |
|
30 |
+ raise |
|
31 |
+ # File did not exist, set b_data_old to a sentinel value so that |
|
32 |
+ # b_data gets written to the filename |
|
33 |
+ b_data_old = None |
|
34 |
+ |
|
35 |
+ if b_data_old != b_data: |
|
36 |
+ with open(filename, 'wb') as f: |
|
37 |
+ f.write(b_data) |
|
38 |
+ return True |
|
39 |
+ |
|
40 |
+ return False |
0 | 41 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,80 @@ |
0 |
+# coding: utf-8 |
|
1 |
+# Copyright: (c) 2019, Ansible Project |
|
2 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
3 |
+ |
|
4 |
+# Make coding more python3-ish |
|
5 |
+from __future__ import (absolute_import, division, print_function) |
|
6 |
+__metaclass__ = type |
|
7 |
+ |
|
8 |
+import os |
|
9 |
+import os.path |
|
10 |
+import pathlib |
|
11 |
+ |
|
12 |
+import yaml |
|
13 |
+from jinja2 import Environment, FileSystemLoader |
|
14 |
+from ansible.module_utils._text import to_bytes |
|
15 |
+ |
|
16 |
+# Pylint doesn't understand Python3 namespace modules. |
|
17 |
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level |
|
18 |
+from ..commands import Command # pylint: disable=relative-beyond-top-level |
|
19 |
+ |
|
20 |
+ |
|
21 |
+DEFAULT_TEMPLATE_FILE = 'config.rst.j2' |
|
22 |
+DEFAULT_TEMPLATE_DIR = pathlib.Path(__file__).parents[4] / 'docs/templates' |
|
23 |
+ |
|
24 |
+ |
|
25 |
+def fix_description(config_options): |
|
26 |
+ '''some descriptions are strings, some are lists. workaround it...''' |
|
27 |
+ |
|
28 |
+ for config_key in config_options: |
|
29 |
+ description = config_options[config_key].get('description', []) |
|
30 |
+ if isinstance(description, list): |
|
31 |
+ desc_list = description |
|
32 |
+ else: |
|
33 |
+ desc_list = [description] |
|
34 |
+ config_options[config_key]['description'] = desc_list |
|
35 |
+ return config_options |
|
36 |
+ |
|
37 |
+ |
|
38 |
+class DocumentConfig(Command): |
|
39 |
+ name = 'document-config' |
|
40 |
+ |
|
41 |
+ @classmethod |
|
42 |
+ def init_parser(cls, add_parser): |
|
43 |
+ parser = add_parser(cls.name, description='Generate module documentation from metadata') |
|
44 |
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file", |
|
45 |
+ default=DEFAULT_TEMPLATE_FILE, |
|
46 |
+ help="Jinja2 template to use for the config") |
|
47 |
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir", |
|
48 |
+ default=DEFAULT_TEMPLATE_DIR, |
|
49 |
+ help="directory containing Jinja2 templates") |
|
50 |
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', |
|
51 |
+ help="Output directory for rst files") |
|
52 |
+ parser.add_argument("-d", "--docs-source", action="store", dest="docs", default=None, |
|
53 |
+ help="Source for attribute docs") |
|
54 |
+ |
|
55 |
+ @staticmethod |
|
56 |
+ def main(args): |
|
57 |
+ output_dir = os.path.abspath(args.output_dir) |
|
58 |
+ template_file_full_path = os.path.abspath(os.path.join(args.template_dir, args.template_file)) |
|
59 |
+ template_file = os.path.basename(template_file_full_path) |
|
60 |
+ template_dir = os.path.dirname(template_file_full_path) |
|
61 |
+ |
|
62 |
+ if args.docs: |
|
63 |
+ with open(args.docs) as f: |
|
64 |
+ docs = yaml.safe_load(f) |
|
65 |
+ else: |
|
66 |
+ docs = {} |
|
67 |
+ |
|
68 |
+ config_options = docs |
|
69 |
+ config_options = fix_description(config_options) |
|
70 |
+ |
|
71 |
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) |
|
72 |
+ template = env.get_template(template_file) |
|
73 |
+ output_name = os.path.join(output_dir, template_file.replace('.j2', '')) |
|
74 |
+ temp_vars = {'config_options': config_options} |
|
75 |
+ |
|
76 |
+ data = to_bytes(template.render(temp_vars)) |
|
77 |
+ update_file_if_different(output_name, data) |
|
78 |
+ |
|
79 |
+ return 0 |
0 | 80 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,125 @@ |
0 |
+# coding: utf-8 |
|
1 |
+# Copyright: (c) 2019, Ansible Project |
|
2 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
3 |
+ |
|
4 |
+# Make coding more python3-ish |
|
5 |
+from __future__ import (absolute_import, division, print_function) |
|
6 |
+__metaclass__ = type |
|
7 |
+ |
|
8 |
+import importlib |
|
9 |
+import os.path |
|
10 |
+import pathlib |
|
11 |
+import re |
|
12 |
+from distutils.version import LooseVersion |
|
13 |
+ |
|
14 |
+import jinja2 |
|
15 |
+import yaml |
|
16 |
+from jinja2 import Environment, FileSystemLoader |
|
17 |
+ |
|
18 |
+from ansible.module_utils._text import to_bytes |
|
19 |
+ |
|
20 |
+# Pylint doesn't understand Python3 namespace modules. |
|
21 |
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level |
|
22 |
+from ..commands import Command # pylint: disable=relative-beyond-top-level |
|
23 |
+ |
|
24 |
+ |
|
25 |
+DEFAULT_TEMPLATE_DIR = str(pathlib.Path(__file__).resolve().parents[4] / 'docs/templates') |
|
26 |
+TEMPLATE_FILE = 'playbooks_keywords.rst.j2' |
|
27 |
+PLAYBOOK_CLASS_NAMES = ['Play', 'Role', 'Block', 'Task'] |
|
28 |
+ |
|
29 |
+ |
|
30 |
+def load_definitions(keyword_definitions_file): |
|
31 |
+ docs = {} |
|
32 |
+ with open(keyword_definitions_file) as f: |
|
33 |
+ docs = yaml.safe_load(f) |
|
34 |
+ |
|
35 |
+ return docs |
|
36 |
+ |
|
37 |
+ |
|
38 |
+def extract_keywords(keyword_definitions): |
|
39 |
+ pb_keywords = {} |
|
40 |
+ for pb_class_name in PLAYBOOK_CLASS_NAMES: |
|
41 |
+ if pb_class_name == 'Play': |
|
42 |
+ module_name = 'ansible.playbook' |
|
43 |
+ else: |
|
44 |
+ module_name = 'ansible.playbook.{0}'.format(pb_class_name.lower()) |
|
45 |
+ module = importlib.import_module(module_name) |
|
46 |
+ playbook_class = getattr(module, pb_class_name, None) |
|
47 |
+ if playbook_class is None: |
|
48 |
+ raise ImportError("We weren't able to import the module {0}".format(module_name)) |
|
49 |
+ |
|
50 |
+ # Maintain order of the actual class names for our output |
|
51 |
+ # Build up a mapping of playbook classes to the attributes that they hold |
|
52 |
+ pb_keywords[pb_class_name] = {k: v for (k, v) in playbook_class._valid_attrs.items() |
|
53 |
+ # Filter private attributes as they're not usable in playbooks |
|
54 |
+ if not v.private} |
|
55 |
+ |
|
56 |
+ # pick up definitions if they exist |
|
57 |
+ for keyword in tuple(pb_keywords[pb_class_name]): |
|
58 |
+ if keyword in keyword_definitions: |
|
59 |
+ pb_keywords[pb_class_name][keyword] = keyword_definitions[keyword] |
|
60 |
+ else: |
|
61 |
+ # check if there is an alias, otherwise undocumented |
|
62 |
+ alias = getattr(getattr(playbook_class, '_%s' % keyword), 'alias', None) |
|
63 |
+ if alias and alias in keyword_definitions: |
|
64 |
+ pb_keywords[pb_class_name][alias] = keyword_definitions[alias] |
|
65 |
+ del pb_keywords[pb_class_name][keyword] |
|
66 |
+ else: |
|
67 |
+ pb_keywords[pb_class_name][keyword] = ' UNDOCUMENTED!! ' |
|
68 |
+ |
|
69 |
+ # loop is really with_ for users |
|
70 |
+ if pb_class_name == 'Task': |
|
71 |
+ pb_keywords[pb_class_name]['with_<lookup_plugin>'] = ( |
|
72 |
+ 'The same as ``loop`` but magically adds the output of any lookup plugin to' |
|
73 |
+ ' generate the item list.') |
|
74 |
+ |
|
75 |
+ # local_action is implicit with action |
|
76 |
+ if 'action' in pb_keywords[pb_class_name]: |
|
77 |
+ pb_keywords[pb_class_name]['local_action'] = ('Same as action but also implies' |
|
78 |
+ ' ``delegate_to: localhost``') |
|
79 |
+ |
|
80 |
+ return pb_keywords |
|
81 |
+ |
|
82 |
+ |
|
83 |
+def generate_page(pb_keywords, template_dir): |
|
84 |
+ env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) |
|
85 |
+ template = env.get_template(TEMPLATE_FILE) |
|
86 |
+ tempvars = {'pb_keywords': pb_keywords, 'playbook_class_names': PLAYBOOK_CLASS_NAMES} |
|
87 |
+ |
|
88 |
+ keyword_page = template.render(tempvars) |
|
89 |
+ if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
90 |
+ # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
91 |
+ keyword_page = re.sub(' +\n', '\n', keyword_page) |
|
92 |
+ |
|
93 |
+ return keyword_page |
|
94 |
+ |
|
95 |
+ |
|
96 |
+class DocumentKeywords(Command): |
|
97 |
+ name = 'document-keywords' |
|
98 |
+ |
|
99 |
+ @classmethod |
|
100 |
+ def init_parser(cls, add_parser): |
|
101 |
+ parser = add_parser(cls.name, description='Generate playbook keyword documentation from' |
|
102 |
+ ' code and descriptions') |
|
103 |
+ parser.add_argument("-T", "--template-dir", action="store", dest="template_dir", |
|
104 |
+ default=DEFAULT_TEMPLATE_DIR, |
|
105 |
+ help="directory containing Jinja2 templates") |
|
106 |
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", |
|
107 |
+ default='/tmp/', help="Output directory for rst files") |
|
108 |
+ parser.add_argument("-d", "--docs-source", action="store", dest="docs", default=None, |
|
109 |
+ help="Source for attribute docs") |
|
110 |
+ |
|
111 |
+ @staticmethod |
|
112 |
+ def main(args): |
|
113 |
+ if not args.docs: |
|
114 |
+ print('Definitions for keywords must be specified via `--docs-source FILENAME`') |
|
115 |
+ return 1 |
|
116 |
+ |
|
117 |
+ keyword_definitions = load_definitions(args.docs) |
|
118 |
+ pb_keywords = extract_keywords(keyword_definitions) |
|
119 |
+ |
|
120 |
+ keyword_page = generate_page(pb_keywords, args.template_dir) |
|
121 |
+ outputname = os.path.join(args.output_dir, TEMPLATE_FILE.replace('.j2', '')) |
|
122 |
+ update_file_if_different(outputname, to_bytes(keyword_page)) |
|
123 |
+ |
|
124 |
+ return 0 |
0 | 125 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,303 @@ |
0 |
+# coding: utf-8 |
|
1 |
+# Copyright: (c) 2019, Ansible Project |
|
2 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
3 |
+ |
|
4 |
+# Make coding more python3-ish |
|
5 |
+from __future__ import (absolute_import, division, print_function) |
|
6 |
+__metaclass__ = type |
|
7 |
+ |
|
8 |
+ |
|
9 |
+import argparse |
|
10 |
+import os.path |
|
11 |
+import pathlib |
|
12 |
+import sys |
|
13 |
+ |
|
14 |
+from jinja2 import Environment, FileSystemLoader |
|
15 |
+ |
|
16 |
+from ansible.module_utils._text import to_bytes |
|
17 |
+ |
|
18 |
+# Pylint doesn't understand Python3 namespace modules. |
|
19 |
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level |
|
20 |
+from ..commands import Command # pylint: disable=relative-beyond-top-level |
|
21 |
+ |
|
22 |
+ |
|
23 |
+DEFAULT_TEMPLATE_FILE = pathlib.Path(__file__).parents[4] / 'docs/templates/man.j2' |
|
24 |
+ |
|
25 |
+ |
|
26 |
+# from https://www.python.org/dev/peps/pep-0257/ |
|
27 |
+def trim_docstring(docstring): |
|
28 |
+ if not docstring: |
|
29 |
+ return '' |
|
30 |
+ # Convert tabs to spaces (following the normal Python rules) |
|
31 |
+ # and split into a list of lines: |
|
32 |
+ lines = docstring.expandtabs().splitlines() |
|
33 |
+ # Determine minimum indentation (first line doesn't count): |
|
34 |
+ indent = sys.maxsize |
|
35 |
+ for line in lines[1:]: |
|
36 |
+ stripped = line.lstrip() |
|
37 |
+ if stripped: |
|
38 |
+ indent = min(indent, len(line) - len(stripped)) |
|
39 |
+ # Remove indentation (first line is special): |
|
40 |
+ trimmed = [lines[0].strip()] |
|
41 |
+ if indent < sys.maxsize: |
|
42 |
+ for line in lines[1:]: |
|
43 |
+ trimmed.append(line[indent:].rstrip()) |
|
44 |
+ # Strip off trailing and leading blank lines: |
|
45 |
+ while trimmed and not trimmed[-1]: |
|
46 |
+ trimmed.pop() |
|
47 |
+ while trimmed and not trimmed[0]: |
|
48 |
+ trimmed.pop(0) |
|
49 |
+ # Return a single string: |
|
50 |
+ return '\n'.join(trimmed) |
|
51 |
+ |
|
52 |
+ |
|
53 |
+def get_options(optlist): |
|
54 |
+ ''' get actual options ''' |
|
55 |
+ |
|
56 |
+ opts = [] |
|
57 |
+ for opt in optlist: |
|
58 |
+ res = { |
|
59 |
+ 'desc': opt.help, |
|
60 |
+ 'options': opt.option_strings |
|
61 |
+ } |
|
62 |
+ if isinstance(opt, argparse._StoreAction): |
|
63 |
+ res['arg'] = opt.dest.upper() |
|
64 |
+ elif not res['options']: |
|
65 |
+ continue |
|
66 |
+ opts.append(res) |
|
67 |
+ |
|
68 |
+ return opts |
|
69 |
+ |
|
70 |
+ |
|
71 |
+def dedupe_groups(parser): |
|
72 |
+ action_groups = [] |
|
73 |
+ for action_group in parser._action_groups: |
|
74 |
+ found = False |
|
75 |
+ for a in action_groups: |
|
76 |
+ if a._actions == action_group._actions: |
|
77 |
+ found = True |
|
78 |
+ break |
|
79 |
+ if not found: |
|
80 |
+ action_groups.append(action_group) |
|
81 |
+ return action_groups |
|
82 |
+ |
|
83 |
+ |
|
84 |
+def get_option_groups(option_parser): |
|
85 |
+ groups = [] |
|
86 |
+ for action_group in dedupe_groups(option_parser)[1:]: |
|
87 |
+ group_info = {} |
|
88 |
+ group_info['desc'] = action_group.description |
|
89 |
+ group_info['options'] = action_group._actions |
|
90 |
+ group_info['group_obj'] = action_group |
|
91 |
+ groups.append(group_info) |
|
92 |
+ return groups |
|
93 |
+ |
|
94 |
+ |
|
95 |
+def opt_doc_list(parser): |
|
96 |
+ ''' iterate over options lists ''' |
|
97 |
+ |
|
98 |
+ results = [] |
|
99 |
+ for option_group in dedupe_groups(parser)[1:]: |
|
100 |
+ results.extend(get_options(option_group._actions)) |
|
101 |
+ |
|
102 |
+ results.extend(get_options(parser._actions)) |
|
103 |
+ |
|
104 |
+ return results |
|
105 |
+ |
|
106 |
+ |
|
107 |
+# def opts_docs(cli, name): |
|
108 |
+def opts_docs(cli_class_name, cli_module_name): |
|
109 |
+ ''' generate doc structure from options ''' |
|
110 |
+ |
|
111 |
+ cli_name = 'ansible-%s' % cli_module_name |
|
112 |
+ if cli_module_name == 'adhoc': |
|
113 |
+ cli_name = 'ansible' |
|
114 |
+ |
|
115 |
+ # WIth no action/subcommand |
|
116 |
+ # shared opts set |
|
117 |
+ # instantiate each cli and ask its options |
|
118 |
+ cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name, |
|
119 |
+ fromlist=[cli_class_name]), cli_class_name) |
|
120 |
+ cli = cli_klass([cli_name]) |
|
121 |
+ |
|
122 |
+ # parse the common options |
|
123 |
+ try: |
|
124 |
+ cli.init_parser() |
|
125 |
+ except Exception: |
|
126 |
+ pass |
|
127 |
+ |
|
128 |
+ # base/common cli info |
|
129 |
+ docs = { |
|
130 |
+ 'cli': cli_module_name, |
|
131 |
+ 'cli_name': cli_name, |
|
132 |
+ 'usage': cli.parser.format_usage(), |
|
133 |
+ 'short_desc': cli.parser.description, |
|
134 |
+ 'long_desc': trim_docstring(cli.__doc__), |
|
135 |
+ 'actions': {}, |
|
136 |
+ 'content_depth': 2, |
|
137 |
+ } |
|
138 |
+ option_info = {'option_names': [], |
|
139 |
+ 'options': [], |
|
140 |
+ 'groups': []} |
|
141 |
+ |
|
142 |
+ for extras in ('ARGUMENTS'): |
|
143 |
+ if hasattr(cli, extras): |
|
144 |
+ docs[extras.lower()] = getattr(cli, extras) |
|
145 |
+ |
|
146 |
+ common_opts = opt_doc_list(cli.parser) |
|
147 |
+ groups_info = get_option_groups(cli.parser) |
|
148 |
+ shared_opt_names = [] |
|
149 |
+ for opt in common_opts: |
|
150 |
+ shared_opt_names.extend(opt.get('options', [])) |
|
151 |
+ |
|
152 |
+ option_info['options'] = common_opts |
|
153 |
+ option_info['option_names'] = shared_opt_names |
|
154 |
+ |
|
155 |
+ option_info['groups'].extend(groups_info) |
|
156 |
+ |
|
157 |
+ docs.update(option_info) |
|
158 |
+ |
|
159 |
+ # now for each action/subcommand |
|
160 |
+ # force populate parser with per action options |
|
161 |
+ |
|
162 |
+ def get_actions(parser, docs): |
|
163 |
+ # use class attrs not the attrs on a instance (not that it matters here...) |
|
164 |
+ try: |
|
165 |
+ subparser = parser._subparsers._group_actions[0].choices |
|
166 |
+ except AttributeError: |
|
167 |
+ subparser = {} |
|
168 |
+ |
|
169 |
+ depth = 0 |
|
170 |
+ |
|
171 |
+ for action, parser in subparser.items(): |
|
172 |
+ action_info = {'option_names': [], |
|
173 |
+ 'options': [], |
|
174 |
+ 'actions': {}} |
|
175 |
+ # docs['actions'][action] = {} |
|
176 |
+ # docs['actions'][action]['name'] = action |
|
177 |
+ action_info['name'] = action |
|
178 |
+ action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__) |
|
179 |
+ |
|
180 |
+ # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip() |
|
181 |
+ action_doc_list = opt_doc_list(parser) |
|
182 |
+ |
|
183 |
+ uncommon_options = [] |
|
184 |
+ for action_doc in action_doc_list: |
|
185 |
+ # uncommon_options = [] |
|
186 |
+ |
|
187 |
+ option_aliases = action_doc.get('options', []) |
|
188 |
+ for option_alias in option_aliases: |
|
189 |
+ |
|
190 |
+ if option_alias in shared_opt_names: |
|
191 |
+ continue |
|
192 |
+ |
|
193 |
+ # TODO: use set |
|
194 |
+ if option_alias not in action_info['option_names']: |
|
195 |
+ action_info['option_names'].append(option_alias) |
|
196 |
+ |
|
197 |
+ if action_doc in action_info['options']: |
|
198 |
+ continue |
|
199 |
+ |
|
200 |
+ uncommon_options.append(action_doc) |
|
201 |
+ |
|
202 |
+ action_info['options'] = uncommon_options |
|
203 |
+ |
|
204 |
+ depth = 1 + get_actions(parser, action_info) |
|
205 |
+ |
|
206 |
+ docs['actions'][action] = action_info |
|
207 |
+ |
|
208 |
+ return depth |
|
209 |
+ |
|
210 |
+ action_depth = get_actions(cli.parser, docs) |
|
211 |
+ docs['content_depth'] = action_depth + 1 |
|
212 |
+ |
|
213 |
+ docs['options'] = opt_doc_list(cli.parser) |
|
214 |
+ return docs |
|
215 |
+ |
|
216 |
+ |
|
217 |
+class GenerateMan(Command): |
|
218 |
+ name = 'generate-man' |
|
219 |
+ |
|
220 |
+ @classmethod |
|
221 |
+ def init_parser(cls, add_parser): |
|
222 |
+ parser = add_parser(name=cls.name, |
|
223 |
+ description='Generate cli documentation from cli docstrings') |
|
224 |
+ |
|
225 |
+ parser.add_argument("-t", "--template-file", action="store", dest="template_file", |
|
226 |
+ default=DEFAULT_TEMPLATE_FILE, help="path to jinja2 template") |
|
227 |
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", |
|
228 |
+ default='/tmp/', help="Output directory for rst files") |
|
229 |
+ parser.add_argument("-f", "--output-format", action="store", dest="output_format", |
|
230 |
+ default='man', |
|
231 |
+ help="Output format for docs (the default 'man' or 'rst')") |
|
232 |
+ parser.add_argument('cli_modules', help='CLI module name(s)', metavar='MODULE_NAME', nargs='*') |
|
233 |
+ |
|
234 |
+ @staticmethod |
|
235 |
+ def main(args): |
|
236 |
+ template_file = args.template_file |
|
237 |
+ template_path = os.path.expanduser(template_file) |
|
238 |
+ template_dir = os.path.abspath(os.path.dirname(template_path)) |
|
239 |
+ template_basename = os.path.basename(template_file) |
|
240 |
+ |
|
241 |
+ output_dir = os.path.abspath(args.output_dir) |
|
242 |
+ output_format = args.output_format |
|
243 |
+ |
|
244 |
+ cli_modules = args.cli_modules |
|
245 |
+ |
|
246 |
+ # various cli parsing things checks sys.argv if the 'args' that are passed in are [] |
|
247 |
+ # so just remove any args so the cli modules dont try to parse them resulting in warnings |
|
248 |
+ sys.argv = [sys.argv[0]] |
|
249 |
+ |
|
250 |
+ allvars = {} |
|
251 |
+ output = {} |
|
252 |
+ cli_list = [] |
|
253 |
+ cli_bin_name_list = [] |
|
254 |
+ |
|
255 |
+ # for binary in os.listdir('../../lib/ansible/cli'): |
|
256 |
+ for cli_module_name in cli_modules: |
|
257 |
+ binary = os.path.basename(os.path.expanduser(cli_module_name)) |
|
258 |
+ |
|
259 |
+ if not binary.endswith('.py'): |
|
260 |
+ continue |
|
261 |
+ elif binary == '__init__.py': |
|
262 |
+ continue |
|
263 |
+ |
|
264 |
+ cli_name = os.path.splitext(binary)[0] |
|
265 |
+ |
|
266 |
+ if cli_name == 'adhoc': |
|
267 |
+ cli_class_name = 'AdHocCLI' |
|
268 |
+ # myclass = 'AdHocCLI' |
|
269 |
+ output[cli_name] = 'ansible.1.rst.in' |
|
270 |
+ cli_bin_name = 'ansible' |
|
271 |
+ else: |
|
272 |
+ # myclass = "%sCLI" % libname.capitalize() |
|
273 |
+ cli_class_name = "%sCLI" % cli_name.capitalize() |
|
274 |
+ output[cli_name] = 'ansible-%s.1.rst.in' % cli_name |
|
275 |
+ cli_bin_name = 'ansible-%s' % cli_name |
|
276 |
+ |
|
277 |
+ # FIXME: |
|
278 |
+ allvars[cli_name] = opts_docs(cli_class_name, cli_name) |
|
279 |
+ cli_bin_name_list.append(cli_bin_name) |
|
280 |
+ |
|
281 |
+ cli_list = allvars.keys() |
|
282 |
+ |
|
283 |
+ doc_name_formats = {'man': '%s.1.rst.in', |
|
284 |
+ 'rst': '%s.rst'} |
|
285 |
+ |
|
286 |
+ for cli_name in cli_list: |
|
287 |
+ |
|
288 |
+ # template it! |
|
289 |
+ env = Environment(loader=FileSystemLoader(template_dir)) |
|
290 |
+ template = env.get_template(template_basename) |
|
291 |
+ |
|
292 |
+ # add rest to vars |
|
293 |
+ tvars = allvars[cli_name] |
|
294 |
+ tvars['cli_list'] = cli_list |
|
295 |
+ tvars['cli_bin_name_list'] = cli_bin_name_list |
|
296 |
+ tvars['cli'] = cli_name |
|
297 |
+ if '-i' in tvars['options']: |
|
298 |
+ print('uses inventory') |
|
299 |
+ |
|
300 |
+ manpage = template.render(tvars) |
|
301 |
+ filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name']) |
|
302 |
+ update_file_if_different(filename, to_bytes(manpage)) |
0 | 303 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,818 @@ |
0 |
+# Copyright: (c) 2012, Jan-Piet Mens <jpmens () gmail.com> |
|
1 |
+# Copyright: (c) 2012-2014, Michael DeHaan <michael@ansible.com> and others |
|
2 |
+# Copyright: (c) 2017, Ansible Project |
|
3 |
+ |
|
4 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
5 |
+ |
|
6 |
+from __future__ import absolute_import, division, print_function |
|
7 |
+__metaclass__ = type |
|
8 |
+ |
|
9 |
+ |
|
10 |
+import datetime |
|
11 |
+import glob |
|
12 |
+import json |
|
13 |
+import optparse |
|
14 |
+import os |
|
15 |
+import re |
|
16 |
+import sys |
|
17 |
+import warnings |
|
18 |
+from collections import defaultdict |
|
19 |
+from copy import deepcopy |
|
20 |
+from distutils.version import LooseVersion |
|
21 |
+from functools import partial |
|
22 |
+from pprint import PrettyPrinter |
|
23 |
+ |
|
24 |
+try: |
|
25 |
+ from html import escape as html_escape |
|
26 |
+except ImportError: |
|
27 |
+ # Python-3.2 or later |
|
28 |
+ import cgi |
|
29 |
+ |
|
30 |
+ def html_escape(text, quote=True): |
|
31 |
+ return cgi.escape(text, quote) |
|
32 |
+ |
|
33 |
+import jinja2 |
|
34 |
+import yaml |
|
35 |
+from jinja2 import Environment, FileSystemLoader |
|
36 |
+from jinja2.runtime import Undefined |
|
37 |
+ |
|
38 |
+from ansible.errors import AnsibleError |
|
39 |
+from ansible.module_utils._text import to_bytes, to_text |
|
40 |
+from ansible.module_utils.common.collections import is_sequence |
|
41 |
+from ansible.module_utils.parsing.convert_bool import boolean |
|
42 |
+from ansible.module_utils.six import iteritems, string_types |
|
43 |
+from ansible.plugins.loader import fragment_loader |
|
44 |
+from ansible.utils import plugin_docs |
|
45 |
+from ansible.utils.display import Display |
|
46 |
+ |
|
47 |
+# Pylint doesn't understand Python3 namespace modules. |
|
48 |
+from ..change_detection import update_file_if_different # pylint: disable=relative-beyond-top-level |
|
49 |
+from ..commands import Command # pylint: disable=relative-beyond-top-level |
|
50 |
+ |
|
51 |
+ |
|
52 |
+##################################################################################### |
|
53 |
+# constants and paths |
|
54 |
+ |
|
55 |
+# if a module is added in a version of Ansible older than this, don't print the version added information |
|
56 |
+# in the module documentation because everyone is assumed to be running something newer than this already. |
|
57 |
+TOO_OLD_TO_BE_NOTABLE = 2.3 |
|
58 |
+ |
|
59 |
+# Get parent directory of the directory this script lives in |
|
60 |
+MODULEDIR = os.path.abspath(os.path.join( |
|
61 |
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, 'lib', 'ansible', 'modules' |
|
62 |
+)) |
|
63 |
+ |
|
64 |
+# The name of the DOCUMENTATION template |
|
65 |
+EXAMPLE_YAML = os.path.abspath(os.path.join( |
|
66 |
+ os.path.dirname(os.path.realpath(__file__)), os.pardir, 'examples', 'DOCUMENTATION.yml' |
|
67 |
+)) |
|
68 |
+ |
|
69 |
+_ITALIC = re.compile(r"I\(([^)]+)\)") |
|
70 |
+_BOLD = re.compile(r"B\(([^)]+)\)") |
|
71 |
+_MODULE = re.compile(r"M\(([^)]+)\)") |
|
72 |
+_URL = re.compile(r"U\(([^)]+)\)") |
|
73 |
+_LINK = re.compile(r"L\(([^)]+),([^)]+)\)") |
|
74 |
+_CONST = re.compile(r"C\(([^)]+)\)") |
|
75 |
+_RULER = re.compile(r"HORIZONTALLINE") |
|
76 |
+ |
|
77 |
+DEPRECATED = b" (D)" |
|
78 |
+ |
|
79 |
+pp = PrettyPrinter() |
|
80 |
+display = Display() |
|
81 |
+ |
|
82 |
+ |
|
83 |
+# kludge_ns gives us a kludgey way to set variables inside of loops that need to be visible outside |
|
84 |
+# the loop. We can get rid of this when we no longer need to build docs with less than Jinja-2.10 |
|
85 |
+# http://jinja.pocoo.org/docs/2.10/templates/#assignments |
|
86 |
+# With Jinja-2.10 we can use jinja2's namespace feature, restoring the namespace template portion |
|
87 |
+# of: fa5c0282a4816c4dd48e80b983ffc1e14506a1f5 |
|
88 |
+NS_MAP = {} |
|
89 |
+ |
|
90 |
+ |
|
91 |
+def to_kludge_ns(key, value): |
|
92 |
+ NS_MAP[key] = value |
|
93 |
+ return "" |
|
94 |
+ |
|
95 |
+ |
|
96 |
+def from_kludge_ns(key): |
|
97 |
+ return NS_MAP[key] |
|
98 |
+ |
|
99 |
+ |
|
100 |
+# The max filter was added in Jinja2-2.10. Until we can require that version, use this |
|
101 |
+def do_max(seq): |
|
102 |
+ return max(seq) |
|
103 |
+ |
|
104 |
+ |
|
105 |
+def rst_ify(text): |
|
106 |
+ ''' convert symbols like I(this is in italics) to valid restructured text ''' |
|
107 |
+ |
|
108 |
+ try: |
|
109 |
+ t = _ITALIC.sub(r"*\1*", text) |
|
110 |
+ t = _BOLD.sub(r"**\1**", t) |
|
111 |
+ t = _MODULE.sub(r":ref:`\1 <\1_module>`", t) |
|
112 |
+ t = _LINK.sub(r"`\1 <\2>`_", t) |
|
113 |
+ t = _URL.sub(r"\1", t) |
|
114 |
+ t = _CONST.sub(r"``\1``", t) |
|
115 |
+ t = _RULER.sub(r"------------", t) |
|
116 |
+ except Exception as e: |
|
117 |
+ raise AnsibleError("Could not process (%s) : %s" % (text, e)) |
|
118 |
+ |
|
119 |
+ return t |
|
120 |
+ |
|
121 |
+ |
|
122 |
+def html_ify(text): |
|
123 |
+ ''' convert symbols like I(this is in italics) to valid HTML ''' |
|
124 |
+ |
|
125 |
+ if not isinstance(text, string_types): |
|
126 |
+ text = to_text(text) |
|
127 |
+ |
|
128 |
+ t = html_escape(text) |
|
129 |
+ t = _ITALIC.sub(r"<em>\1</em>", t) |
|
130 |
+ t = _BOLD.sub(r"<b>\1</b>", t) |
|
131 |
+ t = _MODULE.sub(r"<span class='module'>\1</span>", t) |
|
132 |
+ t = _URL.sub(r"<a href='\1'>\1</a>", t) |
|
133 |
+ t = _LINK.sub(r"<a href='\2'>\1</a>", t) |
|
134 |
+ t = _CONST.sub(r"<code>\1</code>", t) |
|
135 |
+ t = _RULER.sub(r"<hr/>", t) |
|
136 |
+ |
|
137 |
+ return t.strip() |
|
138 |
+ |
|
139 |
+ |
|
140 |
+def rst_fmt(text, fmt): |
|
141 |
+ ''' helper for Jinja2 to do format strings ''' |
|
142 |
+ |
|
143 |
+ return fmt % (text) |
|
144 |
+ |
|
145 |
+ |
|
146 |
+def rst_xline(width, char="="): |
|
147 |
+ ''' return a restructured text line of a given length ''' |
|
148 |
+ |
|
149 |
+ return char * width |
|
150 |
+ |
|
151 |
+ |
|
152 |
+def documented_type(text): |
|
153 |
+ ''' Convert any python type to a type for documentation ''' |
|
154 |
+ |
|
155 |
+ if isinstance(text, Undefined): |
|
156 |
+ return '-' |
|
157 |
+ if text == 'str': |
|
158 |
+ return 'string' |
|
159 |
+ if text == 'bool': |
|
160 |
+ return 'boolean' |
|
161 |
+ if text == 'int': |
|
162 |
+ return 'integer' |
|
163 |
+ if text == 'dict': |
|
164 |
+ return 'dictionary' |
|
165 |
+ return text |
|
166 |
+ |
|
167 |
+ |
|
168 |
+test_list = partial(is_sequence, include_strings=False) |
|
169 |
+ |
|
170 |
+ |
|
171 |
+def normalize_options(value): |
|
172 |
+ """Normalize boolean option value.""" |
|
173 |
+ |
|
174 |
+ if value.get('type') == 'bool' and 'default' in value: |
|
175 |
+ try: |
|
176 |
+ value['default'] = boolean(value['default'], strict=True) |
|
177 |
+ except TypeError: |
|
178 |
+ pass |
|
179 |
+ return value |
|
180 |
+ |
|
181 |
+ |
|
182 |
+def write_data(text, output_dir, outputname, module=None): |
|
183 |
+ ''' dumps module output to a file or the screen, as requested ''' |
|
184 |
+ |
|
185 |
+ if output_dir is not None: |
|
186 |
+ if module: |
|
187 |
+ outputname = outputname % module |
|
188 |
+ |
|
189 |
+ if not os.path.exists(output_dir): |
|
190 |
+ os.makedirs(output_dir) |
|
191 |
+ fname = os.path.join(output_dir, outputname) |
|
192 |
+ fname = fname.replace(".py", "") |
|
193 |
+ |
|
194 |
+ try: |
|
195 |
+ updated = update_file_if_different(fname, to_bytes(text)) |
|
196 |
+ except Exception as e: |
|
197 |
+ display.display("while rendering %s, an error occured: %s" % (module, e)) |
|
198 |
+ raise |
|
199 |
+ if updated: |
|
200 |
+ display.display("rendering: %s" % module) |
|
201 |
+ else: |
|
202 |
+ print(text) |
|
203 |
+ |
|
204 |
+ |
|
205 |
+IS_STDOUT_TTY = sys.stdout.isatty() |
|
206 |
+ |
|
207 |
+ |
|
208 |
+def show_progress(progress): |
|
209 |
+ '''Show a little process indicator.''' |
|
210 |
+ if IS_STDOUT_TTY: |
|
211 |
+ sys.stdout.write('\r%s\r' % ("-/|\\"[progress % 4])) |
|
212 |
+ sys.stdout.flush() |
|
213 |
+ |
|
214 |
+ |
|
215 |
+def get_plugin_info(module_dir, limit_to=None, verbose=False): |
|
216 |
+ ''' |
|
217 |
+ Returns information about plugins and the categories that they belong to |
|
218 |
+ |
|
219 |
+ :arg module_dir: file system path to the top of the plugin directory |
|
220 |
+ :kwarg limit_to: If given, this is a list of plugin names to |
|
221 |
+ generate information for. All other plugins will be ignored. |
|
222 |
+ :returns: Tuple of two dicts containing module_info, categories, and |
|
223 |
+ aliases and a set listing deprecated modules: |
|
224 |
+ |
|
225 |
+ :module_info: mapping of module names to information about them. The fields of the dict are: |
|
226 |
+ |
|
227 |
+ :path: filesystem path to the module |
|
228 |
+ :deprecated: boolean. True means the module is deprecated otherwise not. |
|
229 |
+ :aliases: set of aliases to this module name |
|
230 |
+ :metadata: The modules metadata (as recorded in the module) |
|
231 |
+ :doc: The documentation structure for the module |
|
232 |
+ :seealso: The list of dictionaries with references to related subjects |
|
233 |
+ :examples: The module's examples |
|
234 |
+ :returndocs: The module's returndocs |
|
235 |
+ |
|
236 |
+ :categories: maps category names to a dict. The dict contains at |
|
237 |
+ least one key, '_modules' which contains a list of module names in |
|
238 |
+ that category. Any other keys in the dict are subcategories with |
|
239 |
+ the same structure. |
|
240 |
+ |
|
241 |
+ ''' |
|
242 |
+ |
|
243 |
+ categories = dict() |
|
244 |
+ module_info = defaultdict(dict) |
|
245 |
+ |
|
246 |
+ # * windows powershell modules have documentation stubs in python docstring |
|
247 |
+ # format (they are not executed) so skip the ps1 format files |
|
248 |
+ # * One glob level for every module level that we're going to traverse |
|
249 |
+ files = ( |
|
250 |
+ glob.glob("%s/*.py" % module_dir) + |
|
251 |
+ glob.glob("%s/*/*.py" % module_dir) + |
|
252 |
+ glob.glob("%s/*/*/*.py" % module_dir) + |
|
253 |
+ glob.glob("%s/*/*/*/*.py" % module_dir) |
|
254 |
+ ) |
|
255 |
+ |
|
256 |
+ module_index = 0 |
|
257 |
+ for module_path in files: |
|
258 |
+ # Do not list __init__.py files |
|
259 |
+ if module_path.endswith('__init__.py'): |
|
260 |
+ continue |
|
261 |
+ |
|
262 |
+ # Do not list blacklisted modules |
|
263 |
+ module = os.path.splitext(os.path.basename(module_path))[0] |
|
264 |
+ if module in plugin_docs.BLACKLIST['MODULE'] or module == 'base': |
|
265 |
+ continue |
|
266 |
+ |
|
267 |
+ # If requested, limit module documentation building only to passed-in |
|
268 |
+ # modules. |
|
269 |
+ if limit_to is not None and module.lower() not in limit_to: |
|
270 |
+ continue |
|
271 |
+ |
|
272 |
+ deprecated = False |
|
273 |
+ if module.startswith("_"): |
|
274 |
+ if os.path.islink(module_path): |
|
275 |
+ # Handle aliases |
|
276 |
+ source = os.path.splitext(os.path.basename(os.path.realpath(module_path)))[0] |
|
277 |
+ module = module.replace("_", "", 1) |
|
278 |
+ if source.startswith("_"): |
|
279 |
+ source = source.replace("_", "", 1) |
|
280 |
+ aliases = module_info[source].get('aliases', set()) |
|
281 |
+ aliases.add(module) |
|
282 |
+ aliases_deprecated = module_info[source].get('aliases_deprecated', set()) |
|
283 |
+ aliases_deprecated.add(module) |
|
284 |
+ # In case we just created this via get()'s fallback |
|
285 |
+ module_info[source]['aliases'] = aliases |
|
286 |
+ module_info[source]['aliases_deprecated'] = aliases_deprecated |
|
287 |
+ continue |
|
288 |
+ else: |
|
289 |
+ # Handle deprecations |
|
290 |
+ module = module.replace("_", "", 1) |
|
291 |
+ deprecated = True |
|
292 |
+ |
|
293 |
+ # |
|
294 |
+ # Regular module to process |
|
295 |
+ # |
|
296 |
+ |
|
297 |
+ module_index += 1 |
|
298 |
+ show_progress(module_index) |
|
299 |
+ |
|
300 |
+ # use ansible core library to parse out doc metadata YAML and plaintext examples |
|
301 |
+ doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose) |
|
302 |
+ |
|
303 |
+ if metadata and 'removed' in metadata.get('status', []): |
|
304 |
+ continue |
|
305 |
+ |
|
306 |
+ category = categories |
|
307 |
+ |
|
308 |
+ # Start at the second directory because we don't want the "vendor" |
|
309 |
+ mod_path_only = os.path.dirname(module_path[len(module_dir):]) |
|
310 |
+ |
|
311 |
+ # Find the subcategory for each module |
|
312 |
+ relative_dir = mod_path_only.split('/')[1] |
|
313 |
+ sub_category = mod_path_only[len(relative_dir) + 2:] |
|
314 |
+ |
|
315 |
+ primary_category = '' |
|
316 |
+ module_categories = [] |
|
317 |
+ # build up the categories that this module belongs to |
|
318 |
+ for new_cat in mod_path_only.split('/')[1:]: |
|
319 |
+ if new_cat not in category: |
|
320 |
+ category[new_cat] = dict() |
|
321 |
+ category[new_cat]['_modules'] = [] |
|
322 |
+ module_categories.append(new_cat) |
|
323 |
+ category = category[new_cat] |
|
324 |
+ |
|
325 |
+ category['_modules'].append(module) |
|
326 |
+ |
|
327 |
+ # the category we will use in links (so list_of_all_plugins can point to plugins/action_plugins/*' |
|
328 |
+ if module_categories: |
|
329 |
+ primary_category = module_categories[0] |
|
330 |
+ |
|
331 |
+ if not doc: |
|
332 |
+ display.error("*** ERROR: DOCUMENTATION section missing for %s. ***" % module_path) |
|
333 |
+ continue |
|
334 |
+ |
|
335 |
+ if 'options' in doc and doc['options'] is None: |
|
336 |
+ display.error("*** ERROR: DOCUMENTATION.options must be a dictionary/hash when used. ***") |
|
337 |
+ pos = getattr(doc, "ansible_pos", None) |
|
338 |
+ if pos is not None: |
|
339 |
+ display.error("Module position: %s, %d, %d" % doc.ansible_pos) |
|
340 |
+ doc['options'] = dict() |
|
341 |
+ |
|
342 |
+ for key, opt in doc.get('options', {}).items(): |
|
343 |
+ doc['options'][key] = normalize_options(opt) |
|
344 |
+ |
|
345 |
+ # save all the information |
|
346 |
+ module_info[module] = {'path': module_path, |
|
347 |
+ 'source': os.path.relpath(module_path, module_dir), |
|
348 |
+ 'deprecated': deprecated, |
|
349 |
+ 'aliases': module_info[module].get('aliases', set()), |
|
350 |
+ 'aliases_deprecated': module_info[module].get('aliases_deprecated', set()), |
|
351 |
+ 'metadata': metadata, |
|
352 |
+ 'doc': doc, |
|
353 |
+ 'examples': examples, |
|
354 |
+ 'returndocs': returndocs, |
|
355 |
+ 'categories': module_categories, |
|
356 |
+ 'primary_category': primary_category, |
|
357 |
+ 'sub_category': sub_category, |
|
358 |
+ } |
|
359 |
+ |
|
360 |
+ # keep module tests out of becoming module docs |
|
361 |
+ if 'test' in categories: |
|
362 |
+ del categories['test'] |
|
363 |
+ |
|
364 |
+ return module_info, categories |
|
365 |
+ |
|
366 |
+ |
|
367 |
+def jinja2_environment(template_dir, typ, plugin_type): |
|
368 |
+ |
|
369 |
+ env = Environment(loader=FileSystemLoader(template_dir), |
|
370 |
+ variable_start_string="@{", |
|
371 |
+ variable_end_string="}@", |
|
372 |
+ trim_blocks=True) |
|
373 |
+ env.globals['xline'] = rst_xline |
|
374 |
+ |
|
375 |
+ # Can be removed (and template switched to use namespace) when we no longer need to build |
|
376 |
+ # with <Jinja-2.10 |
|
377 |
+ env.globals['to_kludge_ns'] = to_kludge_ns |
|
378 |
+ env.globals['from_kludge_ns'] = from_kludge_ns |
|
379 |
+ if 'max' not in env.filters: |
|
380 |
+ # Jinja < 2.10 |
|
381 |
+ env.filters['max'] = do_max |
|
382 |
+ |
|
383 |
+ if 'tojson' not in env.filters: |
|
384 |
+ # Jinja < 2.9 |
|
385 |
+ env.filters['tojson'] = json.dumps |
|
386 |
+ |
|
387 |
+ templates = {} |
|
388 |
+ if typ == 'rst': |
|
389 |
+ env.filters['rst_ify'] = rst_ify |
|
390 |
+ env.filters['html_ify'] = html_ify |
|
391 |
+ env.filters['fmt'] = rst_fmt |
|
392 |
+ env.filters['xline'] = rst_xline |
|
393 |
+ env.filters['documented_type'] = documented_type |
|
394 |
+ env.tests['list'] = test_list |
|
395 |
+ templates['plugin'] = env.get_template('plugin.rst.j2') |
|
396 |
+ templates['plugin_deprecation_stub'] = env.get_template('plugin_deprecation_stub.rst.j2') |
|
397 |
+ |
|
398 |
+ if plugin_type == 'module': |
|
399 |
+ name = 'modules' |
|
400 |
+ else: |
|
401 |
+ name = 'plugins' |
|
402 |
+ |
|
403 |
+ templates['category_list'] = env.get_template('%s_by_category.rst.j2' % name) |
|
404 |
+ templates['support_list'] = env.get_template('%s_by_support.rst.j2' % name) |
|
405 |
+ templates['list_of_CATEGORY_modules'] = env.get_template('list_of_CATEGORY_%s.rst.j2' % name) |
|
406 |
+ else: |
|
407 |
+ raise Exception("Unsupported format type: %s" % typ) |
|
408 |
+ |
|
409 |
+ return templates |
|
410 |
+ |
|
411 |
+ |
|
412 |
+def too_old(added): |
|
413 |
+ if not added: |
|
414 |
+ return False |
|
415 |
+ try: |
|
416 |
+ added_tokens = str(added).split(".") |
|
417 |
+ readded = added_tokens[0] + "." + added_tokens[1] |
|
418 |
+ added_float = float(readded) |
|
419 |
+ except ValueError as e: |
|
420 |
+ warnings.warn("Could not parse %s: %s" % (added, str(e))) |
|
421 |
+ return False |
|
422 |
+ return added_float < TOO_OLD_TO_BE_NOTABLE |
|
423 |
+ |
|
424 |
+ |
|
425 |
+def process_plugins(module_map, templates, outputname, output_dir, ansible_version, plugin_type): |
|
426 |
+ for module_index, module in enumerate(module_map): |
|
427 |
+ |
|
428 |
+ show_progress(module_index) |
|
429 |
+ |
|
430 |
+ fname = module_map[module]['path'] |
|
431 |
+ display.vvvvv(pp.pformat(('process_plugins info: ', module_map[module]))) |
|
432 |
+ |
|
433 |
+ # crash if module is missing documentation and not explicitly hidden from docs index |
|
434 |
+ if module_map[module]['doc'] is None: |
|
435 |
+ display.error("%s MISSING DOCUMENTATION" % (fname,)) |
|
436 |
+ _doc = {plugin_type: module, |
|
437 |
+ 'version_added': '2.4', |
|
438 |
+ 'filename': fname} |
|
439 |
+ module_map[module]['doc'] = _doc |
|
440 |
+ # continue |
|
441 |
+ |
|
442 |
+ # Going to reference this heavily so make a short name to reference it by |
|
443 |
+ doc = module_map[module]['doc'] |
|
444 |
+ display.vvvvv(pp.pformat(('process_plugins doc: ', doc))) |
|
445 |
+ |
|
446 |
+ # add some defaults for plugins that dont have most of the info |
|
447 |
+ doc['module'] = doc.get('module', module) |
|
448 |
+ doc['version_added'] = doc.get('version_added', 'historical') |
|
449 |
+ |
|
450 |
+ doc['plugin_type'] = plugin_type |
|
451 |
+ |
|
452 |
+ if module_map[module]['deprecated'] and 'deprecated' not in doc: |
|
453 |
+ display.warning("%s PLUGIN MISSING DEPRECATION DOCUMENTATION: %s" % (fname, 'deprecated')) |
|
454 |
+ |
|
455 |
+ required_fields = ('short_description',) |
|
456 |
+ for field in required_fields: |
|
457 |
+ if field not in doc: |
|
458 |
+ display.warning("%s PLUGIN MISSING field '%s'" % (fname, field)) |
|
459 |
+ |
|
460 |
+ not_nullable_fields = ('short_description',) |
|
461 |
+ for field in not_nullable_fields: |
|
462 |
+ if field in doc and doc[field] in (None, ''): |
|
463 |
+ print("%s: WARNING: MODULE field '%s' DOCUMENTATION is null/empty value=%s" % (fname, field, doc[field])) |
|
464 |
+ |
|
465 |
+ if 'version_added' not in doc: |
|
466 |
+ display.error("*** ERROR: missing version_added in: %s ***\n" % module) |
|
467 |
+ |
|
468 |
+ # |
|
469 |
+ # The present template gets everything from doc so we spend most of this |
|
470 |
+ # function moving data into doc for the template to reference |
|
471 |
+ # |
|
472 |
+ |
|
473 |
+ if module_map[module]['aliases']: |
|
474 |
+ doc['aliases'] = module_map[module]['aliases'] |
|
475 |
+ |
|
476 |
+ # don't show version added information if it's too old to be called out |
|
477 |
+ added = 0 |
|
478 |
+ if doc['version_added'] == 'historical': |
|
479 |
+ del doc['version_added'] |
|
480 |
+ else: |
|
481 |
+ added = doc['version_added'] |
|
482 |
+ |
|
483 |
+ # Strip old version_added for the module |
|
484 |
+ if too_old(added): |
|
485 |
+ del doc['version_added'] |
|
486 |
+ |
|
487 |
+ option_names = [] |
|
488 |
+ |
|
489 |
+ if 'options' in doc and doc['options']: |
|
490 |
+ for (k, v) in iteritems(doc['options']): |
|
491 |
+ # Error out if there's no description |
|
492 |
+ if 'description' not in doc['options'][k]: |
|
493 |
+ raise AnsibleError("Missing required description for parameter '%s' in '%s' " % (k, module)) |
|
494 |
+ |
|
495 |
+ # Error out if required isn't a boolean (people have been putting |
|
496 |
+ # information on when something is required in here. Those need |
|
497 |
+ # to go in the description instead). |
|
498 |
+ required_value = doc['options'][k].get('required', False) |
|
499 |
+ if not isinstance(required_value, bool): |
|
500 |
+ raise AnsibleError("Invalid required value '%s' for parameter '%s' in '%s' (must be truthy)" % (required_value, k, module)) |
|
501 |
+ |
|
502 |
+ # Strip old version_added information for options |
|
503 |
+ if 'version_added' in doc['options'][k] and too_old(doc['options'][k]['version_added']): |
|
504 |
+ del doc['options'][k]['version_added'] |
|
505 |
+ |
|
506 |
+ # Make sure description is a list of lines for later formatting |
|
507 |
+ if not isinstance(doc['options'][k]['description'], list): |
|
508 |
+ doc['options'][k]['description'] = [doc['options'][k]['description']] |
|
509 |
+ |
|
510 |
+ option_names.append(k) |
|
511 |
+ |
|
512 |
+ option_names.sort() |
|
513 |
+ |
|
514 |
+ doc['option_keys'] = option_names |
|
515 |
+ doc['filename'] = fname |
|
516 |
+ doc['source'] = module_map[module]['source'] |
|
517 |
+ doc['docuri'] = doc['module'].replace('_', '-') |
|
518 |
+ doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') |
|
519 |
+ doc['ansible_version'] = ansible_version |
|
520 |
+ |
|
521 |
+ # check the 'deprecated' field in doc. We expect a dict potentially with 'why', 'version', and 'alternative' fields |
|
522 |
+ # examples = module_map[module]['examples'] |
|
523 |
+ # print('\n\n%s: type of examples: %s\n' % (module, type(examples))) |
|
524 |
+ # if examples and not isinstance(examples, (str, unicode, list)): |
|
525 |
+ # raise TypeError('module %s examples is wrong type (%s): %s' % (module, type(examples), examples)) |
|
526 |
+ |
|
527 |
+ # use 'examples' for 'plainexamples' if 'examples' is a string |
|
528 |
+ if isinstance(module_map[module]['examples'], string_types): |
|
529 |
+ doc['plainexamples'] = module_map[module]['examples'] # plain text |
|
530 |
+ else: |
|
531 |
+ doc['plainexamples'] = '' |
|
532 |
+ |
|
533 |
+ doc['metadata'] = module_map[module]['metadata'] |
|
534 |
+ |
|
535 |
+ display.vvvvv(pp.pformat(module_map[module])) |
|
536 |
+ if module_map[module]['returndocs']: |
|
537 |
+ try: |
|
538 |
+ doc['returndocs'] = yaml.safe_load(module_map[module]['returndocs']) |
|
539 |
+ except Exception as e: |
|
540 |
+ print("%s:%s:yaml error:%s:returndocs=%s" % (fname, module, e, module_map[module]['returndocs'])) |
|
541 |
+ doc['returndocs'] = None |
|
542 |
+ else: |
|
543 |
+ doc['returndocs'] = None |
|
544 |
+ |
|
545 |
+ doc['author'] = doc.get('author', ['UNKNOWN']) |
|
546 |
+ if isinstance(doc['author'], string_types): |
|
547 |
+ doc['author'] = [doc['author']] |
|
548 |
+ |
|
549 |
+ display.v('about to template %s' % module) |
|
550 |
+ display.vvvvv(pp.pformat(doc)) |
|
551 |
+ try: |
|
552 |
+ text = templates['plugin'].render(doc) |
|
553 |
+ except Exception as e: |
|
554 |
+ display.warning(msg="Could not parse %s due to %s" % (module, e)) |
|
555 |
+ continue |
|
556 |
+ |
|
557 |
+ if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
558 |
+ # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
559 |
+ text = re.sub(' +\n', '\n', text) |
|
560 |
+ |
|
561 |
+ write_data(text, output_dir, outputname, module) |
|
562 |
+ |
|
563 |
+ # Create deprecation stub pages for deprecated aliases |
|
564 |
+ if module_map[module]['aliases']: |
|
565 |
+ for alias in module_map[module]['aliases']: |
|
566 |
+ if alias in module_map[module]['aliases_deprecated']: |
|
567 |
+ doc['alias'] = alias |
|
568 |
+ |
|
569 |
+ display.v('about to template %s (deprecation alias %s)' % (module, alias)) |
|
570 |
+ display.vvvvv(pp.pformat(doc)) |
|
571 |
+ try: |
|
572 |
+ text = templates['plugin_deprecation_stub'].render(doc) |
|
573 |
+ except Exception as e: |
|
574 |
+ display.warning(msg="Could not parse %s (deprecation alias %s) due to %s" % (module, alias, e)) |
|
575 |
+ continue |
|
576 |
+ |
|
577 |
+ if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): |
|
578 |
+ # jinja2 < 2.10's indent filter indents blank lines. Cleanup |
|
579 |
+ text = re.sub(' +\n', '\n', text) |
|
580 |
+ |
|
581 |
+ write_data(text, output_dir, outputname, alias) |
|
582 |
+ |
|
583 |
+ |
|
584 |
+def process_categories(plugin_info, categories, templates, output_dir, output_name, plugin_type): |
|
585 |
+ # For some reason, this line is changing plugin_info: |
|
586 |
+ # text = templates['list_of_CATEGORY_modules'].render(template_data) |
|
587 |
+ # To avoid that, make a deepcopy of the data. |
|
588 |
+ # We should track that down and fix it at some point in the future. |
|
589 |
+ plugin_info = deepcopy(plugin_info) |
|
590 |
+ for category in sorted(categories.keys()): |
|
591 |
+ module_map = categories[category] |
|
592 |
+ category_filename = output_name % category |
|
593 |
+ |
|
594 |
+ display.display("*** recording category %s in %s ***" % (category, category_filename)) |
|
595 |
+ |
|
596 |
+ # start a new category file |
|
597 |
+ |
|
598 |
+ category_name = category.replace("_", " ") |
|
599 |
+ category_title = category_name.title() |
|
600 |
+ |
|
601 |
+ subcategories = dict((k, v) for k, v in module_map.items() if k != '_modules') |
|
602 |
+ template_data = {'title': category_title, |
|
603 |
+ 'category_name': category_name, |
|
604 |
+ 'category': module_map, |
|
605 |
+ 'subcategories': subcategories, |
|
606 |
+ 'module_info': plugin_info, |
|
607 |
+ 'plugin_type': plugin_type |
|
608 |
+ } |
|
609 |
+ |
|
610 |
+ text = templates['list_of_CATEGORY_modules'].render(template_data) |
|
611 |
+ write_data(text, output_dir, category_filename) |
|
612 |
+ |
|
613 |
+ |
|
614 |
+def process_support_levels(plugin_info, categories, templates, output_dir, plugin_type): |
|
615 |
+ supported_by = {'Ansible Core Team': {'slug': 'core_supported', |
|
616 |
+ 'modules': [], |
|
617 |
+ 'output': 'core_maintained.rst', |
|
618 |
+ 'blurb': "These are :doc:`modules maintained by the" |
|
619 |
+ " Ansible Core Team<core_maintained>` and will always ship" |
|
620 |
+ " with Ansible itself."}, |
|
621 |
+ 'Ansible Network Team': {'slug': 'network_supported', |
|
622 |
+ 'modules': [], |
|
623 |
+ 'output': 'network_maintained.rst', |
|
624 |
+ 'blurb': "These are :doc:`modules maintained by the" |
|
625 |
+ " Ansible Network Team<network_maintained>` in" |
|
626 |
+ " a relationship similar to how the Ansible Core Team" |
|
627 |
+ " maintains the Core modules."}, |
|
628 |
+ 'Ansible Partners': {'slug': 'certified_supported', |
|
629 |
+ 'modules': [], |
|
630 |
+ 'output': 'partner_maintained.rst', |
|
631 |
+ 'blurb': """ |
|
632 |
+Some examples of :doc:`Certified Modules<partner_maintained>` are those submitted by other |
|
633 |
+companies. Maintainers of these types of modules must watch for any issues reported or pull requests |
|
634 |
+raised against the module. |
|
635 |
+ |
|
636 |
+The Ansible Core Team will review all modules becoming certified. Core committers will review |
|
637 |
+proposed changes to existing Certified Modules once the community maintainers of the module have |
|
638 |
+approved the changes. Core committers will also ensure that any issues that arise due to Ansible |
|
639 |
+engine changes will be remediated. Also, it is strongly recommended (but not presently required) |
|
640 |
+for these types of modules to have unit tests. |
|
641 |
+ |
|
642 |
+These modules are currently shipped with Ansible, but might be shipped separately in the future. |
|
643 |
+"""}, |
|
644 |
+ 'Ansible Community': {'slug': 'community_supported', |
|
645 |
+ 'modules': [], |
|
646 |
+ 'output': 'community_maintained.rst', |
|
647 |
+ 'blurb': """ |
|
648 |
+These are :doc:`modules maintained by the Ansible Community<community_maintained>`. They **are |
|
649 |
+not** supported by the Ansible Core Team or by companies/partners associated to the module. |
|
650 |
+ |
|
651 |
+They are still fully usable, but the response rate to issues is purely up to the community. Best |
|
652 |
+effort support will be provided but is not covered under any support contracts. |
|
653 |
+ |
|
654 |
+These modules are currently shipped with Ansible, but will most likely be shipped separately in the future. |
|
655 |
+ """}, |
|
656 |
+ } |
|
657 |
+ |
|
658 |
+ # only gen support pages for modules for now, need to split and namespace templates and generated docs |
|
659 |
+ if plugin_type == 'plugins': |
|
660 |
+ return |
|
661 |
+ # Separate the modules by support_level |
|
662 |
+ for module, info in plugin_info.items(): |
|
663 |
+ if not info.get('metadata', None): |
|
664 |
+ display.warning('no metadata for %s' % module) |
|
665 |
+ continue |
|
666 |
+ if info['metadata']['supported_by'] == 'core': |
|
667 |
+ supported_by['Ansible Core Team']['modules'].append(module) |
|
668 |
+ elif info['metadata']['supported_by'] == 'network': |
|
669 |
+ supported_by['Ansible Network Team']['modules'].append(module) |
|
670 |
+ elif info['metadata']['supported_by'] == 'certified': |
|
671 |
+ supported_by['Ansible Partners']['modules'].append(module) |
|
672 |
+ elif info['metadata']['supported_by'] == 'community': |
|
673 |
+ supported_by['Ansible Community']['modules'].append(module) |
|
674 |
+ else: |
|
675 |
+ raise AnsibleError('Unknown supported_by value: %s' % info['metadata']['supported_by']) |
|
676 |
+ |
|
677 |
+ # Render the module lists based on category and subcategory |
|
678 |
+ for maintainers, data in supported_by.items(): |
|
679 |
+ subcategories = {} |
|
680 |
+ subcategories[''] = {} |
|
681 |
+ for module in data['modules']: |
|
682 |
+ new_cat = plugin_info[module]['sub_category'] |
|
683 |
+ category = plugin_info[module]['primary_category'] |
|
684 |
+ if category not in subcategories: |
|
685 |
+ subcategories[category] = {} |
|
686 |
+ subcategories[category][''] = {} |
|
687 |
+ subcategories[category]['']['_modules'] = [] |
|
688 |
+ if new_cat not in subcategories[category]: |
|
689 |
+ subcategories[category][new_cat] = {} |
|
690 |
+ subcategories[category][new_cat]['_modules'] = [] |
|
691 |
+ subcategories[category][new_cat]['_modules'].append(module) |
|
692 |
+ |
|
693 |
+ template_data = {'maintainers': maintainers, |
|
694 |
+ 'subcategories': subcategories, |
|
695 |
+ 'modules': data['modules'], |
|
696 |
+ 'slug': data['slug'], |
|
697 |
+ 'module_info': plugin_info, |
|
698 |
+ 'plugin_type': plugin_type |
|
699 |
+ } |
|
700 |
+ text = templates['support_list'].render(template_data) |
|
701 |
+ write_data(text, output_dir, data['output']) |
|
702 |
+ |
|
703 |
+ |
|
704 |
+def validate_options(options): |
|
705 |
+ ''' validate option parser options ''' |
|
706 |
+ |
|
707 |
+ if not options.module_dir: |
|
708 |
+ sys.exit("--module-dir is required", file=sys.stderr) |
|
709 |
+ if not os.path.exists(options.module_dir): |
|
710 |
+ sys.exit("--module-dir does not exist: %s" % options.module_dir, file=sys.stderr) |
|
711 |
+ if not options.template_dir: |
|
712 |
+ sys.exit("--template-dir must be specified") |
|
713 |
+ |
|
714 |
+ |
|
715 |
+class DocumentPlugins(Command): |
|
716 |
+ name = 'document-plugins' |
|
717 |
+ |
|
718 |
+ @classmethod |
|
719 |
+ def init_parser(cls, add_parser): |
|
720 |
+ parser = add_parser(cls.name, description='Generate module documentation from metadata') |
|
721 |
+ |
|
722 |
+ parser.add_argument("-A", "--ansible-version", action="store", dest="ansible_version", |
|
723 |
+ default="unknown", help="Ansible version number") |
|
724 |
+ parser.add_argument("-M", "--module-dir", action="store", dest="module_dir", |
|
725 |
+ default=MODULEDIR, help="Ansible library path") |
|
726 |
+ parser.add_argument("-P", "--plugin-type", action="store", dest="plugin_type", |
|
727 |
+ default='module', help="The type of plugin (module, lookup, etc)") |
|
728 |
+ parser.add_argument("-T", "--template-dir", action="append", dest="template_dir", |
|
729 |
+ help="directory containing Jinja2 templates") |
|
730 |
+ parser.add_argument("-t", "--type", action='store', dest='type', choices=['rst'], |
|
731 |
+ default='rst', help="Document type") |
|
732 |
+ parser.add_argument("-o", "--output-dir", action="store", dest="output_dir", default=None, |
|
733 |
+ help="Output directory for module files") |
|
734 |
+ parser.add_argument("-I", "--includes-file", action="store", dest="includes_file", |
|
735 |
+ default=None, help="Create a file containing list of processed modules") |
|
736 |
+ parser.add_argument("-l", "--limit-to-modules", '--limit-to', action="store", |
|
737 |
+ dest="limit_to", default=None, help="Limit building module documentation" |
|
738 |
+ " to comma-separated list of plugins. Specify non-existing plugin name" |
|
739 |
+ " for no plugins.") |
|
740 |
+ parser.add_argument('-V', action='version', help='Show version number and exit') |
|
741 |
+ parser.add_argument('-v', '--verbose', dest='verbosity', default=0, action="count", |
|
742 |
+ help="verbose mode (increase number of 'v's for more)") |
|
743 |
+ |
|
744 |
+ @staticmethod |
|
745 |
+ def main(args): |
|
746 |
+ if not args.template_dir: |
|
747 |
+ args.template_dir = ["hacking/templates"] |
|
748 |
+ validate_options(args) |
|
749 |
+ display.verbosity = args.verbosity |
|
750 |
+ plugin_type = args.plugin_type |
|
751 |
+ |
|
752 |
+ display.display("Evaluating %s files..." % plugin_type) |
|
753 |
+ |
|
754 |
+ # prep templating |
|
755 |
+ templates = jinja2_environment(args.template_dir, args.type, plugin_type) |
|
756 |
+ |
|
757 |
+ # set file/directory structure |
|
758 |
+ if plugin_type == 'module': |
|
759 |
+ # trim trailing s off of plugin_type for plugin_type=='modules'. ie 'copy_module.rst' |
|
760 |
+ outputname = '%s_' + '%s.rst' % plugin_type |
|
761 |
+ output_dir = args.output_dir |
|
762 |
+ else: |
|
763 |
+ # for plugins, just use 'ssh.rst' vs 'ssh_module.rst' |
|
764 |
+ outputname = '%s.rst' |
|
765 |
+ output_dir = '%s/plugins/%s' % (args.output_dir, plugin_type) |
|
766 |
+ |
|
767 |
+ display.vv('output name: %s' % outputname) |
|
768 |
+ display.vv('output dir: %s' % output_dir) |
|
769 |
+ |
|
770 |
+ # Convert passed-in limit_to to None or list of modules. |
|
771 |
+ if args.limit_to is not None: |
|
772 |
+ args.limit_to = [s.lower() for s in args.limit_to.split(",")] |
|
773 |
+ |
|
774 |
+ plugin_info, categories = get_plugin_info(args.module_dir, limit_to=args.limit_to, verbose=(args.verbosity > 0)) |
|
775 |
+ |
|
776 |
+ categories['all'] = {'_modules': plugin_info.keys()} |
|
777 |
+ |
|
778 |
+ if display.verbosity >= 3: |
|
779 |
+ display.vvv(pp.pformat(categories)) |
|
780 |
+ if display.verbosity >= 5: |
|
781 |
+ display.vvvvv(pp.pformat(plugin_info)) |
|
782 |
+ |
|
783 |
+ # Transform the data |
|
784 |
+ if args.type == 'rst': |
|
785 |
+ display.v('Generating rst') |
|
786 |
+ for key, record in plugin_info.items(): |
|
787 |
+ display.vv(key) |
|
788 |
+ if display.verbosity >= 5: |
|
789 |
+ display.vvvvv(pp.pformat(('record', record))) |
|
790 |
+ if record.get('doc', None): |
|
791 |
+ short_desc = record['doc']['short_description'].rstrip('.') |
|
792 |
+ if short_desc is None: |
|
793 |
+ display.warning('short_description for %s is None' % key) |
|
794 |
+ short_desc = '' |
|
795 |
+ record['doc']['short_description'] = rst_ify(short_desc) |
|
796 |
+ |
|
797 |
+ if plugin_type == 'module': |
|
798 |
+ display.v('Generating Categories') |
|
799 |
+ # Write module master category list |
|
800 |
+ category_list_text = templates['category_list'].render(categories=sorted(categories.keys())) |
|
801 |
+ category_index_name = '%ss_by_category.rst' % plugin_type |
|
802 |
+ write_data(category_list_text, output_dir, category_index_name) |
|
803 |
+ |
|
804 |
+ # Render all the individual plugin pages |
|
805 |
+ display.v('Generating plugin pages') |
|
806 |
+ process_plugins(plugin_info, templates, outputname, output_dir, args.ansible_version, plugin_type) |
|
807 |
+ |
|
808 |
+ # Render all the categories for modules |
|
809 |
+ if plugin_type == 'module': |
|
810 |
+ display.v('Generating Category lists') |
|
811 |
+ category_list_name_template = 'list_of_%s_' + '%ss.rst' % plugin_type |
|
812 |
+ process_categories(plugin_info, categories, templates, output_dir, category_list_name_template, plugin_type) |
|
813 |
+ |
|
814 |
+ # Render all the categories for modules |
|
815 |
+ process_support_levels(plugin_info, categories, templates, output_dir, plugin_type) |
|
816 |
+ |
|
817 |
+ return 0 |
... | ... |
@@ -121,7 +121,7 @@ def generate_porting_guide(version): |
121 | 121 |
|
122 | 122 |
|
123 | 123 |
def write_guide(version, guide_content): |
124 |
- filename = f'porting_guide_{version}.rst' |
|
124 |
+ filename = 'porting_guide_{0}.rst'.format(version) |
|
125 | 125 |
with open(filename, 'w') as out_file: |
126 | 126 |
out_file.write(guide_content) |
127 | 127 |
|
... | ... |
@@ -7,148 +7,13 @@ from __future__ import (absolute_import, division, print_function) |
7 | 7 |
__metaclass__ = type |
8 | 8 |
|
9 | 9 |
|
10 |
-import argparse |
|
11 |
-import asyncio |
|
12 |
-import datetime |
|
13 |
-import hashlib |
|
14 |
-import os.path |
|
15 | 10 |
import sys |
16 | 11 |
from collections import UserString |
17 | 12 |
from distutils.version import LooseVersion |
18 | 13 |
|
19 |
-import aiohttp |
|
20 |
-from jinja2 import Environment, DictLoader |
|
21 |
- |
|
22 | 14 |
# Pylint doesn't understand Python3 namespace modules. |
23 | 15 |
from ..commands import Command # pylint: disable=relative-beyond-top-level |
24 |
- |
|
25 |
- |
|
26 |
-# pylint: disable= |
|
27 |
-VERSION_FRAGMENT = """ |
|
28 |
-{%- if versions | length > 1 %} |
|
29 |
- {% for version in versions %} |
|
30 |
- {% if loop.last %}and {{ version }}{% else %} |
|
31 |
- {% if versions | length == 2 %}{{ version }} {% else %}{{ version }}, {% endif -%} |
|
32 |
- {% endif -%} |
|
33 |
- {% endfor -%} |
|
34 |
-{%- else %}{{ versions[0] }}{% endif -%} |
|
35 |
-""" |
|
36 |
- |
|
37 |
-LONG_TEMPLATE = """ |
|
38 |
-{% set plural = False if versions | length == 1 else True %} |
|
39 |
-{% set latest_ver = (versions | sort(attribute='ver_obj'))[-1] %} |
|
40 |
- |
|
41 |
-To: ansible-devel@googlegroups.com, ansible-project@googlegroups.com, ansible-announce@googlegroups.com |
|
42 |
-Subject: New Ansible release{% if plural %}s{% endif %} {{ version_str }} |
|
43 |
- |
|
44 |
-{% filter wordwrap %} |
|
45 |
-Hi all- we're happy to announce that the general release of Ansible {{ version_str }}{% if plural %} are{%- else %} is{%- endif %} now available! |
|
46 |
-{% endfilter %} |
|
47 |
- |
|
48 |
- |
|
49 |
- |
|
50 |
-How do you get it? |
|
51 |
- |
|
52 |
-{% for version in versions %} |
|
53 |
-$ pip install ansible=={{ version }} --user |
|
54 |
-{% if not loop.last %} |
|
55 |
-or |
|
56 |
-{% endif %} |
|
57 |
-{% endfor %} |
|
58 |
- |
|
59 |
-The tar.gz of the release{% if plural %}s{% endif %} can be found here: |
|
60 |
- |
|
61 |
-{% for version in versions %} |
|
62 |
-* {{ version }} |
|
63 |
- https://releases.ansible.com/ansible/ansible-{{ version }}.tar.gz |
|
64 |
- SHA256: {{ hashes[version] }} |
|
65 |
-{% endfor %} |
|
66 |
- |
|
67 |
- |
|
68 |
-What's new in {{ version_str }} |
|
69 |
-{{ '-' * (14 + version_str | length) }} |
|
70 |
- |
|
71 |
-{% filter wordwrap %} |
|
72 |
-{% if plural %}These releases are{% else %}This release is a{% endif %} maintenance release{% if plural %}s{% endif %} containing numerous bugfixes. The full {% if plural %} changelogs are{% else %} changelog is{% endif %} at: |
|
73 |
-{% endfilter %} |
|
74 |
- |
|
75 |
- |
|
76 |
-{% for version in versions %} |
|
77 |
-* {{ version }} |
|
78 |
- https://github.com/ansible/ansible/blob/stable-{{ version.split('.')[:2] | join('.') }}/changelogs/CHANGELOG-v{{ version.split('.')[:2] | join('.') }}.rst |
|
79 |
-{% endfor %} |
|
80 |
- |
|
81 |
- |
|
82 |
-What's the schedule for future maintenance releases? |
|
83 |
- |
|
84 |
-{% filter wordwrap %} |
|
85 |
-Future maintenance releases will occur approximately every 3 weeks. So expect the next one around {{ next_release.strftime('%Y-%m-%d') }}. |
|
86 |
-{% endfilter %} |
|
87 |
- |
|
88 |
- |
|
89 |
- |
|
90 |
-Porting Help |
|
91 |
- |
|
92 |
-{% filter wordwrap %} |
|
93 |
-We've published a porting guide at |
|
94 |
-https://docs.ansible.com/ansible/devel/porting_guides/porting_guide_{{ latest_ver.split('.')[:2] | join('.') }}.html to help migrate your content to {{ latest_ver.split('.')[:2] | join('.') }}. |
|
95 |
-{% endfilter %} |
|
96 |
- |
|
97 |
- |
|
98 |
- |
|
99 |
-{% filter wordwrap %} |
|
100 |
-If you discover any errors or if any of your working playbooks break when you upgrade to {{ latest_ver }}, please use the following link to report the regression: |
|
101 |
-{% endfilter %} |
|
102 |
- |
|
103 |
- |
|
104 |
- https://github.com/ansible/ansible/issues/new/choose |
|
105 |
- |
|
106 |
-{% filter wordwrap %} |
|
107 |
-In your issue, be sure to mention the Ansible version that works and the one that doesn't. |
|
108 |
-{% endfilter %} |
|
109 |
- |
|
110 |
- |
|
111 |
-Thanks! |
|
112 |
- |
|
113 |
--{{ name }} |
|
114 |
- |
|
115 |
-""" # noqa for E501 (line length). |
|
116 |
-# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for |
|
117 |
-# proper wrapping to occur |
|
118 |
- |
|
119 |
-SHORT_TEMPLATE = """ |
|
120 |
-{% set plural = False if versions | length == 1 else True %} |
|
121 |
-@ansible |
|
122 |
-{{ version_str }} |
|
123 |
-{% if plural %} |
|
124 |
- have |
|
125 |
-{% else %} |
|
126 |
- has |
|
127 |
-{% endif %} |
|
128 |
-been released! Get |
|
129 |
-{% if plural %} |
|
130 |
-them |
|
131 |
-{% else %} |
|
132 |
-it |
|
133 |
-{% endif %} |
|
134 |
-on PyPI: pip install ansible=={{ (versions|sort(attribute='ver_obj'))[-1] }}, |
|
135 |
-https://releases.ansible.com/ansible/, the Ansible PPA on Launchpad, or GitHub. Happy automating! |
|
136 |
-""" # noqa for E501 (line length). |
|
137 |
-# jinja2 is horrid about getting rid of extra newlines so we have to have a single per paragraph for |
|
138 |
-# proper wrapping to occur |
|
139 |
- |
|
140 |
-JINJA_ENV = Environment( |
|
141 |
- loader=DictLoader({'long': LONG_TEMPLATE, |
|
142 |
- 'short': SHORT_TEMPLATE, |
|
143 |
- 'version_string': VERSION_FRAGMENT, |
|
144 |
- }), |
|
145 |
- extensions=['jinja2.ext.i18n'], |
|
146 |
- trim_blocks=True, |
|
147 |
- lstrip_blocks=True, |
|
148 |
-) |
|
16 |
+from .. import errors # pylint: disable=relative-beyond-top-level |
|
149 | 17 |
|
150 | 18 |
|
151 | 19 |
class VersionStr(UserString): |
... | ... |
@@ -167,108 +32,6 @@ def transform_args(args): |
167 | 167 |
return args |
168 | 168 |
|
169 | 169 |
|
170 |
-async def calculate_hash_from_tarball(session, version): |
|
171 |
- tar_url = f'https://releases.ansible.com/ansible/ansible-{version}.tar.gz' |
|
172 |
- tar_task = asyncio.create_task(session.get(tar_url)) |
|
173 |
- tar_response = await tar_task |
|
174 |
- |
|
175 |
- tar_hash = hashlib.sha256() |
|
176 |
- while True: |
|
177 |
- chunk = await tar_response.content.read(1024) |
|
178 |
- if not chunk: |
|
179 |
- break |
|
180 |
- tar_hash.update(chunk) |
|
181 |
- |
|
182 |
- return tar_hash.hexdigest() |
|
183 |
- |
|
184 |
- |
|
185 |
-async def parse_hash_from_file(session, version): |
|
186 |
- filename = f'ansible-{version}.tar.gz' |
|
187 |
- hash_url = f'https://releases.ansible.com/ansible/{filename}.sha' |
|
188 |
- hash_task = asyncio.create_task(session.get(hash_url)) |
|
189 |
- hash_response = await hash_task |
|
190 |
- |
|
191 |
- hash_content = await hash_response.read() |
|
192 |
- precreated_hash, precreated_filename = hash_content.split(None, 1) |
|
193 |
- if filename != precreated_filename.strip().decode('utf-8'): |
|
194 |
- raise ValueError(f'Hash file contains hash for a different file: {precreated_filename}') |
|
195 |
- |
|
196 |
- return precreated_hash.decode('utf-8') |
|
197 |
- |
|
198 |
- |
|
199 |
-async def get_hash(session, version): |
|
200 |
- calculated_hash = await calculate_hash_from_tarball(session, version) |
|
201 |
- precreated_hash = await parse_hash_from_file(session, version) |
|
202 |
- |
|
203 |
- if calculated_hash != precreated_hash: |
|
204 |
- raise ValueError(f'Hash in file ansible-{version}.tar.gz.sha {precreated_hash} does not' |
|
205 |
- f' match hash of tarball {calculated_hash}') |
|
206 |
- |
|
207 |
- return calculated_hash |
|
208 |
- |
|
209 |
- |
|
210 |
-async def get_hashes(versions): |
|
211 |
- hashes = {} |
|
212 |
- requestors = {} |
|
213 |
- async with aiohttp.ClientSession() as aio_session: |
|
214 |
- for version in versions: |
|
215 |
- requestors[version] = asyncio.create_task(get_hash(aio_session, version)) |
|
216 |
- |
|
217 |
- for version, request in requestors.items(): |
|
218 |
- await request |
|
219 |
- hashes[version] = request.result() |
|
220 |
- |
|
221 |
- return hashes |
|
222 |
- |
|
223 |
- |
|
224 |
-def next_release_date(weeks=3): |
|
225 |
- days_in_the_future = weeks * 7 |
|
226 |
- today = datetime.datetime.now() |
|
227 |
- numeric_today = today.weekday() |
|
228 |
- |
|
229 |
- # We release on Thursdays |
|
230 |
- if numeric_today == 3: |
|
231 |
- # 3 is Thursday |
|
232 |
- pass |
|
233 |
- elif numeric_today == 4: |
|
234 |
- # If this is Friday, we can adjust back to Thursday for the next release |
|
235 |
- today -= datetime.timedelta(days=1) |
|
236 |
- elif numeric_today < 3: |
|
237 |
- # Otherwise, slide forward to Thursday |
|
238 |
- today += datetime.timedelta(days=(3 - numeric_today)) |
|
239 |
- else: |
|
240 |
- # slightly different formula if it's past Thursday this week. We need to go forward to |
|
241 |
- # Thursday of next week |
|
242 |
- today += datetime.timedelta(days=(10 - numeric_today)) |
|
243 |
- |
|
244 |
- next_release = today + datetime.timedelta(days=days_in_the_future) |
|
245 |
- return next_release |
|
246 |
- |
|
247 |
- |
|
248 |
-def generate_long_message(versions, name): |
|
249 |
- hashes = asyncio.run(get_hashes(versions)) |
|
250 |
- |
|
251 |
- version_template = JINJA_ENV.get_template('version_string') |
|
252 |
- version_str = version_template.render(versions=versions).strip() |
|
253 |
- |
|
254 |
- next_release = next_release_date() |
|
255 |
- |
|
256 |
- template = JINJA_ENV.get_template('long') |
|
257 |
- message = template.render(versions=versions, version_str=version_str, |
|
258 |
- name=name, hashes=hashes, next_release=next_release) |
|
259 |
- return message |
|
260 |
- |
|
261 |
- |
|
262 |
-def generate_short_message(versions): |
|
263 |
- version_template = JINJA_ENV.get_template('version_string') |
|
264 |
- version_str = version_template.render(versions=versions).strip() |
|
265 |
- |
|
266 |
- template = JINJA_ENV.get_template('short') |
|
267 |
- message = template.render(versions=versions, version_str=version_str) |
|
268 |
- message = ' '.join(message.split()) + '\n' |
|
269 |
- return message |
|
270 |
- |
|
271 |
- |
|
272 | 170 |
def write_message(filename, message): |
273 | 171 |
if filename != '-': |
274 | 172 |
with open(filename, 'w') as out_file: |
... | ... |
@@ -294,12 +57,21 @@ class ReleaseAnnouncementCommand(Command): |
294 | 294 |
parser.add_argument("--twitter-out", type=str, default="-", |
295 | 295 |
help="Filename to place the twitter announcement into") |
296 | 296 |
|
297 |
- @staticmethod |
|
298 |
- def main(args): |
|
297 |
+ @classmethod |
|
298 |
+ def main(cls, args): |
|
299 |
+ if sys.version_info < (3, 6): |
|
300 |
+ raise errors.DependencyError('The {0} subcommand needs Python-3.6+' |
|
301 |
+ ' to run'.format(cls.name)) |
|
302 |
+ |
|
303 |
+ # Import here because these functions are invalid on Python-3.5 and the command plugins and |
|
304 |
+ # init_parser() method need to be compatible with Python-3.4+ for now. |
|
305 |
+ # Pylint doesn't understand Python3 namespace modules. |
|
306 |
+ from .. announce import create_short_message, create_long_message # pylint: disable=relative-beyond-top-level |
|
307 |
+ |
|
299 | 308 |
args = transform_args(args) |
300 | 309 |
|
301 |
- twitter_message = generate_short_message(args.versions) |
|
302 |
- email_message = generate_long_message(args.versions, args.name) |
|
310 |
+ twitter_message = create_short_message(args.versions) |
|
311 |
+ email_message = create_long_message(args.versions, args.name) |
|
303 | 312 |
|
304 | 313 |
write_message(args.twitter_out, twitter_message) |
305 | 314 |
write_message(args.email_out, email_message) |
306 | 315 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,12 @@ |
0 |
+# coding: utf-8 |
|
1 |
+# Copyright: (c) 2019, Ansible Project |
|
2 |
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
3 |
+ |
|
4 |
+# Make coding more python3-ish |
|
5 |
+from __future__ import (absolute_import, division, print_function) |
|
6 |
+__metaclass__ = type |
|
7 |
+ |
|
8 |
+ |
|
9 |
+class DependencyError(Exception): |
|
10 |
+ """Used when a dependency is unmet""" |
|
11 |
+ pass |
... | ... |
@@ -52,8 +52,9 @@ class Attribute: |
52 | 52 |
:kwarg isa: The type of the attribute. Allowable values are a string |
53 | 53 |
representation of any yaml basic datatype, python class, or percent. |
54 | 54 |
(Enforced at post-validation time). |
55 |
- :kwarg private: Hides the attribute from being documented. |
|
56 |
- TODO: it should also should prevent it from being user settable |
|
55 |
+ :kwarg private: Not used at runtime. The docs playbook keyword dumper uses it to determine |
|
56 |
+ that a keyword should not be documented. mpdehaan had plans to remove attributes marked |
|
57 |
+ private from the ds so they would not have been available at all. |
|
57 | 58 |
:kwarg default: Default value if unspecified in the YAML document. |
58 | 59 |
:kwarg required: Whether or not the YAML document must contain this field. |
59 | 60 |
If the attribute is None when post-validated, an error will be raised. |
60 | 61 |
deleted file mode 100644 |
... | ... |
@@ -1,41 +0,0 @@ |
1 |
-# Copyright: (c) 2018, Ansible Project |
|
2 |
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) |
|
3 |
- |
|
4 |
-from __future__ import (absolute_import, division, print_function) |
|
5 |
-__metaclass__ = type |
|
6 |
- |
|
7 |
-""" |
|
8 |
-This file contains common code for building ansible. If you want to use code from here at runtime, |
|
9 |
-it needs to be moved out of this file and the implementation looked over to figure out whether API |
|
10 |
-should be changed before being made public. |
|
11 |
-""" |
|
12 |
- |
|
13 |
-import os.path |
|
14 |
- |
|
15 |
- |
|
16 |
-def update_file_if_different(filename, b_data): |
|
17 |
- ''' |
|
18 |
- Replace file content only if content is different. |
|
19 |
- |
|
20 |
- This preserves timestamps in case the file content has not changed. It performs multiple |
|
21 |
- operations on the file so it is not atomic and may be slower than simply writing to the file. |
|
22 |
- |
|
23 |
- :arg filename: The filename to write to |
|
24 |
- :b_data: Byte string containing the data to write to the file |
|
25 |
- ''' |
|
26 |
- try: |
|
27 |
- with open(filename, 'rb') as f: |
|
28 |
- b_data_old = f.read() |
|
29 |
- except IOError as e: |
|
30 |
- if e.errno != 2: |
|
31 |
- raise |
|
32 |
- # File did not exist, set b_data_old to a sentinel value so that |
|
33 |
- # b_data gets written to the filename |
|
34 |
- b_data_old = None |
|
35 |
- |
|
36 |
- if b_data_old != b_data: |
|
37 |
- with open(filename, 'wb') as f: |
|
38 |
- f.write(b_data) |
|
39 |
- return True |
|
40 |
- |
|
41 |
- return False |
... | ... |
@@ -1,3 +1,11 @@ |
1 | 1 |
# The following are only run by release engineers who can be asked to have newer Python3 on their systems |
2 | 2 |
hacking/build_library/build_ansible/command_plugins/porting_guide.py |
3 | 3 |
hacking/build_library/build_ansible/command_plugins/release_announcement.py |
4 |
+ |
|
5 |
+# The following are used to build docs. Since we explicitly say that the controller won't run on |
|
6 |
+# Python-2.6 (docs are built controller-side) and EPEL-6, the only LTS platform with Python-2.6, |
|
7 |
+# doesn't have a new enough sphinx to build docs, do not test these under Python-2.6 |
|
8 |
+hacking/build_library/build_ansible/command_plugins/dump_config.py |
|
9 |
+hacking/build_library/build_ansible/command_plugins/dump_keywords.py |
|
10 |
+hacking/build_library/build_ansible/command_plugins/generate_man.py |
|
11 |
+hacking/build_library/build_ansible/command_plugins/plugin_formatter.py |