* Remove .keep files from test/results/ dirs.
* Remove classification of test/results/ dir.
* Add results_relative to data context.
* Use variables in delegation paths.
* Standardize file writing and results paths.
* Fix issues reported by PyCharm.
* Clean up invocation of coverage command.
It now runs through the injector.
* Hack to allow intercept_command in cover.py.
* Simplify git ignore for test results.
* Use test result tmp dir instead of cache dir.
* Remove old .pytest_cache reference.
* Fix unit test docker delegation.
* Show HTML report link.
* Clean up more results references.
* Move import sanity test output to .tmp dir.
* Exclude test results dir from coverage.
* Fix import sanity test lib paths.
* Fix hard-coded import test paths.
* Fix most hard-coded integration test paths.
* Fix PyCharm warnings.
* Fix import placement.
* Fix integration test dir path.
* Fix Shippable scripts.
* Fix Shippable matrix check.
* Overhaul key pair management.
... | ... |
@@ -79,14 +79,7 @@ ansible.egg-info/ |
79 | 79 |
# Release directory |
80 | 80 |
packaging/release/ansible_release |
81 | 81 |
/.cache/ |
82 |
-/test/results/coverage/*=coverage.* |
|
83 |
-/test/results/coverage/coverage* |
|
84 |
-/test/results/reports/coverage*.xml |
|
85 |
-/test/results/reports/coverage*/ |
|
86 |
-/test/results/bot/*.json |
|
87 |
-/test/results/junit/*.xml |
|
88 |
-/test/results/logs/*.log |
|
89 |
-/test/results/data/*.json |
|
82 |
+/test/results/ |
|
90 | 83 |
/test/integration/cloud-config-aws.yml |
91 | 84 |
/test/integration/inventory.networking |
92 | 85 |
/test/integration/inventory.winrm |
... | ... |
@@ -16,6 +16,9 @@ def main(): |
16 | 16 |
import traceback |
17 | 17 |
import warnings |
18 | 18 |
|
19 |
+ import_dir = os.environ['SANITY_IMPORT_DIR'] |
|
20 |
+ minimal_dir = os.environ['SANITY_MINIMAL_DIR'] |
|
21 |
+ |
|
19 | 22 |
try: |
20 | 23 |
import importlib.util |
21 | 24 |
imp = None # pylint: disable=invalid-name |
... | ... |
@@ -266,9 +269,6 @@ def main(): |
266 | 266 |
filepath = os.path.relpath(warning.filename) |
267 | 267 |
lineno = warning.lineno |
268 | 268 |
|
269 |
- import_dir = 'test/runner/.tox/import/' |
|
270 |
- minimal_dir = 'test/runner/.tox/minimal-' |
|
271 |
- |
|
272 | 269 |
if filepath.startswith('../') or filepath.startswith(minimal_dir): |
273 | 270 |
# The warning occurred outside our source tree. |
274 | 271 |
# The best we can do is to report the file which was tested that triggered the warning. |
... | ... |
@@ -21,6 +21,7 @@ from .util import ( |
21 | 21 |
|
22 | 22 |
from .util_common import ( |
23 | 23 |
run_command, |
24 |
+ ResultType, |
|
24 | 25 |
) |
25 | 26 |
|
26 | 27 |
from .config import ( |
... | ... |
@@ -82,7 +83,7 @@ def ansible_environment(args, color=True, ansible_config=None): |
82 | 82 |
if args.debug: |
83 | 83 |
env.update(dict( |
84 | 84 |
ANSIBLE_DEBUG='true', |
85 |
- ANSIBLE_LOG_PATH=os.path.join(data_context().results, 'logs', 'debug.log'), |
|
85 |
+ ANSIBLE_LOG_PATH=os.path.join(ResultType.LOGS.name, 'debug.log'), |
|
86 | 86 |
)) |
87 | 87 |
|
88 | 88 |
if data_context().content.collection: |
... | ... |
@@ -276,7 +276,7 @@ class PathMapper: |
276 | 276 |
if ext == '.cs': |
277 | 277 |
return self.get_csharp_module_utils_usage(path) |
278 | 278 |
|
279 |
- if path.startswith('test/integration/targets/'): |
|
279 |
+ if is_subdir(path, data_context().content.integration_targets_path): |
|
280 | 280 |
return self.get_integration_target_usage(path) |
281 | 281 |
|
282 | 282 |
return [] |
... | ... |
@@ -338,7 +338,8 @@ class PathMapper: |
338 | 338 |
:rtype: list[str] |
339 | 339 |
""" |
340 | 340 |
target_name = path.split('/')[3] |
341 |
- dependents = [os.path.join('test/integration/targets/%s/' % target) for target in sorted(self.integration_dependencies.get(target_name, set()))] |
|
341 |
+ dependents = [os.path.join(data_context().content.integration_targets_path, target) + os.path.sep |
|
342 |
+ for target in sorted(self.integration_dependencies.get(target_name, set()))] |
|
342 | 343 |
|
343 | 344 |
return dependents |
344 | 345 |
|
... | ... |
@@ -620,22 +621,10 @@ class PathMapper: |
620 | 620 |
if path.startswith('test/ansible_test/'): |
621 | 621 |
return minimal # these tests are not invoked from ansible-test |
622 | 622 |
|
623 |
- if path.startswith('test/cache/'): |
|
624 |
- return minimal |
|
625 |
- |
|
626 |
- if path.startswith('test/results/'): |
|
627 |
- return minimal |
|
628 |
- |
|
629 | 623 |
if path.startswith('test/legacy/'): |
630 | 624 |
return minimal |
631 | 625 |
|
632 |
- if path.startswith('test/env/'): |
|
633 |
- return minimal |
|
634 |
- |
|
635 |
- if path.startswith('test/integration/roles/'): |
|
636 |
- return minimal |
|
637 |
- |
|
638 |
- if path.startswith('test/integration/targets/'): |
|
626 |
+ if is_subdir(path, data_context().content.integration_targets_path): |
|
639 | 627 |
if not os.path.exists(path): |
640 | 628 |
return minimal |
641 | 629 |
|
... | ... |
@@ -655,25 +644,8 @@ class PathMapper: |
655 | 655 |
FOCUSED_TARGET: True, |
656 | 656 |
} |
657 | 657 |
|
658 |
- if path.startswith('test/integration/'): |
|
659 |
- if dirname == 'test/integration': |
|
660 |
- if self.prefixes.get(name) == 'network' and ext == '.yaml': |
|
661 |
- return minimal # network integration test playbooks are not used by ansible-test |
|
662 |
- |
|
663 |
- if filename == 'network-all.yaml': |
|
664 |
- return minimal # network integration test playbook not used by ansible-test |
|
665 |
- |
|
666 |
- if filename == 'platform_agnostic.yaml': |
|
667 |
- return minimal # network integration test playbook not used by ansible-test |
|
668 |
- |
|
669 |
- if filename.startswith('inventory.') and filename.endswith('.template'): |
|
670 |
- return minimal # ansible-test does not use these inventory templates |
|
671 |
- |
|
672 |
- if filename == 'inventory': |
|
673 |
- return { |
|
674 |
- 'integration': self.integration_all_target, |
|
675 |
- } |
|
676 |
- |
|
658 |
+ if is_subdir(path, data_context().content.integration_path): |
|
659 |
+ if dirname == data_context().content.integration_path: |
|
677 | 660 |
for command in ( |
678 | 661 |
'integration', |
679 | 662 |
'windows-integration', |
... | ... |
@@ -888,7 +888,7 @@ def complete_network_testcase(prefix, parsed_args, **_): |
888 | 888 |
if len(parsed_args.include) != 1: |
889 | 889 |
return [] |
890 | 890 |
|
891 |
- test_dir = 'test/integration/targets/%s/tests' % parsed_args.include[0] |
|
891 |
+ test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests') |
|
892 | 892 |
connection_dirs = data_context().content.get_dirs(test_dir) |
893 | 893 |
|
894 | 894 |
for connection_dir in connection_dirs: |
... | ... |
@@ -5,7 +5,6 @@ __metaclass__ = type |
5 | 5 |
import abc |
6 | 6 |
import atexit |
7 | 7 |
import datetime |
8 |
-import json |
|
9 | 8 |
import time |
10 | 9 |
import os |
11 | 10 |
import platform |
... | ... |
@@ -23,10 +22,14 @@ from ..util import ( |
23 | 23 |
load_plugins, |
24 | 24 |
ABC, |
25 | 25 |
to_bytes, |
26 |
- make_dirs, |
|
27 | 26 |
ANSIBLE_TEST_CONFIG_ROOT, |
28 | 27 |
) |
29 | 28 |
|
29 |
+from ..util_common import ( |
|
30 |
+ write_json_test_results, |
|
31 |
+ ResultType, |
|
32 |
+) |
|
33 |
+ |
|
30 | 34 |
from ..target import ( |
31 | 35 |
TestTarget, |
32 | 36 |
) |
... | ... |
@@ -158,17 +161,14 @@ def cloud_init(args, targets): |
158 | 158 |
) |
159 | 159 |
|
160 | 160 |
if not args.explain and results: |
161 |
- results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % ( |
|
162 |
- args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))) |
|
161 |
+ result_name = '%s-%s.json' % ( |
|
162 |
+ args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))) |
|
163 | 163 |
|
164 | 164 |
data = dict( |
165 | 165 |
clouds=results, |
166 | 166 |
) |
167 | 167 |
|
168 |
- make_dirs(os.path.dirname(results_path)) |
|
169 |
- |
|
170 |
- with open(results_path, 'w') as results_fd: |
|
171 |
- results_fd.write(json.dumps(data, sort_keys=True, indent=4)) |
|
168 |
+ write_json_test_results(ResultType.DATA, result_name, data) |
|
172 | 169 |
|
173 | 170 |
|
174 | 171 |
class CloudBase(ABC): |
... | ... |
@@ -280,8 +280,6 @@ class CloudBase(ABC): |
280 | 280 |
|
281 | 281 |
class CloudProvider(CloudBase): |
282 | 282 |
"""Base class for cloud provider plugins. Sets up cloud resources before delegation.""" |
283 |
- TEST_DIR = 'test/integration' |
|
284 |
- |
|
285 | 283 |
def __init__(self, args, config_extension='.ini'): |
286 | 284 |
""" |
287 | 285 |
:type args: IntegrationConfig |
... | ... |
@@ -291,7 +289,7 @@ class CloudProvider(CloudBase): |
291 | 291 |
|
292 | 292 |
self.remove_config = False |
293 | 293 |
self.config_static_name = 'cloud-config-%s%s' % (self.platform, config_extension) |
294 |
- self.config_static_path = os.path.join(self.TEST_DIR, self.config_static_name) |
|
294 |
+ self.config_static_path = os.path.join(data_context().content.integration_path, self.config_static_name) |
|
295 | 295 |
self.config_template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, '%s.template' % self.config_static_name) |
296 | 296 |
self.config_extension = config_extension |
297 | 297 |
|
... | ... |
@@ -352,8 +350,8 @@ class CloudProvider(CloudBase): |
352 | 352 |
""" |
353 | 353 |
prefix = '%s-' % os.path.splitext(os.path.basename(self.config_static_path))[0] |
354 | 354 |
|
355 |
- with tempfile.NamedTemporaryFile(dir=self.TEST_DIR, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd: |
|
356 |
- filename = os.path.join(self.TEST_DIR, os.path.basename(config_fd.name)) |
|
355 |
+ with tempfile.NamedTemporaryFile(dir=data_context().content.integration_path, prefix=prefix, suffix=self.config_extension, delete=False) as config_fd: |
|
356 |
+ filename = os.path.join(data_context().content.integration_path, os.path.basename(config_fd.name)) |
|
357 | 357 |
|
358 | 358 |
self.config_path = filename |
359 | 359 |
self.remove_config = True |
... | ... |
@@ -3,7 +3,6 @@ from __future__ import (absolute_import, division, print_function) |
3 | 3 |
__metaclass__ = type |
4 | 4 |
|
5 | 5 |
import os |
6 |
-import time |
|
7 | 6 |
|
8 | 7 |
from . import ( |
9 | 8 |
CloudProvider, |
... | ... |
@@ -14,10 +13,8 @@ from . import ( |
14 | 14 |
from ..util import ( |
15 | 15 |
find_executable, |
16 | 16 |
display, |
17 |
- ApplicationError, |
|
18 | 17 |
is_shippable, |
19 | 18 |
ConfigParser, |
20 |
- SubprocessError, |
|
21 | 19 |
) |
22 | 20 |
|
23 | 21 |
from ..docker_util import ( |
... | ... |
@@ -32,10 +29,6 @@ from ..core_ci import ( |
32 | 32 |
AnsibleCoreCI, |
33 | 33 |
) |
34 | 34 |
|
35 |
-from ..http import ( |
|
36 |
- HttpClient, |
|
37 |
-) |
|
38 |
- |
|
39 | 35 |
|
40 | 36 |
class VcenterProvider(CloudProvider): |
41 | 37 |
"""VMware vcenter/esx plugin. Sets up cloud resources for tests.""" |
... | ... |
@@ -14,7 +14,6 @@ from .util import ( |
14 | 14 |
generate_pip_command, |
15 | 15 |
get_docker_completion, |
16 | 16 |
ApplicationError, |
17 |
- INTEGRATION_DIR_RELATIVE, |
|
18 | 17 |
) |
19 | 18 |
|
20 | 19 |
from .util_common import ( |
... | ... |
@@ -247,7 +246,7 @@ class IntegrationConfig(TestConfig): |
247 | 247 |
|
248 | 248 |
def get_ansible_config(self): # type: () -> str |
249 | 249 |
"""Return the path to the Ansible config for the given config.""" |
250 |
- ansible_config_relative_path = os.path.join(INTEGRATION_DIR_RELATIVE, '%s.cfg' % self.command) |
|
250 |
+ ansible_config_relative_path = os.path.join(data_context().content.integration_path, '%s.cfg' % self.command) |
|
251 | 251 |
ansible_config_path = os.path.join(data_context().content.root, ansible_config_relative_path) |
252 | 252 |
|
253 | 253 |
if not os.path.exists(ansible_config_path): |
... | ... |
@@ -327,6 +326,7 @@ class CoverageConfig(EnvironmentConfig): |
327 | 327 |
self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: t.FrozenSet[str] |
328 | 328 |
self.all = args.all if 'all' in args else False # type: bool |
329 | 329 |
self.stub = args.stub if 'stub' in args else False # type: bool |
330 |
+ self.coverage = False # temporary work-around to support intercept_command in cover.py |
|
330 | 331 |
|
331 | 332 |
|
332 | 333 |
class CoverageReportConfig(CoverageConfig): |
... | ... |
@@ -28,6 +28,8 @@ from .util import ( |
28 | 28 |
|
29 | 29 |
from .util_common import ( |
30 | 30 |
run_command, |
31 |
+ write_json_file, |
|
32 |
+ ResultType, |
|
31 | 33 |
) |
32 | 34 |
|
33 | 35 |
from .config import ( |
... | ... |
@@ -492,10 +494,7 @@ class AnsibleCoreCI: |
492 | 492 |
|
493 | 493 |
config = self.save() |
494 | 494 |
|
495 |
- make_dirs(os.path.dirname(self.path)) |
|
496 |
- |
|
497 |
- with open(self.path, 'w') as instance_fd: |
|
498 |
- instance_fd.write(json.dumps(config, indent=4, sort_keys=True)) |
|
495 |
+ write_json_file(self.path, config, create_directories=True) |
|
499 | 496 |
|
500 | 497 |
def save(self): |
501 | 498 |
""" |
... | ... |
@@ -559,40 +558,30 @@ class SshKey: |
559 | 559 |
""" |
560 | 560 |
:type args: EnvironmentConfig |
561 | 561 |
""" |
562 |
- cache_dir = os.path.join(data_context().content.root, 'test/cache') |
|
563 |
- |
|
564 |
- self.key = os.path.join(cache_dir, self.KEY_NAME) |
|
565 |
- self.pub = os.path.join(cache_dir, self.PUB_NAME) |
|
566 |
- |
|
567 |
- key_dst = os.path.relpath(self.key, data_context().content.root) |
|
568 |
- pub_dst = os.path.relpath(self.pub, data_context().content.root) |
|
562 |
+ key_pair = self.get_key_pair() |
|
569 | 563 |
|
570 |
- if not os.path.isfile(self.key) or not os.path.isfile(self.pub): |
|
571 |
- base_dir = os.path.expanduser('~/.ansible/test/') |
|
564 |
+ if not key_pair: |
|
565 |
+ key_pair = self.generate_key_pair(args) |
|
572 | 566 |
|
573 |
- key = os.path.join(base_dir, self.KEY_NAME) |
|
574 |
- pub = os.path.join(base_dir, self.PUB_NAME) |
|
567 |
+ key, pub = key_pair |
|
568 |
+ key_dst, pub_dst = self.get_in_tree_key_pair_paths() |
|
575 | 569 |
|
576 |
- if not args.explain: |
|
577 |
- make_dirs(base_dir) |
|
578 |
- |
|
579 |
- if not os.path.isfile(key) or not os.path.isfile(pub): |
|
580 |
- run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key]) |
|
570 |
+ def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None |
|
571 |
+ """ |
|
572 |
+ Add the SSH keys to the payload file list. |
|
573 |
+ They are either outside the source tree or in the cache dir which is ignored by default. |
|
574 |
+ """ |
|
575 |
+ if data_context().content.collection: |
|
576 |
+ working_path = data_context().content.collection.directory |
|
577 |
+ else: |
|
578 |
+ working_path = '' |
|
581 | 579 |
|
582 |
- self.key = key |
|
583 |
- self.pub = pub |
|
580 |
+ files.append((key, os.path.join(working_path, os.path.relpath(key_dst, data_context().content.root)))) |
|
581 |
+ files.append((pub, os.path.join(working_path, os.path.relpath(pub_dst, data_context().content.root)))) |
|
584 | 582 |
|
585 |
- def ssh_key_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None |
|
586 |
- """Add the SSH keys to the payload file list.""" |
|
587 |
- if data_context().content.collection: |
|
588 |
- working_path = data_context().content.collection.directory |
|
589 |
- else: |
|
590 |
- working_path = '' |
|
583 |
+ data_context().register_payload_callback(ssh_key_callback) |
|
591 | 584 |
|
592 |
- files.append((key, os.path.join(working_path, key_dst))) |
|
593 |
- files.append((pub, os.path.join(working_path, pub_dst))) |
|
594 |
- |
|
595 |
- data_context().register_payload_callback(ssh_key_callback) |
|
585 |
+ self.key, self.pub = key, pub |
|
596 | 586 |
|
597 | 587 |
if args.explain: |
598 | 588 |
self.pub_contents = None |
... | ... |
@@ -600,6 +589,50 @@ class SshKey: |
600 | 600 |
with open(self.pub, 'r') as pub_fd: |
601 | 601 |
self.pub_contents = pub_fd.read().strip() |
602 | 602 |
|
603 |
+ def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]] |
|
604 |
+ """Return the ansible-test SSH key pair paths from the content tree.""" |
|
605 |
+ temp_dir = ResultType.TMP.path |
|
606 |
+ |
|
607 |
+ key = os.path.join(temp_dir, self.KEY_NAME) |
|
608 |
+ pub = os.path.join(temp_dir, self.PUB_NAME) |
|
609 |
+ |
|
610 |
+ return key, pub |
|
611 |
+ |
|
612 |
+ def get_source_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]] |
|
613 |
+ """Return the ansible-test SSH key pair paths for the current user.""" |
|
614 |
+ base_dir = os.path.expanduser('~/.ansible/test/') |
|
615 |
+ |
|
616 |
+ key = os.path.join(base_dir, self.KEY_NAME) |
|
617 |
+ pub = os.path.join(base_dir, self.PUB_NAME) |
|
618 |
+ |
|
619 |
+ return key, pub |
|
620 |
+ |
|
621 |
+ def get_key_pair(self): # type: () -> t.Optional[t.Tuple[str, str]] |
|
622 |
+ """Return the ansible-test SSH key pair paths if present, otherwise return None.""" |
|
623 |
+ key, pub = self.get_in_tree_key_pair_paths() |
|
624 |
+ |
|
625 |
+ if os.path.isfile(key) and os.path.isfile(pub): |
|
626 |
+ return key, pub |
|
627 |
+ |
|
628 |
+ key, pub = self.get_source_key_pair_paths() |
|
629 |
+ |
|
630 |
+ if os.path.isfile(key) and os.path.isfile(pub): |
|
631 |
+ return key, pub |
|
632 |
+ |
|
633 |
+ return None |
|
634 |
+ |
|
635 |
+ def generate_key_pair(self, args): # type: (EnvironmentConfig) -> t.Tuple[str, str] |
|
636 |
+ """Generate an SSH key pair for use by all ansible-test invocations for the current user.""" |
|
637 |
+ key, pub = self.get_source_key_pair_paths() |
|
638 |
+ |
|
639 |
+ if not args.explain: |
|
640 |
+ make_dirs(os.path.dirname(key)) |
|
641 |
+ |
|
642 |
+ if not os.path.isfile(key) or not os.path.isfile(pub): |
|
643 |
+ run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key]) |
|
644 |
+ |
|
645 |
+ return key, pub |
|
646 |
+ |
|
603 | 647 |
|
604 | 648 |
class InstanceConnection: |
605 | 649 |
"""Container for remote instance status and connection details.""" |
... | ... |
@@ -18,6 +18,8 @@ from xml.dom import ( |
18 | 18 |
minidom, |
19 | 19 |
) |
20 | 20 |
|
21 |
+from . import types as t |
|
22 |
+ |
|
21 | 23 |
from .target import ( |
22 | 24 |
walk_module_targets, |
23 | 25 |
walk_compile_targets, |
... | ... |
@@ -34,7 +36,8 @@ from .util import ( |
34 | 34 |
) |
35 | 35 |
|
36 | 36 |
from .util_common import ( |
37 |
- run_command, |
|
37 |
+ intercept_command, |
|
38 |
+ ResultType, |
|
38 | 39 |
) |
39 | 40 |
|
40 | 41 |
from .config import ( |
... | ... |
@@ -57,6 +60,7 @@ from .data import ( |
57 | 57 |
|
58 | 58 |
COVERAGE_GROUPS = ('command', 'target', 'environment', 'version') |
59 | 59 |
COVERAGE_CONFIG_PATH = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coveragerc') |
60 |
+COVERAGE_OUTPUT_FILE_NAME = 'coverage' |
|
60 | 61 |
|
61 | 62 |
|
62 | 63 |
def command_coverage_combine(args): |
... | ... |
@@ -74,9 +78,9 @@ def _command_coverage_combine_python(args): |
74 | 74 |
""" |
75 | 75 |
coverage = initialize_coverage(args) |
76 | 76 |
|
77 |
- modules = dict((t.module, t.path) for t in list(walk_module_targets()) if t.path.endswith('.py')) |
|
77 |
+ modules = dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py')) |
|
78 | 78 |
|
79 |
- coverage_dir = os.path.join(data_context().results, 'coverage') |
|
79 |
+ coverage_dir = ResultType.COVERAGE.path |
|
80 | 80 |
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir) |
81 | 81 |
if '=coverage.' in f and '=python' in f] |
82 | 82 |
|
... | ... |
@@ -140,7 +144,7 @@ def _command_coverage_combine_python(args): |
140 | 140 |
invalid_path_count = 0 |
141 | 141 |
invalid_path_chars = 0 |
142 | 142 |
|
143 |
- coverage_file = os.path.join(data_context().results, 'coverage', 'coverage') |
|
143 |
+ coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME) |
|
144 | 144 |
|
145 | 145 |
for group in sorted(groups): |
146 | 146 |
arc_data = groups[group] |
... | ... |
@@ -322,9 +326,7 @@ def command_coverage_report(args): |
322 | 322 |
if args.omit: |
323 | 323 |
options.extend(['--omit', args.omit]) |
324 | 324 |
|
325 |
- env = common_environment() |
|
326 |
- env.update(dict(COVERAGE_FILE=output_file)) |
|
327 |
- run_command(args, env=env, cmd=['coverage', 'report', '--rcfile', COVERAGE_CONFIG_PATH] + options) |
|
325 |
+ run_coverage(args, output_file, 'report', options) |
|
328 | 326 |
|
329 | 327 |
|
330 | 328 |
def command_coverage_html(args): |
... | ... |
@@ -339,10 +341,10 @@ def command_coverage_html(args): |
339 | 339 |
display.info("Skipping output file %s in html generation" % output_file, verbosity=3) |
340 | 340 |
continue |
341 | 341 |
|
342 |
- dir_name = os.path.join(data_context().results, 'reports', os.path.basename(output_file)) |
|
343 |
- env = common_environment() |
|
344 |
- env.update(dict(COVERAGE_FILE=output_file)) |
|
345 |
- run_command(args, env=env, cmd=['coverage', 'html', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-d', dir_name]) |
|
342 |
+ dir_name = os.path.join(ResultType.REPORTS.path, os.path.basename(output_file)) |
|
343 |
+ run_coverage(args, output_file, 'html', ['-i', '-d', dir_name]) |
|
344 |
+ |
|
345 |
+ display.info('HTML report generated: file:///%s' % os.path.join(dir_name, 'index.html')) |
|
346 | 346 |
|
347 | 347 |
|
348 | 348 |
def command_coverage_xml(args): |
... | ... |
@@ -352,7 +354,7 @@ def command_coverage_xml(args): |
352 | 352 |
output_files = command_coverage_combine(args) |
353 | 353 |
|
354 | 354 |
for output_file in output_files: |
355 |
- xml_name = os.path.join(data_context().results, 'reports', '%s.xml' % os.path.basename(output_file)) |
|
355 |
+ xml_name = os.path.join(ResultType.REPORTS.path, '%s.xml' % os.path.basename(output_file)) |
|
356 | 356 |
if output_file.endswith('-powershell'): |
357 | 357 |
report = _generage_powershell_xml(output_file) |
358 | 358 |
|
... | ... |
@@ -363,9 +365,7 @@ def command_coverage_xml(args): |
363 | 363 |
with open(xml_name, 'w') as xml_fd: |
364 | 364 |
xml_fd.write(pretty) |
365 | 365 |
else: |
366 |
- env = common_environment() |
|
367 |
- env.update(dict(COVERAGE_FILE=output_file)) |
|
368 |
- run_command(args, env=env, cmd=['coverage', 'xml', '--rcfile', COVERAGE_CONFIG_PATH, '-i', '-o', xml_name]) |
|
366 |
+ run_coverage(args, output_file, 'xml', ['-i', '-o', xml_name]) |
|
369 | 367 |
|
370 | 368 |
|
371 | 369 |
def command_coverage_erase(args): |
... | ... |
@@ -374,7 +374,7 @@ def command_coverage_erase(args): |
374 | 374 |
""" |
375 | 375 |
initialize_coverage(args) |
376 | 376 |
|
377 |
- coverage_dir = os.path.join(data_context().results, 'coverage') |
|
377 |
+ coverage_dir = ResultType.COVERAGE.path |
|
378 | 378 |
|
379 | 379 |
for name in os.listdir(coverage_dir): |
380 | 380 |
if not name.startswith('coverage') and '=coverage.' not in name: |
... | ... |
@@ -440,13 +440,13 @@ def _command_coverage_combine_powershell(args): |
440 | 440 |
:type args: CoverageConfig |
441 | 441 |
:rtype: list[str] |
442 | 442 |
""" |
443 |
- coverage_dir = os.path.join(data_context().results, 'coverage') |
|
443 |
+ coverage_dir = ResultType.COVERAGE.path |
|
444 | 444 |
coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir) |
445 | 445 |
if '=coverage.' in f and '=powershell' in f] |
446 | 446 |
|
447 |
- def _default_stub_value(line_count): |
|
447 |
+ def _default_stub_value(lines): |
|
448 | 448 |
val = {} |
449 |
- for line in range(line_count): |
|
449 |
+ for line in range(lines): |
|
450 | 450 |
val[line] = 0 |
451 | 451 |
return val |
452 | 452 |
|
... | ... |
@@ -504,7 +504,7 @@ def _command_coverage_combine_powershell(args): |
504 | 504 |
invalid_path_count = 0 |
505 | 505 |
invalid_path_chars = 0 |
506 | 506 |
|
507 |
- coverage_file = os.path.join(data_context().results, 'coverage', 'coverage') |
|
507 |
+ coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME) |
|
508 | 508 |
|
509 | 509 |
for group in sorted(groups): |
510 | 510 |
coverage_data = groups[group] |
... | ... |
@@ -543,7 +543,7 @@ def _command_coverage_combine_powershell(args): |
543 | 543 |
|
544 | 544 |
def _generage_powershell_xml(coverage_file): |
545 | 545 |
""" |
546 |
- :type input_path: str |
|
546 |
+ :type coverage_file: str |
|
547 | 547 |
:rtype: Element |
548 | 548 |
""" |
549 | 549 |
with open(coverage_file, 'rb') as coverage_fd: |
... | ... |
@@ -669,7 +669,7 @@ def _add_cobertura_package(packages, package_name, package_data): |
669 | 669 |
|
670 | 670 |
def _generate_powershell_output_report(args, coverage_file): |
671 | 671 |
""" |
672 |
- :type args: CoverageConfig |
|
672 |
+ :type args: CoverageReportConfig |
|
673 | 673 |
:type coverage_file: str |
674 | 674 |
:rtype: str |
675 | 675 |
""" |
... | ... |
@@ -756,3 +756,13 @@ def _generate_powershell_output_report(args, coverage_file): |
756 | 756 |
|
757 | 757 |
report = '{0}\n{1}\n{2}\n{1}\n{3}'.format(header, line_break, "\n".join(lines), totals) |
758 | 758 |
return report |
759 |
+ |
|
760 |
+ |
|
761 |
+def run_coverage(args, output_file, command, cmd): # type: (CoverageConfig, str, str, t.List[str]) -> None |
|
762 |
+ """Run the coverage cli tool with the specified options.""" |
|
763 |
+ env = common_environment() |
|
764 |
+ env.update(dict(COVERAGE_FILE=output_file)) |
|
765 |
+ |
|
766 |
+ cmd = ['python', '-m', 'coverage', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd |
|
767 |
+ |
|
768 |
+ intercept_command(args, target_name='coverage', env=env, cmd=cmd, disable_coverage=True) |
... | ... |
@@ -17,6 +17,10 @@ from .util import ( |
17 | 17 |
remove_tree, |
18 | 18 |
) |
19 | 19 |
|
20 |
+from .util_common import ( |
|
21 |
+ write_text_file, |
|
22 |
+) |
|
23 |
+ |
|
20 | 24 |
from .data import ( |
21 | 25 |
data_context, |
22 | 26 |
) |
... | ... |
@@ -45,8 +49,7 @@ def coverage_setup(args): # type: (TestConfig) -> None |
45 | 45 |
else: |
46 | 46 |
args.coverage_config_base_path = tempfile.mkdtemp() |
47 | 47 |
|
48 |
- with open(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), 'w') as coverage_config_path_fd: |
|
49 |
- coverage_config_path_fd.write(coverage_config) |
|
48 |
+ write_text_file(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), coverage_config) |
|
50 | 49 |
|
51 | 50 |
|
52 | 51 |
def coverage_cleanup(args): # type: (TestConfig) -> None |
... | ... |
@@ -81,6 +84,7 @@ omit = |
81 | 81 |
*/pyshared/* |
82 | 82 |
*/pytest |
83 | 83 |
*/AnsiballZ_*.py |
84 |
+ */test/results/* |
|
84 | 85 |
''' |
85 | 86 |
|
86 | 87 |
return coverage_config |
... | ... |
@@ -110,7 +114,7 @@ include = |
110 | 110 |
%s/* |
111 | 111 |
|
112 | 112 |
omit = |
113 |
- */test/runner/.tox/* |
|
113 |
+ */test/results/* |
|
114 | 114 |
''' % data_context().content.root |
115 | 115 |
else: |
116 | 116 |
coverage_config += ''' |
... | ... |
@@ -72,7 +72,8 @@ class DataContext: |
72 | 72 |
content = self.__create_content_layout(layout_providers, source_providers, current_path, True) |
73 | 73 |
|
74 | 74 |
self.content = content # type: ContentLayout |
75 |
- self.results = os.path.join(self.content.root, 'test', 'results') |
|
75 |
+ self.results_relative = os.path.join('test', 'results') |
|
76 |
+ self.results = os.path.join(self.content.root, self.results_relative) |
|
76 | 77 |
|
77 | 78 |
def create_collection_layouts(self): # type: () -> t.List[ContentLayout] |
78 | 79 |
""" |
... | ... |
@@ -50,6 +50,7 @@ from .util import ( |
50 | 50 |
|
51 | 51 |
from .util_common import ( |
52 | 52 |
run_command, |
53 |
+ ResultType, |
|
53 | 54 |
) |
54 | 55 |
|
55 | 56 |
from .docker_util import ( |
... | ... |
@@ -241,6 +242,8 @@ def delegate_docker(args, exclude, require, integration_targets): |
241 | 241 |
else: |
242 | 242 |
content_root = install_root |
243 | 243 |
|
244 |
+ remote_results_root = os.path.join(content_root, data_context().results_relative) |
|
245 |
+ |
|
244 | 246 |
cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require) |
245 | 247 |
|
246 | 248 |
if isinstance(args, TestConfig): |
... | ... |
@@ -321,19 +324,12 @@ def delegate_docker(args, exclude, require, integration_targets): |
321 | 321 |
# also disconnect from the network once requirements have been installed |
322 | 322 |
if isinstance(args, UnitsConfig): |
323 | 323 |
writable_dirs = [ |
324 |
- os.path.join(install_root, '.pytest_cache'), |
|
324 |
+ os.path.join(content_root, ResultType.JUNIT.relative_path), |
|
325 |
+ os.path.join(content_root, ResultType.COVERAGE.relative_path), |
|
325 | 326 |
] |
326 | 327 |
|
327 |
- if content_root != install_root: |
|
328 |
- writable_dirs.append(os.path.join(content_root, 'test/results/junit')) |
|
329 |
- writable_dirs.append(os.path.join(content_root, 'test/results/coverage')) |
|
330 |
- |
|
331 | 328 |
docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs) |
332 | 329 |
docker_exec(args, test_id, ['chmod', '777'] + writable_dirs) |
333 |
- |
|
334 |
- if content_root == install_root: |
|
335 |
- docker_exec(args, test_id, ['find', os.path.join(content_root, 'test/results/'), '-type', 'd', '-exec', 'chmod', '777', '{}', '+']) |
|
336 |
- |
|
337 | 330 |
docker_exec(args, test_id, ['chmod', '755', '/root']) |
338 | 331 |
docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)]) |
339 | 332 |
|
... | ... |
@@ -353,10 +349,16 @@ def delegate_docker(args, exclude, require, integration_targets): |
353 | 353 |
try: |
354 | 354 |
docker_exec(args, test_id, cmd, options=cmd_options) |
355 | 355 |
finally: |
356 |
+ local_test_root = os.path.dirname(data_context().results) |
|
357 |
+ |
|
358 |
+ remote_test_root = os.path.dirname(remote_results_root) |
|
359 |
+ remote_results_name = os.path.basename(remote_results_root) |
|
360 |
+ remote_temp_file = os.path.join('/root', remote_results_name + '.tgz') |
|
361 |
+ |
|
356 | 362 |
with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd: |
357 |
- docker_exec(args, test_id, ['tar', 'czf', '/root/results.tgz', '-C', os.path.join(content_root, 'test'), 'results']) |
|
358 |
- docker_get(args, test_id, '/root/results.tgz', local_result_fd.name) |
|
359 |
- run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', 'test']) |
|
363 |
+ docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '-C', remote_test_root, remote_results_name]) |
|
364 |
+ docker_get(args, test_id, remote_temp_file, local_result_fd.name) |
|
365 |
+ run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root]) |
|
360 | 366 |
finally: |
361 | 367 |
if httptester_id: |
362 | 368 |
docker_rm(args, httptester_id) |
... | ... |
@@ -470,8 +472,14 @@ def delegate_remote(args, exclude, require, integration_targets): |
470 | 470 |
download = False |
471 | 471 |
|
472 | 472 |
if download and content_root: |
473 |
- manage.ssh('rm -rf /tmp/results && cp -a %s/test/results /tmp/results && chmod -R a+r /tmp/results' % content_root) |
|
474 |
- manage.download('/tmp/results', 'test') |
|
473 |
+ local_test_root = os.path.dirname(data_context().results) |
|
474 |
+ |
|
475 |
+ remote_results_root = os.path.join(content_root, data_context().results_relative) |
|
476 |
+ remote_results_name = os.path.basename(remote_results_root) |
|
477 |
+ remote_temp_path = os.path.join('/tmp', remote_results_name) |
|
478 |
+ |
|
479 |
+ manage.ssh('rm -rf {0} && cp -a {1} {0} && chmod -R a+r {0}'.format(remote_temp_path, remote_results_root)) |
|
480 |
+ manage.download(remote_temp_path, local_test_root) |
|
475 | 481 |
finally: |
476 | 482 |
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success): |
477 | 483 |
core_ci.stop() |
... | ... |
@@ -26,6 +26,12 @@ from .util import ( |
26 | 26 |
get_available_python_versions, |
27 | 27 |
) |
28 | 28 |
|
29 |
+from .util_common import ( |
|
30 |
+ write_json_test_results, |
|
31 |
+ write_json_file, |
|
32 |
+ ResultType, |
|
33 |
+) |
|
34 |
+ |
|
29 | 35 |
from .git import ( |
30 | 36 |
Git, |
31 | 37 |
) |
... | ... |
@@ -47,10 +53,6 @@ from .test import ( |
47 | 47 |
TestTimeout, |
48 | 48 |
) |
49 | 49 |
|
50 |
-from .data import ( |
|
51 |
- data_context, |
|
52 |
-) |
|
53 |
- |
|
54 | 50 |
from .executor import ( |
55 | 51 |
SUPPORTED_PYTHON_VERSIONS, |
56 | 52 |
) |
... | ... |
@@ -122,8 +124,7 @@ def show_dump_env(args): |
122 | 122 |
show_dict(data, verbose) |
123 | 123 |
|
124 | 124 |
if args.dump and not args.explain: |
125 |
- with open(os.path.join(data_context().results, 'bot', 'data-environment.json'), 'w') as results_fd: |
|
126 |
- results_fd.write(json.dumps(data, sort_keys=True)) |
|
125 |
+ write_json_test_results(ResultType.BOT, 'data-environment.json', data) |
|
127 | 126 |
|
128 | 127 |
|
129 | 128 |
def set_timeout(args): |
... | ... |
@@ -151,8 +152,7 @@ def set_timeout(args): |
151 | 151 |
deadline=deadline, |
152 | 152 |
) |
153 | 153 |
|
154 |
- with open(TIMEOUT_PATH, 'w') as timeout_fd: |
|
155 |
- json.dump(data, timeout_fd, indent=4, sort_keys=True) |
|
154 |
+ write_json_file(TIMEOUT_PATH, data) |
|
156 | 155 |
elif os.path.exists(TIMEOUT_PATH): |
157 | 156 |
os.remove(TIMEOUT_PATH) |
158 | 157 |
|
... | ... |
@@ -56,7 +56,6 @@ from .util import ( |
56 | 56 |
find_python, |
57 | 57 |
get_docker_completion, |
58 | 58 |
get_remote_completion, |
59 |
- COVERAGE_OUTPUT_NAME, |
|
60 | 59 |
cmd_quote, |
61 | 60 |
ANSIBLE_LIB_ROOT, |
62 | 61 |
ANSIBLE_TEST_DATA_ROOT, |
... | ... |
@@ -71,6 +70,9 @@ from .util_common import ( |
71 | 71 |
intercept_command, |
72 | 72 |
named_temporary_file, |
73 | 73 |
run_command, |
74 |
+ write_text_file, |
|
75 |
+ write_json_test_results, |
|
76 |
+ ResultType, |
|
74 | 77 |
) |
75 | 78 |
|
76 | 79 |
from .docker_util import ( |
... | ... |
@@ -128,9 +130,7 @@ from .integration import ( |
128 | 128 |
integration_test_environment, |
129 | 129 |
integration_test_config_file, |
130 | 130 |
setup_common_temp_dir, |
131 |
- INTEGRATION_VARS_FILE_RELATIVE, |
|
132 | 131 |
get_inventory_relative_path, |
133 |
- INTEGRATION_DIR_RELATIVE, |
|
134 | 132 |
check_inventory, |
135 | 133 |
delegate_inventory, |
136 | 134 |
) |
... | ... |
@@ -198,8 +198,8 @@ def install_command_requirements(args, python_version=None): |
198 | 198 |
:type python_version: str | None |
199 | 199 |
""" |
200 | 200 |
if not args.explain: |
201 |
- make_dirs(os.path.join(data_context().results, 'coverage')) |
|
202 |
- make_dirs(os.path.join(data_context().results, 'data')) |
|
201 |
+ make_dirs(ResultType.COVERAGE.path) |
|
202 |
+ make_dirs(ResultType.DATA.path) |
|
203 | 203 |
|
204 | 204 |
if isinstance(args, ShellConfig): |
205 | 205 |
if args.raw: |
... | ... |
@@ -322,12 +322,9 @@ Author-email: info@ansible.com |
322 | 322 |
License: GPLv3+ |
323 | 323 |
''' % get_ansible_version() |
324 | 324 |
|
325 |
- os.mkdir(egg_info_path) |
|
326 |
- |
|
327 | 325 |
pkg_info_path = os.path.join(egg_info_path, 'PKG-INFO') |
328 | 326 |
|
329 |
- with open(pkg_info_path, 'w') as pkg_info_fd: |
|
330 |
- pkg_info_fd.write(pkg_info.lstrip()) |
|
327 |
+ write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True) |
|
331 | 328 |
|
332 | 329 |
|
333 | 330 |
def generate_pip_install(pip, command, packages=None): |
... | ... |
@@ -394,7 +391,7 @@ def command_network_integration(args): |
394 | 394 |
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template' |
395 | 395 |
|
396 | 396 |
if args.inventory: |
397 |
- inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory) |
|
397 |
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory) |
|
398 | 398 |
else: |
399 | 399 |
inventory_path = os.path.join(data_context().content.root, inventory_relative_path) |
400 | 400 |
|
... | ... |
@@ -445,8 +442,7 @@ def command_network_integration(args): |
445 | 445 |
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3) |
446 | 446 |
|
447 | 447 |
if not args.explain: |
448 |
- with open(inventory_path, 'w') as inventory_fd: |
|
449 |
- inventory_fd.write(inventory) |
|
448 |
+ write_text_file(inventory_path, inventory) |
|
450 | 449 |
|
451 | 450 |
success = False |
452 | 451 |
|
... | ... |
@@ -576,7 +572,7 @@ def command_windows_integration(args): |
576 | 576 |
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template' |
577 | 577 |
|
578 | 578 |
if args.inventory: |
579 |
- inventory_path = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE, args.inventory) |
|
579 |
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory) |
|
580 | 580 |
else: |
581 | 581 |
inventory_path = os.path.join(data_context().content.root, inventory_relative_path) |
582 | 582 |
|
... | ... |
@@ -620,8 +616,7 @@ def command_windows_integration(args): |
620 | 620 |
display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3) |
621 | 621 |
|
622 | 622 |
if not args.explain: |
623 |
- with open(inventory_path, 'w') as inventory_fd: |
|
624 |
- inventory_fd.write(inventory) |
|
623 |
+ write_text_file(inventory_path, inventory) |
|
625 | 624 |
|
626 | 625 |
use_httptester = args.httptester and any('needs/httptester/' in target.aliases for target in internal_targets) |
627 | 626 |
# if running under Docker delegation, the httptester may have already been started |
... | ... |
@@ -681,9 +676,9 @@ def command_windows_integration(args): |
681 | 681 |
pre_target = forward_ssh_ports |
682 | 682 |
post_target = cleanup_ssh_ports |
683 | 683 |
|
684 |
- def run_playbook(playbook, playbook_vars): |
|
684 |
+ def run_playbook(playbook, run_playbook_vars): # type: (str, t.Dict[str, t.Any]) -> None |
|
685 | 685 |
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook) |
686 |
- command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(playbook_vars)] |
|
686 |
+ command = ['ansible-playbook', '-i', inventory_path, playbook_path, '-e', json.dumps(run_playbook_vars)] |
|
687 | 687 |
if args.verbosity: |
688 | 688 |
command.append('-%s' % ('v' * args.verbosity)) |
689 | 689 |
|
... | ... |
@@ -716,7 +711,7 @@ def command_windows_integration(args): |
716 | 716 |
|
717 | 717 |
for filename in os.listdir(local_temp_path): |
718 | 718 |
with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip: |
719 |
- coverage_zip.extractall(os.path.join(data_context().results, 'coverage')) |
|
719 |
+ coverage_zip.extractall(ResultType.COVERAGE.path) |
|
720 | 720 |
|
721 | 721 |
if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success): |
722 | 722 |
for instance in instances: |
... | ... |
@@ -882,7 +877,7 @@ def command_integration_filter(args, # type: TIntegrationConfig |
882 | 882 |
|
883 | 883 |
cloud_init(args, internal_targets) |
884 | 884 |
|
885 |
- vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE) |
|
885 |
+ vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path) |
|
886 | 886 |
|
887 | 887 |
if os.path.exists(vars_file_src): |
888 | 888 |
def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None |
... | ... |
@@ -895,7 +890,7 @@ def command_integration_filter(args, # type: TIntegrationConfig |
895 | 895 |
else: |
896 | 896 |
working_path = '' |
897 | 897 |
|
898 |
- files.append((vars_file_src, os.path.join(working_path, INTEGRATION_VARS_FILE_RELATIVE))) |
|
898 |
+ files.append((vars_file_src, os.path.join(working_path, data_context().content.integration_vars_path))) |
|
899 | 899 |
|
900 | 900 |
data_context().register_payload_callback(integration_config_callback) |
901 | 901 |
|
... | ... |
@@ -1086,23 +1081,22 @@ def command_integration_filtered(args, targets, all_targets, inventory_path, pre |
1086 | 1086 |
finally: |
1087 | 1087 |
if not args.explain: |
1088 | 1088 |
if args.coverage: |
1089 |
- coverage_temp_path = os.path.join(common_temp_path, COVERAGE_OUTPUT_NAME) |
|
1090 |
- coverage_save_path = os.path.join(data_context().results, 'coverage') |
|
1089 |
+ coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name) |
|
1090 |
+ coverage_save_path = ResultType.COVERAGE.path |
|
1091 | 1091 |
|
1092 | 1092 |
for filename in os.listdir(coverage_temp_path): |
1093 | 1093 |
shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename)) |
1094 | 1094 |
|
1095 | 1095 |
remove_tree(common_temp_path) |
1096 | 1096 |
|
1097 |
- results_path = os.path.join(data_context().results, 'data', '%s-%s.json' % ( |
|
1098 |
- args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))) |
|
1097 |
+ result_name = '%s-%s.json' % ( |
|
1098 |
+ args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0)))) |
|
1099 | 1099 |
|
1100 | 1100 |
data = dict( |
1101 | 1101 |
targets=results, |
1102 | 1102 |
) |
1103 | 1103 |
|
1104 |
- with open(results_path, 'w') as results_fd: |
|
1105 |
- results_fd.write(json.dumps(data, sort_keys=True, indent=4)) |
|
1104 |
+ write_json_test_results(ResultType.DATA, result_name, data) |
|
1106 | 1105 |
|
1107 | 1106 |
if failed: |
1108 | 1107 |
raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % ( |
... | ... |
@@ -1286,7 +1280,7 @@ def integration_environment(args, target, test_dir, inventory_path, ansible_conf |
1286 | 1286 |
callback_plugins = ['junit'] + (env_config.callback_plugins or [] if env_config else []) |
1287 | 1287 |
|
1288 | 1288 |
integration = dict( |
1289 |
- JUNIT_OUTPUT_DIR=os.path.join(data_context().results, 'junit'), |
|
1289 |
+ JUNIT_OUTPUT_DIR=ResultType.JUNIT.path, |
|
1290 | 1290 |
ANSIBLE_CALLBACK_WHITELIST=','.join(sorted(set(callback_plugins))), |
1291 | 1291 |
ANSIBLE_TEST_CI=args.metadata.ci_provider, |
1292 | 1292 |
ANSIBLE_TEST_COVERAGE='check' if args.coverage_check else ('yes' if args.coverage else ''), |
... | ... |
@@ -5,6 +5,8 @@ __metaclass__ = type |
5 | 5 |
import ast |
6 | 6 |
import os |
7 | 7 |
|
8 |
+from . import types as t |
|
9 |
+ |
|
8 | 10 |
from .util import ( |
9 | 11 |
display, |
10 | 12 |
ApplicationError, |
... | ... |
@@ -35,13 +37,8 @@ def get_python_module_utils_imports(compile_targets): |
35 | 35 |
for target in compile_targets: |
36 | 36 |
imports_by_target_path[target.path] = extract_python_module_utils_imports(target.path, module_utils) |
37 | 37 |
|
38 |
- def recurse_import(import_name, depth=0, seen=None): |
|
39 |
- """Recursively expand module_utils imports from module_utils files. |
|
40 |
- :type import_name: str |
|
41 |
- :type depth: int |
|
42 |
- :type seen: set[str] | None |
|
43 |
- :rtype set[str] |
|
44 |
- """ |
|
38 |
+ def recurse_import(import_name, depth=0, seen=None): # type: (str, int, t.Optional[t.Set[str]]) -> t.Set[str] |
|
39 |
+ """Recursively expand module_utils imports from module_utils files.""" |
|
45 | 40 |
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4) |
46 | 41 |
|
47 | 42 |
if seen is None: |
... | ... |
@@ -27,17 +27,16 @@ from ..util import ( |
27 | 27 |
display, |
28 | 28 |
make_dirs, |
29 | 29 |
COVERAGE_CONFIG_NAME, |
30 |
- COVERAGE_OUTPUT_NAME, |
|
31 | 30 |
MODE_DIRECTORY, |
32 | 31 |
MODE_DIRECTORY_WRITE, |
33 | 32 |
MODE_FILE, |
34 |
- INTEGRATION_DIR_RELATIVE, |
|
35 |
- INTEGRATION_VARS_FILE_RELATIVE, |
|
36 | 33 |
to_bytes, |
37 | 34 |
) |
38 | 35 |
|
39 | 36 |
from ..util_common import ( |
40 | 37 |
named_temporary_file, |
38 |
+ write_text_file, |
|
39 |
+ ResultType, |
|
41 | 40 |
) |
42 | 41 |
|
43 | 42 |
from ..coverage_util import ( |
... | ... |
@@ -73,12 +72,11 @@ def setup_common_temp_dir(args, path): |
73 | 73 |
|
74 | 74 |
coverage_config = generate_coverage_config(args) |
75 | 75 |
|
76 |
- with open(coverage_config_path, 'w') as coverage_config_fd: |
|
77 |
- coverage_config_fd.write(coverage_config) |
|
76 |
+ write_text_file(coverage_config_path, coverage_config) |
|
78 | 77 |
|
79 | 78 |
os.chmod(coverage_config_path, MODE_FILE) |
80 | 79 |
|
81 |
- coverage_output_path = os.path.join(path, COVERAGE_OUTPUT_NAME) |
|
80 |
+ coverage_output_path = os.path.join(path, ResultType.COVERAGE.name) |
|
82 | 81 |
|
83 | 82 |
os.mkdir(coverage_output_path) |
84 | 83 |
os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE) |
... | ... |
@@ -153,7 +151,7 @@ def get_inventory_relative_path(args): # type: (IntegrationConfig) -> str |
153 | 153 |
NetworkIntegrationConfig: 'inventory.networking', |
154 | 154 |
} # type: t.Dict[t.Type[IntegrationConfig], str] |
155 | 155 |
|
156 |
- return os.path.join(INTEGRATION_DIR_RELATIVE, inventory_names[type(args)]) |
|
156 |
+ return os.path.join(data_context().content.integration_path, inventory_names[type(args)]) |
|
157 | 157 |
|
158 | 158 |
|
159 | 159 |
def delegate_inventory(args, inventory_path_src): # type: (IntegrationConfig, str) -> None |
... | ... |
@@ -202,10 +200,10 @@ def integration_test_environment(args, target, inventory_path_src): |
202 | 202 |
if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases: |
203 | 203 |
display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.') |
204 | 204 |
|
205 |
- integration_dir = os.path.join(data_context().content.root, INTEGRATION_DIR_RELATIVE) |
|
205 |
+ integration_dir = os.path.join(data_context().content.root, data_context().content.integration_path) |
|
206 | 206 |
inventory_path = inventory_path_src |
207 | 207 |
ansible_config = ansible_config_src |
208 |
- vars_file = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE) |
|
208 |
+ vars_file = os.path.join(data_context().content.root, data_context().content.integration_vars_path) |
|
209 | 209 |
|
210 | 210 |
yield IntegrationEnvironment(integration_dir, inventory_path, ansible_config, vars_file) |
211 | 211 |
return |
... | ... |
@@ -237,11 +235,11 @@ def integration_test_environment(args, target, inventory_path_src): |
237 | 237 |
|
238 | 238 |
files_needed = get_files_needed(target_dependencies) |
239 | 239 |
|
240 |
- integration_dir = os.path.join(temp_dir, INTEGRATION_DIR_RELATIVE) |
|
240 |
+ integration_dir = os.path.join(temp_dir, data_context().content.integration_path) |
|
241 | 241 |
ansible_config = os.path.join(temp_dir, ansible_config_relative) |
242 | 242 |
|
243 |
- vars_file_src = os.path.join(data_context().content.root, INTEGRATION_VARS_FILE_RELATIVE) |
|
244 |
- vars_file = os.path.join(temp_dir, INTEGRATION_VARS_FILE_RELATIVE) |
|
243 |
+ vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path) |
|
244 |
+ vars_file = os.path.join(temp_dir, data_context().content.integration_vars_path) |
|
245 | 245 |
|
246 | 246 |
file_copies = [ |
247 | 247 |
(ansible_config_src, ansible_config), |
... | ... |
@@ -253,8 +251,10 @@ def integration_test_environment(args, target, inventory_path_src): |
253 | 253 |
|
254 | 254 |
file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed] |
255 | 255 |
|
256 |
+ integration_targets_relative_path = data_context().content.integration_targets_path |
|
257 |
+ |
|
256 | 258 |
directory_copies = [ |
257 |
- (os.path.join(INTEGRATION_DIR_RELATIVE, 'targets', target.name), os.path.join(integration_dir, 'targets', target.name)) |
|
259 |
+ (os.path.join(integration_targets_relative_path, target.name), os.path.join(temp_dir, integration_targets_relative_path, target.name)) |
|
258 | 260 |
for target in target_dependencies |
259 | 261 |
] |
260 | 262 |
|
... | ... |
@@ -11,6 +11,10 @@ from .util import ( |
11 | 11 |
is_shippable, |
12 | 12 |
) |
13 | 13 |
|
14 |
+from .util_common import ( |
|
15 |
+ write_json_file, |
|
16 |
+) |
|
17 |
+ |
|
14 | 18 |
from .diff import ( |
15 | 19 |
parse_diff, |
16 | 20 |
FileDiff, |
... | ... |
@@ -72,8 +76,7 @@ class Metadata: |
72 | 72 |
|
73 | 73 |
display.info('>>> Metadata: %s\n%s' % (path, data), verbosity=3) |
74 | 74 |
|
75 |
- with open(path, 'w') as data_fd: |
|
76 |
- json.dump(data, data_fd, sort_keys=True, indent=4) |
|
75 |
+ write_json_file(path, data) |
|
77 | 76 |
|
78 | 77 |
@staticmethod |
79 | 78 |
def from_file(path): |
... | ... |
@@ -81,6 +81,7 @@ class ContentLayout(Layout): |
81 | 81 |
paths, # type: t.List[str] |
82 | 82 |
plugin_paths, # type: t.Dict[str, str] |
83 | 83 |
collection=None, # type: t.Optional[CollectionDetail] |
84 |
+ integration_path=None, # type: t.Optional[str] |
|
84 | 85 |
unit_path=None, # type: t.Optional[str] |
85 | 86 |
unit_module_path=None, # type: t.Optional[str] |
86 | 87 |
unit_module_utils_path=None, # type: t.Optional[str] |
... | ... |
@@ -89,6 +90,9 @@ class ContentLayout(Layout): |
89 | 89 |
|
90 | 90 |
self.plugin_paths = plugin_paths |
91 | 91 |
self.collection = collection |
92 |
+ self.integration_path = integration_path |
|
93 |
+ self.integration_targets_path = os.path.join(integration_path, 'targets') |
|
94 |
+ self.integration_vars_path = os.path.join(integration_path, 'integration_config.yml') |
|
92 | 95 |
self.unit_path = unit_path |
93 | 96 |
self.unit_module_path = unit_module_path |
94 | 97 |
self.unit_module_utils_path = unit_module_utils_path |
... | ... |
@@ -31,6 +31,7 @@ class AnsibleLayout(LayoutProvider): |
31 | 31 |
return ContentLayout(root, |
32 | 32 |
paths, |
33 | 33 |
plugin_paths=plugin_paths, |
34 |
+ integration_path='test/integration', |
|
34 | 35 |
unit_path='test/units', |
35 | 36 |
unit_module_path='test/units/modules', |
36 | 37 |
unit_module_utils_path='test/units/module_utils', |
... | ... |
@@ -44,6 +44,7 @@ class CollectionLayout(LayoutProvider): |
44 | 44 |
namespace=collection_namespace, |
45 | 45 |
root=collection_root, |
46 | 46 |
), |
47 |
+ integration_path='test/integration', |
|
47 | 48 |
unit_path='test/unit', |
48 | 49 |
unit_module_path='test/unit/plugins/modules', |
49 | 50 |
unit_module_utils_path='test/unit/plugins/module_utils', |
... | ... |
@@ -24,7 +24,6 @@ from ..util import ( |
24 | 24 |
display, |
25 | 25 |
find_python, |
26 | 26 |
parse_to_list_of_dict, |
27 |
- make_dirs, |
|
28 | 27 |
is_subdir, |
29 | 28 |
ANSIBLE_LIB_ROOT, |
30 | 29 |
) |
... | ... |
@@ -32,6 +31,8 @@ from ..util import ( |
32 | 32 |
from ..util_common import ( |
33 | 33 |
intercept_command, |
34 | 34 |
run_command, |
35 |
+ write_text_file, |
|
36 |
+ ResultType, |
|
35 | 37 |
) |
36 | 38 |
|
37 | 39 |
from ..ansible_util import ( |
... | ... |
@@ -75,8 +76,10 @@ class ImportTest(SanityMultipleVersion): |
75 | 75 |
|
76 | 76 |
env = ansible_environment(args, color=False) |
77 | 77 |
|
78 |
+ temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import') |
|
79 |
+ |
|
78 | 80 |
# create a clean virtual environment to minimize the available imports beyond the python standard library |
79 |
- virtual_environment_path = os.path.abspath('test/runner/.tox/minimal-py%s' % python_version.replace('.', '')) |
|
81 |
+ virtual_environment_path = os.path.join(temp_root, 'minimal-py%s' % python_version.replace('.', '')) |
|
80 | 82 |
virtual_environment_bin = os.path.join(virtual_environment_path, 'bin') |
81 | 83 |
|
82 | 84 |
remove_tree(virtual_environment_path) |
... | ... |
@@ -96,7 +99,7 @@ class ImportTest(SanityMultipleVersion): |
96 | 96 |
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path) |
97 | 97 |
|
98 | 98 |
# create a minimal python library |
99 |
- python_path = os.path.abspath('test/runner/.tox/import/lib') |
|
99 |
+ python_path = os.path.join(temp_root, 'lib') |
|
100 | 100 |
ansible_path = os.path.join(python_path, 'ansible') |
101 | 101 |
ansible_init = os.path.join(ansible_path, '__init__.py') |
102 | 102 |
ansible_link = os.path.join(ansible_path, 'module_utils') |
... | ... |
@@ -104,10 +107,7 @@ class ImportTest(SanityMultipleVersion): |
104 | 104 |
if not args.explain: |
105 | 105 |
remove_tree(ansible_path) |
106 | 106 |
|
107 |
- make_dirs(ansible_path) |
|
108 |
- |
|
109 |
- with open(ansible_init, 'w'): |
|
110 |
- pass |
|
107 |
+ write_text_file(ansible_init, '', create_directories=True) |
|
111 | 108 |
|
112 | 109 |
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'module_utils'), ansible_link) |
113 | 110 |
|
... | ... |
@@ -116,21 +116,22 @@ class ImportTest(SanityMultipleVersion): |
116 | 116 |
# the __init__.py files are needed only for Python 2.x |
117 | 117 |
# the empty modules directory is required for the collection loader to generate the synthetic packages list |
118 | 118 |
|
119 |
- make_dirs(os.path.join(ansible_path, 'utils')) |
|
120 |
- with open(os.path.join(ansible_path, 'utils/__init__.py'), 'w'): |
|
121 |
- pass |
|
119 |
+ write_text_file(os.path.join(ansible_path, 'utils/__init__.py'), '', create_directories=True) |
|
122 | 120 |
|
123 | 121 |
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'collection_loader.py'), os.path.join(ansible_path, 'utils', 'collection_loader.py')) |
124 | 122 |
os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'utils', 'singleton.py'), os.path.join(ansible_path, 'utils', 'singleton.py')) |
125 | 123 |
|
126 |
- make_dirs(os.path.join(ansible_path, 'modules')) |
|
127 |
- with open(os.path.join(ansible_path, 'modules/__init__.py'), 'w'): |
|
128 |
- pass |
|
124 |
+ write_text_file(os.path.join(ansible_path, 'modules/__init__.py'), '', create_directories=True) |
|
129 | 125 |
|
130 | 126 |
# activate the virtual environment |
131 | 127 |
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH']) |
132 | 128 |
env['PYTHONPATH'] = python_path |
133 | 129 |
|
130 |
+ env.update( |
|
131 |
+ SANITY_IMPORT_DIR=os.path.relpath(temp_root, data_context().content.root) + os.path.sep, |
|
132 |
+ SANITY_MINIMAL_DIR=os.path.relpath(virtual_environment_path, data_context().content.root) + os.path.sep, |
|
133 |
+ ) |
|
134 |
+ |
|
134 | 135 |
# make sure coverage is available in the virtual environment if needed |
135 | 136 |
if args.coverage: |
136 | 137 |
run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['setuptools']), env=env) |
... | ... |
@@ -163,9 +164,11 @@ class ImportTest(SanityMultipleVersion): |
163 | 163 |
|
164 | 164 |
results = parse_to_list_of_dict(pattern, ex.stdout) |
165 | 165 |
|
166 |
+ relative_temp_root = os.path.relpath(temp_root, data_context().content.root) + os.path.sep |
|
167 |
+ |
|
166 | 168 |
results = [SanityMessage( |
167 | 169 |
message=r['message'], |
168 |
- path=r['path'], |
|
170 |
+ path=os.path.relpath(r['path'], relative_temp_root) if r['path'].startswith(relative_temp_root) else r['path'], |
|
169 | 171 |
line=int(r['line']), |
170 | 172 |
column=int(r['column']), |
171 | 173 |
) for r in results] |
... | ... |
@@ -2,7 +2,6 @@ |
2 | 2 |
from __future__ import (absolute_import, division, print_function) |
3 | 3 |
__metaclass__ = type |
4 | 4 |
|
5 |
-import json |
|
6 | 5 |
import textwrap |
7 | 6 |
import re |
8 | 7 |
import os |
... | ... |
@@ -37,8 +36,9 @@ from ..util import ( |
37 | 37 |
display, |
38 | 38 |
) |
39 | 39 |
|
40 |
-from ..data import ( |
|
41 |
- data_context, |
|
40 |
+from ..util_common import ( |
|
41 |
+ write_json_test_results, |
|
42 |
+ ResultType, |
|
42 | 43 |
) |
43 | 44 |
|
44 | 45 |
|
... | ... |
@@ -180,8 +180,7 @@ class IntegrationAliasesTest(SanityVersionNeutral): |
180 | 180 |
|
181 | 181 |
self.check_changes(args, results) |
182 | 182 |
|
183 |
- with open(os.path.join(data_context().results, 'bot', 'data-sanity-ci.json'), 'w') as results_fd: |
|
184 |
- json.dump(results, results_fd, sort_keys=True, indent=4) |
|
183 |
+ write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results) |
|
185 | 184 |
|
186 | 185 |
messages = [] |
187 | 186 |
|
... | ... |
@@ -228,7 +228,7 @@ def walk_integration_targets(): |
228 | 228 |
""" |
229 | 229 |
:rtype: collections.Iterable[IntegrationTarget] |
230 | 230 |
""" |
231 |
- path = 'test/integration/targets' |
|
231 |
+ path = data_context().content.integration_targets_path |
|
232 | 232 |
modules = frozenset(target.module for target in walk_module_targets()) |
233 | 233 |
paths = data_context().content.get_dirs(path) |
234 | 234 |
prefixes = load_integration_prefixes() |
... | ... |
@@ -241,7 +241,7 @@ def load_integration_prefixes(): |
241 | 241 |
""" |
242 | 242 |
:rtype: dict[str, str] |
243 | 243 |
""" |
244 |
- path = 'test/integration' |
|
244 |
+ path = data_context().content.integration_path |
|
245 | 245 |
file_paths = sorted(f for f in data_context().content.get_files(path) if os.path.splitext(os.path.basename(f))[0] == 'target-prefixes') |
246 | 246 |
prefixes = {} |
247 | 247 |
|
... | ... |
@@ -306,7 +306,7 @@ def analyze_integration_target_dependencies(integration_targets): |
306 | 306 |
:type integration_targets: list[IntegrationTarget] |
307 | 307 |
:rtype: dict[str,set[str]] |
308 | 308 |
""" |
309 |
- real_target_root = os.path.realpath('test/integration/targets') + '/' |
|
309 |
+ real_target_root = os.path.realpath(data_context().content.integration_targets_path) + '/' |
|
310 | 310 |
|
311 | 311 |
role_targets = [target for target in integration_targets if target.type == 'role'] |
312 | 312 |
hidden_role_target_names = set(target.name for target in role_targets if 'hidden/' in target.aliases) |
... | ... |
@@ -595,10 +595,12 @@ class IntegrationTarget(CompletionTarget): |
595 | 595 |
if self.type not in ('script', 'role'): |
596 | 596 |
groups.append('hidden') |
597 | 597 |
|
598 |
+ targets_relative_path = data_context().content.integration_targets_path |
|
599 |
+ |
|
598 | 600 |
# Collect file paths before group expansion to avoid including the directories. |
599 | 601 |
# Ignore references to test targets, as those must be defined using `needs/target/*` or other target references. |
600 | 602 |
self.needs_file = tuple(sorted(set('/'.join(g.split('/')[2:]) for g in groups if |
601 |
- g.startswith('needs/file/') and not g.startswith('needs/file/test/integration/targets/')))) |
|
603 |
+ g.startswith('needs/file/') and not g.startswith('needs/file/%s/' % targets_relative_path)))) |
|
602 | 604 |
|
603 | 605 |
for group in itertools.islice(groups, 0, len(groups)): |
604 | 606 |
if '/' in group: |
... | ... |
@@ -3,23 +3,22 @@ from __future__ import (absolute_import, division, print_function) |
3 | 3 |
__metaclass__ = type |
4 | 4 |
|
5 | 5 |
import datetime |
6 |
-import json |
|
7 | 6 |
import os |
8 | 7 |
|
9 | 8 |
from . import types as t |
10 | 9 |
|
11 | 10 |
from .util import ( |
12 | 11 |
display, |
13 |
- make_dirs, |
|
14 |
- to_bytes, |
|
15 | 12 |
) |
16 | 13 |
|
17 |
-from .config import ( |
|
18 |
- TestConfig, |
|
14 |
+from .util_common import ( |
|
15 |
+ write_text_test_results, |
|
16 |
+ write_json_test_results, |
|
17 |
+ ResultType, |
|
19 | 18 |
) |
20 | 19 |
|
21 |
-from .data import ( |
|
22 |
- data_context, |
|
20 |
+from .config import ( |
|
21 |
+ TestConfig, |
|
23 | 22 |
) |
24 | 23 |
|
25 | 24 |
|
... | ... |
@@ -118,23 +117,22 @@ class TestResult: |
118 | 118 |
:type args: TestConfig |
119 | 119 |
""" |
120 | 120 |
|
121 |
- def create_path(self, directory, extension): |
|
121 |
+ def create_result_name(self, extension): |
|
122 | 122 |
""" |
123 |
- :type directory: str |
|
124 | 123 |
:type extension: str |
125 | 124 |
:rtype: str |
126 | 125 |
""" |
127 |
- path = os.path.join(data_context().results, directory, 'ansible-test-%s' % self.command) |
|
126 |
+ name = 'ansible-test-%s' % self.command |
|
128 | 127 |
|
129 | 128 |
if self.test: |
130 |
- path += '-%s' % self.test |
|
129 |
+ name += '-%s' % self.test |
|
131 | 130 |
|
132 | 131 |
if self.python_version: |
133 |
- path += '-python-%s' % self.python_version |
|
132 |
+ name += '-python-%s' % self.python_version |
|
134 | 133 |
|
135 |
- path += extension |
|
134 |
+ name += extension |
|
136 | 135 |
|
137 |
- return path |
|
136 |
+ return name |
|
138 | 137 |
|
139 | 138 |
def save_junit(self, args, test_case, properties=None): |
140 | 139 |
""" |
... | ... |
@@ -143,8 +141,6 @@ class TestResult: |
143 | 143 |
:type properties: dict[str, str] | None |
144 | 144 |
:rtype: str | None |
145 | 145 |
""" |
146 |
- path = self.create_path('junit', '.xml') |
|
147 |
- |
|
148 | 146 |
test_suites = [ |
149 | 147 |
self.junit.TestSuite( |
150 | 148 |
name='ansible-test', |
... | ... |
@@ -159,8 +155,7 @@ class TestResult: |
159 | 159 |
if args.explain: |
160 | 160 |
return |
161 | 161 |
|
162 |
- with open(path, 'wb') as xml: |
|
163 |
- xml.write(to_bytes(report)) |
|
162 |
+ write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), report) |
|
164 | 163 |
|
165 | 164 |
|
166 | 165 |
class TestTimeout(TestResult): |
... | ... |
@@ -207,10 +202,7 @@ One or more of the following situations may be responsible: |
207 | 207 |
</testsuites> |
208 | 208 |
''' % (timestamp, message, output) |
209 | 209 |
|
210 |
- path = self.create_path('junit', '.xml') |
|
211 |
- |
|
212 |
- with open(path, 'w') as junit_fd: |
|
213 |
- junit_fd.write(xml.lstrip()) |
|
210 |
+ write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), xml.lstrip()) |
|
214 | 211 |
|
215 | 212 |
|
216 | 213 |
class TestSuccess(TestResult): |
... | ... |
@@ -335,16 +327,10 @@ class TestFailure(TestResult): |
335 | 335 |
], |
336 | 336 |
) |
337 | 337 |
|
338 |
- path = self.create_path('bot', '.json') |
|
339 |
- |
|
340 | 338 |
if args.explain: |
341 | 339 |
return |
342 | 340 |
|
343 |
- make_dirs(os.path.dirname(path)) |
|
344 |
- |
|
345 |
- with open(path, 'w') as bot_fd: |
|
346 |
- json.dump(bot_data, bot_fd, indent=4, sort_keys=True) |
|
347 |
- bot_fd.write('\n') |
|
341 |
+ write_json_test_results(ResultType.BOT, self.create_result_name('.json'), bot_data) |
|
348 | 342 |
|
349 | 343 |
def populate_confidence(self, metadata): |
350 | 344 |
""" |
... | ... |
@@ -15,6 +15,7 @@ from ..util import ( |
15 | 15 |
|
16 | 16 |
from ..util_common import ( |
17 | 17 |
intercept_command, |
18 |
+ ResultType, |
|
18 | 19 |
) |
19 | 20 |
|
20 | 21 |
from ..ansible_util import ( |
... | ... |
@@ -98,7 +99,7 @@ def command_units(args): |
98 | 98 |
'yes' if args.color else 'no', |
99 | 99 |
'-p', 'no:cacheprovider', |
100 | 100 |
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'), |
101 |
- '--junit-xml', os.path.join(data_context().results, 'junit', 'python%s-units.xml' % version), |
|
101 |
+ '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-units.xml' % version), |
|
102 | 102 |
] |
103 | 103 |
|
104 | 104 |
if not data_context().content.collection: |
... | ... |
@@ -62,7 +62,6 @@ except AttributeError: |
62 | 62 |
MAXFD = -1 |
63 | 63 |
|
64 | 64 |
COVERAGE_CONFIG_NAME = 'coveragerc' |
65 |
-COVERAGE_OUTPUT_NAME = 'coverage' |
|
66 | 65 |
|
67 | 66 |
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
68 | 67 |
|
... | ... |
@@ -82,9 +81,6 @@ if not os.path.exists(ANSIBLE_LIB_ROOT): |
82 | 82 |
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data') |
83 | 83 |
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config') |
84 | 84 |
|
85 |
-INTEGRATION_DIR_RELATIVE = 'test/integration' |
|
86 |
-INTEGRATION_VARS_FILE_RELATIVE = os.path.join(INTEGRATION_DIR_RELATIVE, 'integration_config.yml') |
|
87 |
- |
|
88 | 85 |
# Modes are set to allow all users the same level of access. |
89 | 86 |
# This permits files to be used in tests that change users. |
90 | 87 |
# The only exception is write access to directories for the user creating them. |
... | ... |
@@ -801,8 +797,8 @@ def get_available_port(): |
801 | 801 |
|
802 | 802 |
def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]] |
803 | 803 |
"""Returns the set of types that are concrete subclasses of the given type.""" |
804 |
- subclasses = set() |
|
805 |
- queue = [class_type] |
|
804 |
+ subclasses = set() # type: t.Set[t.Type[C]] |
|
805 |
+ queue = [class_type] # type: t.List[t.Type[C]] |
|
806 | 806 |
|
807 | 807 |
while queue: |
808 | 808 |
parent = queue.pop() |
... | ... |
@@ -4,15 +4,17 @@ __metaclass__ = type |
4 | 4 |
|
5 | 5 |
import atexit |
6 | 6 |
import contextlib |
7 |
+import json |
|
7 | 8 |
import os |
8 | 9 |
import shutil |
9 | 10 |
import tempfile |
10 | 11 |
import textwrap |
11 | 12 |
|
13 |
+from . import types as t |
|
14 |
+ |
|
12 | 15 |
from .util import ( |
13 | 16 |
common_environment, |
14 | 17 |
COVERAGE_CONFIG_NAME, |
15 |
- COVERAGE_OUTPUT_NAME, |
|
16 | 18 |
display, |
17 | 19 |
find_python, |
18 | 20 |
is_shippable, |
... | ... |
@@ -22,6 +24,7 @@ from .util import ( |
22 | 22 |
raw_command, |
23 | 23 |
to_bytes, |
24 | 24 |
ANSIBLE_TEST_DATA_ROOT, |
25 |
+ make_dirs, |
|
25 | 26 |
) |
26 | 27 |
|
27 | 28 |
from .data import ( |
... | ... |
@@ -29,6 +32,47 @@ from .data import ( |
29 | 29 |
) |
30 | 30 |
|
31 | 31 |
|
32 |
+class ResultType: |
|
33 |
+ """Test result type.""" |
|
34 |
+ BOT = None # type: ResultType |
|
35 |
+ COVERAGE = None # type: ResultType |
|
36 |
+ DATA = None # type: ResultType |
|
37 |
+ JUNIT = None # type: ResultType |
|
38 |
+ LOGS = None # type: ResultType |
|
39 |
+ REPORTS = None # type: ResultType |
|
40 |
+ TMP = None # type: ResultType |
|
41 |
+ |
|
42 |
+ @staticmethod |
|
43 |
+ def _populate(): |
|
44 |
+ ResultType.BOT = ResultType('bot') |
|
45 |
+ ResultType.COVERAGE = ResultType('coverage') |
|
46 |
+ ResultType.DATA = ResultType('data') |
|
47 |
+ ResultType.JUNIT = ResultType('junit') |
|
48 |
+ ResultType.LOGS = ResultType('logs') |
|
49 |
+ ResultType.REPORTS = ResultType('reports') |
|
50 |
+ ResultType.TMP = ResultType('.tmp') |
|
51 |
+ |
|
52 |
+ def __init__(self, name): # type: (str) -> None |
|
53 |
+ self.name = name |
|
54 |
+ |
|
55 |
+ @property |
|
56 |
+ def relative_path(self): # type: () -> str |
|
57 |
+ """The content relative path to the results.""" |
|
58 |
+ return os.path.join(data_context().results_relative, self.name) |
|
59 |
+ |
|
60 |
+ @property |
|
61 |
+ def path(self): # type: () -> str |
|
62 |
+ """The absolute path to the results.""" |
|
63 |
+ return os.path.join(data_context().results, self.name) |
|
64 |
+ |
|
65 |
+ def __str__(self): # type: () -> str |
|
66 |
+ return self.name |
|
67 |
+ |
|
68 |
+ |
|
69 |
+# noinspection PyProtectedMember |
|
70 |
+ResultType._populate() # pylint: disable=protected-access |
|
71 |
+ |
|
72 |
+ |
|
32 | 73 |
class CommonConfig: |
33 | 74 |
"""Configuration common to all commands.""" |
34 | 75 |
def __init__(self, args, command): |
... | ... |
@@ -75,6 +119,33 @@ def named_temporary_file(args, prefix, suffix, directory, content): |
75 | 75 |
yield tempfile_fd.name |
76 | 76 |
|
77 | 77 |
|
78 |
+def write_json_test_results(category, name, content): # type: (ResultType, str, t.Union[t.List[t.Any], t.Dict[str, t.Any]]) -> None |
|
79 |
+ """Write the given json content to the specified test results path, creating directories as needed.""" |
|
80 |
+ path = os.path.join(category.path, name) |
|
81 |
+ write_json_file(path, content, create_directories=True) |
|
82 |
+ |
|
83 |
+ |
|
84 |
+def write_text_test_results(category, name, content): # type: (ResultType, str, str) -> None |
|
85 |
+ """Write the given text content to the specified test results path, creating directories as needed.""" |
|
86 |
+ path = os.path.join(category.path, name) |
|
87 |
+ write_text_file(path, content, create_directories=True) |
|
88 |
+ |
|
89 |
+ |
|
90 |
+def write_json_file(path, content, create_directories=False): # type: (str, t.Union[t.List[t.Any], t.Dict[str, t.Any]], bool) -> None |
|
91 |
+ """Write the given json content to the specified path, optionally creating missing directories.""" |
|
92 |
+ text_content = json.dumps(content, sort_keys=True, indent=4, ensure_ascii=False) + '\n' |
|
93 |
+ write_text_file(path, text_content, create_directories=create_directories) |
|
94 |
+ |
|
95 |
+ |
|
96 |
+def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None |
|
97 |
+ """Write the given text content to the specified path, optionally creating missing directories.""" |
|
98 |
+ if create_directories: |
|
99 |
+ make_dirs(os.path.dirname(path)) |
|
100 |
+ |
|
101 |
+ with open(to_bytes(path), 'wb') as file: |
|
102 |
+ file.write(to_bytes(content)) |
|
103 |
+ |
|
104 |
+ |
|
78 | 105 |
def get_python_path(args, interpreter): |
79 | 106 |
""" |
80 | 107 |
:type args: TestConfig |
... | ... |
@@ -126,8 +197,7 @@ def get_python_path(args, interpreter): |
126 | 126 |
execv(python, [python] + argv[1:]) |
127 | 127 |
''' % (interpreter, interpreter)).lstrip() |
128 | 128 |
|
129 |
- with open(injected_interpreter, 'w') as python_fd: |
|
130 |
- python_fd.write(code) |
|
129 |
+ write_text_file(injected_interpreter, code) |
|
131 | 130 |
|
132 | 131 |
os.chmod(injected_interpreter, MODE_FILE_EXECUTE) |
133 | 132 |
|
... | ... |
@@ -173,7 +243,7 @@ def get_coverage_environment(args, target_name, version, temp_path, module_cover |
173 | 173 |
raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.') |
174 | 174 |
|
175 | 175 |
config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME) |
176 |
- coverage_file = os.path.join(coverage_output_base_path, COVERAGE_OUTPUT_NAME, '%s=%s=%s=%s=coverage' % ( |
|
176 |
+ coverage_file = os.path.join(coverage_output_base_path, ResultType.COVERAGE.name, '%s=%s=%s=%s=coverage' % ( |
|
177 | 177 |
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)) |
178 | 178 |
|
179 | 179 |
if not args.explain and not os.path.exists(config_file): |
... | ... |
@@ -94,7 +94,13 @@ def fail(message, output): # type: (str, str) -> NoReturn |
94 | 94 |
</testsuites> |
95 | 95 |
''' % (timestamp, message, output) |
96 | 96 |
|
97 |
- with open('test/results/junit/check-matrix.xml', 'w') as junit_fd: |
|
97 |
+ path = 'shippable/testresults/check-matrix.xml' |
|
98 |
+ dir_path = os.path.dirname(path) |
|
99 |
+ |
|
100 |
+ if not os.path.exists(dir_path): |
|
101 |
+ os.makedirs(dir_path) |
|
102 |
+ |
|
103 |
+ with open(path, 'w') as junit_fd: |
|
98 | 104 |
junit_fd.write(xml.lstrip()) |
99 | 105 |
|
100 | 106 |
sys.stderr.write(message + '\n') |
... | ... |
@@ -73,55 +73,64 @@ find lib/ansible/modules -type d -empty -print -delete |
73 | 73 |
|
74 | 74 |
function cleanup |
75 | 75 |
{ |
76 |
- if find test/results/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then |
|
77 |
- # for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy |
|
78 |
- if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then |
|
79 |
- stub="--stub" |
|
80 |
- else |
|
81 |
- stub="" |
|
76 |
+ if [ -d test/results/coverage/ ]; then |
|
77 |
+ if find test/results/coverage/ -mindepth 1 -name '.*' -prune -o -print -quit | grep -q .; then |
|
78 |
+ # for complete on-demand coverage generate a report for all files with no coverage on the "other" job so we only have one copy |
|
79 |
+ if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ] && [ "${test}" == "sanity/1" ]; then |
|
80 |
+ stub="--stub" |
|
81 |
+ else |
|
82 |
+ stub="" |
|
83 |
+ fi |
|
84 |
+ |
|
85 |
+ # use python 3.7 for coverage to avoid running out of memory during coverage xml processing |
|
86 |
+ # only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job |
|
87 |
+ virtualenv --python /usr/bin/python3.7 ~/ansible-venv |
|
88 |
+ set +ux |
|
89 |
+ . ~/ansible-venv/bin/activate |
|
90 |
+ set -ux |
|
91 |
+ |
|
92 |
+ # shellcheck disable=SC2086 |
|
93 |
+ ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"} |
|
94 |
+ cp -a test/results/reports/coverage=*.xml shippable/codecoverage/ |
|
95 |
+ |
|
96 |
+ # upload coverage report to codecov.io only when using complete on-demand coverage |
|
97 |
+ if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then |
|
98 |
+ for file in test/results/reports/coverage=*.xml; do |
|
99 |
+ flags="${file##*/coverage=}" |
|
100 |
+ flags="${flags%-powershell.xml}" |
|
101 |
+ flags="${flags%.xml}" |
|
102 |
+ # remove numbered component from stub files when converting to tags |
|
103 |
+ flags="${flags//stub-[0-9]*/stub}" |
|
104 |
+ flags="${flags//=/,}" |
|
105 |
+ flags="${flags//[^a-zA-Z0-9_,]/_}" |
|
106 |
+ |
|
107 |
+ bash <(curl -s https://codecov.io/bash) \ |
|
108 |
+ -f "${file}" \ |
|
109 |
+ -F "${flags}" \ |
|
110 |
+ -n "${test}" \ |
|
111 |
+ -t 83cd8957-dc76-488c-9ada-210dcea51633 \ |
|
112 |
+ -X coveragepy \ |
|
113 |
+ -X gcov \ |
|
114 |
+ -X fix \ |
|
115 |
+ -X search \ |
|
116 |
+ -X xcode \ |
|
117 |
+ || echo "Failed to upload code coverage report to codecov.io: ${file}" |
|
118 |
+ done |
|
119 |
+ fi |
|
82 | 120 |
fi |
121 |
+ fi |
|
83 | 122 |
|
84 |
- # use python 3.7 for coverage to avoid running out of memory during coverage xml processing |
|
85 |
- # only use it for coverage to avoid the additional overhead of setting up a virtual environment for a potential no-op job |
|
86 |
- virtualenv --python /usr/bin/python3.7 ~/ansible-venv |
|
87 |
- set +ux |
|
88 |
- . ~/ansible-venv/bin/activate |
|
89 |
- set -ux |
|
90 |
- |
|
91 |
- # shellcheck disable=SC2086 |
|
92 |
- ansible-test coverage xml --color -v --requirements --group-by command --group-by version ${stub:+"$stub"} |
|
93 |
- cp -a test/results/reports/coverage=*.xml shippable/codecoverage/ |
|
94 |
- |
|
95 |
- # upload coverage report to codecov.io only when using complete on-demand coverage |
|
96 |
- if [ "${COVERAGE}" == "--coverage" ] && [ "${CHANGED}" == "" ]; then |
|
97 |
- for file in test/results/reports/coverage=*.xml; do |
|
98 |
- flags="${file##*/coverage=}" |
|
99 |
- flags="${flags%-powershell.xml}" |
|
100 |
- flags="${flags%.xml}" |
|
101 |
- # remove numbered component from stub files when converting to tags |
|
102 |
- flags="${flags//stub-[0-9]*/stub}" |
|
103 |
- flags="${flags//=/,}" |
|
104 |
- flags="${flags//[^a-zA-Z0-9_,]/_}" |
|
105 |
- |
|
106 |
- bash <(curl -s https://codecov.io/bash) \ |
|
107 |
- -f "${file}" \ |
|
108 |
- -F "${flags}" \ |
|
109 |
- -n "${test}" \ |
|
110 |
- -t 83cd8957-dc76-488c-9ada-210dcea51633 \ |
|
111 |
- -X coveragepy \ |
|
112 |
- -X gcov \ |
|
113 |
- -X fix \ |
|
114 |
- -X search \ |
|
115 |
- -X xcode \ |
|
116 |
- || echo "Failed to upload code coverage report to codecov.io: ${file}" |
|
117 |
- done |
|
118 |
- fi |
|
123 |
+ if [ -d test/results/junit/ ]; then |
|
124 |
+ cp -a test/results/junit/ shippable/testresults/ |
|
119 | 125 |
fi |
120 | 126 |
|
121 |
- rmdir shippable/testresults/ |
|
122 |
- cp -a test/results/junit/ shippable/testresults/ |
|
123 |
- cp -a test/results/data/ shippable/testresults/ |
|
124 |
- cp -aT test/results/bot/ shippable/testresults/ |
|
127 |
+ if [ -d test/results/data/ ]; then |
|
128 |
+ cp -a test/results/data/ shippable/testresults/ |
|
129 |
+ fi |
|
130 |
+ |
|
131 |
+ if [ -d test/results/bot/ ]; then |
|
132 |
+ cp -aT test/results/bot/ shippable/testresults/ |
|
133 |
+ fi |
|
125 | 134 |
} |
126 | 135 |
|
127 | 136 |
trap cleanup EXIT |