Browse code

Update ansible-test handling of delegation paths. (#69056)

Matt Clay authored on 2020/04/21 14:04:25
Showing 11 changed files
... ...
@@ -105,5 +105,3 @@ test/units/.coverage.*
105 105
 /SYMLINK_CACHE.json
106 106
 changelogs/.plugin-cache.yaml
107 107
 .ansible-test-timeout.json
108
-# ansible-test temporary metadata file for use with delgation
109
-/metadata-*.json
110 108
new file mode 100644
... ...
@@ -0,0 +1,2 @@
0
+minor_changes:
1
+    - ansible-test now places the ansible source and collections content in separate directories when using the ``--docker`` or ``--remote`` options.
... ...
@@ -197,12 +197,7 @@ class CloudBase(ABC):
197 197
         def config_callback(files):  # type: (t.List[t.Tuple[str, str]]) -> None
198 198
             """Add the config file to the payload file list."""
199 199
             if self._get_cloud_config(self._CONFIG_PATH, ''):
200
-                if data_context().content.collection:
201
-                    working_path = data_context().content.collection.directory
202
-                else:
203
-                    working_path = ''
204
-
205
-                pair = (self.config_path, os.path.join(working_path, os.path.relpath(self.config_path, data_context().content.root)))
200
+                pair = (self.config_path, os.path.relpath(self.config_path, data_context().content.root))
206 201
 
207 202
                 if pair not in files:
208 203
                     display.info('Including %s config: %s -> %s' % (self.platform, pair[0], pair[1]), verbosity=3)
... ...
@@ -209,13 +209,8 @@ class TestConfig(EnvironmentConfig):
209 209
             """Add the metadata file to the payload file list."""
210 210
             config = self
211 211
 
212
-            if data_context().content.collection:
213
-                working_path = data_context().content.collection.directory
214
-            else:
215
-                working_path = ''
216
-
217 212
             if self.metadata_path:
218
-                files.append((os.path.abspath(config.metadata_path), os.path.join(working_path, config.metadata_path)))
213
+                files.append((os.path.abspath(config.metadata_path), config.metadata_path))
219 214
 
220 215
         data_context().register_payload_callback(metadata_callback)
221 216
 
... ...
@@ -617,13 +617,8 @@ class SshKey:
617 617
             Add the SSH keys to the payload file list.
618 618
             They are either outside the source tree or in the cache dir which is ignored by default.
619 619
             """
620
-            if data_context().content.collection:
621
-                working_path = data_context().content.collection.directory
622
-            else:
623
-                working_path = ''
624
-
625
-            files.append((key, os.path.join(working_path, os.path.relpath(key_dst, data_context().content.root))))
626
-            files.append((pub, os.path.join(working_path, os.path.relpath(pub_dst, data_context().content.root))))
620
+            files.append((key, os.path.relpath(key_dst, data_context().content.root)))
621
+            files.append((pub, os.path.relpath(pub_dst, data_context().content.root)))
627 622
 
628 623
         data_context().register_payload_callback(ssh_key_callback)
629 624
 
... ...
@@ -117,8 +117,10 @@ def delegate(args, exclude, require, integration_targets):
117 117
     :rtype: bool
118 118
     """
119 119
     if isinstance(args, TestConfig):
120
-        with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=data_context().content.root) as metadata_fd:
121
-            args.metadata_path = os.path.basename(metadata_fd.name)
120
+        make_dirs(ResultType.TMP.path)
121
+
122
+        with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
123
+            args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
122 124
             args.metadata.to_file(args.metadata_path)
123 125
 
124 126
             try:
... ...
@@ -244,16 +246,17 @@ def delegate_docker(args, exclude, require, integration_targets):
244 244
 
245 245
     python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
246 246
 
247
-    install_root = '/root/ansible'
247
+    pwd = '/root'
248
+    ansible_root = os.path.join(pwd, 'ansible')
248 249
 
249 250
     if data_context().content.collection:
250
-        content_root = os.path.join(install_root, data_context().content.collection.directory)
251
+        content_root = os.path.join(pwd, data_context().content.collection.directory)
251 252
     else:
252
-        content_root = install_root
253
+        content_root = ansible_root
253 254
 
254 255
     remote_results_root = os.path.join(content_root, data_context().content.results_path)
255 256
 
256
-    cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
257
+    cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
257 258
 
258 259
     if isinstance(args, TestConfig):
259 260
         if args.coverage and not args.coverage_label:
... ...
@@ -321,9 +324,8 @@ def delegate_docker(args, exclude, require, integration_targets):
321 321
             # write temporary files to /root since /tmp isn't ready immediately on container start
322 322
             docker_put(args, test_id, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh'), '/root/docker.sh')
323 323
             docker_exec(args, test_id, ['/bin/bash', '/root/docker.sh'])
324
-            docker_put(args, test_id, local_source_fd.name, '/root/ansible.tgz')
325
-            docker_exec(args, test_id, ['mkdir', '/root/ansible'])
326
-            docker_exec(args, test_id, ['tar', 'oxzf', '/root/ansible.tgz', '-C', '/root/ansible'])
324
+            docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
325
+            docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
327 326
 
328 327
             # docker images are only expected to have a single python version available
329 328
             if isinstance(args, UnitsConfig) and not args.python:
... ...
@@ -440,14 +442,14 @@ def delegate_remote(args, exclude, require, integration_targets):
440 440
 
441 441
             python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
442 442
 
443
-            install_root = os.path.join(pwd, 'ansible')
443
+            ansible_root = os.path.join(pwd, 'ansible')
444 444
 
445 445
             if data_context().content.collection:
446
-                content_root = os.path.join(install_root, data_context().content.collection.directory)
446
+                content_root = os.path.join(pwd, data_context().content.collection.directory)
447 447
             else:
448
-                content_root = install_root
448
+                content_root = ansible_root
449 449
 
450
-            cmd = generate_command(args, python_interpreter, os.path.join(install_root, 'bin'), content_root, options, exclude, require)
450
+            cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
451 451
 
452 452
             if httptester_id:
453 453
                 cmd += ['--inject-httptester']
... ...
@@ -972,12 +972,7 @@ def command_integration_filter(args,  # type: TIntegrationConfig
972 972
             Add the integration config vars file to the payload file list.
973 973
             This will preserve the file during delegation even if the file is ignored by source control.
974 974
             """
975
-            if data_context().content.collection:
976
-                working_path = data_context().content.collection.directory
977
-            else:
978
-                working_path = ''
979
-
980
-            files.append((vars_file_src, os.path.join(working_path, data_context().content.integration_vars_path)))
975
+            files.append((vars_file_src, data_context().content.integration_vars_path))
981 976
 
982 977
         data_context().register_payload_callback(integration_config_callback)
983 978
 
... ...
@@ -170,12 +170,7 @@ def delegate_inventory(args, inventory_path_src):  # type: (IntegrationConfig, s
170 170
         Add the inventory file to the payload file list.
171 171
         This will preserve the file during delegation even if it is ignored or is outside the content and install roots.
172 172
         """
173
-        if data_context().content.collection:
174
-            working_path = data_context().content.collection.directory
175
-        else:
176
-            working_path = ''
177
-
178
-        inventory_path = os.path.join(working_path, get_inventory_relative_path(args))
173
+        inventory_path = get_inventory_relative_path(args)
179 174
         inventory_tuple = inventory_path_src, inventory_path
180 175
 
181 176
         if os.path.isfile(inventory_path_src) and inventory_tuple not in files:
... ...
@@ -277,7 +277,7 @@ class ManagePosixCI:
277 277
             # being different and -z not being recognized. This pattern works
278 278
             # with both versions of tar.
279 279
             self.ssh(
280
-                'rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && gunzip --stdout %s | tar oxf - && rm %s' %
280
+                'rm -rf ~/ansible ~/ansible_collections && cd ~/ && gunzip --stdout %s | tar oxf - && rm %s' %
281 281
                 (remote_source_path, remote_source_path)
282 282
             )
283 283
 
... ...
@@ -75,7 +75,7 @@ def create_payload(args, dst_path):  # type: (CommonConfig, str) -> None
75 75
         files = [f for f in files if
76 76
                  is_subdir(f[1], 'bin/') or
77 77
                  is_subdir(f[1], 'lib/ansible/') or
78
-                 (is_subdir(f[1], 'test/lib/ansible_test/') and not is_subdir(f[1], 'test/lib/ansible_test/tests/'))]
78
+                 is_subdir(f[1], 'test/lib/ansible_test/')]
79 79
 
80 80
         if not isinstance(args, (ShellConfig, IntegrationConfig)):
81 81
             # exclude built-in ansible modules when they are not needed
... ...
@@ -83,12 +83,35 @@ def create_payload(args, dst_path):  # type: (CommonConfig, str) -> None
83 83
 
84 84
         collection_layouts = data_context().create_collection_layouts()
85 85
 
86
+        content_files = []
87
+        extra_files = []
88
+
86 89
         for layout in collection_layouts:
87
-            # include files from each collection in the same collection root as the content being tested
88
-            files.extend((os.path.join(layout.root, path), os.path.join(layout.collection.directory, path)) for path in layout.all_files())
90
+            if layout == data_context().content:
91
+                # include files from the current collection (layout.collection.directory will be added later)
92
+                content_files.extend((os.path.join(layout.root, path), path) for path in data_context().content.all_files())
93
+            else:
94
+                # include files from each collection in the same collection root as the content being tested
95
+                extra_files.extend((os.path.join(layout.root, path), os.path.join(layout.collection.directory, path)) for path in layout.all_files())
96
+    else:
97
+        # when testing ansible itself the ansible source is the content
98
+        content_files = files
99
+        # there are no extra files when testing ansible itself
100
+        extra_files = []
89 101
 
90 102
     for callback in data_context().payload_callbacks:
91
-        callback(files)
103
+        # execute callbacks only on the content paths
104
+        # this is done before placing them in the appropriate subdirectory (see below)
105
+        callback(content_files)
106
+
107
+    # place ansible source files under the 'ansible' directory on the delegated host
108
+    files = [(src, os.path.join('ansible', dst)) for src, dst in files]
109
+
110
+    if data_context().content.collection:
111
+        # place collection files under the 'ansible_collections/{namespace}/{collection}' directory on the delegated host
112
+        files.extend((src, os.path.join(data_context().content.collection.directory, dst)) for src, dst in content_files)
113
+        # extra files already have the correct destination path
114
+        files.extend(extra_files)
92 115
 
93 116
     # maintain predictable file order
94 117
     files = sorted(set(files))
... ...
@@ -27,7 +27,6 @@ def assemble_files_to_ship(complete_file_list):
27 27
         'hacking/shippable/*',
28 28
         'hacking/tests/*',
29 29
         'hacking/ticket_stubs/*',
30
-        'metadata-*.json',  # ansible-test with --docker produces this tmp file.
31 30
         'test/sanity/code-smell/botmeta.*',
32 31
         'test/utils/*',
33 32
         'test/utils/*/*',