Browse code

Replace the inhouse collection dependency resolver with `resolvelib`

PR #72591

This change:

* Adds an artifacts manager that abstracts away extracting the
metadata from artifacts, downloading and caching them in a
temporary location.

* Adds `resolvelib` to direct ansible-core dependencies[0].

* Implements a `resolvelib`-based dependency resolver for
`collection` subcommands that replaces the legacy
in-house code.

This is a dependency resolution library that pip 20.3+ uses
by default. It's now integrated for use for the collection
dependency resolution in ansible-galaxy CLI.

* Refactors of the `ansible-galaxy collection` CLI.
In particular, it:

- reimplements most of the `download`, `install`, `list` and
`verify` subcommands from scratch;

- reuses helper bits previously moved out into external modules;

- replaces the old in-house resolver with a more clear
implementation based on the resolvelib library[0][1][2].

* Adds a multi Galaxy API proxy layer that abstracts accessing the
version and dependencies via API or local artifacts manager.

* Makes `GalaxyAPI` instances sortable.

* Adds string representation methods to `GalaxyAPI`.

* Adds dev representation to `GalaxyAPI`.

* Removes unnecessary integration and unit tests.

* Aligns the tests with the new expectations.

* Adds more tests, integration ones in particular.

[0]: https://pypi.org/p/resolvelib
[1]: https://github.com/sarugaku/resolvelib
[2]: https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing

Co-Authored-By: Jordan Borean <jborean93@gmail.com>
Co-Authored-By: Matt Clay <matt@mystile.com>
Co-Authored-By: Sam Doran <sdoran@redhat.com>
Co-Authored-By: Sloane Hertel <shertel@redhat.com>
Co-Authored-By: Sviatoslav Sydorenko <webknjaz@redhat.com>

Signed-Off-By: Sviatoslav Sydorenko <webknjaz@redhat.com>

Sviatoslav Sydorenko authored on 2021/01/28 06:23:22
Showing 35 changed files
1 1
new file mode 100644
... ...
@@ -0,0 +1,27 @@
0
+---
1
+breaking_changes:
2
+- >-
3
+  Replaced the in-tree dependency resolver with an external implementation
4
+  that pip >= 20.3 uses now by default — ``resolvelib``.
5
+  (https://github.com/ansible/ansible/issues/71784)
6
+- >-
7
+  Made SCM collections be reinstalled regardless of ``--force`` being
8
+  present.
9
+major_changes:
10
+- |
11
+  Declared ``resolvelib >= 0.5.3, < 0.6.0`` a direct dependency of
12
+  ansible-core. Refs:
13
+  - https://github.com/sarugaku/resolvelib
14
+  - https://pypi.org/p/resolvelib
15
+  - https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing
16
+- >-
17
+  It became possible to install Ansible Collections from local folders and
18
+  namespaces folder similar to SCM structure with multiple collections.
19
+minor_changes:
20
+- >-
21
+  Refactored ``ansible-galaxy collection [download|install|list|verify]``
22
+  CLI subcommands with the public interface kept intact.
23
+- >-
24
+  The new dependency resolver prefers ``MANIFEST.json`` over ``galaxy.yml``
25
+  if it exists in the target directory.
26
+...
... ...
@@ -1,5 +1,5 @@
1 1
 # Copyright: (c) 2013, James Cammarata <jcammarata@ansible.com>
2
-# Copyright: (c) 2018, Ansible Project
2
+# Copyright: (c) 2018-2021, Ansible Project
3 3
 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
4 4
 
5 5
 from __future__ import (absolute_import, division, print_function)
... ...
@@ -24,7 +24,6 @@ from ansible.galaxy import Galaxy, get_collections_galaxy_meta_info
24 24
 from ansible.galaxy.api import GalaxyAPI
25 25
 from ansible.galaxy.collection import (
26 26
     build_collection,
27
-    CollectionRequirement,
28 27
     download_collections,
29 28
     find_existing_collections,
30 29
     install_collections,
... ...
@@ -33,6 +32,10 @@ from ansible.galaxy.collection import (
33 33
     validate_collection_path,
34 34
     verify_collections
35 35
 )
36
+from ansible.galaxy.collection.concrete_artifact_manager import (
37
+    ConcreteArtifactsManager,
38
+)
39
+from ansible.galaxy.dependency_resolution.dataclasses import Requirement
36 40
 
37 41
 from ansible.galaxy.role import GalaxyRole
38 42
 from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoTokenSentinel
... ...
@@ -52,6 +55,26 @@ display = Display()
52 52
 urlparse = six.moves.urllib.parse.urlparse
53 53
 
54 54
 
55
+def with_collection_artifacts_manager(wrapped_method):
56
+    """Inject an artifacts manager if not passed explicitly.
57
+
58
+    This decorator constructs a ConcreteArtifactsManager and maintains
59
+    the related temporary directory auto-cleanup around the target
60
+    method invocation.
61
+    """
62
+    def method_wrapper(*args, **kwargs):
63
+        if 'artifacts_manager' in kwargs:
64
+            return wrapped_method(*args, **kwargs)
65
+
66
+        with ConcreteArtifactsManager.under_tmpdir(
67
+                C.DEFAULT_LOCAL_TMP,
68
+                validate_certs=not context.CLIARGS['ignore_certs'],
69
+        ) as concrete_artifact_cm:
70
+            kwargs['artifacts_manager'] = concrete_artifact_cm
71
+            return wrapped_method(*args, **kwargs)
72
+    return method_wrapper
73
+
74
+
55 75
 def _display_header(path, h1, h2, w1=10, w2=7):
56 76
     display.display('\n# {0}\n{1:{cwidth}} {2:{vwidth}}\n{3} {4}\n'.format(
57 77
         path,
... ...
@@ -76,20 +99,19 @@ def _display_role(gr):
76 76
 
77 77
 def _display_collection(collection, cwidth=10, vwidth=7, min_cwidth=10, min_vwidth=7):
78 78
     display.display('{fqcn:{cwidth}} {version:{vwidth}}'.format(
79
-        fqcn=to_text(collection),
80
-        version=collection.latest_version,
79
+        fqcn=to_text(collection.fqcn),
80
+        version=collection.ver,
81 81
         cwidth=max(cwidth, min_cwidth),  # Make sure the width isn't smaller than the header
82 82
         vwidth=max(vwidth, min_vwidth)
83 83
     ))
84 84
 
85 85
 
86 86
 def _get_collection_widths(collections):
87
-    if is_iterable(collections):
88
-        fqcn_set = set(to_text(c) for c in collections)
89
-        version_set = set(to_text(c.latest_version) for c in collections)
90
-    else:
91
-        fqcn_set = set([to_text(collections)])
92
-        version_set = set([collections.latest_version])
87
+    if not is_iterable(collections):
88
+        collections = (collections, )
89
+
90
+    fqcn_set = {to_text(c.fqcn) for c in collections}
91
+    version_set = {to_text(c.ver) for c in collections}
93 92
 
94 93
     fqcn_length = len(max(fqcn_set, key=len))
95 94
     version_length = len(max(version_set, key=len))
... ...
@@ -447,7 +469,7 @@ class GalaxyCLI(CLI):
447 447
 
448 448
         # Need to filter out empty strings or non truthy values as an empty server list env var is equal to [''].
449 449
         server_list = [s for s in C.GALAXY_SERVER_LIST or [] if s]
450
-        for server_key in server_list:
450
+        for server_priority, server_key in enumerate(server_list, start=1):
451 451
             # Config definitions are looked up dynamically based on the C.GALAXY_SERVER_LIST entry. We look up the
452 452
             # section [galaxy_server.<server>] for the values url, username, password, and token.
453 453
             config_dict = dict((k, server_config_def(server_key, k, req)) for k, req in server_def)
... ...
@@ -486,7 +508,11 @@ class GalaxyCLI(CLI):
486 486
                         server_options['token'] = GalaxyToken(token=token_val)
487 487
 
488 488
             server_options.update(galaxy_options)
489
-            config_servers.append(GalaxyAPI(self.galaxy, server_key, **server_options))
489
+            config_servers.append(GalaxyAPI(
490
+                self.galaxy, server_key,
491
+                priority=server_priority,
492
+                **server_options
493
+            ))
490 494
 
491 495
         cmd_server = context.CLIARGS['api_server']
492 496
         cmd_token = GalaxyToken(token=context.CLIARGS['api_key'])
... ...
@@ -497,15 +523,21 @@ class GalaxyCLI(CLI):
497 497
             if config_server:
498 498
                 self.api_servers.append(config_server)
499 499
             else:
500
-                self.api_servers.append(GalaxyAPI(self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
501
-                                                  **galaxy_options))
500
+                self.api_servers.append(GalaxyAPI(
501
+                    self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
502
+                    priority=len(config_servers) + 1,
503
+                    **galaxy_options
504
+                ))
502 505
         else:
503 506
             self.api_servers = config_servers
504 507
 
505 508
         # Default to C.GALAXY_SERVER if no servers were defined
506 509
         if len(self.api_servers) == 0:
507
-            self.api_servers.append(GalaxyAPI(self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
508
-                                              **galaxy_options))
510
+            self.api_servers.append(GalaxyAPI(
511
+                self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
512
+                priority=0,
513
+                **galaxy_options
514
+            ))
509 515
 
510 516
         context.CLIARGS['func']()
511 517
 
... ...
@@ -530,7 +562,7 @@ class GalaxyCLI(CLI):
530 530
     def _get_default_collection_path(self):
531 531
         return C.COLLECTIONS_PATHS[0]
532 532
 
533
-    def _parse_requirements_file(self, requirements_file, allow_old_format=True):
533
+    def _parse_requirements_file(self, requirements_file, allow_old_format=True, artifacts_manager=None):
534 534
         """
535 535
         Parses an Ansible requirement.yml file and returns all the roles and/or collections defined in it. There are 2
536 536
         requirements file format:
... ...
@@ -556,6 +588,7 @@ class GalaxyCLI(CLI):
556 556
 
557 557
         :param requirements_file: The path to the requirements file.
558 558
         :param allow_old_format: Will fail if a v1 requirements file is found and this is set to False.
559
+        :param artifacts_manager: Artifacts manager.
559 560
         :return: a dict containing roles and collections to found in the requirements file.
560 561
         """
561 562
         requirements = {
... ...
@@ -619,33 +652,48 @@ class GalaxyCLI(CLI):
619 619
             for role_req in file_requirements.get('roles') or []:
620 620
                 requirements['roles'] += parse_role_req(role_req)
621 621
 
622
-            for collection_req in file_requirements.get('collections') or []:
623
-                if isinstance(collection_req, dict):
624
-                    req_name = collection_req.get('name', None)
625
-                    if req_name is None:
626
-                        raise AnsibleError("Collections requirement entry should contain the key name.")
627
-
628
-                    req_type = collection_req.get('type')
629
-                    if req_type not in ('file', 'galaxy', 'git', 'url', None):
630
-                        raise AnsibleError("The collection requirement entry key 'type' must be one of file, galaxy, git, or url.")
631
-
632
-                    req_version = collection_req.get('version', '*')
633
-                    req_source = collection_req.get('source', None)
634
-                    if req_source:
635
-                        # Try and match up the requirement source with our list of Galaxy API servers defined in the
636
-                        # config, otherwise create a server with that URL without any auth.
637
-                        req_source = next(iter([a for a in self.api_servers if req_source in [a.name, a.api_server]]),
638
-                                          GalaxyAPI(self.galaxy,
639
-                                                    "explicit_requirement_%s" % req_name,
640
-                                                    req_source,
641
-                                                    validate_certs=not context.CLIARGS['ignore_certs']))
642
-
643
-                    requirements['collections'].append((req_name, req_version, req_source, req_type))
644
-                else:
645
-                    requirements['collections'].append((collection_req, '*', None, None))
622
+            requirements['collections'] = [
623
+                Requirement.from_requirement_dict(
624
+                    self._init_coll_req_dict(collection_req),
625
+                    artifacts_manager,
626
+                )
627
+                for collection_req in file_requirements.get('collections') or []
628
+            ]
646 629
 
647 630
         return requirements
648 631
 
632
+    def _init_coll_req_dict(self, coll_req):
633
+        if not isinstance(coll_req, dict):
634
+            # Assume it's a string:
635
+            return {'name': coll_req}
636
+
637
+        if (
638
+                'name' not in coll_req or
639
+                not coll_req.get('source') or
640
+                coll_req.get('type', 'galaxy') != 'galaxy'
641
+        ):
642
+            return coll_req
643
+
644
+        # Try and match up the requirement source with our list of Galaxy API
645
+        # servers defined in the config, otherwise create a server with that
646
+        # URL without any auth.
647
+        coll_req['source'] = next(
648
+            iter(
649
+                srvr for srvr in self.api_servers
650
+                if coll_req['source'] in {srvr.name, srvr.api_server}
651
+            ),
652
+            GalaxyAPI(
653
+                self.galaxy,
654
+                'explicit_requirement_{name!s}'.format(
655
+                    name=coll_req['name'],
656
+                ),
657
+                coll_req['source'],
658
+                validate_certs=not context.CLIARGS['ignore_certs'],
659
+            ),
660
+        )
661
+
662
+        return coll_req
663
+
649 664
     @staticmethod
650 665
     def exit_without_ignore(rc=1):
651 666
         """
... ...
@@ -733,26 +781,29 @@ class GalaxyCLI(CLI):
733 733
 
734 734
         return meta_value
735 735
 
736
-    def _require_one_of_collections_requirements(self, collections, requirements_file):
736
+    def _require_one_of_collections_requirements(
737
+            self, collections, requirements_file,
738
+            artifacts_manager=None,
739
+    ):
737 740
         if collections and requirements_file:
738 741
             raise AnsibleError("The positional collection_name arg and --requirements-file are mutually exclusive.")
739 742
         elif not collections and not requirements_file:
740 743
             raise AnsibleError("You must specify a collection name or a requirements file.")
741 744
         elif requirements_file:
742 745
             requirements_file = GalaxyCLI._resolve_path(requirements_file)
743
-            requirements = self._parse_requirements_file(requirements_file, allow_old_format=False)
746
+            requirements = self._parse_requirements_file(
747
+                requirements_file,
748
+                allow_old_format=False,
749
+                artifacts_manager=artifacts_manager,
750
+            )
744 751
         else:
745
-            requirements = {'collections': [], 'roles': []}
746
-            for collection_input in collections:
747
-                requirement = None
748
-                if os.path.isfile(to_bytes(collection_input, errors='surrogate_or_strict')) or \
749
-                        urlparse(collection_input).scheme.lower() in ['http', 'https'] or \
750
-                        collection_input.startswith(('git+', 'git@')):
751
-                    # Arg is a file path or URL to a collection
752
-                    name = collection_input
753
-                else:
754
-                    name, dummy, requirement = collection_input.partition(':')
755
-                requirements['collections'].append((name, requirement or '*', None, None))
752
+            requirements = {
753
+                'collections': [
754
+                    Requirement.from_string(coll_input, artifacts_manager)
755
+                    for coll_input in collections
756
+                ],
757
+                'roles': [],
758
+            }
756 759
         return requirements
757 760
 
758 761
     ############################
... ...
@@ -792,27 +843,37 @@ class GalaxyCLI(CLI):
792 792
 
793 793
         for collection_path in context.CLIARGS['args']:
794 794
             collection_path = GalaxyCLI._resolve_path(collection_path)
795
-            build_collection(collection_path, output_path, force)
795
+            build_collection(
796
+                to_text(collection_path, errors='surrogate_or_strict'),
797
+                to_text(output_path, errors='surrogate_or_strict'),
798
+                force,
799
+            )
796 800
 
797
-    def execute_download(self):
801
+    @with_collection_artifacts_manager
802
+    def execute_download(self, artifacts_manager=None):
798 803
         collections = context.CLIARGS['args']
799 804
         no_deps = context.CLIARGS['no_deps']
800 805
         download_path = context.CLIARGS['download_path']
801
-        ignore_certs = context.CLIARGS['ignore_certs']
802 806
 
803 807
         requirements_file = context.CLIARGS['requirements']
804 808
         if requirements_file:
805 809
             requirements_file = GalaxyCLI._resolve_path(requirements_file)
806 810
 
807
-        requirements = self._require_one_of_collections_requirements(collections, requirements_file)['collections']
811
+        requirements = self._require_one_of_collections_requirements(
812
+            collections, requirements_file,
813
+            artifacts_manager=artifacts_manager,
814
+        )['collections']
808 815
 
809 816
         download_path = GalaxyCLI._resolve_path(download_path)
810 817
         b_download_path = to_bytes(download_path, errors='surrogate_or_strict')
811 818
         if not os.path.exists(b_download_path):
812 819
             os.makedirs(b_download_path)
813 820
 
814
-        download_collections(requirements, download_path, self.api_servers, (not ignore_certs), no_deps,
815
-                             context.CLIARGS['allow_pre_release'])
821
+        download_collections(
822
+            requirements, download_path, self.api_servers, no_deps,
823
+            context.CLIARGS['allow_pre_release'],
824
+            artifacts_manager=artifacts_manager,
825
+        )
816 826
 
817 827
         return 0
818 828
 
... ...
@@ -1002,29 +1063,38 @@ class GalaxyCLI(CLI):
1002 1002
 
1003 1003
         self.pager(data)
1004 1004
 
1005
-    def execute_verify(self):
1005
+    @with_collection_artifacts_manager
1006
+    def execute_verify(self, artifacts_manager=None):
1006 1007
 
1007 1008
         collections = context.CLIARGS['args']
1008 1009
         search_paths = context.CLIARGS['collections_path']
1009
-        ignore_certs = context.CLIARGS['ignore_certs']
1010 1010
         ignore_errors = context.CLIARGS['ignore_errors']
1011 1011
         requirements_file = context.CLIARGS['requirements']
1012 1012
 
1013
-        requirements = self._require_one_of_collections_requirements(collections, requirements_file)['collections']
1013
+        requirements = self._require_one_of_collections_requirements(
1014
+            collections, requirements_file,
1015
+            artifacts_manager=artifacts_manager,
1016
+        )['collections']
1014 1017
 
1015 1018
         resolved_paths = [validate_collection_path(GalaxyCLI._resolve_path(path)) for path in search_paths]
1016 1019
 
1017
-        verify_collections(requirements, resolved_paths, self.api_servers, (not ignore_certs), ignore_errors,
1018
-                           allow_pre_release=True)
1020
+        verify_collections(
1021
+            requirements, resolved_paths,
1022
+            self.api_servers, ignore_errors,
1023
+            artifacts_manager=artifacts_manager,
1024
+        )
1019 1025
 
1020 1026
         return 0
1021 1027
 
1022
-    def execute_install(self):
1028
+    @with_collection_artifacts_manager
1029
+    def execute_install(self, artifacts_manager=None):
1023 1030
         """
1024 1031
         Install one or more roles(``ansible-galaxy role install``), or one or more collections(``ansible-galaxy collection install``).
1025 1032
         You can pass in a list (roles or collections) or use the file
1026 1033
         option listed below (these are mutually exclusive). If you pass in a list, it
1027 1034
         can be a name (which will be downloaded via the galaxy API and github), or it can be a local tar archive file.
1035
+
1036
+        :param artifacts_manager: Artifacts manager.
1028 1037
         """
1029 1038
         install_items = context.CLIARGS['args']
1030 1039
         requirements_file = context.CLIARGS['requirements']
... ...
@@ -1042,7 +1112,10 @@ class GalaxyCLI(CLI):
1042 1042
         role_requirements = []
1043 1043
         if context.CLIARGS['type'] == 'collection':
1044 1044
             collection_path = GalaxyCLI._resolve_path(context.CLIARGS['collections_path'])
1045
-            requirements = self._require_one_of_collections_requirements(install_items, requirements_file)
1045
+            requirements = self._require_one_of_collections_requirements(
1046
+                install_items, requirements_file,
1047
+                artifacts_manager=artifacts_manager,
1048
+            )
1046 1049
 
1047 1050
             collection_requirements = requirements['collections']
1048 1051
             if requirements['roles']:
... ...
@@ -1055,7 +1128,10 @@ class GalaxyCLI(CLI):
1055 1055
                 if not (requirements_file.endswith('.yaml') or requirements_file.endswith('.yml')):
1056 1056
                     raise AnsibleError("Invalid role requirements file, it must end with a .yml or .yaml extension")
1057 1057
 
1058
-                requirements = self._parse_requirements_file(requirements_file)
1058
+                requirements = self._parse_requirements_file(
1059
+                    requirements_file,
1060
+                    artifacts_manager=artifacts_manager,
1061
+                )
1059 1062
                 role_requirements = requirements['roles']
1060 1063
 
1061 1064
                 # We can only install collections and roles at the same time if the type wasn't specified and the -p
... ...
@@ -1090,11 +1166,15 @@ class GalaxyCLI(CLI):
1090 1090
             display.display("Starting galaxy collection install process")
1091 1091
             # Collections can technically be installed even when ansible-galaxy is in role mode so we need to pass in
1092 1092
             # the install path as context.CLIARGS['collections_path'] won't be set (default is calculated above).
1093
-            self._execute_install_collection(collection_requirements, collection_path)
1093
+            self._execute_install_collection(
1094
+                collection_requirements, collection_path,
1095
+                artifacts_manager=artifacts_manager,
1096
+            )
1094 1097
 
1095
-    def _execute_install_collection(self, requirements, path):
1098
+    def _execute_install_collection(
1099
+            self, requirements, path, artifacts_manager,
1100
+    ):
1096 1101
         force = context.CLIARGS['force']
1097
-        ignore_certs = context.CLIARGS['ignore_certs']
1098 1102
         ignore_errors = context.CLIARGS['ignore_errors']
1099 1103
         no_deps = context.CLIARGS['no_deps']
1100 1104
         force_with_deps = context.CLIARGS['force_with_deps']
... ...
@@ -1111,8 +1191,12 @@ class GalaxyCLI(CLI):
1111 1111
         if not os.path.exists(b_output_path):
1112 1112
             os.makedirs(b_output_path)
1113 1113
 
1114
-        install_collections(requirements, output_path, self.api_servers, (not ignore_certs), ignore_errors,
1115
-                            no_deps, force, force_with_deps, allow_pre_release=allow_pre_release)
1114
+        install_collections(
1115
+            requirements, output_path, self.api_servers, ignore_errors,
1116
+            no_deps, force, force_with_deps,
1117
+            allow_pre_release=allow_pre_release,
1118
+            artifacts_manager=artifacts_manager,
1119
+        )
1116 1120
 
1117 1121
         return 0
1118 1122
 
... ...
@@ -1283,9 +1367,12 @@ class GalaxyCLI(CLI):
1283 1283
 
1284 1284
         return 0
1285 1285
 
1286
-    def execute_list_collection(self):
1286
+    @with_collection_artifacts_manager
1287
+    def execute_list_collection(self, artifacts_manager=None):
1287 1288
         """
1288 1289
         List all collections installed on the local system
1290
+
1291
+        :param artifacts_manager: Artifacts manager.
1289 1292
         """
1290 1293
 
1291 1294
         collections_search_paths = set(context.CLIARGS['collections_path'])
... ...
@@ -1328,8 +1415,16 @@ class GalaxyCLI(CLI):
1328 1328
                     continue
1329 1329
 
1330 1330
                 collection_found = True
1331
-                collection = CollectionRequirement.from_path(b_collection_path, False, fallback_metadata=True)
1332
-                fqcn_width, version_width = _get_collection_widths(collection)
1331
+
1332
+                try:
1333
+                    collection = Requirement.from_dir_path_as_unknown(
1334
+                        b_collection_path,
1335
+                        artifacts_manager,
1336
+                    )
1337
+                except ValueError as val_err:
1338
+                    six.raise_from(AnsibleError(val_err), val_err)
1339
+
1340
+                fqcn_width, version_width = _get_collection_widths([collection])
1333 1341
 
1334 1342
                 _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
1335 1343
                 _display_collection(collection, fqcn_width, version_width)
... ...
@@ -1339,7 +1434,9 @@ class GalaxyCLI(CLI):
1339 1339
                 collection_path = validate_collection_path(path)
1340 1340
                 if os.path.isdir(collection_path):
1341 1341
                     display.vvv("Searching {0} for collections".format(collection_path))
1342
-                    collections = find_existing_collections(collection_path, fallback_metadata=True)
1342
+                    collections = list(find_existing_collections(
1343
+                        collection_path, artifacts_manager,
1344
+                    ))
1343 1345
                 else:
1344 1346
                     # There was no 'ansible_collections/' directory in the path, so there
1345 1347
                     # or no collections here.
... ...
@@ -1355,8 +1452,7 @@ class GalaxyCLI(CLI):
1355 1355
                 _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
1356 1356
 
1357 1357
                 # Sort collections by the namespace and name
1358
-                collections.sort(key=to_text)
1359
-                for collection in collections:
1358
+                for collection in sorted(collections, key=to_text):
1360 1359
                     _display_collection(collection, fqcn_width, version_width)
1361 1360
 
1362 1361
         # Do not warn if the specific collection was found in any of the search paths
... ...
@@ -7,6 +7,7 @@ __metaclass__ = type
7 7
 
8 8
 import collections
9 9
 import datetime
10
+import functools
10 11
 import hashlib
11 12
 import json
12 13
 import os
... ...
@@ -233,11 +234,17 @@ class CollectionVersionMetadata:
233 233
         self.dependencies = dependencies
234 234
 
235 235
 
236
+@functools.total_ordering
236 237
 class GalaxyAPI:
237 238
     """ This class is meant to be used as a API client for an Ansible Galaxy server """
238 239
 
239
-    def __init__(self, galaxy, name, url, username=None, password=None, token=None, validate_certs=True,
240
-                 available_api_versions=None, clear_response_cache=False, no_cache=True):
240
+    def __init__(
241
+            self, galaxy, name, url,
242
+            username=None, password=None, token=None, validate_certs=True,
243
+            available_api_versions=None,
244
+            clear_response_cache=False, no_cache=True,
245
+            priority=float('inf'),
246
+    ):
241 247
         self.galaxy = galaxy
242 248
         self.name = name
243 249
         self.username = username
... ...
@@ -246,6 +253,7 @@ class GalaxyAPI:
246 246
         self.api_server = url
247 247
         self.validate_certs = validate_certs
248 248
         self._available_api_versions = available_api_versions or {}
249
+        self._priority = priority
249 250
 
250 251
         b_cache_dir = to_bytes(C.config.get_config_value('GALAXY_CACHE_DIR'), errors='surrogate_or_strict')
251 252
         makedirs_safe(b_cache_dir, mode=0o700)
... ...
@@ -263,6 +271,38 @@ class GalaxyAPI:
263 263
 
264 264
         display.debug('Validate TLS certificates for %s: %s' % (self.api_server, self.validate_certs))
265 265
 
266
+    def __str__(self):
267
+        # type: (GalaxyAPI) -> str
268
+        """Render GalaxyAPI as a native string representation."""
269
+        return to_native(self.name)
270
+
271
+    def __unicode__(self):
272
+        # type: (GalaxyAPI) -> unicode
273
+        """Render GalaxyAPI as a unicode/text string representation."""
274
+        return to_text(self.name)
275
+
276
+    def __repr__(self):
277
+        # type: (GalaxyAPI) -> str
278
+        """Render GalaxyAPI as an inspectable string representation."""
279
+        return (
280
+            '<{instance!s} "{name!s}" @ {url!s} with priority {priority!s}>'.
281
+            format(
282
+                instance=self, name=self.name,
283
+                priority=self._priority, url=self.api_server,
284
+            )
285
+        )
286
+
287
+    def __lt__(self, other_galaxy_api):
288
+        # type: (GalaxyAPI, GalaxyAPI) -> Union[bool, 'NotImplemented']
289
+        """Return whether the instance priority is higher than other."""
290
+        if not isinstance(other_galaxy_api, self.__class__):
291
+            return NotImplemented
292
+
293
+        return (
294
+            self._priority > other_galaxy_api._priority or
295
+            self.name < self.name
296
+        )
297
+
266 298
     @property
267 299
     @g_connect(['v1', 'v2', 'v3'])
268 300
     def available_api_versions(self):
... ...
@@ -1,5 +1,5 @@
1 1
 # -*- coding: utf-8 -*-
2
-# Copyright: (c) 2019, Ansible Project
2
+# Copyright: (c) 2019-2021, Ansible Project
3 3
 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
4 4
 """Installed collections management package."""
5 5
 
... ...
@@ -9,7 +9,6 @@ __metaclass__ = type
9 9
 import errno
10 10
 import fnmatch
11 11
 import json
12
-import operator
13 12
 import os
14 13
 import shutil
15 14
 import stat
... ...
@@ -25,30 +24,102 @@ from contextlib import contextmanager
25 25
 from distutils.version import LooseVersion
26 26
 from hashlib import sha256
27 27
 from io import BytesIO
28
+from itertools import chain
29
+from resolvelib.resolvers import InconsistentCandidate
28 30
 from yaml.error import YAMLError
29 31
 
32
+# NOTE: Adding type ignores is a hack for mypy to shut up wrt bug #1153
30 33
 try:
31
-    import queue
34
+    import queue  # type: ignore[import]
35
+except ImportError:  # Python 2
36
+    import Queue as queue  # type: ignore[import,no-redef]
37
+
38
+try:
39
+    # NOTE: It's in Python 3 stdlib and can be installed on Python 2
40
+    # NOTE: via `pip install typing`. Unnecessary in runtime.
41
+    # NOTE: `TYPE_CHECKING` is True during mypy-typecheck-time.
42
+    from typing import TYPE_CHECKING
32 43
 except ImportError:
33
-    import Queue as queue  # Python 2
44
+    TYPE_CHECKING = False
45
+
46
+if TYPE_CHECKING:
47
+    from typing import Dict, Iterable, List, Optional, Text, Union
48
+    if sys.version_info[:2] >= (3, 8):
49
+        from typing import Literal
50
+    else:  # Python 2 + Python 3.4-3.7
51
+        from typing_extensions import Literal
52
+
53
+    from ansible.galaxy.api import GalaxyAPI
54
+    from ansible.galaxy.collection.concrete_artifact_manager import (
55
+        ConcreteArtifactsManager,
56
+    )
57
+
58
+    ManifestKeysType = Literal[
59
+        'collection_info', 'file_manifest_file', 'format',
60
+    ]
61
+    FileMetaKeysType = Literal[
62
+        'name',
63
+        'ftype',
64
+        'chksum_type',
65
+        'chksum_sha256',
66
+        'format',
67
+    ]
68
+    CollectionInfoKeysType = Literal[
69
+        # collection meta:
70
+        'namespace', 'name', 'version',
71
+        'authors', 'readme',
72
+        'tags', 'description',
73
+        'license', 'license_file',
74
+        'dependencies',
75
+        'repository', 'documentation',
76
+        'homepage', 'issues',
77
+
78
+        # files meta:
79
+        FileMetaKeysType,
80
+    ]
81
+    ManifestValueType = Dict[
82
+        CollectionInfoKeysType,
83
+        Optional[
84
+            Union[
85
+                int, str,  # scalars, like name/ns, schema version
86
+                List[str],  # lists of scalars, like tags
87
+                Dict[str, str],  # deps map
88
+            ],
89
+        ],
90
+    ]
91
+    CollectionManifestType = Dict[ManifestKeysType, ManifestValueType]
92
+    FileManifestEntryType = Dict[FileMetaKeysType, Optional[Union[str, int]]]
93
+    FilesManifestType = Dict[
94
+        Literal['files', 'format'],
95
+        Union[List[FileManifestEntryType], int],
96
+    ]
34 97
 
35 98
 import ansible.constants as C
36 99
 from ansible.errors import AnsibleError
37 100
 from ansible.galaxy import get_collections_galaxy_meta_info
38
-from ansible.galaxy.api import CollectionVersionMetadata, GalaxyError
39
-from ansible.galaxy.user_agent import user_agent
40
-from ansible.module_utils import six
101
+from ansible.galaxy.collection.concrete_artifact_manager import (
102
+    _consume_file,
103
+    _download_file,
104
+    _get_meta_from_src_dir,
105
+    _tarfile_extract,
106
+)
107
+from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
108
+from ansible.galaxy.dependency_resolution import (
109
+    build_collection_dependency_resolver,
110
+)
111
+from ansible.galaxy.dependency_resolution.dataclasses import (
112
+    Candidate, Requirement,
113
+)
114
+from ansible.galaxy.dependency_resolution.errors import (
115
+    CollectionDependencyResolutionImpossible,
116
+)
117
+from ansible.galaxy.dependency_resolution.versioning import meets_requirements
118
+from ansible.module_utils.six import raise_from
41 119
 from ansible.module_utils._text import to_bytes, to_native, to_text
42 120
 from ansible.utils.collection_loader import AnsibleCollectionRef
43 121
 from ansible.utils.display import Display
44
-from ansible.utils.galaxy import scm_archive_collection
45 122
 from ansible.utils.hashing import secure_hash, secure_hash_s
46 123
 from ansible.utils.version import SemanticVersion
47
-from ansible.module_utils.urls import open_url
48
-
49
-urlparse = six.moves.urllib.parse.urlparse
50
-urldefrag = six.moves.urllib.parse.urldefrag
51
-urllib_error = six.moves.urllib.error
52 124
 
53 125
 
54 126
 display = Display()
... ...
@@ -58,532 +129,145 @@ MANIFEST_FORMAT = 1
58 58
 ModifiedContent = namedtuple('ModifiedContent', ['filename', 'expected', 'installed'])
59 59
 
60 60
 
61
-class CollectionRequirement:
62
-
63
-    _FILE_MAPPING = [(b'MANIFEST.json', 'manifest_file'), (b'FILES.json', 'files_file')]
64
-
65
-    def __init__(self, namespace, name, b_path, api, versions, requirement, force, parent=None, metadata=None,
66
-                 files=None, skip=False, allow_pre_releases=False):
67
-        """Represents a collection requirement, the versions that are available to be installed as well as any
68
-        dependencies the collection has.
69
-
70
-        :param namespace: The collection namespace.
71
-        :param name: The collection name.
72
-        :param b_path: Byte str of the path to the collection tarball if it has already been downloaded.
73
-        :param api: The GalaxyAPI to use if the collection is from Galaxy.
74
-        :param versions: A list of versions of the collection that are available.
75
-        :param requirement: The version requirement string used to verify the list of versions fit the requirements.
76
-        :param force: Whether the force flag applied to the collection.
77
-        :param parent: The name of the parent the collection is a dependency of.
78
-        :param metadata: The galaxy.api.CollectionVersionMetadata that has already been retrieved from the Galaxy
79
-            server.
80
-        :param files: The files that exist inside the collection. This is based on the FILES.json file inside the
81
-            collection artifact.
82
-        :param skip: Whether to skip installing the collection. Should be set if the collection is already installed
83
-            and force is not set.
84
-        :param allow_pre_releases: Whether to skip pre-release versions of collections.
85
-        """
86
-        self.namespace = namespace
87
-        self.name = name
88
-        self.b_path = b_path
89
-        self.api = api
90
-        self._versions = set(versions)
91
-        self.force = force
92
-        self.skip = skip
93
-        self.required_by = []
94
-        self.allow_pre_releases = allow_pre_releases
95
-
96
-        self._metadata = metadata
97
-        self._files = files
98
-
99
-        self.add_requirement(parent, requirement)
100
-
101
-    def __str__(self):
102
-        return to_native("%s.%s" % (self.namespace, self.name))
103
-
104
-    def __unicode__(self):
105
-        return u"%s.%s" % (self.namespace, self.name)
106
-
107
-    @property
108
-    def metadata(self):
109
-        self._get_metadata()
110
-        return self._metadata
111
-
112
-    @property
113
-    def versions(self):
114
-        if self.allow_pre_releases:
115
-            return self._versions
116
-        return set(v for v in self._versions if v == '*' or not SemanticVersion(v).is_prerelease)
117
-
118
-    @versions.setter
119
-    def versions(self, value):
120
-        self._versions = set(value)
121
-
122
-    @property
123
-    def pre_releases(self):
124
-        return set(v for v in self._versions if SemanticVersion(v).is_prerelease)
125
-
126
-    @property
127
-    def latest_version(self):
128
-        try:
129
-            return max([v for v in self.versions if v != '*'], key=SemanticVersion)
130
-        except ValueError:  # ValueError: max() arg is an empty sequence
131
-            return '*'
132
-
133
-    @property
134
-    def dependencies(self):
135
-        if not self._metadata:
136
-            if len(self.versions) > 1:
137
-                return {}
138
-            self._get_metadata()
139
-
140
-        dependencies = self._metadata.dependencies
141
-
142
-        if dependencies is None:
143
-            return {}
144
-
145
-        return dependencies
146
-
147
-    @staticmethod
148
-    def artifact_info(b_path):
149
-        """Load the manifest data from the MANIFEST.json and FILES.json. If the files exist, return a dict containing the keys 'files_file' and 'manifest_file'.
150
-        :param b_path: The directory of a collection.
151
-        """
152
-        info = {}
153
-        for b_file_name, property_name in CollectionRequirement._FILE_MAPPING:
154
-            b_file_path = os.path.join(b_path, b_file_name)
155
-            if not os.path.exists(b_file_path):
156
-                continue
157
-            with open(b_file_path, 'rb') as file_obj:
158
-                try:
159
-                    info[property_name] = json.loads(to_text(file_obj.read(), errors='surrogate_or_strict'))
160
-                except ValueError:
161
-                    raise AnsibleError("Collection file at '%s' does not contain a valid json string." % to_native(b_file_path))
162
-        return info
163
-
164
-    @staticmethod
165
-    def galaxy_metadata(b_path):
166
-        """Generate the manifest data from the galaxy.yml file.
167
-        If the galaxy.yml exists, return a dictionary containing the keys 'files_file' and 'manifest_file'.
168
-
169
-        :param b_path: The directory of a collection.
170
-        """
171
-        b_galaxy_path = get_galaxy_metadata_path(b_path)
172
-        info = {}
173
-        if os.path.exists(b_galaxy_path):
174
-            collection_meta = _get_galaxy_yml(b_galaxy_path)
175
-            info['files_file'] = _build_files_manifest(b_path, collection_meta['namespace'], collection_meta['name'], collection_meta['build_ignore'])
176
-            info['manifest_file'] = _build_manifest(**collection_meta)
177
-        return info
178
-
179
-    @staticmethod
180
-    def collection_info(b_path, fallback_metadata=False):
181
-        info = CollectionRequirement.artifact_info(b_path)
182
-        if info or not fallback_metadata:
183
-            return info
184
-        return CollectionRequirement.galaxy_metadata(b_path)
185
-
186
-    def add_requirement(self, parent, requirement):
187
-        self.required_by.append((parent, requirement))
188
-        new_versions = set(v for v in self.versions if self._meets_requirements(v, requirement, parent))
189
-        if len(new_versions) == 0:
190
-            if self.skip:
191
-                force_flag = '--force-with-deps' if parent else '--force'
192
-                version = self.latest_version if self.latest_version != '*' else 'unknown'
193
-                msg = "Cannot meet requirement %s:%s as it is already installed at version '%s'. Use %s to overwrite" \
194
-                      % (to_text(self), requirement, version, force_flag)
195
-                raise AnsibleError(msg)
196
-            elif parent is None:
197
-                msg = "Cannot meet requirement %s for dependency %s" % (requirement, to_text(self))
198
-            else:
199
-                msg = "Cannot meet dependency requirement '%s:%s' for collection %s" \
200
-                      % (to_text(self), requirement, parent)
61
+def verify_local_collection(
62
+        local_collection, remote_collection,
63
+        artifacts_manager,
64
+):  # type: (Candidate, Candidate, ConcreteArtifactsManager) -> None
65
+    """Verify integrity of the locally installed collection.
201 66
 
202
-            collection_source = to_text(self.b_path, nonstring='passthru') or self.api.api_server
203
-            req_by = "\n".join(
204
-                "\t%s - '%s:%s'" % (to_text(p) if p else 'base', to_text(self), r)
205
-                for p, r in self.required_by
206
-            )
67
+    :param local_collection: Collection being checked.
68
+    :param remote_collection: Correct collection.
69
+    :param artifacts_manager: Artifacts manager.
70
+    """
71
+    b_temp_tar_path = (  # NOTE: AnsibleError is raised on URLError
72
+        artifacts_manager.get_artifact_path
73
+        if remote_collection.is_concrete_artifact
74
+        else artifacts_manager.get_galaxy_artifact_path
75
+    )(remote_collection)
76
+
77
+    b_collection_path = to_bytes(
78
+        local_collection.src, errors='surrogate_or_strict',
79
+    )
207 80
 
208
-            versions = ", ".join(sorted(self.versions, key=SemanticVersion))
209
-            if not self.versions and self.pre_releases:
210
-                pre_release_msg = (
211
-                    '\nThis collection only contains pre-releases. Utilize `--pre` to install pre-releases, or '
212
-                    'explicitly provide the pre-release version.'
213
-                )
214
-            else:
215
-                pre_release_msg = ''
81
+    display.vvv("Verifying '{coll!s}'.".format(coll=local_collection))
82
+    display.vvv(
83
+        u"Installed collection found at '{path!s}'".
84
+        format(path=to_text(local_collection.src)),
85
+    )
86
+    display.vvv(
87
+        u"Remote collection cached as '{path!s}'".
88
+        format(path=to_text(b_temp_tar_path)),
89
+    )
216 90
 
217
-            raise AnsibleError(
218
-                "%s from source '%s'. Available versions before last requirement added: %s\nRequirements from:\n%s%s"
219
-                % (msg, collection_source, versions, req_by, pre_release_msg)
91
+    # Compare installed version versus requirement version
92
+    if local_collection.ver != remote_collection.ver:
93
+        err = (
94
+            "{local_fqcn!s} has the version '{local_ver!s}' but "
95
+            "is being compared to '{remote_ver!s}'".format(
96
+                local_fqcn=local_collection.fqcn,
97
+                local_ver=local_collection.ver,
98
+                remote_ver=remote_collection.ver,
220 99
             )
100
+        )
101
+        display.display(err)
102
+        return
221 103
 
222
-        self.versions = new_versions
223
-
224
-    def download(self, b_path):
225
-        download_url = self._metadata.download_url
226
-        artifact_hash = self._metadata.artifact_sha256
227
-        headers = {}
228
-        self.api._add_auth_token(headers, download_url, required=False)
229
-
230
-        b_collection_path = _download_file(download_url, b_path, artifact_hash, self.api.validate_certs,
231
-                                           headers=headers)
232
-
233
-        return to_text(b_collection_path, errors='surrogate_or_strict')
234
-
235
-    def install(self, path, b_temp_path):
236
-        if self.skip:
237
-            display.display("Skipping '%s' as it is already installed" % to_text(self))
238
-            return
239
-
240
-        # Install if it is not
241
-        collection_path = os.path.join(path, self.namespace, self.name)
242
-        b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
243
-        display.display("Installing '%s:%s' to '%s'" % (to_text(self), self.latest_version, collection_path))
244
-
245
-        if self.b_path is None:
246
-            self.b_path = self.download(b_temp_path)
247
-
248
-        if os.path.exists(b_collection_path):
249
-            shutil.rmtree(b_collection_path)
250
-
251
-        if os.path.isfile(self.b_path):
252
-            self.install_artifact(b_collection_path, b_temp_path)
253
-        else:
254
-            self.install_scm(b_collection_path)
255
-
256
-        display.display("%s (%s) was installed successfully" % (to_text(self), self.latest_version))
257
-
258
-    def install_artifact(self, b_collection_path, b_temp_path):
259
-
260
-        try:
261
-            with tarfile.open(self.b_path, mode='r') as collection_tar:
262
-                files_member_obj = collection_tar.getmember('FILES.json')
263
-                with _tarfile_extract(collection_tar, files_member_obj) as (dummy, files_obj):
264
-                    files = json.loads(to_text(files_obj.read(), errors='surrogate_or_strict'))
265
-
266
-                _extract_tar_file(collection_tar, 'MANIFEST.json', b_collection_path, b_temp_path)
267
-                _extract_tar_file(collection_tar, 'FILES.json', b_collection_path, b_temp_path)
268
-
269
-                for file_info in files['files']:
270
-                    file_name = file_info['name']
271
-                    if file_name == '.':
272
-                        continue
273
-
274
-                    if file_info['ftype'] == 'file':
275
-                        _extract_tar_file(collection_tar, file_name, b_collection_path, b_temp_path,
276
-                                          expected_hash=file_info['chksum_sha256'])
277
-
278
-                    else:
279
-                        _extract_tar_dir(collection_tar, file_name, b_collection_path)
280
-
281
-        except Exception:
282
-            # Ensure we don't leave the dir behind in case of a failure.
283
-            shutil.rmtree(b_collection_path)
284
-
285
-            b_namespace_path = os.path.dirname(b_collection_path)
286
-            if not os.listdir(b_namespace_path):
287
-                os.rmdir(b_namespace_path)
288
-
289
-            raise
290
-
291
-    def install_scm(self, b_collection_output_path):
292
-        """Install the collection from source control into given dir.
293
-
294
-        Generates the Ansible collection artifact data from a galaxy.yml and installs the artifact to a directory.
295
-        This should follow the same pattern as build_collection, but instead of creating an artifact, install it.
296
-        :param b_collection_output_path: The installation directory for the collection artifact.
297
-        :raises AnsibleError: If no collection metadata found.
298
-        """
299
-        b_collection_path = self.b_path
300
-
301
-        b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
302
-        if not os.path.exists(b_galaxy_path):
303
-            raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
304
-
305
-        info = CollectionRequirement.galaxy_metadata(b_collection_path)
306
-
307
-        collection_manifest = info['manifest_file']
308
-        collection_meta = collection_manifest['collection_info']
309
-        file_manifest = info['files_file']
310
-
311
-        _build_collection_dir(b_collection_path, b_collection_output_path, collection_manifest, file_manifest)
312
-
313
-        collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
314
-                                     collection_manifest['collection_info']['name'])
315
-        display.display('Created collection for %s at %s' % (collection_name, to_text(b_collection_output_path)))
316
-
317
-    def set_latest_version(self):
318
-        self.versions = set([self.latest_version])
319
-        self._get_metadata()
320
-
321
-    def verify(self, remote_collection, path, b_temp_tar_path):
322
-        if not self.skip:
323
-            display.display("'%s' has not been installed, nothing to verify" % (to_text(self)))
324
-            return
325
-
326
-        collection_path = os.path.join(path, self.namespace, self.name)
327
-        b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
328
-
329
-        display.vvv("Verifying '%s:%s'." % (to_text(self), self.latest_version))
330
-        display.vvv("Installed collection found at '%s'" % collection_path)
331
-        display.vvv("Remote collection found at '%s'" % remote_collection.metadata.download_url)
332
-
333
-        # Compare installed version versus requirement version
334
-        if self.latest_version != remote_collection.latest_version:
335
-            err = "%s has the version '%s' but is being compared to '%s'" % (to_text(self), self.latest_version, remote_collection.latest_version)
336
-            display.display(err)
337
-            return
338
-
339
-        modified_content = []
340
-
341
-        # Verify the manifest hash matches before verifying the file manifest
342
-        expected_hash = _get_tar_file_hash(b_temp_tar_path, 'MANIFEST.json')
343
-        self._verify_file_hash(b_collection_path, 'MANIFEST.json', expected_hash, modified_content)
344
-        manifest = _get_json_from_tar_file(b_temp_tar_path, 'MANIFEST.json')
345
-
346
-        # Use the manifest to verify the file manifest checksum
347
-        file_manifest_data = manifest['file_manifest_file']
348
-        file_manifest_filename = file_manifest_data['name']
349
-        expected_hash = file_manifest_data['chksum_%s' % file_manifest_data['chksum_type']]
350
-
351
-        # Verify the file manifest before using it to verify individual files
352
-        self._verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content)
353
-        file_manifest = _get_json_from_tar_file(b_temp_tar_path, file_manifest_filename)
354
-
355
-        # Use the file manifest to verify individual file checksums
356
-        for manifest_data in file_manifest['files']:
357
-            if manifest_data['ftype'] == 'file':
358
-                expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']]
359
-                self._verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content)
360
-
361
-        if modified_content:
362
-            display.display("Collection %s contains modified content in the following files:" % to_text(self))
363
-            display.display(to_text(self))
364
-            display.vvv(to_text(self.b_path))
365
-            for content_change in modified_content:
366
-                display.display('    %s' % content_change.filename)
367
-                display.vvv("    Expected: %s\n    Found: %s" % (content_change.expected, content_change.installed))
368
-        else:
369
-            display.vvv("Successfully verified that checksums for '%s:%s' match the remote collection" % (to_text(self), self.latest_version))
370
-
371
-    def _verify_file_hash(self, b_path, filename, expected_hash, error_queue):
372
-        b_file_path = to_bytes(os.path.join(to_text(b_path), filename), errors='surrogate_or_strict')
373
-
374
-        if not os.path.isfile(b_file_path):
375
-            actual_hash = None
376
-        else:
377
-            with open(b_file_path, mode='rb') as file_object:
378
-                actual_hash = _consume_file(file_object)
379
-
380
-        if expected_hash != actual_hash:
381
-            error_queue.append(ModifiedContent(filename=filename, expected=expected_hash, installed=actual_hash))
382
-
383
-    def _get_metadata(self):
384
-        if self._metadata:
385
-            return
386
-        self._metadata = self.api.get_collection_version_metadata(self.namespace, self.name, self.latest_version)
387
-
388
-    def _meets_requirements(self, version, requirements, parent):
389
-        """
390
-        Supports version identifiers can be '==', '!=', '>', '>=', '<', '<=', '*'. Each requirement is delimited by ','
391
-        """
392
-        op_map = {
393
-            '!=': operator.ne,
394
-            '==': operator.eq,
395
-            '=': operator.eq,
396
-            '>=': operator.ge,
397
-            '>': operator.gt,
398
-            '<=': operator.le,
399
-            '<': operator.lt,
400
-        }
401
-
402
-        for req in list(requirements.split(',')):
403
-            op_pos = 2 if len(req) > 1 and req[1] == '=' else 1
404
-            op = op_map.get(req[:op_pos])
405
-
406
-            requirement = req[op_pos:]
407
-            if not op:
408
-                requirement = req
409
-                op = operator.eq
410
-
411
-            # In the case we are checking a new requirement on a base requirement (parent != None) we can't accept
412
-            # version as '*' (unknown version) unless the requirement is also '*'.
413
-            if parent and version == '*' and requirement != '*':
414
-                display.warning("Failed to validate the collection requirement '%s:%s' for %s when the existing "
415
-                                "install does not have a version set, the collection may not work."
416
-                                % (to_text(self), req, parent))
417
-                continue
418
-            elif requirement == '*' or version == '*':
419
-                continue
420
-
421
-            if not op(SemanticVersion(version), SemanticVersion.from_loose_version(LooseVersion(requirement))):
422
-                break
423
-        else:
424
-            return True
425
-
426
-        # The loop was broken early, it does not meet all the requirements
427
-        return False
428
-
429
-    @staticmethod
430
-    def from_tar(b_path, force, parent=None):
431
-        if not tarfile.is_tarfile(b_path):
432
-            raise AnsibleError("Collection artifact at '%s' is not a valid tar file." % to_native(b_path))
433
-
434
-        info = {}
435
-        with tarfile.open(b_path, mode='r') as collection_tar:
436
-            for b_member_name, property_name in CollectionRequirement._FILE_MAPPING:
437
-                n_member_name = to_native(b_member_name)
438
-                try:
439
-                    member = collection_tar.getmember(n_member_name)
440
-                except KeyError:
441
-                    raise AnsibleError("Collection at '%s' does not contain the required file %s."
442
-                                       % (to_native(b_path), n_member_name))
443
-
444
-                with _tarfile_extract(collection_tar, member) as (dummy, member_obj):
445
-                    try:
446
-                        info[property_name] = json.loads(to_text(member_obj.read(), errors='surrogate_or_strict'))
447
-                    except ValueError:
448
-                        raise AnsibleError("Collection tar file member %s does not contain a valid json string."
449
-                                           % n_member_name)
450
-
451
-        meta = info['manifest_file']['collection_info']
452
-        files = info['files_file']['files']
453
-
454
-        namespace = meta['namespace']
455
-        name = meta['name']
456
-        version = meta['version']
457
-        meta = CollectionVersionMetadata(namespace, name, version, None, None, meta['dependencies'])
458
-
459
-        if SemanticVersion(version).is_prerelease:
460
-            allow_pre_release = True
461
-        else:
462
-            allow_pre_release = False
463
-
464
-        return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
465
-                                     metadata=meta, files=files, allow_pre_releases=allow_pre_release)
466
-
467
-    @staticmethod
468
-    def from_path(b_path, force, parent=None, fallback_metadata=False, skip=True):
469
-        info = CollectionRequirement.collection_info(b_path, fallback_metadata)
470
-
471
-        allow_pre_release = False
472
-        if 'manifest_file' in info:
473
-            manifest = info['manifest_file']['collection_info']
474
-            namespace = manifest['namespace']
475
-            name = manifest['name']
476
-            version = to_text(manifest['version'], errors='surrogate_or_strict')
477
-
478
-            try:
479
-                _v = SemanticVersion()
480
-                _v.parse(version)
481
-                if _v.is_prerelease:
482
-                    allow_pre_release = True
483
-            except ValueError:
484
-                display.warning("Collection at '%s' does not have a valid version set, falling back to '*'. Found "
485
-                                "version: '%s'" % (to_text(b_path), version))
486
-                version = '*'
487
-
488
-            dependencies = manifest['dependencies']
489
-        else:
490
-            if fallback_metadata:
491
-                warning = "Collection at '%s' does not have a galaxy.yml or a MANIFEST.json file, cannot detect version."
492
-            else:
493
-                warning = "Collection at '%s' does not have a MANIFEST.json file, cannot detect version."
494
-            display.warning(warning % to_text(b_path))
495
-            parent_dir, name = os.path.split(to_text(b_path, errors='surrogate_or_strict'))
496
-            namespace = os.path.split(parent_dir)[1]
497
-
498
-            version = '*'
499
-            dependencies = {}
500
-
501
-        meta = CollectionVersionMetadata(namespace, name, version, None, None, dependencies)
502
-
503
-        files = info.get('files_file', {}).get('files', {})
504
-
505
-        return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
506
-                                     metadata=meta, files=files, skip=skip, allow_pre_releases=allow_pre_release)
507
-
508
-    @staticmethod
509
-    def from_name(collection, apis, requirement, force, parent=None, allow_pre_release=False):
510
-        namespace, name = collection.split('.', 1)
511
-        galaxy_meta = None
512
-
513
-        for api in apis:
514
-            if not (requirement == '*' or requirement.startswith('<') or requirement.startswith('>') or
515
-                    requirement.startswith('!=')):
516
-                # Exact requirement
517
-                allow_pre_release = True
518
-
519
-                if requirement.startswith('='):
520
-                    requirement = requirement.lstrip('=')
521
-
522
-                try:
523
-                    resp = api.get_collection_version_metadata(namespace, name, requirement)
524
-                except GalaxyError as err:
525
-                    if err.http_code != 404:
526
-                        raise
527
-                    versions = []
528
-                else:
529
-                    galaxy_meta = resp
530
-                    versions = [resp.version]
531
-            else:
532
-                versions = api.get_collection_versions(namespace, name)
533
-
534
-            if not versions:
535
-                display.vvv("Collection '%s' is not available from server %s %s" % (collection, api.name,
536
-                                                                                    api.api_server))
537
-                continue
538
-
539
-            display.vvv("Collection '%s' obtained from server %s %s" % (collection, api.name, api.api_server))
540
-            break
541
-        else:
542
-            raise AnsibleError("Failed to find collection %s:%s" % (collection, requirement))
543
-
544
-        req = CollectionRequirement(namespace, name, None, api, versions, requirement, force, parent=parent,
545
-                                    metadata=galaxy_meta, allow_pre_releases=allow_pre_release)
546
-        return req
104
+    modified_content = []  # type: List[ModifiedContent]
105
+
106
+    # Verify the manifest hash matches before verifying the file manifest
107
+    expected_hash = _get_tar_file_hash(b_temp_tar_path, 'MANIFEST.json')
108
+    _verify_file_hash(b_collection_path, 'MANIFEST.json', expected_hash, modified_content)
109
+    manifest = _get_json_from_tar_file(b_temp_tar_path, 'MANIFEST.json')
110
+
111
+    # Use the manifest to verify the file manifest checksum
112
+    file_manifest_data = manifest['file_manifest_file']
113
+    file_manifest_filename = file_manifest_data['name']
114
+    expected_hash = file_manifest_data['chksum_%s' % file_manifest_data['chksum_type']]
115
+
116
+    # Verify the file manifest before using it to verify individual files
117
+    _verify_file_hash(b_collection_path, file_manifest_filename, expected_hash, modified_content)
118
+    file_manifest = _get_json_from_tar_file(b_temp_tar_path, file_manifest_filename)
119
+
120
+    # Use the file manifest to verify individual file checksums
121
+    for manifest_data in file_manifest['files']:
122
+        if manifest_data['ftype'] == 'file':
123
+            expected_hash = manifest_data['chksum_%s' % manifest_data['chksum_type']]
124
+            _verify_file_hash(b_collection_path, manifest_data['name'], expected_hash, modified_content)
125
+
126
+    if modified_content:
127
+        display.display(
128
+            'Collection {fqcn!s} contains modified content '
129
+            'in the following files:'.
130
+            format(fqcn=to_text(local_collection.fqcn)),
131
+        )
132
+        display.display(to_text(local_collection.fqcn))
133
+        display.vvv(to_text(local_collection.src))
134
+        for content_change in modified_content:
135
+            display.display('    %s' % content_change.filename)
136
+            display.vvv("    Expected: %s\n    Found: %s" % (content_change.expected, content_change.installed))
137
+        # FIXME: Why doesn't this raise a failed return code?
138
+    else:
139
+        display.vvv(
140
+            "Successfully verified that checksums for '{coll!s}' "
141
+            'match the remote collection'.
142
+            format(coll=local_collection),
143
+        )
547 144
 
548 145
 
549
-def build_collection(collection_path, output_path, force):
146
+def build_collection(u_collection_path, u_output_path, force):
147
+    # type: (Text, Text, bool) -> Text
550 148
     """Creates the Ansible collection artifact in a .tar.gz file.
551 149
 
552
-    :param collection_path: The path to the collection to build. This should be the directory that contains the
150
+    :param u_collection_path: The path to the collection to build. This should be the directory that contains the
553 151
         galaxy.yml file.
554
-    :param output_path: The path to create the collection build artifact. This should be a directory.
152
+    :param u_output_path: The path to create the collection build artifact. This should be a directory.
555 153
     :param force: Whether to overwrite an existing collection build artifact or fail.
556 154
     :return: The path to the collection build artifact.
557 155
     """
558
-    b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
559
-    b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
560
-    if not os.path.exists(b_galaxy_path):
561
-        raise AnsibleError("The collection galaxy.yml path '%s' does not exist." % to_native(b_galaxy_path))
562
-
563
-    info = CollectionRequirement.galaxy_metadata(b_collection_path)
564
-
565
-    collection_manifest = info['manifest_file']
566
-    collection_meta = collection_manifest['collection_info']
567
-    file_manifest = info['files_file']
156
+    b_collection_path = to_bytes(u_collection_path, errors='surrogate_or_strict')
157
+    try:
158
+        collection_meta = _get_meta_from_src_dir(b_collection_path)
159
+    except LookupError as lookup_err:
160
+        raise_from(AnsibleError(to_native(lookup_err)), lookup_err)
161
+
162
+    collection_manifest = _build_manifest(**collection_meta)
163
+    file_manifest = _build_files_manifest(
164
+        b_collection_path,
165
+        collection_meta['namespace'],  # type: ignore[arg-type]
166
+        collection_meta['name'],  # type: ignore[arg-type]
167
+        collection_meta['build_ignore'],  # type: ignore[arg-type]
168
+    )
568 169
 
569
-    collection_output = os.path.join(output_path, "%s-%s-%s.tar.gz" % (collection_meta['namespace'],
570
-                                                                       collection_meta['name'],
571
-                                                                       collection_meta['version']))
170
+    artifact_tarball_file_name = '{ns!s}-{name!s}-{ver!s}.tar.gz'.format(
171
+        name=collection_meta['name'],
172
+        ns=collection_meta['namespace'],
173
+        ver=collection_meta['version'],
174
+    )
175
+    b_collection_output = os.path.join(
176
+        to_bytes(u_output_path),
177
+        to_bytes(artifact_tarball_file_name, errors='surrogate_or_strict'),
178
+    )
572 179
 
573
-    b_collection_output = to_bytes(collection_output, errors='surrogate_or_strict')
574 180
     if os.path.exists(b_collection_output):
575 181
         if os.path.isdir(b_collection_output):
576 182
             raise AnsibleError("The output collection artifact '%s' already exists, "
577
-                               "but is a directory - aborting" % to_native(collection_output))
183
+                               "but is a directory - aborting" % to_native(b_collection_output))
578 184
         elif not force:
579 185
             raise AnsibleError("The file '%s' already exists. You can use --force to re-create "
580
-                               "the collection artifact." % to_native(collection_output))
186
+                               "the collection artifact." % to_native(b_collection_output))
581 187
 
582
-    _build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
188
+    collection_output = _build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
583 189
     return collection_output
584 190
 
585 191
 
586
-def download_collections(collections, output_path, apis, validate_certs, no_deps, allow_pre_release):
192
+def download_collections(
193
+        collections,  # type: Iterable[Requirement]
194
+        output_path,  # type: str
195
+        apis,  # type: Iterable[GalaxyAPI]
196
+        no_deps,  # type: bool
197
+        allow_pre_release,  # type: bool
198
+        artifacts_manager,  # type: ConcreteArtifactsManager
199
+):  # type: (...) -> None
587 200
     """Download Ansible collections as their tarball from a Galaxy server to the path specified and creates a requirements
588 201
     file of the downloaded requirements to be used for an install.
589 202
 
... ...
@@ -594,41 +278,88 @@ def download_collections(collections, output_path, apis, validate_certs, no_deps
594 594
     :param no_deps: Ignore any collection dependencies and only download the base requirements.
595 595
     :param allow_pre_release: Do not ignore pre-release versions when selecting the latest.
596 596
     """
597
-    with _tempdir() as b_temp_path:
598
-        with _display_progress("Process download dependency map"):
599
-            dep_map = _build_dependency_map(collections, [], b_temp_path, apis, validate_certs, True, True, no_deps,
600
-                                            allow_pre_release=allow_pre_release)
597
+    with _display_progress("Process download dependency map"):
598
+        dep_map = _resolve_depenency_map(
599
+            set(collections),
600
+            galaxy_apis=apis,
601
+            preferred_candidates=None,
602
+            concrete_artifacts_manager=artifacts_manager,
603
+            no_deps=no_deps,
604
+            allow_pre_release=allow_pre_release,
605
+        )
606
+
607
+    b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
608
+
609
+    requirements = []
610
+    with _display_progress(
611
+            "Starting collection download process to '{path!s}'".
612
+            format(path=output_path),
613
+    ):
614
+        for fqcn, concrete_coll_pin in dep_map.copy().items():  # FIXME: move into the provider
615
+            if concrete_coll_pin.is_virtual:
616
+                display.v(
617
+                    'Virtual collection {coll!s} is not downloadable'.
618
+                    format(coll=to_text(concrete_coll_pin)),
619
+                )
620
+                continue
601 621
 
602
-        requirements = []
603
-        with _display_progress(
604
-                "Starting collection download process to '{path!s}'".
605
-                format(path=output_path),
606
-        ):
607
-            for name, requirement in dep_map.items():
608
-                collection_filename = "%s-%s-%s.tar.gz" % (requirement.namespace, requirement.name,
609
-                                                           requirement.latest_version)
610
-                dest_path = os.path.join(output_path, collection_filename)
611
-                requirements.append({'name': collection_filename, 'version': requirement.latest_version})
612
-
613
-                display.display("Downloading collection '%s' to '%s'" % (name, dest_path))
614
-
615
-                if requirement.api is None and requirement.b_path and os.path.isfile(requirement.b_path):
616
-                    shutil.copy(requirement.b_path, to_bytes(dest_path, errors='surrogate_or_strict'))
617
-                elif requirement.api is None and requirement.b_path:
618
-                    temp_path = to_text(b_temp_path, errors='surrogate_or_string')
619
-                    temp_download_path = build_collection(requirement.b_path, temp_path, True)
620
-                    shutil.move(to_bytes(temp_download_path, errors='surrogate_or_strict'),
621
-                                to_bytes(dest_path, errors='surrogate_or_strict'))
622
-                else:
623
-                    b_temp_download_path = requirement.download(b_temp_path)
624
-                    shutil.move(b_temp_download_path, to_bytes(dest_path, errors='surrogate_or_strict'))
622
+            display.display(
623
+                u"Downloading collection '{coll!s}' to '{path!s}'".
624
+                format(coll=to_text(concrete_coll_pin), path=to_text(b_output_path)),
625
+            )
626
+
627
+            b_src_path = (
628
+                artifacts_manager.get_artifact_path
629
+                if concrete_coll_pin.is_concrete_artifact
630
+                else artifacts_manager.get_galaxy_artifact_path
631
+            )(concrete_coll_pin)
625 632
 
626
-                display.display("%s (%s) was downloaded successfully" % (name, requirement.latest_version))
633
+            b_dest_path = os.path.join(
634
+                b_output_path,
635
+                os.path.basename(b_src_path),
636
+            )
637
+
638
+            if concrete_coll_pin.is_dir:
639
+                b_dest_path = to_bytes(
640
+                    build_collection(
641
+                        to_text(b_src_path, errors='surrogate_or_strict'),
642
+                        to_text(output_path, errors='surrogate_or_strict'),
643
+                        force=True,
644
+                    ),
645
+                    errors='surrogate_or_strict',
646
+                )
647
+            else:
648
+                shutil.copy(to_native(b_src_path), to_native(b_dest_path))
627 649
 
628
-            requirements_path = os.path.join(output_path, 'requirements.yml')
629
-            display.display("Writing requirements.yml file of downloaded collections to '%s'" % requirements_path)
630
-            with open(to_bytes(requirements_path, errors='surrogate_or_strict'), mode='wb') as req_fd:
631
-                req_fd.write(to_bytes(yaml.safe_dump({'collections': requirements}), errors='surrogate_or_strict'))
650
+            display.display(
651
+                "Collection '{coll!s}' was downloaded successfully".
652
+                format(coll=concrete_coll_pin),
653
+            )
654
+            requirements.append({
655
+                # FIXME: Consider using a more specific upgraded format
656
+                # FIXME: having FQCN in the name field, with src field
657
+                # FIXME: pointing to the file path, and explicitly set
658
+                # FIXME: type. If version and name are set, it'd
659
+                # FIXME: perform validation against the actual metadata
660
+                # FIXME: in the artifact src points at.
661
+                'name': to_native(os.path.basename(b_dest_path)),
662
+                'version': concrete_coll_pin.ver,
663
+            })
664
+
665
+        requirements_path = os.path.join(output_path, 'requirements.yml')
666
+        b_requirements_path = to_bytes(
667
+            requirements_path, errors='surrogate_or_strict',
668
+        )
669
+        display.display(
670
+            u'Writing requirements.yml file of downloaded collections '
671
+            "to '{path!s}'".format(path=to_text(requirements_path)),
672
+        )
673
+        yaml_bytes = to_bytes(
674
+            yaml.safe_dump({'collections': requirements}),
675
+            errors='surrogate_or_strict',
676
+        )
677
+        with open(b_requirements_path, mode='wb') as req_fd:
678
+            req_fd.write(yaml_bytes)
632 679
 
633 680
 
634 681
 def publish_collection(collection_path, api, wait, timeout):
... ...
@@ -668,11 +399,20 @@ def publish_collection(collection_path, api, wait, timeout):
668 668
                         % (api.name, api.api_server, import_uri))
669 669
 
670 670
 
671
-def install_collections(collections, output_path, apis, validate_certs, ignore_errors, no_deps, force, force_deps,
672
-                        allow_pre_release=False):
671
+def install_collections(
672
+        collections,  # type: Iterable[Requirement]
673
+        output_path,  # type: str
674
+        apis,  # type: Iterable[GalaxyAPI]
675
+        ignore_errors,  # type: bool
676
+        no_deps,  # type: bool
677
+        force,  # type: bool
678
+        force_deps,  # type: bool
679
+        allow_pre_release,  # type: bool
680
+        artifacts_manager,  # type: ConcreteArtifactsManager
681
+):  # type: (...) -> None
673 682
     """Install Ansible collections to the path specified.
674 683
 
675
-    :param collections: The collections to install, should be a list of tuples with (name, requirement, Galaxy server).
684
+    :param collections: The collections to install.
676 685
     :param output_path: The path to install the collections to.
677 686
     :param apis: A list of GalaxyAPIs to query when searching for a collection.
678 687
     :param validate_certs: Whether to validate the certificates if downloading a tarball.
... ...
@@ -681,27 +421,132 @@ def install_collections(collections, output_path, apis, validate_certs, ignore_e
681 681
     :param force: Re-install a collection if it has already been installed.
682 682
     :param force_deps: Re-install a collection as well as its dependencies if they have already been installed.
683 683
     """
684
-    existing_collections = find_existing_collections(output_path, fallback_metadata=True)
684
+    existing_collections = {
685
+        Requirement(coll.fqcn, coll.ver, coll.src, coll.type)
686
+        for coll in find_existing_collections(output_path, artifacts_manager)
687
+    }
685 688
 
686
-    with _tempdir() as b_temp_path:
687
-        with _display_progress("Process install dependency map"):
688
-            dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, apis,
689
-                                                   validate_certs, force, force_deps, no_deps,
690
-                                                   allow_pre_release=allow_pre_release)
689
+    unsatisfied_requirements = set(
690
+        chain.from_iterable(
691
+            (
692
+                Requirement.from_dir_path(sub_coll, artifacts_manager)
693
+                for sub_coll in (
694
+                    artifacts_manager.
695
+                    get_direct_collection_dependencies(install_req).
696
+                    keys()
697
+                )
698
+            )
699
+            if install_req.is_subdirs else (install_req, )
700
+            for install_req in collections
701
+        ),
702
+    )
703
+    requested_requirements_names = {req.fqcn for req in unsatisfied_requirements}
704
+
705
+    # NOTE: Don't attempt to reevaluate already installed deps
706
+    # NOTE: unless `--force` or `--force-with-deps` is passed
707
+    unsatisfied_requirements -= set() if force or force_deps else {
708
+        req
709
+        for req in unsatisfied_requirements
710
+        for exs in existing_collections
711
+        if req.fqcn == exs.fqcn and meets_requirements(exs.ver, req.ver)
712
+    }
691 713
 
692
-        with _display_progress("Starting collection install process"):
693
-            for collection in dependency_map.values():
694
-                try:
695
-                    collection.install(output_path, b_temp_path)
696
-                except AnsibleError as err:
697
-                    if ignore_errors:
698
-                        display.warning("Failed to install collection %s but skipping due to --ignore-errors being set. "
699
-                                        "Error: %s" % (to_text(collection), to_text(err)))
700
-                    else:
701
-                        raise
714
+    if not unsatisfied_requirements:
715
+        display.display(
716
+            'Nothing to do. All requested collections are already '
717
+            'installed. If you want to reinstall them, '
718
+            'consider using `--force`.'
719
+        )
720
+        return
721
+
722
+    # FIXME: This probably needs to be improved to
723
+    # FIXME: properly match differing src/type.
724
+    existing_non_requested_collections = {
725
+        coll for coll in existing_collections
726
+        if coll.fqcn not in requested_requirements_names
727
+    }
728
+
729
+    preferred_requirements = (
730
+        [] if force_deps
731
+        else existing_non_requested_collections if force
732
+        else existing_collections
733
+    )
734
+    preferred_collections = {
735
+        Candidate(coll.fqcn, coll.ver, coll.src, coll.type)
736
+        for coll in preferred_requirements
737
+    }
738
+    with _display_progress("Process install dependency map"):
739
+        try:
740
+            dependency_map = _resolve_depenency_map(
741
+                collections,
742
+                galaxy_apis=apis,
743
+                preferred_candidates=preferred_collections,
744
+                concrete_artifacts_manager=artifacts_manager,
745
+                no_deps=no_deps,
746
+                allow_pre_release=allow_pre_release,
747
+            )
748
+        except InconsistentCandidate as inconsistent_candidate_exc:
749
+            # FIXME: Processing this error is hacky and should be removed along
750
+            # FIXME: with implementing the automatic replacement for installed
751
+            # FIXME: collections.
752
+            if not all(
753
+                    inconsistent_candidate_exc.candidate.fqcn == r.fqcn
754
+                    for r in inconsistent_candidate_exc.criterion.iter_requirement()
755
+            ):
756
+                raise
757
+
758
+            req_info = inconsistent_candidate_exc.criterion.information[0]
759
+            force_flag = (
760
+                '--force' if req_info.parent is None
761
+                else '--force-with-deps'
762
+            )
763
+            raise_from(
764
+                AnsibleError(
765
+                    'Cannot meet requirement {collection!s} as it is already '
766
+                    "installed at version '{installed_ver!s}'. "
767
+                    'Use {force_flag!s} to overwrite'.format(
768
+                        collection=req_info.requirement,
769
+                        force_flag=force_flag,
770
+                        installed_ver=inconsistent_candidate_exc.candidate.ver,
771
+                    )
772
+                ),
773
+                inconsistent_candidate_exc,
774
+            )
775
+
776
+    with _display_progress("Starting collection install process"):
777
+        for fqcn, concrete_coll_pin in dependency_map.items():
778
+            if concrete_coll_pin.is_virtual:
779
+                display.vvvv(
780
+                    "Skipping '{coll!s}' as it is virtual".
781
+                    format(coll=to_text(concrete_coll_pin)),
782
+                )
783
+                continue
784
+
785
+            if concrete_coll_pin in preferred_collections:
786
+                display.display(
787
+                    "Skipping '{coll!s}' as it is already installed".
788
+                    format(coll=to_text(concrete_coll_pin)),
789
+                )
790
+                continue
791
+
792
+            try:
793
+                install(concrete_coll_pin, output_path, artifacts_manager)
794
+            except AnsibleError as err:
795
+                if ignore_errors:
796
+                    display.warning(
797
+                        'Failed to install collection {coll!s} but skipping '
798
+                        'due to --ignore-errors being set. Error: {error!s}'.
799
+                        format(
800
+                            coll=to_text(concrete_coll_pin),
801
+                            error=to_text(err),
802
+                        )
803
+                    )
804
+                else:
805
+                    raise
702 806
 
703 807
 
704
-def validate_collection_name(name):
808
+# NOTE: imported in ansible.cli.galaxy
809
+def validate_collection_name(name):  # type: (str) -> str
705 810
     """Validates the collection name as an input from the user or a requirements file fit the requirements.
706 811
 
707 812
     :param name: The input name with optional range specifier split by ':'.
... ...
@@ -717,7 +562,8 @@ def validate_collection_name(name):
717 717
                        "characters from [a-zA-Z0-9_] only." % name)
718 718
 
719 719
 
720
-def validate_collection_path(collection_path):
720
+# NOTE: imported in ansible.cli.galaxy
721
+def validate_collection_path(collection_path):  # type: (str) -> str
721 722
     """Ensure a given path ends with 'ansible_collections'
722 723
 
723 724
     :param collection_path: The path that should end in 'ansible_collections'
... ...
@@ -730,73 +576,106 @@ def validate_collection_path(collection_path):
730 730
     return collection_path
731 731
 
732 732
 
733
-def verify_collections(collections, search_paths, apis, validate_certs, ignore_errors, allow_pre_release=False):
733
+def verify_collections(
734
+        collections,  # type: Iterable[Requirement]
735
+        search_paths,  # type: Iterable[str]
736
+        apis,  # type: Iterable[GalaxyAPI]
737
+        ignore_errors,  # type: bool
738
+        artifacts_manager,  # type: ConcreteArtifactsManager
739
+):  # type: (...) -> None
740
+    r"""Verify the integrity of locally installed collections.
741
+
742
+    :param collections: The collections to check.
743
+    :param search_paths: Locations for the local collection lookup.
744
+    :param apis: A list of GalaxyAPIs to query when searching for a collection.
745
+    :param ignore_errors: Whether to ignore any errors when verifying the collection.
746
+    :param artifacts_manager: Artifacts manager.
747
+    """
748
+    api_proxy = MultiGalaxyAPIProxy(apis, artifacts_manager)
734 749
 
735 750
     with _display_progress():
736
-        with _tempdir() as b_temp_path:
737
-            for collection in collections:
751
+        for collection in collections:
752
+            try:
753
+                if collection.is_concrete_artifact:
754
+                    raise AnsibleError(
755
+                        message="'{coll_type!s}' type is not supported. "
756
+                        'The format namespace.name is expected.'.
757
+                        format(coll_type=collection.type)
758
+                    )
759
+
760
+                # NOTE: Verify local collection exists before
761
+                # NOTE: downloading its source artifact from
762
+                # NOTE: a galaxy server.
763
+                for search_path in search_paths:
764
+                    b_search_path = to_bytes(
765
+                        os.path.join(
766
+                            search_path,
767
+                            collection.namespace, collection.name,
768
+                        ),
769
+                        errors='surrogate_or_strict',
770
+                    )
771
+                    if not os.path.isdir(b_search_path):
772
+                        continue
773
+
774
+                    local_collection = Candidate.from_dir_path(
775
+                        b_search_path, artifacts_manager,
776
+                    )
777
+                    break
778
+                else:
779
+                    raise AnsibleError(message='Collection %s is not installed in any of the collection paths.' % collection.fqcn)
780
+
781
+                remote_collection = Candidate(
782
+                    collection.fqcn,
783
+                    collection.ver if collection.ver != '*'
784
+                    else local_collection.ver,
785
+                    None, 'galaxy',
786
+                )
787
+
788
+                # Download collection on a galaxy server for comparison
738 789
                 try:
790
+                    # NOTE: Trigger the lookup. If found, it'll cache
791
+                    # NOTE: download URL and token in artifact manager.
792
+                    api_proxy.get_collection_version_metadata(
793
+                        remote_collection,
794
+                    )
795
+                except AnsibleError as e:  # FIXME: does this actually emit any errors?
796
+                    # FIXME: extract the actual message and adjust this:
797
+                    expected_error_msg = (
798
+                        'Failed to find collection {coll.fqcn!s}:{coll.ver!s}'.
799
+                        format(coll=collection)
800
+                    )
801
+                    if e.message == expected_error_msg:
802
+                        raise AnsibleError(
803
+                            'Failed to find remote collection '
804
+                            "'{coll!s}' on any of the galaxy servers".
805
+                            format(coll=collection)
806
+                        )
807
+                    raise
808
+
809
+                verify_local_collection(
810
+                    local_collection, remote_collection,
811
+                    artifacts_manager,
812
+                )
739 813
 
740
-                    local_collection = None
741
-                    b_collection = to_bytes(collection[0], errors='surrogate_or_strict')
742
-
743
-                    if os.path.isfile(b_collection) or urlparse(collection[0]).scheme.lower() in ['http', 'https'] or len(collection[0].split('.')) != 2:
744
-                        raise AnsibleError(message="'%s' is not a valid collection name. The format namespace.name is expected." % collection[0])
745
-
746
-                    collection_name = collection[0]
747
-                    namespace, name = collection_name.split('.')
748
-                    collection_version = collection[1]
749
-
750
-                    # Verify local collection exists before downloading it from a galaxy server
751
-                    for search_path in search_paths:
752
-                        b_search_path = to_bytes(os.path.join(search_path, namespace, name), errors='surrogate_or_strict')
753
-                        if os.path.isdir(b_search_path):
754
-                            if not os.path.isfile(os.path.join(to_text(b_search_path, errors='surrogate_or_strict'), 'MANIFEST.json')):
755
-                                raise AnsibleError(
756
-                                    message="Collection %s does not appear to have a MANIFEST.json. " % collection_name +
757
-                                            "A MANIFEST.json is expected if the collection has been built and installed via ansible-galaxy."
758
-                                )
759
-                            local_collection = CollectionRequirement.from_path(b_search_path, False)
760
-                            break
761
-                    if local_collection is None:
762
-                        raise AnsibleError(message='Collection %s is not installed in any of the collection paths.' % collection_name)
763
-
764
-                    # Download collection on a galaxy server for comparison
765
-                    try:
766
-                        remote_collection = CollectionRequirement.from_name(collection_name, apis, collection_version, False, parent=None,
767
-                                                                            allow_pre_release=allow_pre_release)
768
-                    except AnsibleError as e:
769
-                        if e.message == 'Failed to find collection %s:%s' % (collection[0], collection[1]):
770
-                            raise AnsibleError('Failed to find remote collection %s:%s on any of the galaxy servers' % (collection[0], collection[1]))
771
-                        raise
772
-
773
-                    download_url = remote_collection.metadata.download_url
774
-                    headers = {}
775
-                    remote_collection.api._add_auth_token(headers, download_url, required=False)
776
-                    b_temp_tar_path = _download_file(download_url, b_temp_path, None, validate_certs, headers=headers)
777
-
778
-                    local_collection.verify(remote_collection, search_path, b_temp_tar_path)
779
-
780
-                except AnsibleError as err:
781
-                    if ignore_errors:
782
-                        display.warning("Failed to verify collection %s but skipping due to --ignore-errors being set. "
783
-                                        "Error: %s" % (collection[0], to_text(err)))
784
-                    else:
785
-                        raise
814
+            except AnsibleError as err:
815
+                if ignore_errors:
816
+                    display.warning(
817
+                        "Failed to verify collection '{coll!s}' but skipping "
818
+                        'due to --ignore-errors being set. '
819
+                        'Error: {err!s}'.
820
+                        format(coll=collection, err=to_text(err)),
821
+                    )
822
+                else:
823
+                    raise
786 824
 
787 825
 
788 826
 @contextmanager
789 827
 def _tempdir():
790 828
     b_temp_path = tempfile.mkdtemp(dir=to_bytes(C.DEFAULT_LOCAL_TMP, errors='surrogate_or_strict'))
791
-    yield b_temp_path
792
-    shutil.rmtree(b_temp_path)
793
-
794
-
795
-@contextmanager
796
-def _tarfile_extract(tar, member):
797
-    tar_obj = tar.extractfile(member)
798
-    yield member, tar_obj
799
-    tar_obj.close()
829
+    try:
830
+        yield b_temp_path
831
+    finally:
832
+        shutil.rmtree(b_temp_path)
800 833
 
801 834
 
802 835
 @contextmanager
... ...
@@ -867,70 +746,21 @@ def _display_progress(msg=None):
867 867
         display = old_display
868 868
 
869 869
 
870
-def _get_galaxy_yml(b_galaxy_yml_path):
871
-    meta_info = get_collections_galaxy_meta_info()
872
-
873
-    mandatory_keys = set()
874
-    string_keys = set()
875
-    list_keys = set()
876
-    dict_keys = set()
877
-
878
-    for info in meta_info:
879
-        if info.get('required', False):
880
-            mandatory_keys.add(info['key'])
881
-
882
-        key_list_type = {
883
-            'str': string_keys,
884
-            'list': list_keys,
885
-            'dict': dict_keys,
886
-        }[info.get('type', 'str')]
887
-        key_list_type.add(info['key'])
888
-
889
-    all_keys = frozenset(list(mandatory_keys) + list(string_keys) + list(list_keys) + list(dict_keys))
890
-
891
-    try:
892
-        with open(b_galaxy_yml_path, 'rb') as g_yaml:
893
-            galaxy_yml = yaml.safe_load(g_yaml)
894
-    except YAMLError as err:
895
-        raise AnsibleError("Failed to parse the galaxy.yml at '%s' with the following error:\n%s"
896
-                           % (to_native(b_galaxy_yml_path), to_native(err)))
897
-
898
-    set_keys = set(galaxy_yml.keys())
899
-    missing_keys = mandatory_keys.difference(set_keys)
900
-    if missing_keys:
901
-        raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
902
-                           % (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
903
-
904
-    extra_keys = set_keys.difference(all_keys)
905
-    if len(extra_keys) > 0:
906
-        display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
907
-                        % (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
908
-
909
-    # Add the defaults if they have not been set
910
-    for optional_string in string_keys:
911
-        if optional_string not in galaxy_yml:
912
-            galaxy_yml[optional_string] = None
913
-
914
-    for optional_list in list_keys:
915
-        list_val = galaxy_yml.get(optional_list, None)
870
+def _verify_file_hash(b_path, filename, expected_hash, error_queue):
871
+    b_file_path = to_bytes(os.path.join(to_text(b_path), filename), errors='surrogate_or_strict')
916 872
 
917
-        if list_val is None:
918
-            galaxy_yml[optional_list] = []
919
-        elif not isinstance(list_val, list):
920
-            galaxy_yml[optional_list] = [list_val]
921
-
922
-    for optional_dict in dict_keys:
923
-        if optional_dict not in galaxy_yml:
924
-            galaxy_yml[optional_dict] = {}
925
-
926
-    # license is a builtin var in Python, to avoid confusion we just rename it to license_ids
927
-    galaxy_yml['license_ids'] = galaxy_yml['license']
928
-    del galaxy_yml['license']
873
+    if not os.path.isfile(b_file_path):
874
+        actual_hash = None
875
+    else:
876
+        with open(b_file_path, mode='rb') as file_object:
877
+            actual_hash = _consume_file(file_object)
929 878
 
930
-    return galaxy_yml
879
+    if expected_hash != actual_hash:
880
+        error_queue.append(ModifiedContent(filename=filename, expected=expected_hash, installed=actual_hash))
931 881
 
932 882
 
933 883
 def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns):
884
+    # type: (bytes, str, str, List[str]) -> FilesManifestType
934 885
     # We always ignore .pyc and .retry files as well as some well known version control directories. The ignore
935 886
     # patterns can be extended by the build_ignore key in galaxy.yml
936 887
     b_ignore_patterns = [
... ...
@@ -963,7 +793,7 @@ def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns):
963 963
             },
964 964
         ],
965 965
         'format': MANIFEST_FORMAT,
966
-    }
966
+    }  # type: FilesManifestType
967 967
 
968 968
     def _walk(b_path, b_top_level_dir):
969 969
         for b_item in os.listdir(b_path):
... ...
@@ -1014,9 +844,9 @@ def _build_files_manifest(b_collection_path, namespace, name, ignore_patterns):
1014 1014
     return manifest
1015 1015
 
1016 1016
 
1017
-def _build_manifest(namespace, name, version, authors, readme, tags, description, license_ids, license_file,
1017
+# FIXME: accept a dict produced from `galaxy.yml` instead of separate args
1018
+def _build_manifest(namespace, name, version, authors, readme, tags, description, license_file,
1018 1019
                     dependencies, repository, documentation, homepage, issues, **kwargs):
1019
-
1020 1020
     manifest = {
1021 1021
         'collection_info': {
1022 1022
             'namespace': namespace,
... ...
@@ -1026,8 +856,8 @@ def _build_manifest(namespace, name, version, authors, readme, tags, description
1026 1026
             'readme': readme,
1027 1027
             'tags': tags,
1028 1028
             'description': description,
1029
-            'license': license_ids,
1030
-            'license_file': license_file if license_file else None,  # Handle galaxy.yml having an empty string (None)
1029
+            'license': kwargs['license'],
1030
+            'license_file': license_file or None,  # Handle galaxy.yml having an empty string (None)
1031 1031
             'dependencies': dependencies,
1032 1032
             'repository': repository,
1033 1033
             'documentation': documentation,
... ...
@@ -1047,7 +877,12 @@ def _build_manifest(namespace, name, version, authors, readme, tags, description
1047 1047
     return manifest
1048 1048
 
1049 1049
 
1050
-def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, file_manifest):
1050
+def _build_collection_tar(
1051
+        b_collection_path,  # type: bytes
1052
+        b_tar_path,  # type: bytes
1053
+        collection_manifest,  # type: CollectionManifestType
1054
+        file_manifest,  # type: FilesManifestType
1055
+):  # type: (...) -> Text
1051 1056
     """Build a tar.gz collection artifact from the manifest data."""
1052 1057
     files_manifest_json = to_bytes(json.dumps(file_manifest, indent=True), errors='surrogate_or_strict')
1053 1058
     collection_manifest['file_manifest_file']['chksum_sha256'] = secure_hash_s(files_manifest_json, hash_func=sha256)
... ...
@@ -1062,11 +897,11 @@ def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, fi
1062 1062
                 b_io = BytesIO(b)
1063 1063
                 tar_info = tarfile.TarInfo(name)
1064 1064
                 tar_info.size = len(b)
1065
-                tar_info.mtime = time.time()
1065
+                tar_info.mtime = int(time.time())
1066 1066
                 tar_info.mode = 0o0644
1067 1067
                 tar_file.addfile(tarinfo=tar_info, fileobj=b_io)
1068 1068
 
1069
-            for file_info in file_manifest['files']:
1069
+            for file_info in file_manifest['files']:  # type: ignore[union-attr]
1070 1070
                 if file_info['name'] == '.':
1071 1071
                     continue
1072 1072
 
... ...
@@ -1097,12 +932,19 @@ def _build_collection_tar(b_collection_path, b_tar_path, collection_manifest, fi
1097 1097
                         continue
1098 1098
 
1099 1099
                 # Dealing with a normal file, just add it by name.
1100
-                tar_file.add(os.path.realpath(b_src_path), arcname=filename, recursive=False, filter=reset_stat)
1100
+                tar_file.add(
1101
+                    to_native(os.path.realpath(b_src_path)),
1102
+                    arcname=filename,
1103
+                    recursive=False,
1104
+                    filter=reset_stat,
1105
+                )
1101 1106
 
1102
-        shutil.copy(b_tar_filepath, b_tar_path)
1107
+        shutil.copy(to_native(b_tar_filepath), to_native(b_tar_path))
1103 1108
         collection_name = "%s.%s" % (collection_manifest['collection_info']['namespace'],
1104 1109
                                      collection_manifest['collection_info']['name'])
1105
-        display.display('Created collection for %s at %s' % (collection_name, to_text(b_tar_path)))
1110
+        tar_path = to_text(b_tar_path)
1111
+        display.display(u'Created collection for %s at %s' % (collection_name, tar_path))
1112
+        return tar_path
1106 1113
 
1107 1114
 
1108 1115
 def _build_collection_dir(b_collection_path, b_collection_output, collection_manifest, file_manifest):
... ...
@@ -1146,263 +988,159 @@ def _build_collection_dir(b_collection_path, b_collection_output, collection_man
1146 1146
             shutil.copyfile(src_file, dest_file)
1147 1147
 
1148 1148
         os.chmod(dest_file, mode)
1149
+    collection_output = to_text(b_collection_output)
1150
+    return collection_output
1149 1151
 
1150 1152
 
1151
-def find_existing_collections(path, fallback_metadata=False):
1152
-    collections = []
1153
+def find_existing_collections(path, artifacts_manager):
1154
+    """Locate all collections under a given path.
1153 1155
 
1156
+    :param path: Collection dirs layout search path.
1157
+    :param artifacts_manager: Artifacts manager.
1158
+    """
1154 1159
     b_path = to_bytes(path, errors='surrogate_or_strict')
1160
+
1161
+    # FIXME: consider using `glob.glob()` to simplify looping
1155 1162
     for b_namespace in os.listdir(b_path):
1156 1163
         b_namespace_path = os.path.join(b_path, b_namespace)
1157 1164
         if os.path.isfile(b_namespace_path):
1158 1165
             continue
1159 1166
 
1167
+        # FIXME: consider feeding b_namespace_path to Candidate.from_dir_path to get subdirs automatically
1160 1168
         for b_collection in os.listdir(b_namespace_path):
1161 1169
             b_collection_path = os.path.join(b_namespace_path, b_collection)
1162
-            if os.path.isdir(b_collection_path):
1163
-                req = CollectionRequirement.from_path(b_collection_path, False, fallback_metadata=fallback_metadata)
1164
-                display.vvv("Found installed collection %s:%s at '%s'" % (to_text(req), req.latest_version,
1165
-                                                                          to_text(b_collection_path)))
1166
-                collections.append(req)
1167
-
1168
-    return collections
1169
-
1170
-
1171
-def _build_dependency_map(collections, existing_collections, b_temp_path, apis, validate_certs, force, force_deps,
1172
-                          no_deps, allow_pre_release=False):
1173
-    dependency_map = {}
1174
-
1175
-    # First build the dependency map on the actual requirements
1176
-    for name, version, source, req_type in collections:
1177
-        _get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, apis,
1178
-                             validate_certs, (force or force_deps), allow_pre_release=allow_pre_release, req_type=req_type)
1179
-
1180
-    checked_parents = set([to_text(c) for c in dependency_map.values() if c.skip])
1181
-    while len(dependency_map) != len(checked_parents):
1182
-        while not no_deps:  # Only parse dependencies if no_deps was not set
1183
-            parents_to_check = set(dependency_map.keys()).difference(checked_parents)
1170
+            if not os.path.isdir(b_collection_path):
1171
+                continue
1184 1172
 
1185
-            deps_exhausted = True
1186
-            for parent in parents_to_check:
1187
-                parent_info = dependency_map[parent]
1173
+            try:
1174
+                req = Candidate.from_dir_path_as_unknown(
1175
+                    b_collection_path,
1176
+                    artifacts_manager,
1177
+                )
1178
+            except ValueError as val_err:
1179
+                raise_from(AnsibleError(val_err), val_err)
1188 1180
 
1189
-                if parent_info.dependencies:
1190
-                    deps_exhausted = False
1191
-                    for dep_name, dep_requirement in parent_info.dependencies.items():
1192
-                        _get_collection_info(dependency_map, existing_collections, dep_name, dep_requirement,
1193
-                                             None, b_temp_path, apis, validate_certs, force_deps,
1194
-                                             parent=parent, allow_pre_release=allow_pre_release)
1181
+            display.vvv(
1182
+                u"Found installed collection {coll!s} at '{path!s}'".
1183
+                format(coll=to_text(req), path=to_text(req.src))
1184
+            )
1185
+            yield req
1195 1186
 
1196
-                    checked_parents.add(parent)
1197 1187
 
1198
-            # No extra dependencies were resolved, exit loop
1199
-            if deps_exhausted:
1200
-                break
1188
+def install(collection, path, artifacts_manager):  # FIXME: mv to dataclasses?
1189
+    # type: (Candidate, str, ConcreteArtifactsManager) -> None
1190
+    """Install a collection under a given path.
1201 1191
 
1202
-        # Now we have resolved the deps to our best extent, now select the latest version for collections with
1203
-        # multiple versions found and go from there
1204
-        deps_not_checked = set(dependency_map.keys()).difference(checked_parents)
1205
-        for collection in deps_not_checked:
1206
-            dependency_map[collection].set_latest_version()
1207
-            if no_deps or len(dependency_map[collection].dependencies) == 0:
1208
-                checked_parents.add(collection)
1209
-
1210
-    return dependency_map
1211
-
1212
-
1213
-def _collections_from_scm(collection, requirement, b_temp_path, force, parent=None):
1214
-    """Returns a list of collections found in the repo. If there is a galaxy.yml in the collection then just return
1215
-    the specific collection. Otherwise, check each top-level directory for a galaxy.yml.
1216
-
1217
-    :param collection: URI to a git repo
1218
-    :param requirement: The version of the artifact
1219
-    :param b_temp_path: The temporary path to the archive of a collection
1220
-    :param force: Whether to overwrite an existing collection or fail
1221
-    :param parent: The name of the parent collection
1222
-    :raises AnsibleError: if nothing found
1223
-    :return: List of CollectionRequirement objects
1224
-    :rtype: list
1192
+    :param collection: Collection to be installed.
1193
+    :param path: Collection dirs layout path.
1194
+    :param artifacts_manager: Artifacts manager.
1225 1195
     """
1196
+    b_artifact_path = (
1197
+        artifacts_manager.get_artifact_path if collection.is_concrete_artifact
1198
+        else artifacts_manager.get_galaxy_artifact_path
1199
+    )(collection)
1226 1200
 
1227
-    reqs = []
1228
-    name, version, path, fragment = parse_scm(collection, requirement)
1229
-    b_repo_root = to_bytes(name, errors='surrogate_or_strict')
1230
-
1231
-    b_collection_path = os.path.join(b_temp_path, b_repo_root)
1232
-    if fragment:
1233
-        b_fragment = to_bytes(fragment, errors='surrogate_or_strict')
1234
-        b_collection_path = os.path.join(b_collection_path, b_fragment)
1235
-
1236
-    b_galaxy_path = get_galaxy_metadata_path(b_collection_path)
1237
-
1238
-    err = ("%s appears to be an SCM collection source, but the required galaxy.yml was not found. "
1239
-           "Append #path/to/collection/ to your URI (before the comma separated version, if one is specified) "
1240
-           "to point to a directory containing the galaxy.yml or directories of collections" % collection)
1241
-
1242
-    display.vvvvv("Considering %s as a possible path to a collection's galaxy.yml" % b_galaxy_path)
1243
-    if os.path.exists(b_galaxy_path):
1244
-        return [CollectionRequirement.from_path(b_collection_path, force, parent, fallback_metadata=True, skip=False)]
1245
-
1246
-    if not os.path.isdir(b_collection_path) or not os.listdir(b_collection_path):
1247
-        raise AnsibleError(err)
1248
-
1249
-    for b_possible_collection in os.listdir(b_collection_path):
1250
-        b_collection = os.path.join(b_collection_path, b_possible_collection)
1251
-        if not os.path.isdir(b_collection):
1252
-            continue
1253
-        b_galaxy = get_galaxy_metadata_path(b_collection)
1254
-        display.vvvvv("Considering %s as a possible path to a collection's galaxy.yml" % b_galaxy)
1255
-        if os.path.exists(b_galaxy):
1256
-            reqs.append(CollectionRequirement.from_path(b_collection, force, parent, fallback_metadata=True, skip=False))
1257
-    if not reqs:
1258
-        raise AnsibleError(err)
1259
-
1260
-    return reqs
1261
-
1262
-
1263
-def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, apis,
1264
-                         validate_certs, force, parent=None, allow_pre_release=False, req_type=None):
1265
-    dep_msg = ""
1266
-    if parent:
1267
-        dep_msg = " - as dependency of %s" % parent
1268
-    display.vvv("Processing requirement collection '%s'%s" % (to_text(collection), dep_msg))
1269
-
1270
-    b_tar_path = None
1271
-
1272
-    is_file = (
1273
-        req_type == 'file' or
1274
-        (not req_type and os.path.isfile(to_bytes(collection, errors='surrogate_or_strict')))
1275
-    )
1276
-
1277
-    is_url = (
1278
-        req_type == 'url' or
1279
-        (not req_type and urlparse(collection).scheme.lower() in ['http', 'https'])
1201
+    collection_path = os.path.join(path, collection.namespace, collection.name)
1202
+    b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
1203
+    display.display(
1204
+        u"Installing '{coll!s}' to '{path!s}'".
1205
+        format(coll=to_text(collection), path=collection_path),
1280 1206
     )
1281 1207
 
1282
-    is_scm = (
1283
-        req_type == 'git' or
1284
-        (not req_type and not b_tar_path and collection.startswith(('git+', 'git@')))
1285
-    )
1208
+    if os.path.exists(b_collection_path):
1209
+        shutil.rmtree(b_collection_path)
1286 1210
 
1287
-    if is_file:
1288
-        display.vvvv("Collection requirement '%s' is a tar artifact" % to_text(collection))
1289
-        b_tar_path = to_bytes(collection, errors='surrogate_or_strict')
1290
-    elif is_url:
1291
-        display.vvvv("Collection requirement '%s' is a URL to a tar artifact" % collection)
1292
-        try:
1293
-            b_tar_path = _download_file(collection, b_temp_path, None, validate_certs)
1294
-        except urllib_error.URLError as err:
1295
-            raise AnsibleError("Failed to download collection tar from '%s': %s"
1296
-                               % (to_native(collection), to_native(err)))
1297
-
1298
-    if is_scm:
1299
-        if not collection.startswith('git'):
1300
-            collection = 'git+' + collection
1301
-
1302
-        name, version, path, fragment = parse_scm(collection, requirement)
1303
-        b_tar_path = scm_archive_collection(path, name=name, version=version)
1304
-
1305
-        with tarfile.open(b_tar_path, mode='r') as collection_tar:
1306
-            collection_tar.extractall(path=to_text(b_temp_path))
1307
-
1308
-        # Ignore requirement if it is set (it must follow semantic versioning, unlike a git version, which is any tree-ish)
1309
-        # If the requirement was the only place version was set, requirement == version at this point
1310
-        if requirement not in {"*", ""} and requirement != version:
1311
-            display.warning(
1312
-                "The collection {0} appears to be a git repository and two versions were provided: '{1}', and '{2}'. "
1313
-                "The version {2} is being disregarded.".format(collection, version, requirement)
1314
-            )
1315
-        requirement = "*"
1316
-
1317
-        reqs = _collections_from_scm(collection, requirement, b_temp_path, force, parent)
1318
-        for req in reqs:
1319
-            collection_info = get_collection_info_from_req(dep_map, req)
1320
-            update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement)
1211
+    if collection.is_dir:
1212
+        install_src(collection, b_artifact_path, b_collection_path, artifacts_manager)
1321 1213
     else:
1322
-        if b_tar_path:
1323
-            req = CollectionRequirement.from_tar(b_tar_path, force, parent=parent)
1324
-            collection_info = get_collection_info_from_req(dep_map, req)
1325
-        else:
1326
-            validate_collection_name(collection)
1327
-
1328
-            display.vvvv("Collection requirement '%s' is the name of a collection" % collection)
1329
-            if collection in dep_map:
1330
-                collection_info = dep_map[collection]
1331
-                collection_info.add_requirement(parent, requirement)
1332
-            else:
1333
-                apis = [source] if source else apis
1334
-                collection_info = CollectionRequirement.from_name(collection, apis, requirement, force, parent=parent,
1335
-                                                                  allow_pre_release=allow_pre_release)
1214
+        install_artifact(b_artifact_path, b_collection_path, artifacts_manager._b_working_directory)
1336 1215
 
1337
-        update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement)
1216
+    display.display(
1217
+        '{coll!s} was installed successfully'.
1218
+        format(coll=to_text(collection)),
1219
+    )
1338 1220
 
1339 1221
 
1340
-def get_collection_info_from_req(dep_map, collection):
1341
-    collection_name = to_text(collection)
1342
-    if collection_name in dep_map:
1343
-        collection_info = dep_map[collection_name]
1344
-        collection_info.add_requirement(None, collection.latest_version)
1345
-    else:
1346
-        collection_info = collection
1347
-    return collection_info
1222
+def install_artifact(b_coll_targz_path, b_collection_path, b_temp_path):
1223
+    """Install a collection from tarball under a given path.
1348 1224
 
1225
+    :param b_coll_targz_path: Collection tarball to be installed.
1226
+    :param b_collection_path: Collection dirs layout path.
1227
+    :param b_temp_path: Temporary dir path.
1228
+    """
1229
+    try:
1230
+        with tarfile.open(b_coll_targz_path, mode='r') as collection_tar:
1231
+            files_member_obj = collection_tar.getmember('FILES.json')
1232
+            with _tarfile_extract(collection_tar, files_member_obj) as (dummy, files_obj):
1233
+                files = json.loads(to_text(files_obj.read(), errors='surrogate_or_strict'))
1349 1234
 
1350
-def update_dep_map_collection_info(dep_map, existing_collections, collection_info, parent, requirement):
1351
-    existing = [c for c in existing_collections if to_text(c) == to_text(collection_info)]
1352
-    if existing and not collection_info.force:
1353
-        # Test that the installed collection fits the requirement
1354
-        existing[0].add_requirement(parent, requirement)
1355
-        collection_info = existing[0]
1235
+            _extract_tar_file(collection_tar, 'MANIFEST.json', b_collection_path, b_temp_path)
1236
+            _extract_tar_file(collection_tar, 'FILES.json', b_collection_path, b_temp_path)
1356 1237
 
1357
-    dep_map[to_text(collection_info)] = collection_info
1238
+            for file_info in files['files']:
1239
+                file_name = file_info['name']
1240
+                if file_name == '.':
1241
+                    continue
1358 1242
 
1243
+                if file_info['ftype'] == 'file':
1244
+                    _extract_tar_file(collection_tar, file_name, b_collection_path, b_temp_path,
1245
+                                      expected_hash=file_info['chksum_sha256'])
1359 1246
 
1360
-def parse_scm(collection, version):
1361
-    if ',' in collection:
1362
-        collection, version = collection.split(',', 1)
1363
-    elif version == '*' or not version:
1364
-        version = 'HEAD'
1247
+                else:
1248
+                    _extract_tar_dir(collection_tar, file_name, b_collection_path)
1365 1249
 
1366
-    if collection.startswith('git+'):
1367
-        path = collection[4:]
1368
-    else:
1369
-        path = collection
1250
+    except Exception:
1251
+        # Ensure we don't leave the dir behind in case of a failure.
1252
+        shutil.rmtree(b_collection_path)
1370 1253
 
1371
-    path, fragment = urldefrag(path)
1372
-    fragment = fragment.strip(os.path.sep)
1254
+        b_namespace_path = os.path.dirname(b_collection_path)
1255
+        if not os.listdir(b_namespace_path):
1256
+            os.rmdir(b_namespace_path)
1373 1257
 
1374
-    if path.endswith(os.path.sep + '.git'):
1375
-        name = path.split(os.path.sep)[-2]
1376
-    elif '://' not in path and '@' not in path:
1377
-        name = path
1378
-    else:
1379
-        name = path.split('/')[-1]
1380
-        if name.endswith('.git'):
1381
-            name = name[:-4]
1258
+        raise
1382 1259
 
1383
-    return name, version, path, fragment
1384 1260
 
1261
+def install_src(
1262
+        collection,
1263
+        b_collection_path, b_collection_output_path,
1264
+        artifacts_manager,
1265
+):
1266
+    r"""Install the collection from source control into given dir.
1385 1267
 
1386
-def _download_file(url, b_path, expected_hash, validate_certs, headers=None):
1387
-    urlsplit = os.path.splitext(to_text(url.rsplit('/', 1)[1]))
1388
-    b_file_name = to_bytes(urlsplit[0], errors='surrogate_or_strict')
1389
-    b_file_ext = to_bytes(urlsplit[1], errors='surrogate_or_strict')
1390
-    b_file_path = tempfile.NamedTemporaryFile(dir=b_path, prefix=b_file_name, suffix=b_file_ext, delete=False).name
1268
+    Generates the Ansible collection artifact data from a galaxy.yml and
1269
+    installs the artifact to a directory.
1270
+    This should follow the same pattern as build_collection, but instead
1271
+    of creating an artifact, install it.
1391 1272
 
1392
-    display.display("Downloading %s to %s" % (url, to_text(b_path)))
1393
-    # Galaxy redirs downloads to S3 which reject the request if an Authorization header is attached so don't redir that
1394
-    resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs, headers=headers,
1395
-                    unredirected_headers=['Authorization'], http_agent=user_agent())
1273
+    :param collection: Collection to be installed.
1274
+    :param b_collection_path: Collection dirs layout path.
1275
+    :param b_collection_output_path: The installation directory for the \
1276
+                                     collection artifact.
1277
+    :param artifacts_manager: Artifacts manager.
1396 1278
 
1397
-    with open(b_file_path, 'wb') as download_file:
1398
-        actual_hash = _consume_file(resp, download_file)
1279
+    :raises AnsibleError: If no collection metadata found.
1280
+    """
1281
+    collection_meta = artifacts_manager.get_direct_collection_meta(collection)
1282
+
1283
+    if 'build_ignore' not in collection_meta:  # installed collection, not src
1284
+        # FIXME: optimize this? use a different process? copy instead of build?
1285
+        collection_meta['build_ignore'] = []
1286
+    collection_manifest = _build_manifest(**collection_meta)
1287
+    file_manifest = _build_files_manifest(
1288
+        b_collection_path,
1289
+        collection_meta['namespace'], collection_meta['name'],
1290
+        collection_meta['build_ignore'],
1291
+    )
1399 1292
 
1400
-    if expected_hash:
1401
-        display.vvvv("Validating downloaded file hash %s with expected hash %s" % (actual_hash, expected_hash))
1402
-        if expected_hash != actual_hash:
1403
-            raise AnsibleError("Mismatch artifact hash with downloaded file")
1293
+    collection_output_path = _build_collection_dir(
1294
+        b_collection_path, b_collection_output_path,
1295
+        collection_manifest, file_manifest,
1296
+    )
1404 1297
 
1405
-    return b_file_path
1298
+    display.display(
1299
+        'Created collection for {coll!s} at {path!s}'.
1300
+        format(coll=collection, path=collection_output_path)
1301
+    )
1406 1302
 
1407 1303
 
1408 1304
 def _extract_tar_dir(tar, dirname, b_dest):
... ...
@@ -1536,25 +1274,47 @@ def _is_child_path(path, parent_path, link_name=None):
1536 1536
     return b_path == b_parent_path or b_path.startswith(b_parent_path + to_bytes(os.path.sep))
1537 1537
 
1538 1538
 
1539
-def _consume_file(read_from, write_to=None):
1540
-    bufsize = 65536
1541
-    sha256_digest = sha256()
1542
-    data = read_from.read(bufsize)
1543
-    while data:
1544
-        if write_to is not None:
1545
-            write_to.write(data)
1546
-            write_to.flush()
1547
-        sha256_digest.update(data)
1548
-        data = read_from.read(bufsize)
1549
-
1550
-    return sha256_digest.hexdigest()
1551
-
1552
-
1553
-def get_galaxy_metadata_path(b_path):
1554
-    b_default_path = os.path.join(b_path, b'galaxy.yml')
1555
-    candidate_names = [b'galaxy.yml', b'galaxy.yaml']
1556
-    for b_name in candidate_names:
1557
-        b_path = os.path.join(b_path, b_name)
1558
-        if os.path.exists(b_path):
1559
-            return b_path
1560
-    return b_default_path
1539
+def _resolve_depenency_map(
1540
+        requested_requirements,  # type: Iterable[Requirement]
1541
+        galaxy_apis,  # type: Iterable[GalaxyAPI]
1542
+        concrete_artifacts_manager,  # type: ConcreteArtifactsManager
1543
+        preferred_candidates,  # type: Optional[Iterable[Candidate]]
1544
+        no_deps,  # type: bool
1545
+        allow_pre_release,  # type: bool
1546
+):  # type: (...) -> Dict[str, Candidate]
1547
+    """Return the resolved dependency map."""
1548
+    collection_dep_resolver = build_collection_dependency_resolver(
1549
+        galaxy_apis=galaxy_apis,
1550
+        concrete_artifacts_manager=concrete_artifacts_manager,
1551
+        preferred_candidates=preferred_candidates,
1552
+        with_deps=not no_deps,
1553
+        with_pre_releases=allow_pre_release,
1554
+    )
1555
+    try:
1556
+        return collection_dep_resolver.resolve(
1557
+            requested_requirements,
1558
+            max_rounds=2000000,  # NOTE: same constant pip uses
1559
+        ).mapping
1560
+    except CollectionDependencyResolutionImpossible as dep_exc:
1561
+        conflict_causes = (
1562
+            '* {req.fqcn!s}:{req.ver!s} ({dep_origin!s})'.format(
1563
+                req=req_inf.requirement,
1564
+                dep_origin='direct request'
1565
+                if req_inf.parent is None
1566
+                else 'dependency of {parent!s}'.
1567
+                format(parent=req_inf.parent),
1568
+            )
1569
+            for req_inf in dep_exc.causes
1570
+        )
1571
+        error_msg_lines = chain(
1572
+            (
1573
+                'Failed to resolve the requested '
1574
+                'dependencies map. Could not satisfy the following '
1575
+                'requirements:',
1576
+            ),
1577
+            conflict_causes,
1578
+        )
1579
+        raise raise_from(  # NOTE: Leading "raise" is a hack for mypy bug #9717
1580
+            AnsibleError('\n'.join(error_msg_lines)),
1581
+            dep_exc,
1582
+        )
1561 1583
new file mode 100644
... ...
@@ -0,0 +1,646 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Concrete collection candidate management helper module."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+import json
9
+import os
10
+import tarfile
11
+import subprocess
12
+from contextlib import contextmanager
13
+from hashlib import sha256
14
+from shutil import rmtree
15
+from tempfile import mkdtemp
16
+
17
+try:
18
+    from typing import TYPE_CHECKING
19
+except ImportError:
20
+    TYPE_CHECKING = False
21
+
22
+if TYPE_CHECKING:
23
+    from typing import (
24
+        Any,  # FIXME: !!!111
25
+        BinaryIO, Dict, IO,
26
+        Iterator, List, Optional,
27
+        Set, Tuple, Type, Union,
28
+    )
29
+
30
+    from ansible.galaxy.dependency_resolution.dataclasses import (
31
+        Candidate, Requirement,
32
+    )
33
+    from ansible.galaxy.token import GalaxyToken
34
+
35
+from ansible.errors import AnsibleError
36
+from ansible.galaxy import get_collections_galaxy_meta_info
37
+from ansible.galaxy.dependency_resolution.dataclasses import _GALAXY_YAML
38
+from ansible.galaxy.user_agent import user_agent
39
+from ansible.module_utils._text import to_bytes, to_native, to_text
40
+from ansible.module_utils.six.moves.urllib.error import URLError
41
+from ansible.module_utils.six.moves.urllib.parse import urldefrag
42
+from ansible.module_utils.six import raise_from
43
+from ansible.module_utils.urls import open_url
44
+from ansible.utils.display import Display
45
+
46
+import yaml
47
+
48
+
49
+display = Display()
50
+
51
+
52
+class ConcreteArtifactsManager:
53
+    """Manager for on-disk collection artifacts.
54
+
55
+    It is responsible for:
56
+        * downloading remote collections from Galaxy-compatible servers and
57
+          direct links to tarballs or SCM repositories
58
+        * keeping track of local ones
59
+        * keeping track of Galaxy API tokens for downloads from Galaxy'ish
60
+          as well as the artifact hashes
61
+        * caching all of above
62
+        * retrieving the metadata out of the downloaded artifacts
63
+    """
64
+
65
+    def __init__(self, b_working_directory, validate_certs=True):
66
+        # type: (bytes, bool) -> None
67
+        """Initialize ConcreteArtifactsManager caches and costraints."""
68
+        self._validate_certs = validate_certs  # type: bool
69
+        self._artifact_cache = {}  # type: Dict[bytes, bytes]
70
+        self._galaxy_artifact_cache = {}  # type: Dict[Union[Candidate, Requirement], bytes]
71
+        self._artifact_meta_cache = {}  # type: Dict[bytes, Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]]
72
+        self._galaxy_collection_cache = {}  # type: Dict[Union[Candidate, Requirement], Tuple[str, str, GalaxyToken]]
73
+        self._b_working_directory = b_working_directory  # type: bytes
74
+
75
+    def get_galaxy_artifact_path(self, collection):
76
+        # type: (Union[Candidate, Requirement]) -> bytes
77
+        """Given a Galaxy-stored collection, return a cached path.
78
+
79
+        If it's not yet on disk, this method downloads the artifact first.
80
+        """
81
+        try:
82
+            return self._galaxy_artifact_cache[collection]
83
+        except KeyError:
84
+            pass
85
+
86
+        try:
87
+            url, sha256_hash, token = self._galaxy_collection_cache[collection]
88
+        except KeyError as key_err:
89
+            raise_from(
90
+                RuntimeError(
91
+                    'The is no known source for {coll!s}'.
92
+                    format(coll=collection),
93
+                ),
94
+                key_err,
95
+            )
96
+
97
+        display.vvvv(
98
+            "Fetching a collection tarball for '{collection!s}' from "
99
+            'Ansible Galaxy'.format(collection=collection),
100
+        )
101
+
102
+        try:
103
+            b_artifact_path = _download_file(
104
+                url,
105
+                self._b_working_directory,
106
+                expected_hash=sha256_hash,
107
+                validate_certs=self._validate_certs,
108
+                token=token,
109
+            )  # type: bytes
110
+        except URLError as err:
111
+            raise_from(
112
+                AnsibleError(
113
+                    'Failed to download collection tar '
114
+                    "from '{coll_src!s}': {download_err!s}".
115
+                    format(
116
+                        coll_src=to_native(collection.src),
117
+                        download_err=to_native(err),
118
+                    ),
119
+                ),
120
+                err,
121
+            )
122
+        else:
123
+            display.vvv(
124
+                "Collection '{coll!s}' obtained from "
125
+                'server {server!s} {url!s}'.format(
126
+                    coll=collection, server=collection.src or 'Galaxy',
127
+                    url=collection.src.api_server if collection.src is not None
128
+                    else '',
129
+                )
130
+            )
131
+
132
+        self._galaxy_artifact_cache[collection] = b_artifact_path
133
+        return b_artifact_path
134
+
135
+    def get_artifact_path(self, collection):
136
+        # type: (Union[Candidate, Requirement]) -> bytes
137
+        """Given a concrete collection pointer, return a cached path.
138
+
139
+        If it's not yet on disk, this method downloads the artifact first.
140
+        """
141
+        try:
142
+            return self._artifact_cache[collection.src]
143
+        except KeyError:
144
+            pass
145
+
146
+        # NOTE: SCM needs to be special-cased as it may contain either
147
+        # NOTE: one collection in its root, or a number of top-level
148
+        # NOTE: collection directories instead.
149
+        # NOTE: The idea is to store the SCM collection as unpacked
150
+        # NOTE: directory structure under the temporary location and use
151
+        # NOTE: a "virtual" collection that has pinned requirements on
152
+        # NOTE: the directories under that SCM checkout that correspond
153
+        # NOTE: to collections.
154
+        # NOTE: This brings us to the idea that we need two separate
155
+        # NOTE: virtual Requirement/Candidate types --
156
+        # NOTE: (single) dir + (multidir) subdirs
157
+        if collection.is_url:
158
+            display.vvvv(
159
+                "Collection requirement '{collection!s}' is a URL "
160
+                'to a tar artifact'.format(collection=collection.fqcn),
161
+            )
162
+            try:
163
+                b_artifact_path = _download_file(
164
+                    collection.src,
165
+                    self._b_working_directory,
166
+                    expected_hash=None,  # NOTE: URLs don't support checksums
167
+                    validate_certs=self._validate_certs,
168
+                )
169
+            except URLError as err:
170
+                raise_from(
171
+                    AnsibleError(
172
+                        'Failed to download collection tar '
173
+                        "from '{coll_src!s}': {download_err!s}".
174
+                        format(
175
+                            coll_src=to_native(collection.src),
176
+                            download_err=to_native(err),
177
+                        ),
178
+                    ),
179
+                    err,
180
+                )
181
+        elif collection.is_scm:
182
+            b_artifact_path = _extract_collection_from_git(
183
+                collection.src,
184
+                collection.ver,
185
+                self._b_working_directory,
186
+            )
187
+        elif collection.is_file or collection.is_dir or collection.is_subdirs:
188
+            b_artifact_path = to_bytes(collection.src)
189
+        else:
190
+            # NOTE: This may happen `if collection.is_online_index_pointer`
191
+            raise RuntimeError(
192
+                'The artifact is of an unexpected type {art_type!s}'.
193
+                format(art_type=collection.type)
194
+            )
195
+
196
+        self._artifact_cache[collection.src] = b_artifact_path
197
+        return b_artifact_path
198
+
199
+    def _get_direct_collection_namespace(self, collection):
200
+        # type: (Candidate) -> Optional[str]
201
+        return self.get_direct_collection_meta(collection)['namespace']  # type: ignore[return-value]
202
+
203
+    def _get_direct_collection_name(self, collection):
204
+        # type: (Candidate) -> Optional[str]
205
+        return self.get_direct_collection_meta(collection)['name']  # type: ignore[return-value]
206
+
207
+    def get_direct_collection_fqcn(self, collection):
208
+        # type: (Candidate) -> Optional[str]
209
+        """Extract FQCN from the given on-disk collection artifact.
210
+
211
+        If the collection is virtual, ``None`` is returned instead
212
+        of a string.
213
+        """
214
+        if collection.is_virtual:
215
+            # NOTE: should it be something like "<virtual>"?
216
+            return None
217
+
218
+        return '.'.join((  # type: ignore[type-var]
219
+            self._get_direct_collection_namespace(collection),  # type: ignore[arg-type]
220
+            self._get_direct_collection_name(collection),
221
+        ))
222
+
223
+    def get_direct_collection_version(self, collection):
224
+        # type: (Union[Candidate, Requirement]) -> str
225
+        """Extract version from the given on-disk collection artifact."""
226
+        return self.get_direct_collection_meta(collection)['version']  # type: ignore[return-value]
227
+
228
+    def get_direct_collection_dependencies(self, collection):
229
+        # type: (Union[Candidate, Requirement]) -> Dict[str, str]
230
+        """Extract deps from the given on-disk collection artifact."""
231
+        return self.get_direct_collection_meta(collection)['dependencies']  # type: ignore[return-value]
232
+
233
+    def get_direct_collection_meta(self, collection):
234
+        # type: (Union[Candidate, Requirement]) -> Dict[str, Optional[Union[str, Dict[str, str], List[str]]]]
235
+        """Extract meta from the given on-disk collection artifact."""
236
+        try:  # FIXME: use unique collection identifier as a cache key?
237
+            return self._artifact_meta_cache[collection.src]
238
+        except KeyError:
239
+            b_artifact_path = self.get_artifact_path(collection)
240
+
241
+        if collection.is_url or collection.is_file:
242
+            collection_meta = _get_meta_from_tar(b_artifact_path)
243
+        elif collection.is_dir:  # should we just build a coll instead?
244
+            # FIXME: what if there's subdirs?
245
+            try:
246
+                collection_meta = _get_meta_from_dir(b_artifact_path)
247
+            except LookupError as lookup_err:
248
+                raise_from(
249
+                    AnsibleError(
250
+                        'Failed to find the collection dir deps: {err!s}'.
251
+                        format(err=to_native(lookup_err)),
252
+                    ),
253
+                    lookup_err,
254
+                )
255
+        elif collection.is_scm:
256
+            collection_meta = {
257
+                'name': None,
258
+                'namespace': None,
259
+                'dependencies': {to_native(b_artifact_path): '*'},
260
+                'version': '*',
261
+            }
262
+        elif collection.is_subdirs:
263
+            collection_meta = {
264
+                'name': None,
265
+                'namespace': None,
266
+                # NOTE: Dropping b_artifact_path since it's based on src anyway
267
+                'dependencies': dict.fromkeys(
268
+                    map(to_native, collection.namespace_collection_paths),
269
+                    '*',
270
+                ),
271
+                'version': '*',
272
+            }
273
+        else:
274
+            raise RuntimeError
275
+
276
+        self._artifact_meta_cache[collection.src] = collection_meta
277
+        return collection_meta
278
+
279
+    def save_collection_source(self, collection, url, sha256_hash, token):
280
+        # type: (Candidate, str, str, GalaxyToken) -> None
281
+        """Store collection URL, SHA256 hash and Galaxy API token.
282
+
283
+        This is a hook that is supposed to be called before attempting to
284
+        download Galaxy-based collections with ``get_galaxy_artifact_path()``.
285
+        """
286
+        self._galaxy_collection_cache[collection] = url, sha256_hash, token
287
+
288
+    @classmethod
289
+    @contextmanager
290
+    def under_tmpdir(
291
+            cls,  # type: Type[ConcreteArtifactsManager]
292
+            temp_dir_base,  # type: str
293
+            validate_certs=True,  # type: bool
294
+    ):  # type: (...) -> Iterator[ConcreteArtifactsManager]
295
+        """Custom ConcreteArtifactsManager constructor with temp dir.
296
+
297
+        This method returns a context manager that allocates and cleans
298
+        up a temporary directory for caching the collection artifacts
299
+        during the dependency resolution process.
300
+        """
301
+        # NOTE: Can't use `with tempfile.TemporaryDirectory:`
302
+        # NOTE: because it's not in Python 2 stdlib.
303
+        temp_path = mkdtemp(
304
+            dir=to_bytes(temp_dir_base, errors='surrogate_or_strict'),
305
+        )
306
+        b_temp_path = to_bytes(temp_path, errors='surrogate_or_strict')
307
+        try:
308
+            yield cls(b_temp_path, validate_certs)
309
+        finally:
310
+            rmtree(b_temp_path)
311
+
312
+
313
+def parse_scm(collection, version):
314
+    """Extract name, version, path and subdir out of the SCM pointer."""
315
+    if ',' in collection:
316
+        collection, version = collection.split(',', 1)
317
+    elif version == '*' or not version:
318
+        version = 'HEAD'
319
+
320
+    if collection.startswith('git+'):
321
+        path = collection[4:]
322
+    else:
323
+        path = collection
324
+
325
+    path, fragment = urldefrag(path)
326
+    fragment = fragment.strip(os.path.sep)
327
+
328
+    if path.endswith(os.path.sep + '.git'):
329
+        name = path.split(os.path.sep)[-2]
330
+    elif '://' not in path and '@' not in path:
331
+        name = path
332
+    else:
333
+        name = path.split('/')[-1]
334
+        if name.endswith('.git'):
335
+            name = name[:-4]
336
+
337
+    return name, version, path, fragment
338
+
339
+
340
+def _extract_collection_from_git(repo_url, coll_ver, b_path):
341
+    name, version, git_url, fragment = parse_scm(repo_url, coll_ver)
342
+    b_checkout_path = mkdtemp(
343
+        dir=b_path,
344
+        prefix=to_bytes(name, errors='surrogate_or_strict'),
345
+    )  # type: bytes
346
+    git_clone_cmd = 'git', 'clone', git_url, to_text(b_checkout_path)
347
+    # FIXME: '--depth', '1', '--branch', version
348
+    try:
349
+        subprocess.check_call(git_clone_cmd)
350
+    except subprocess.CalledProcessError as proc_err:
351
+        raise_from(
352
+            AnsibleError(  # should probably be LookupError
353
+                'Failed to clone a Git repository from `{repo_url!s}`.'.
354
+                format(repo_url=to_native(git_url)),
355
+            ),
356
+            proc_err,
357
+        )
358
+
359
+    git_switch_cmd = 'git', 'checkout', to_text(version)
360
+    try:
361
+        subprocess.check_call(git_switch_cmd, cwd=b_checkout_path)
362
+    except subprocess.CalledProcessError as proc_err:
363
+        raise_from(
364
+            AnsibleError(  # should probably be LookupError
365
+                'Failed to switch a cloned Git repo `{repo_url!s}` '
366
+                'to the requested revision `{commitish!s}`.'.
367
+                format(
368
+                    commitish=to_native(version),
369
+                    repo_url=to_native(git_url),
370
+                ),
371
+            ),
372
+            proc_err,
373
+        )
374
+
375
+    return (
376
+        os.path.join(b_checkout_path, to_bytes(fragment))
377
+        if fragment else b_checkout_path
378
+    )
379
+
380
+
381
+# FIXME: use random subdirs while preserving the file names
382
+def _download_file(url, b_path, expected_hash, validate_certs, token=None):
383
+    # type: (str, bytes, Optional[str], bool, GalaxyToken) -> bytes
384
+    # ^ NOTE: used in download and verify_collections ^
385
+    b_tarball_name = to_bytes(
386
+        url.rsplit('/', 1)[1], errors='surrogate_or_strict',
387
+    )
388
+    b_file_name = b_tarball_name[:-len('.tar.gz')]
389
+
390
+    b_tarball_dir = mkdtemp(
391
+        dir=b_path,
392
+        prefix=b'-'.join((b_file_name, b'')),
393
+    )  # type: bytes
394
+
395
+    b_file_path = os.path.join(b_tarball_dir, b_tarball_name)
396
+
397
+    display.display("Downloading %s to %s" % (url, to_text(b_tarball_dir)))
398
+    # NOTE: Galaxy redirects downloads to S3 which rejects the request
399
+    # NOTE: if an Authorization header is attached so don't redirect it
400
+    resp = open_url(
401
+        to_native(url, errors='surrogate_or_strict'),
402
+        validate_certs=validate_certs,
403
+        headers=None if token is None else token.headers(),
404
+        unredirected_headers=['Authorization'], http_agent=user_agent(),
405
+    )
406
+
407
+    with open(b_file_path, 'wb') as download_file:  # type: BinaryIO
408
+        actual_hash = _consume_file(resp, write_to=download_file)
409
+
410
+    if expected_hash:
411
+        display.vvvv(
412
+            'Validating downloaded file hash {actual_hash!s} with '
413
+            'expected hash {expected_hash!s}'.
414
+            format(actual_hash=actual_hash, expected_hash=expected_hash)
415
+        )
416
+        if expected_hash != actual_hash:
417
+            raise AnsibleError('Mismatch artifact hash with downloaded file')
418
+
419
+    return b_file_path
420
+
421
+
422
+def _consume_file(read_from, write_to=None):
423
+    # type: (BinaryIO, BinaryIO) -> str
424
+    bufsize = 65536
425
+    sha256_digest = sha256()
426
+    data = read_from.read(bufsize)
427
+    while data:
428
+        if write_to is not None:
429
+            write_to.write(data)
430
+            write_to.flush()
431
+        sha256_digest.update(data)
432
+        data = read_from.read(bufsize)
433
+
434
+    return sha256_digest.hexdigest()
435
+
436
+
437
+def _normalize_galaxy_yml_manifest(
438
+        galaxy_yml,  # type: Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
439
+        b_galaxy_yml_path,  # type: bytes
440
+):
441
+    # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
442
+    galaxy_yml_schema = (
443
+        get_collections_galaxy_meta_info()
444
+    )  # type: List[Dict[str, Any]]  # FIXME: <--
445
+    # FIXME: 👆maybe precise type: List[Dict[str, Union[bool, str, List[str]]]]
446
+
447
+    mandatory_keys = set()
448
+    string_keys = set()  # type: Set[str]
449
+    list_keys = set()  # type: Set[str]
450
+    dict_keys = set()  # type: Set[str]
451
+
452
+    for info in galaxy_yml_schema:
453
+        if info.get('required', False):
454
+            mandatory_keys.add(info['key'])
455
+
456
+        key_list_type = {
457
+            'str': string_keys,
458
+            'list': list_keys,
459
+            'dict': dict_keys,
460
+        }[info.get('type', 'str')]
461
+        key_list_type.add(info['key'])
462
+
463
+    all_keys = frozenset(list(mandatory_keys) + list(string_keys) + list(list_keys) + list(dict_keys))
464
+
465
+    set_keys = set(galaxy_yml.keys())
466
+    missing_keys = mandatory_keys.difference(set_keys)
467
+    if missing_keys:
468
+        raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
469
+                           % (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
470
+
471
+    extra_keys = set_keys.difference(all_keys)
472
+    if len(extra_keys) > 0:
473
+        display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
474
+                        % (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
475
+
476
+    # Add the defaults if they have not been set
477
+    for optional_string in string_keys:
478
+        if optional_string not in galaxy_yml:
479
+            galaxy_yml[optional_string] = None
480
+
481
+    for optional_list in list_keys:
482
+        list_val = galaxy_yml.get(optional_list, None)
483
+
484
+        if list_val is None:
485
+            galaxy_yml[optional_list] = []
486
+        elif not isinstance(list_val, list):
487
+            galaxy_yml[optional_list] = [list_val]  # type: ignore[list-item]
488
+
489
+    for optional_dict in dict_keys:
490
+        if optional_dict not in galaxy_yml:
491
+            galaxy_yml[optional_dict] = {}
492
+
493
+    # NOTE: `version: null` is only allowed for `galaxy.yml`
494
+    # NOTE: and not `MANIFEST.json`. The use-case for it is collections
495
+    # NOTE: that generate the version from Git before building a
496
+    # NOTE: distributable tarball artifact.
497
+    if not galaxy_yml.get('version'):
498
+        galaxy_yml['version'] = '*'
499
+
500
+    return galaxy_yml
501
+
502
+
503
+def _get_meta_from_dir(
504
+        b_path,  # type: bytes
505
+):  # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
506
+    try:
507
+        return _get_meta_from_installed_dir(b_path)
508
+    except LookupError:
509
+        return _get_meta_from_src_dir(b_path)
510
+
511
+
512
+def _get_meta_from_src_dir(
513
+        b_path,  # type: bytes
514
+):  # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
515
+    galaxy_yml = os.path.join(b_path, _GALAXY_YAML)
516
+    if not os.path.isfile(galaxy_yml):
517
+        raise LookupError(
518
+            "The collection galaxy.yml path '{path!s}' does not exist.".
519
+            format(path=to_native(galaxy_yml))
520
+        )
521
+
522
+    with open(galaxy_yml, 'rb') as manifest_file_obj:
523
+        try:
524
+            manifest = yaml.safe_load(manifest_file_obj)
525
+        except yaml.error.YAMLError as yaml_err:
526
+            raise_from(
527
+                AnsibleError(
528
+                    "Failed to parse the galaxy.yml at '{path!s}' with "
529
+                    'the following error:\n{err_txt!s}'.
530
+                    format(
531
+                        path=to_native(galaxy_yml),
532
+                        err_txt=to_native(yaml_err),
533
+                    ),
534
+                ),
535
+                yaml_err,
536
+            )
537
+
538
+    return _normalize_galaxy_yml_manifest(manifest, galaxy_yml)
539
+
540
+
541
+def _get_meta_from_installed_dir(
542
+        b_path,  # type: bytes
543
+):  # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
544
+    n_manifest_json = 'MANIFEST.json'
545
+    b_manifest_json = to_bytes(n_manifest_json)
546
+    b_manifest_json_path = os.path.join(b_path, b_manifest_json)
547
+
548
+    try:
549
+        with open(b_manifest_json_path, 'rb') as manifest_fd:
550
+            b_manifest_txt = manifest_fd.read()
551
+    except (IOError, OSError):
552
+        raise LookupError(
553
+            "The collection {manifest!s} path '{path!s}' does not exist.".
554
+            format(
555
+                manifest=n_manifest_json,
556
+                path=to_native(b_manifest_json_path),
557
+            )
558
+        )
559
+
560
+    manifest_txt = to_text(b_manifest_txt, errors='surrogate_or_strict')
561
+
562
+    try:
563
+        manifest = json.loads(manifest_txt)
564
+    except ValueError:
565
+        raise AnsibleError(
566
+            'Collection tar file member {member!s} does not '
567
+            'contain a valid json string.'.
568
+            format(member=n_manifest_json),
569
+        )
570
+    else:
571
+        collection_info = manifest['collection_info']
572
+
573
+    version = collection_info.get('version')
574
+    if not version:
575
+        raise AnsibleError(
576
+            u'Collection metadata file at `{meta_file!s}` is expected '
577
+            u'to have a valid SemVer version value but got {version!s}'.
578
+            format(
579
+                meta_file=to_text(b_manifest_json_path),
580
+                version=to_text(repr(version)),
581
+            ),
582
+        )
583
+
584
+    return collection_info
585
+
586
+
587
+def _get_meta_from_tar(
588
+        b_path,  # type: bytes
589
+):  # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
590
+    if not tarfile.is_tarfile(b_path):
591
+        raise AnsibleError(
592
+            "Collection artifact at '{path!s}' is not a valid tar file.".
593
+            format(path=to_native(b_path)),
594
+        )
595
+
596
+    n_manifest_json = 'MANIFEST.json'
597
+
598
+    with tarfile.open(b_path, mode='r') as collection_tar:  # type: tarfile.TarFile
599
+        try:
600
+            member = collection_tar.getmember(n_manifest_json)
601
+        except KeyError:
602
+            raise AnsibleError(
603
+                "Collection at '{path!s}' does not contain the "
604
+                'required file {manifest_file!s}.'.
605
+                format(
606
+                    path=to_native(b_path),
607
+                    manifest_file=n_manifest_json,
608
+                ),
609
+            )
610
+
611
+        with _tarfile_extract(collection_tar, member) as (_member, member_obj):
612
+            if member_obj is None:
613
+                raise AnsibleError(
614
+                    'Collection tar file does not contain '
615
+                    'member {member!s}'.format(member=n_manifest_json),
616
+                )
617
+
618
+            text_content = to_text(
619
+                member_obj.read(),
620
+                errors='surrogate_or_strict',
621
+            )
622
+
623
+            try:
624
+                manifest = json.loads(text_content)
625
+            except ValueError:
626
+                raise AnsibleError(
627
+                    'Collection tar file member {member!s} does not '
628
+                    'contain a valid json string.'.
629
+                    format(member=n_manifest_json),
630
+                )
631
+            return manifest['collection_info']
632
+
633
+
634
+@contextmanager
635
+def _tarfile_extract(
636
+        tar,  # type: tarfile.TarFile
637
+        member,  # type: tarfile.TarInfo
638
+):
639
+    # type: (...) -> Iterator[Tuple[tarfile.TarInfo, Optional[IO[bytes]]]]
640
+    tar_obj = tar.extractfile(member)
641
+    try:
642
+        yield member, tar_obj
643
+    finally:
644
+        if tar_obj is not None:
645
+            tar_obj.close()
0 646
new file mode 100644
... ...
@@ -0,0 +1,107 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""A facade for interfacing with multiple Galaxy instances."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+import os
9
+
10
+try:
11
+    from typing import TYPE_CHECKING
12
+except ImportError:
13
+    TYPE_CHECKING = False
14
+
15
+if TYPE_CHECKING:
16
+    from typing import Dict, Iterable, Tuple
17
+    from ansible.galaxy.api import CollectionVersionMetadata
18
+    from ansible.galaxy.collection.concrete_artifact_manager import (
19
+        ConcreteArtifactsManager,
20
+    )
21
+    from ansible.galaxy.dependency_resolution.dataclasses import (
22
+        Candidate, Requirement,
23
+    )
24
+
25
+from ansible.galaxy.api import GalaxyAPI, GalaxyError
26
+
27
+
28
+class MultiGalaxyAPIProxy:
29
+    """A proxy that abstracts talking to multiple Galaxy instances."""
30
+
31
+    def __init__(self, apis, concrete_artifacts_manager):
32
+        # type: (Iterable[GalaxyAPI], ConcreteArtifactsManager) -> None
33
+        """Initialize the target APIs list."""
34
+        self._apis = apis
35
+        self._concrete_art_mgr = concrete_artifacts_manager
36
+
37
+    def get_collection_versions(self, requirement):
38
+        # type: (Requirement) -> Iterable[Tuple[str, GalaxyAPI]]
39
+        """Get a set of unique versions for FQCN on Galaxy servers."""
40
+        if requirement.is_concrete_artifact:
41
+            return {
42
+                (
43
+                    self._concrete_art_mgr.
44
+                    get_direct_collection_version(requirement),
45
+                    requirement.src,
46
+                ),
47
+            }
48
+
49
+        api_lookup_order = (
50
+            (requirement.src, )
51
+            if isinstance(requirement.src, GalaxyAPI)
52
+            else self._apis
53
+        )
54
+        return set(
55
+            (version, api)
56
+            for api in api_lookup_order
57
+            for version in api.get_collection_versions(
58
+                requirement.namespace, requirement.name,
59
+            )
60
+        )
61
+
62
+    def get_collection_version_metadata(self, collection_candidate):
63
+        # type: (Candidate) -> CollectionVersionMetadata
64
+        """Retrieve collection metadata of a given candidate."""
65
+
66
+        api_lookup_order = (
67
+            (collection_candidate.src, )
68
+            if isinstance(collection_candidate.src, GalaxyAPI)
69
+            else self._apis
70
+        )
71
+        for api in api_lookup_order:
72
+            try:
73
+                version_metadata = api.get_collection_version_metadata(
74
+                    collection_candidate.namespace,
75
+                    collection_candidate.name,
76
+                    collection_candidate.ver,
77
+                )
78
+            except GalaxyError as api_err:
79
+                last_err = api_err
80
+            else:
81
+                self._concrete_art_mgr.save_collection_source(
82
+                    collection_candidate,
83
+                    version_metadata.download_url,
84
+                    version_metadata.artifact_sha256,
85
+                    api.token,
86
+                )
87
+                return version_metadata
88
+
89
+        raise last_err
90
+
91
+    def get_collection_dependencies(self, collection_candidate):
92
+        # type: (Candidate) -> Dict[str, str]
93
+        # FIXME: return Requirement instances instead?
94
+        """Retrieve collection dependencies of a given candidate."""
95
+        if collection_candidate.is_concrete_artifact:
96
+            return (
97
+                self.
98
+                _concrete_art_mgr.
99
+                get_direct_collection_dependencies
100
+            )(collection_candidate)
101
+
102
+        return (
103
+            self.
104
+            get_collection_version_metadata(collection_candidate).
105
+            dependencies
106
+        )
... ...
@@ -1,7 +1,49 @@
1 1
 # -*- coding: utf-8 -*-
2
-# Copyright: (c) 2020, Ansible Project
2
+# Copyright: (c) 2020-2021, Ansible Project
3 3
 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
4 4
 """Dependency resolution machinery."""
5 5
 
6 6
 from __future__ import (absolute_import, division, print_function)
7 7
 __metaclass__ = type
8
+
9
+try:
10
+    from typing import TYPE_CHECKING
11
+except ImportError:
12
+    TYPE_CHECKING = False
13
+
14
+if TYPE_CHECKING:
15
+    from typing import Iterable
16
+    from ansible.galaxy.api import GalaxyAPI
17
+    from ansible.galaxy.collection.concrete_artifact_manager import (
18
+        ConcreteArtifactsManager,
19
+    )
20
+    from ansible.galaxy.dependency_resolution.dataclasses import Candidate
21
+
22
+from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
23
+from ansible.galaxy.dependency_resolution.providers import CollectionDependencyProvider
24
+from ansible.galaxy.dependency_resolution.reporters import CollectionDependencyReporter
25
+from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyResolver
26
+
27
+
28
+def build_collection_dependency_resolver(
29
+        galaxy_apis,  # type: Iterable[GalaxyAPI]
30
+        concrete_artifacts_manager,  # type: ConcreteArtifactsManager
31
+        preferred_candidates=None,  # type: Iterable[Candidate]
32
+        with_deps=True,  # type: bool
33
+        with_pre_releases=False,  # type: bool
34
+):  # type: (...) -> CollectionDependencyResolver
35
+    """Return a collection dependency resolver.
36
+
37
+    The returned instance will have a ``resolve()`` method for
38
+    further consumption.
39
+    """
40
+    return CollectionDependencyResolver(
41
+        CollectionDependencyProvider(
42
+            apis=MultiGalaxyAPIProxy(galaxy_apis, concrete_artifacts_manager),
43
+            concrete_artifacts_manager=concrete_artifacts_manager,
44
+            preferred_candidates=preferred_candidates,
45
+            with_deps=with_deps,
46
+            with_pre_releases=with_pre_releases,
47
+        ),
48
+        CollectionDependencyReporter(),
49
+    )
8 50
new file mode 100644
... ...
@@ -0,0 +1,435 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Dependency structs."""
4
+# FIXME: add caching all over the place
5
+
6
+from __future__ import (absolute_import, division, print_function)
7
+__metaclass__ = type
8
+
9
+import json
10
+import os
11
+from collections import namedtuple
12
+from glob import iglob
13
+from keyword import iskeyword  # used in _is_fqcn
14
+
15
+try:
16
+    from typing import TYPE_CHECKING
17
+except ImportError:
18
+    TYPE_CHECKING = False
19
+
20
+if TYPE_CHECKING:
21
+    from typing import Tuple, Type, TypeVar
22
+    from ansible.galaxy.collection.concrete_artifact_manager import (
23
+        ConcreteArtifactsManager,
24
+    )
25
+    Collection = TypeVar(
26
+        'Collection',
27
+        'Candidate', 'Requirement',
28
+        '_ComputedReqKindsMixin',
29
+    )
30
+
31
+import yaml
32
+
33
+from ansible.errors import AnsibleError
34
+from ansible.galaxy.api import GalaxyAPI
35
+from ansible.module_utils._text import to_bytes, to_native, to_text
36
+from ansible.module_utils.six.moves.urllib.parse import urlparse
37
+from ansible.module_utils.six import raise_from
38
+from ansible.utils.display import Display
39
+
40
+
41
+try:  # NOTE: py3/py2 compat
42
+    # FIXME: put somewhere into compat
43
+    # py2 mypy can't deal with try/excepts
44
+    _is_py_id = str.isidentifier  # type: ignore[attr-defined]
45
+except AttributeError:  # Python 2
46
+    # FIXME: port this to AnsibleCollectionRef.is_valid_collection_name
47
+    from re import match as _match_pattern
48
+    from tokenize import Name as _VALID_IDENTIFIER_REGEX
49
+    _valid_identifier_string_regex = ''.join((_VALID_IDENTIFIER_REGEX, r'\Z'))
50
+
51
+    def _is_py_id(tested_str):
52
+        # Ref: https://stackoverflow.com/a/55802320/595220
53
+        return bool(_match_pattern(_valid_identifier_string_regex, tested_str))
54
+
55
+
56
+_ALLOW_CONCRETE_POINTER_IN_SOURCE = False  # NOTE: This is a feature flag
57
+_GALAXY_YAML = b'galaxy.yml'
58
+_MANIFEST_JSON = b'MANIFEST.json'
59
+
60
+
61
+display = Display()
62
+
63
+
64
+def _is_collection_src_dir(dir_path):
65
+    b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
66
+    return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML))
67
+
68
+
69
+def _is_installed_collection_dir(dir_path):
70
+    b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
71
+    return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON))
72
+
73
+
74
+def _is_collection_dir(dir_path):
75
+    return (
76
+        _is_installed_collection_dir(dir_path) or
77
+        _is_collection_src_dir(dir_path)
78
+    )
79
+
80
+
81
+def _find_collections_in_subdirs(dir_path):
82
+    b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
83
+    galaxy_yml_glob_pattern = os.path.join(
84
+        b_dir_path,
85
+        # b'*',  # namespace is supposed to be top-level per spec
86
+        b'*',  # collection name
87
+        _GALAXY_YAML,
88
+    )
89
+    return (
90
+        os.path.dirname(galaxy_yml)
91
+        for galaxy_yml in iglob(galaxy_yml_glob_pattern)
92
+    )
93
+
94
+
95
+def _is_collection_namespace_dir(tested_str):
96
+    return any(_find_collections_in_subdirs(tested_str))
97
+
98
+
99
+def _is_file_path(tested_str):
100
+    return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict'))
101
+
102
+
103
+def _is_http_url(tested_str):
104
+    return urlparse(tested_str).scheme.lower() in {'http', 'https'}
105
+
106
+
107
+def _is_git_url(tested_str):
108
+    return tested_str.startswith(('git+', 'git@'))
109
+
110
+
111
+def _is_concrete_artifact_pointer(tested_str):
112
+    return any(
113
+        predicate(tested_str)
114
+        for predicate in (
115
+            # NOTE: Maintain the checks to be sorted from light to heavy:
116
+            _is_git_url,
117
+            _is_http_url,
118
+            _is_file_path,
119
+            _is_collection_dir,
120
+            _is_collection_namespace_dir,
121
+        )
122
+    )
123
+
124
+
125
+def _is_fqcn(tested_str):
126
+    # FIXME: port this to AnsibleCollectionRef.is_valid_collection_name
127
+    if tested_str.count('.') != 1:
128
+        return False
129
+
130
+    return all(
131
+        # FIXME: keywords and identifiers are different in differnt Pythons
132
+        not iskeyword(ns_or_name) and _is_py_id(ns_or_name)
133
+        for ns_or_name in tested_str.split('.')
134
+    )
135
+
136
+
137
+class _ComputedReqKindsMixin:
138
+
139
+    @classmethod
140
+    def from_dir_path_as_unknown(  # type: ignore[misc]
141
+            cls,  # type: Type[Collection]
142
+            dir_path,  # type: bytes
143
+            art_mgr,  # type: ConcreteArtifactsManager
144
+    ):  # type: (...)  -> Collection
145
+        """Make collection from an unspecified dir type.
146
+
147
+        This alternative constructor attempts to grab metadata from the
148
+        given path if it's a directory. If there's no metadata, it
149
+        falls back to guessing the FQCN based on the directory path and
150
+        sets the version to "*".
151
+
152
+        It raises a ValueError immediatelly if the input is not an
153
+        existing directory path.
154
+        """
155
+        if not os.path.isdir(dir_path):
156
+            raise ValueError(
157
+                "The collection directory '{path!s}' doesn't exist".
158
+                format(path=to_native(dir_path)),
159
+            )
160
+
161
+        try:
162
+            return cls.from_dir_path(dir_path, art_mgr)
163
+        except ValueError:
164
+            return cls.from_dir_path_implicit(dir_path)
165
+
166
+    @classmethod
167
+    def from_dir_path(cls, dir_path, art_mgr):
168
+        """Make collection from an directory with metadata."""
169
+        b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
170
+        if not _is_collection_dir(b_dir_path):
171
+            display.warning(
172
+                u"Collection at '{path!s}' does not have a {manifest_json!s} "
173
+                u'file, nor has it {galaxy_yml!s}: cannot detect version.'.
174
+                format(
175
+                    galaxy_yml=to_text(_GALAXY_YAML),
176
+                    manifest_json=to_text(_MANIFEST_JSON),
177
+                    path=to_text(dir_path, errors='surrogate_or_strict'),
178
+                ),
179
+            )
180
+            raise ValueError(
181
+                '`dir_path` argument must be an installed or a source'
182
+                ' collection directory.',
183
+            )
184
+
185
+        tmp_inst_req = cls(None, None, dir_path, 'dir')
186
+        req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req)
187
+        req_version = art_mgr.get_direct_collection_version(tmp_inst_req)
188
+
189
+        return cls(req_name, req_version, dir_path, 'dir')
190
+
191
+    @classmethod
192
+    def from_dir_path_implicit(  # type: ignore[misc]
193
+            cls,  # type: Type[Collection]
194
+            dir_path,  # type: bytes
195
+    ):  # type: (...)  -> Collection
196
+        """Construct a collection instance based on an arbitrary dir.
197
+
198
+        This alternative constructor infers the FQCN based on the parent
199
+        and current directory names. It also sets the version to "*"
200
+        regardless of whether any of known metadata files are present.
201
+        """
202
+        # There is no metadata, but it isn't required for a functional collection. Determine the namespace.name from the path.
203
+        u_dir_path = to_text(dir_path, errors='surrogate_or_strict')
204
+        path_list = u_dir_path.split(os.path.sep)
205
+        req_name = '.'.join(path_list[-2:])
206
+        return cls(req_name, '*', dir_path, 'dir')  # type: ignore[call-arg]
207
+
208
+    @classmethod
209
+    def from_string(cls, collection_input, artifacts_manager):
210
+        req = {}
211
+        if _is_concrete_artifact_pointer(collection_input):
212
+            # Arg is a file path or URL to a collection
213
+            req['name'] = collection_input
214
+        else:
215
+            req['name'], _sep, req['version'] = collection_input.partition(':')
216
+            if not req['version']:
217
+                del req['version']
218
+
219
+        return cls.from_requirement_dict(req, artifacts_manager)
220
+
221
+    @classmethod
222
+    def from_requirement_dict(cls, collection_req, art_mgr):
223
+        req_name = collection_req.get('name', None)
224
+        req_version = collection_req.get('version', '*')
225
+        req_type = collection_req.get('type')
226
+        # TODO: decide how to deprecate the old src API behavior
227
+        req_source = collection_req.get('source', None)
228
+
229
+        if req_type is None:
230
+            if (  # FIXME: decide on the future behavior:
231
+                    _ALLOW_CONCRETE_POINTER_IN_SOURCE
232
+                    and req_source is not None
233
+                    and _is_concrete_artifact_pointer(req_source)
234
+            ):
235
+                src_path = req_source
236
+            elif req_name is not None and _is_fqcn(req_name):
237
+                req_type = 'galaxy'
238
+            elif (
239
+                    req_name is not None
240
+                    and _is_concrete_artifact_pointer(req_name)
241
+            ):
242
+                src_path, req_name = req_name, None
243
+            else:
244
+                dir_tip_tmpl = (  # NOTE: leading LFs are for concat
245
+                    '\n\nTip: Make sure you are pointing to the right '
246
+                    'subdirectory — `{src!s}` looks like a directory '
247
+                    'but it is neither a collection, nor a namespace '
248
+                    'dir.'
249
+                )
250
+
251
+                if req_source is not None and os.path.isdir(req_source):
252
+                    tip = dir_tip_tmpl.format(src=req_source)
253
+                elif req_name is not None and os.path.isdir(req_name):
254
+                    tip = dir_tip_tmpl.format(src=req_name)
255
+                elif req_name:
256
+                    tip = '\n\nCould not find {0}.'.format(req_name)
257
+                else:
258
+                    tip = ''
259
+
260
+                raise AnsibleError(  # NOTE: I'd prefer a ValueError instead
261
+                    'Neither the collection requirement entry key '
262
+                    "'name', nor 'source' point to a concrete "
263
+                    "resolvable collection artifact. Also 'name' is "
264
+                    'not an FQCN. A valid collection name must be in '
265
+                    'the format <namespace>.<collection>. Please make '
266
+                    'sure that the namespace and the collection name '
267
+                    ' contain characters from [a-zA-Z0-9_] only.'
268
+                    '{extra_tip!s}'.format(extra_tip=tip),
269
+                )
270
+
271
+        if req_type is None:
272
+            if _is_git_url(src_path):
273
+                req_type = 'git'
274
+                req_source = src_path
275
+            elif _is_http_url(src_path):
276
+                req_type = 'url'
277
+                req_source = src_path
278
+            elif _is_file_path(src_path):
279
+                req_type = 'file'
280
+                req_source = src_path
281
+            elif _is_collection_dir(src_path):
282
+                req_type = 'dir'
283
+                req_source = src_path
284
+            elif _is_collection_namespace_dir(src_path):
285
+                req_name = None  # No name for a virtual req or "namespace."?
286
+                req_type = 'subdirs'
287
+                req_source = src_path
288
+            else:
289
+                raise AnsibleError(  # NOTE: this is never supposed to be hit
290
+                    'Failed to automatically detect the collection '
291
+                    'requirement type.',
292
+                )
293
+
294
+        if req_type not in {'file', 'galaxy', 'git', 'url', 'dir', 'subdirs'}:
295
+            raise AnsibleError(
296
+                "The collection requirement entry key 'type' must be "
297
+                'one of file, galaxy, git, dir, subdirs, or url.'
298
+            )
299
+
300
+        if req_name is None and req_type == 'galaxy':
301
+            raise AnsibleError(
302
+                'Collections requirement entry should contain '
303
+                "the key 'name' if it's requested from a Galaxy-like "
304
+                'index server.',
305
+            )
306
+
307
+        if req_type != 'galaxy' and req_source is None:
308
+            req_source, req_name = req_name, None
309
+
310
+        if (
311
+                req_type == 'galaxy' and
312
+                isinstance(req_source, GalaxyAPI) and
313
+                not _is_http_url(req_source.api_server)
314
+        ):
315
+            raise AnsibleError(
316
+                "Collections requirement 'source' entry should contain "
317
+                'a valid Galaxy API URL but it does not: {not_url!s} '
318
+                'is not an HTTP URL.'.
319
+                format(not_url=req_source.api_server),
320
+            )
321
+
322
+        tmp_inst_req = cls(req_name, req_version, req_source, req_type)
323
+
324
+        if req_type not in {'galaxy', 'subdirs'} and req_name is None:
325
+            req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req)  # TODO: fix the cache key in artifacts manager?
326
+
327
+        if req_type not in {'galaxy', 'subdirs'} and req_version == '*':
328
+            req_version = art_mgr.get_direct_collection_version(tmp_inst_req)
329
+
330
+        return cls(
331
+            req_name, req_version,
332
+            req_source, req_type,
333
+        )
334
+
335
+    def __repr__(self):
336
+        return (
337
+            '<{self!s} of type {coll_type!r} from {src!s}>'.
338
+            format(self=self, coll_type=self.type, src=self.src or 'Galaxy')
339
+        )
340
+
341
+    def __str__(self):
342
+        return to_native(self.__unicode__())
343
+
344
+    def __unicode__(self):
345
+        if self.fqcn is None:
346
+            return (
347
+                u'"virtual collection Git repo"' if self.is_scm
348
+                else u'"virtual collection namespace"'
349
+            )
350
+
351
+        return (
352
+            u'{fqcn!s}:{ver!s}'.
353
+            format(fqcn=to_text(self.fqcn), ver=to_text(self.ver))
354
+        )
355
+
356
+    def _get_separate_ns_n_name(self):  # FIXME: use LRU cache
357
+        return self.fqcn.split('.')
358
+
359
+    @property
360
+    def namespace(self):
361
+        if self.is_virtual:
362
+            raise TypeError('Virtual collections do not have a namespace')
363
+
364
+        return self._get_separate_ns_n_name()[0]
365
+
366
+    @property
367
+    def name(self):
368
+        if self.is_virtual:
369
+            raise TypeError('Virtual collections do not have a name')
370
+
371
+        return self._get_separate_ns_n_name()[-1]
372
+
373
+    @property
374
+    def canonical_package_id(self):
375
+        if not self.is_virtual:
376
+            return to_native(self.fqcn)
377
+
378
+        return (
379
+            '<virtual namespace from {src!s} of type {src_type!s}>'.
380
+            format(src=to_native(self.src), src_type=to_native(self.type))
381
+        )
382
+
383
+    @property
384
+    def is_virtual(self):
385
+        return self.is_scm or self.is_subdirs
386
+
387
+    @property
388
+    def is_file(self):
389
+        return self.type == 'file'
390
+
391
+    @property
392
+    def is_dir(self):
393
+        return self.type == 'dir'
394
+
395
+    @property
396
+    def namespace_collection_paths(self):
397
+        return [
398
+            to_native(path)
399
+            for path in _find_collections_in_subdirs(self.src)
400
+        ]
401
+
402
+    @property
403
+    def is_subdirs(self):
404
+        return self.type == 'subdirs'
405
+
406
+    @property
407
+    def is_url(self):
408
+        return self.type == 'url'
409
+
410
+    @property
411
+    def is_scm(self):
412
+        return self.type == 'git'
413
+
414
+    @property
415
+    def is_concrete_artifact(self):
416
+        return self.type in {'git', 'url', 'file', 'dir', 'subdirs'}
417
+
418
+    @property
419
+    def is_online_index_pointer(self):
420
+        return not self.is_concrete_artifact
421
+
422
+
423
+class Requirement(
424
+        _ComputedReqKindsMixin,
425
+        namedtuple('Requirement', ('fqcn', 'ver', 'src', 'type')),
426
+):
427
+    """An abstract requirement request."""
428
+
429
+
430
+class Candidate(
431
+        _ComputedReqKindsMixin,
432
+        namedtuple('Candidate', ('fqcn', 'ver', 'src', 'type'))
433
+):
434
+    """A concrete collection candidate with its version resolved."""
0 435
new file mode 100644
... ...
@@ -0,0 +1,11 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Dependency resolution exceptions."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+from resolvelib.resolvers import (
9
+    ResolutionImpossible as CollectionDependencyResolutionImpossible,
10
+)
0 11
new file mode 100644
... ...
@@ -0,0 +1,273 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Requirement provider interfaces."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+import functools
9
+
10
+try:
11
+    from typing import TYPE_CHECKING
12
+except ImportError:
13
+    TYPE_CHECKING = False
14
+
15
+if TYPE_CHECKING:
16
+    from typing import Iterable, List, NamedTuple, Optional, Union
17
+    from ansible.galaxy.collection.concrete_artifact_manager import (
18
+        ConcreteArtifactsManager,
19
+    )
20
+    from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
21
+
22
+from ansible.galaxy.dependency_resolution.dataclasses import (
23
+    Candidate,
24
+    Requirement,
25
+)
26
+from ansible.galaxy.dependency_resolution.versioning import (
27
+    is_pre_release,
28
+    meets_requirements,
29
+)
30
+from ansible.utils.version import SemanticVersion
31
+
32
+from resolvelib import AbstractProvider
33
+
34
+
35
+class CollectionDependencyProvider(AbstractProvider):
36
+    """Delegate providing a requirement interface for the resolver."""
37
+
38
+    def __init__(
39
+            self,  # type: CollectionDependencyProvider
40
+            apis,  # type: MultiGalaxyAPIProxy
41
+            concrete_artifacts_manager=None,  # type: ConcreteArtifactsManager
42
+            preferred_candidates=None,  # type: Iterable[Candidate]
43
+            with_deps=True,  # type: bool
44
+            with_pre_releases=False,  # type: bool
45
+    ):  # type: (...) -> None
46
+        r"""Initialize helper attributes.
47
+
48
+        :param api: An instance of the multiple Galaxy APIs wrapper.
49
+
50
+        :param concrete_artifacts_manager: An instance of the caching \
51
+                                           concrete artifacts manager.
52
+
53
+        :param with_deps: A flag specifying whether the resolver \
54
+                          should attempt to pull-in the deps of the \
55
+                          requested requirements. On by default.
56
+
57
+        :param with_pre_releases: A flag specifying whether the \
58
+                                  resolver should skip pre-releases. \
59
+                                  Off by default.
60
+        """
61
+        self._api_proxy = apis
62
+        self._make_req_from_dict = functools.partial(
63
+            Requirement.from_requirement_dict,
64
+            art_mgr=concrete_artifacts_manager,
65
+        )
66
+        self._preferred_candidates = set(preferred_candidates or ())
67
+        self._with_deps = with_deps
68
+        self._with_pre_releases = with_pre_releases
69
+
70
+    def identify(self, requirement_or_candidate):
71
+        # type: (Union[Candidate, Requirement]) -> str
72
+        """Given requirement or candidate, return an identifier for it.
73
+
74
+        This is used to identify a requirement or candidate, e.g.
75
+        whether two requirements should have their specifier parts
76
+        (version ranges or pins) merged, whether two candidates would
77
+        conflict with each other (because they have same name but
78
+        different versions).
79
+        """
80
+        return requirement_or_candidate.canonical_package_id
81
+
82
+    def get_preference(
83
+            self,  # type: CollectionDependencyProvider
84
+            resolution,  # type: Optional[Candidate]
85
+            candidates,  # type: List[Candidate]
86
+            information,  # type: List[NamedTuple]
87
+    ):  # type: (...) -> Union[float, int]
88
+        """Return sort key function return value for given requirement.
89
+
90
+        This result should be based on preference that is defined as
91
+        "I think this requirement should be resolved first".
92
+        The lower the return value is, the more preferred this
93
+        group of arguments is.
94
+
95
+        :param resolution: Currently pinned candidate, or ``None``.
96
+
97
+        :param candidates: A list of possible candidates.
98
+
99
+        :param information: A list of requirement information.
100
+
101
+        Each ``information`` instance is a named tuple with two entries:
102
+
103
+          * ``requirement`` specifies a requirement contributing to
104
+            the current candidate list
105
+
106
+          * ``parent`` specifies the candidate that provides
107
+            (dependend on) the requirement, or `None`
108
+            to indicate a root requirement.
109
+
110
+        The preference could depend on a various of issues, including
111
+        (not necessarily in this order):
112
+
113
+          * Is this package pinned in the current resolution result?
114
+
115
+          * How relaxed is the requirement? Stricter ones should
116
+            probably be worked on first? (I don't know, actually.)
117
+
118
+          * How many possibilities are there to satisfy this
119
+            requirement? Those with few left should likely be worked on
120
+            first, I guess?
121
+
122
+          * Are there any known conflicts for this requirement?
123
+            We should probably work on those with the most
124
+            known conflicts.
125
+
126
+        A sortable value should be returned (this will be used as the
127
+        `key` parameter of the built-in sorting function). The smaller
128
+        the value is, the more preferred this requirement is (i.e. the
129
+        sorting function is called with ``reverse=False``).
130
+        """
131
+        if any(
132
+                candidate in self._preferred_candidates
133
+                for candidate in candidates
134
+        ):
135
+            # NOTE: Prefer pre-installed candidates over newer versions
136
+            # NOTE: available from Galaxy or other sources.
137
+            return float('-inf')
138
+        return len(candidates)
139
+
140
+    def find_matches(self, requirements):
141
+        # type: (List[Requirement]) -> List[Candidate]
142
+        r"""Find all possible candidates satisfying given requirements.
143
+
144
+        This tries to get candidates based on the requirements' types.
145
+
146
+        For concrete requirements (SCM, dir, namespace dir, local or
147
+        remote archives), the one-and-only match is returned
148
+
149
+        For a "named" requirement, Galaxy-compatible APIs are consulted
150
+        to find concrete candidates for this requirement. Of theres a
151
+        pre-installed candidate, it's prepended in front of others.
152
+
153
+        :param requirements: A collection of requirements which all of \
154
+                             the returned candidates must match. \
155
+                             All requirements are guaranteed to have \
156
+                             the same identifier. \
157
+                             The collection is never empty.
158
+
159
+        :returns: An iterable that orders candidates by preference, \
160
+                  e.g. the most preferred candidate comes first.
161
+        """
162
+        # FIXME: The first requirement may be a Git repo followed by
163
+        # FIXME: its cloned tmp dir. Using only the first one creates
164
+        # FIXME: loops that prevent any further dependency exploration.
165
+        # FIXME: We need to figure out how to prevent this.
166
+        first_req = requirements[0]
167
+        fqcn = first_req.fqcn
168
+        # The fqcn is guaranteed to be the same
169
+        coll_versions = self._api_proxy.get_collection_versions(first_req)
170
+        if first_req.is_concrete_artifact:
171
+            # FIXME: do we assume that all the following artifacts are also concrete?
172
+            # FIXME: does using fqcn==None cause us problems here?
173
+
174
+            return [
175
+                Candidate(fqcn, version, _none_src_server, first_req.type)
176
+                for version, _none_src_server in coll_versions
177
+            ]
178
+
179
+        preinstalled_candidates = {
180
+            candidate for candidate in self._preferred_candidates
181
+            if candidate.fqcn == fqcn
182
+        }
183
+
184
+        return list(preinstalled_candidates) + sorted(
185
+            {
186
+                candidate for candidate in (
187
+                    Candidate(fqcn, version, src_server, 'galaxy')
188
+                    for version, src_server in coll_versions
189
+                )
190
+                if all(self.is_satisfied_by(requirement, candidate) for requirement in requirements)
191
+                # FIXME
192
+                # if all(self.is_satisfied_by(requirement, candidate) and (
193
+                #     requirement.src is None or  # if this is true for some candidates but not all it will break key param - Nonetype can't be compared to str
194
+                #     requirement.src == candidate.src
195
+                # ))
196
+            },
197
+            key=lambda candidate: (
198
+                SemanticVersion(candidate.ver), candidate.src,
199
+            ),
200
+            reverse=True,  # prefer newer versions over older ones
201
+        )
202
+
203
+    def is_satisfied_by(self, requirement, candidate):
204
+        # type: (Requirement, Candidate) -> bool
205
+        r"""Whether the given requirement is satisfiable by a candidate.
206
+
207
+        :param requirement: A requirement that produced the `candidate`.
208
+
209
+        :param candidate: A pinned candidate supposedly matchine the \
210
+                          `requirement` specifier. It is guaranteed to \
211
+                          have been generated from the `requirement`.
212
+
213
+        :returns: Indication whether the `candidate` is a viable \
214
+                  solution to the `requirement`.
215
+        """
216
+        # NOTE: Only allow pre-release candidates if we want pre-releases or
217
+        # the req ver was an exact match with the pre-release version.
218
+        allow_pre_release = self._with_pre_releases or not (
219
+            requirement.ver == '*' or
220
+            requirement.ver.startswith('<') or
221
+            requirement.ver.startswith('>') or
222
+            requirement.ver.startswith('!=')
223
+        )
224
+        if is_pre_release(candidate.ver) and not allow_pre_release:
225
+            return False
226
+
227
+        # NOTE: This is a set of Pipenv-inspired optimizations. Ref:
228
+        # https://github.com/sarugaku/passa/blob/2ac00f1/src/passa/models/providers.py#L58-L74
229
+        if (
230
+                requirement.is_virtual or
231
+                candidate.is_virtual or
232
+                requirement.ver == '*'
233
+        ):
234
+            return True
235
+
236
+        return meets_requirements(
237
+            version=candidate.ver,
238
+            requirements=requirement.ver,
239
+        )
240
+
241
+    def get_dependencies(self, candidate):
242
+        # type: (Candidate) -> List[Candidate]
243
+        r"""Get direct dependencies of a candidate.
244
+
245
+        :returns: A collection of requirements that `candidate` \
246
+                  specifies as its dependencies.
247
+        """
248
+        # FIXME: If there's several galaxy servers set, there may be a
249
+        # FIXME: situation when the metadata of the same collection
250
+        # FIXME: differs. So how do we resolve this case? Priority?
251
+        # FIXME: Taking into account a pinned hash? Exploding on
252
+        # FIXME: any differences?
253
+        # NOTE: The underlying implmentation currently uses first found
254
+        req_map = self._api_proxy.get_collection_dependencies(candidate)
255
+
256
+        # NOTE: This guard expression MUST perform an early exit only
257
+        # NOTE: after the `get_collection_dependencies()` call because
258
+        # NOTE: internally it polulates the artifact URL of the candidate,
259
+        # NOTE: its SHA hash and the Galaxy API token. These are still
260
+        # NOTE: necessary with `--no-deps` because even with the disabled
261
+        # NOTE: dependency resolution the outer layer will still need to
262
+        # NOTE: know how to download and validate the artifact.
263
+        #
264
+        # NOTE: Virtual candidates should always return dependencies
265
+        # NOTE: because they are ephemeral and non-installable.
266
+        if not self._with_deps and not candidate.is_virtual:
267
+            return []
268
+
269
+        return [
270
+            self._make_req_from_dict({'name': dep_name, 'version': dep_req})
271
+            for dep_name, dep_req in req_map.items()
272
+        ]
0 273
new file mode 100644
... ...
@@ -0,0 +1,17 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Requiement reporter implementations."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+from resolvelib import BaseReporter
9
+
10
+
11
+class CollectionDependencyReporter(BaseReporter):
12
+    """A dependency reporter for Ansible Collections.
13
+
14
+    This is a proxy class allowing us to abstract away importing resolvelib
15
+    outside of the `ansible.galaxy.dependency_resolution` Python package.
16
+    """
0 17
new file mode 100644
... ...
@@ -0,0 +1,17 @@
0
+# -*- coding: utf-8 -*-
1
+# Copyright: (c) 2020-2021, Ansible Project
2
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3
+"""Requirement resolver implementations."""
4
+
5
+from __future__ import (absolute_import, division, print_function)
6
+__metaclass__ = type
7
+
8
+from resolvelib import Resolver
9
+
10
+
11
+class CollectionDependencyResolver(Resolver):
12
+    """A dependency resolver for Ansible Collections.
13
+
14
+    This is a proxy class allowing us to abstract away importing resolvelib
15
+    outside of the `ansible.galaxy.dependency_resolution` Python package.
16
+    """
... ...
@@ -15,7 +15,10 @@ from ansible.utils.version import SemanticVersion
15 15
 def is_pre_release(version):
16 16
     # type: (str) -> bool
17 17
     """Figure out if a given version is a pre-release."""
18
-    return SemanticVersion(version).is_prerelease
18
+    try:
19
+        return SemanticVersion(version).is_prerelease
20
+    except ValueError:
21
+        return False
19 22
 
20 23
 
21 24
 def meets_requirements(version, requirements):
... ...
@@ -7,3 +7,7 @@ jinja2
7 7
 PyYAML
8 8
 cryptography
9 9
 packaging
10
+# NOTE: resolvelib 0.x version bumps should be considered major/breaking
11
+# NOTE: and we should update the upper cap with care, at least until 1.0
12
+# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69
13
+resolvelib >= 0.5.3, < 0.6.0  # dependency resolver used by ansible-galaxy
... ...
@@ -24,8 +24,8 @@
24 24
 
25 25
 - assert:
26 26
     that:
27
-      - '"Downloading collection ''amazon.aws'' to" in download_collection.stdout'
28
-      - '"Downloading collection ''awx.awx'' to" in download_collection.stdout'
27
+      - '"Downloading collection ''amazon.aws:1.0.0'' to" in download_collection.stdout'
28
+      - '"Downloading collection ''awx.awx:0.0.1-devel'' to" in download_collection.stdout'
29 29
       - download_collection_amazon_actual.stat.exists
30 30
       - download_collection_awx_actual.stat.exists
31 31
 
... ...
@@ -2,22 +2,23 @@
2 2
   command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/'
3 3
   register: installed
4 4
 
5
-- assert:
5
+- name: SCM collections don't have a concrete artifact version so the collection should always be reinstalled
6
+  assert:
6 7
     that:
7
-      - "'Skipping' in installed.stdout"
8
-      - "'Created' not in installed.stdout"
8
+      - "'Created collection for ansible_test.collection_1' in installed.stdout"
9
+      - "'Created collection for ansible_test.collection_2' in installed.stdout"
9 10
 
10
-- name: Only reinstall the collection
11
+- name: The collection should also be reinstalled when --force flag is used
11 12
   command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/ --force'
12 13
   register: installed
13 14
 
14 15
 - assert:
15 16
     that:
16 17
       - "'Created collection for ansible_test.collection_1' in installed.stdout"
17
-      - "'Created collection for ansible_test.collection_2' not in installed.stdout"
18
-      - "'Skipping' in installed.stdout"
18
+      # The dependency is also an SCM collection, so it should also be reinstalled
19
+      - "'Created collection for ansible_test.collection_2' in installed.stdout"
19 20
 
20
-- name: Reinstall the collection and dependency
21
+- name: The collection should also be reinstalled when --force-with-deps is used
21 22
   command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/ --force-with-deps'
22 23
   register: installed
23 24
 
... ...
@@ -25,7 +26,6 @@
25 25
     that:
26 26
       - "'Created collection for ansible_test.collection_1' in installed.stdout"
27 27
       - "'Created collection for ansible_test.collection_2' in installed.stdout"
28
-      - "'Skipping' not in installed.stdout"
29 28
 
30 29
 - include_tasks: ./empty_installed_collections.yml
31 30
   when: cleanup
... ...
@@ -25,7 +25,13 @@
25 25
 - assert:
26 26
     that:
27 27
       - result.failed
28
-      - '"ERROR! Collections requirement entry should contain the key name." in result.stderr'
28
+      - >-
29
+        "ERROR! Neither the collection requirement entry key 'name',
30
+        nor 'source' point to a concrete resolvable collection artifact.
31
+        Also 'name' is not an FQCN. A valid collection name must be in
32
+        the format <namespace>.<collection>. Please make sure that the
33
+        namespace and the collection name  contain characters from
34
+        [a-zA-Z0-9_] only." in result.stderr
29 35
 
30 36
 - name: test source is not a git repo even if name is provided
31 37
   command: 'ansible-galaxy collection install -r source_and_name.yml'
... ...
@@ -37,7 +43,10 @@
37 37
 - assert:
38 38
     that:
39 39
       - result.failed
40
-      - '"ERROR! Unknown error when attempting to call Galaxy" in result.stderr'
40
+      - >-
41
+        result.stderr is search("ERROR! Collections requirement 'source'
42
+        entry should contain a valid Galaxy API URL but it does not:
43
+        git\+file:///.*/amazon.aws/.git is not an HTTP URL.")
41 44
 
42 45
 - name: test source is not a git repo even if name and type is provided
43 46
   command: 'ansible-galaxy collection install -r source_and_name_and_type.yml'
... ...
@@ -49,7 +58,12 @@
49 49
 - assert:
50 50
     that:
51 51
       - result.failed
52
-      - 'result.stderr is search("ERROR! - command /.*/git clone ansible.nope ansible.nope failed")'
52
+      - >-
53
+        result.stderr is search("ERROR! Failed to clone a Git repository
54
+        from `file:///.*/.git`.")
55
+      - >-
56
+        result.stderr is search("fatal: '/.*/amazon.aws/.git' does not
57
+        appear to be a git repository")
53 58
 
54 59
 - name: test using name as a git repo without git+ prefix
55 60
   command: 'ansible-galaxy collection install -r name_without_type.yml'
... ...
@@ -4,16 +4,35 @@
4 4
 
5 5
 - assert:
6 6
     that:
7
-      - command.stdout_lines | length == 9
8
-      - command.stdout_lines[0] == "Starting galaxy collection install process"
9
-      - command.stdout_lines[1] == "Process install dependency map"
10
-      - command.stdout_lines[2] == "Starting collection install process"
11
-      - "'namespace_1.collection_1' in command.stdout_lines[3]"
12
-      - "'namespace_1.collection_1' in command.stdout_lines[4]"
13
-      - "'namespace_1.collection_1' in command.stdout_lines[5]"
14
-      - "'namespace_2.collection_2' in command.stdout_lines[6]"
15
-      - "'namespace_2.collection_2' in command.stdout_lines[7]"
16
-      - "'namespace_2.collection_2' in command.stdout_lines[8]"
7
+      - command.stdout_lines | length == 12
8
+      - >-
9
+        'Starting galaxy collection install process'
10
+        in command.stdout_lines
11
+      - >-
12
+        'Starting collection install process'
13
+        in command.stdout_lines
14
+      - >-
15
+        "Installing 'namespace_1.collection_1:1.0.0' to
16
+        '{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'"
17
+        in command.stdout_lines
18
+      - >-
19
+        'Created collection for namespace_1.collection_1:1.0.0 at
20
+        {{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'
21
+        in command.stdout_lines
22
+      - >-
23
+        'namespace_1.collection_1:1.0.0 was installed successfully'
24
+        in command.stdout_lines
25
+      - >-
26
+        "Installing 'namespace_2.collection_2:1.0.0' to
27
+        '{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'"
28
+        in command.stdout_lines
29
+      - >-
30
+        'Created collection for namespace_2.collection_2:1.0.0 at
31
+        {{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'
32
+        in command.stdout_lines
33
+      - >-
34
+        'namespace_2.collection_2:1.0.0 was installed successfully'
35
+        in command.stdout_lines
17 36
 
18 37
 - name: list installed collections
19 38
   command: 'ansible-galaxy collection list'
... ...
@@ -30,16 +49,35 @@
30 30
 
31 31
 - assert:
32 32
     that:
33
-      - command.stdout_lines | length == 9
34
-      - command.stdout_lines[0] == "Starting galaxy collection install process"
35
-      - command.stdout_lines[1] == "Process install dependency map"
36
-      - command.stdout_lines[2] == "Starting collection install process"
37
-      - "'namespace_1.collection_1' in command.stdout_lines[3]"
38
-      - "'namespace_1.collection_1' in command.stdout_lines[4]"
39
-      - "'namespace_1.collection_1' in command.stdout_lines[5]"
40
-      - "'namespace_2.collection_2' in command.stdout_lines[6]"
41
-      - "'namespace_2.collection_2' in command.stdout_lines[7]"
42
-      - "'namespace_2.collection_2' in command.stdout_lines[8]"
33
+      - command.stdout_lines | length == 12
34
+      - >-
35
+        'Starting galaxy collection install process'
36
+        in command.stdout_lines
37
+      - >-
38
+        'Starting collection install process'
39
+        in command.stdout_lines
40
+      - >-
41
+        "Installing 'namespace_1.collection_1:1.0.0' to
42
+        '{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'"
43
+        in command.stdout_lines
44
+      - >-
45
+        'Created collection for namespace_1.collection_1:1.0.0 at
46
+        {{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'
47
+        in command.stdout_lines
48
+      - >-
49
+        'namespace_1.collection_1:1.0.0 was installed successfully'
50
+        in command.stdout_lines
51
+      - >-
52
+        "Installing 'namespace_2.collection_2:1.0.0' to
53
+        '{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'"
54
+        in command.stdout_lines
55
+      - >-
56
+        'Created collection for namespace_2.collection_2:1.0.0 at
57
+        {{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'
58
+        in command.stdout_lines
59
+      - >-
60
+        'namespace_2.collection_2:1.0.0 was installed successfully'
61
+        in command.stdout_lines
43 62
 
44 63
 - name: list installed collections
45 64
   command: 'ansible-galaxy collection list'
... ...
@@ -16,12 +16,18 @@
16 16
     file_type: file
17 17
   register: download_collection_actual
18 18
 
19
-- name: assert download collection with multiple dependencies
19
+- name: assert download collection with multiple dependencies --no-deps
20 20
   assert:
21 21
     that:
22
-    - '"Downloading collection ''parent_dep.parent_collection'' to" in download_collection.stdout'
23
-    - 'not "Downloading collection ''child_dep.child_collection'' to" in download_collection.stdout'
24
-    - 'not "Downloading collection ''child_dep.child_dep2'' to" in download_collection.stdout'
22
+    - >-
23
+      "Downloading collection 'parent_dep.parent_collection:1.0.0' to '/tmp/"
24
+      in download_collection.stdout
25
+    - >-
26
+      "Downloading collection 'child_dep.child_collection"
27
+      not in download_collection.stdout
28
+    - >-
29
+      "Downloading collection 'child_dep.child_dep2"
30
+      not in download_collection.stdout
25 31
     - download_collection_actual.examined == 2
26 32
     - download_collection_actual.matched == 2
27 33
     - (download_collection_actual.files[0].path | basename) in ['requirements.yml', 'parent_dep-parent_collection-1.0.0.tar.gz']
... ...
@@ -42,9 +48,9 @@
42 42
 - name: assert download collection with multiple dependencies
43 43
   assert:
44 44
     that:
45
-    - '"Downloading collection ''parent_dep.parent_collection'' to" in download_collection.stdout'
46
-    - '"Downloading collection ''child_dep.child_collection'' to" in download_collection.stdout'
47
-    - '"Downloading collection ''child_dep.child_dep2'' to" in download_collection.stdout'
45
+    - '"Downloading collection ''parent_dep.parent_collection:1.0.0'' to" in download_collection.stdout'
46
+    - '"Downloading collection ''child_dep.child_collection:0.9.9'' to" in download_collection.stdout'
47
+    - '"Downloading collection ''child_dep.child_dep2:1.2.2'' to" in download_collection.stdout'
48 48
     - download_collection_actual.examined == 4
49 49
     - download_collection_actual.matched == 4
50 50
     - (download_collection_actual.files[0].path | basename) in ['requirements.yml', 'child_dep-child_dep2-1.2.2.tar.gz', 'child_dep-child_collection-0.9.9.tar.gz', 'parent_dep-parent_collection-1.0.0.tar.gz']
... ...
@@ -104,7 +110,7 @@
104 104
 - name: assert download collection with multiple dependencies
105 105
   assert:
106 106
     that:
107
-    - '"Downloading collection ''namespace1.name1'' to" in download_req_custom_path.stdout'
107
+    - '"Downloading collection ''namespace1.name1:1.1.0-beta.1'' to" in download_req_custom_path.stdout'
108 108
     - download_req_custom_path_actual.examined == 2
109 109
     - download_req_custom_path_actual.matched == 2
110 110
     - (download_req_custom_path_actual.files[0].path | basename) in ['requirements.yml', 'namespace1-name1-1.1.0-beta.1.tar.gz']
... ...
@@ -161,5 +167,5 @@
161 161
 
162 162
     - assert:
163 163
         that:
164
-        - '"Downloading collection ''ansible_test.my_collection'' to" in download_collection.stdout'
164
+        - '"Downloading collection ''ansible_test.my_collection:1.0.0'' to" in download_collection.stdout'
165 165
         - download_collection_actual.stat.exists
... ...
@@ -40,7 +40,7 @@
40 40
 - name: assert install existing without --force - {{ test_name }}
41 41
   assert:
42 42
     that:
43
-    - '"Skipping ''namespace1.name1'' as it is already installed" in install_existing_no_force.stdout'
43
+    - '"Nothing to do. All requested collections are already installed" in install_existing_no_force.stdout'
44 44
 
45 45
 - name: install existing with --force - {{ test_name }}
46 46
   command: ansible-galaxy collection install namespace1.name1 -s '{{ test_name }}' --force {{ galaxy_verbosity }}
... ...
@@ -129,7 +129,9 @@
129 129
 - name: expect failure with dep resolution failure
130 130
   command:  ansible-galaxy collection install fail_namespace.fail_collection -s {{ test_name }} {{ galaxy_verbosity }}
131 131
   register: fail_dep_mismatch
132
-  failed_when: '"Cannot meet dependency requirement ''fail_dep2.name:<0.0.5'' for collection fail_namespace.fail_collection" not in fail_dep_mismatch.stderr'
132
+  failed_when:
133
+  - '"Could not satisfy the following requirements" not in fail_dep_mismatch.stderr'
134
+  - '" fail_dep2.name:<0.0.5 (dependency of fail_namespace.fail_collection:2.1.2)" not in fail_dep_mismatch.stderr'
133 135
 
134 136
 - name: Find artifact url for namespace3.name
135 137
   uri:
136 138
new file mode 100644
... ...
@@ -0,0 +1,55 @@
0
+- name: initialize collection structure
1
+  command: ansible-galaxy collection init {{ item }} --init-path "{{ galaxy_dir }}/dev/ansible_collections" {{ galaxy_verbosity }}
2
+  loop:
3
+    - 'dev.collection1'
4
+    - 'dev.collection2'
5
+    - 'dev.collection3'
6
+
7
+- name: replace the default version of the collections
8
+  lineinfile:
9
+    path: "{{ galaxy_dir }}/dev/ansible_collections/dev/{{ item.name }}/galaxy.yml"
10
+    line: "{{ item.version }}"
11
+    regexp: "version: .*"
12
+  loop:
13
+    - name: "collection1"
14
+      version: "version: null"
15
+    - name: "collection2"
16
+      version: "version: placeholder"
17
+    - name: "collection3"
18
+      version: "version: ''"
19
+
20
+- name: list collections in development without semver versions
21
+  command: ansible-galaxy collection list {{ galaxy_verbosity }}
22
+  register: list_result
23
+  environment:
24
+    ANSIBLE_COLLECTIONS_PATH: "{{ galaxy_dir }}/dev:{{ galaxy_dir }}/prod"
25
+
26
+- assert:
27
+    that:
28
+      - "'dev.collection1 *' in list_result.stdout"
29
+      # Note the version displayed is the 'placeholder' string rather than "*" since it is not falsey
30
+      - "'dev.collection2 placeholder' in list_result.stdout"
31
+      - "'dev.collection3 *' in list_result.stdout"
32
+
33
+- name: install an artifact to the second collections path
34
+  command: ansible-galaxy collection install namespace1.name1 -s galaxy_ng {{ galaxy_verbosity }} -p "{{ galaxy_dir }}/prod"
35
+  environment:
36
+    ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
37
+
38
+- name: replace the artifact version
39
+  lineinfile:
40
+    path: "{{ galaxy_dir }}/prod/ansible_collections/namespace1/name1/MANIFEST.json"
41
+    line: '  "version": null,'
42
+    regexp: '  "version": .*'
43
+
44
+- name: test listing collections in all paths
45
+  command: ansible-galaxy collection list {{ galaxy_verbosity }}
46
+  register: list_result
47
+  ignore_errors: True
48
+  environment:
49
+    ANSIBLE_COLLECTIONS_PATH: "{{ galaxy_dir }}/dev:{{ galaxy_dir }}/prod"
50
+
51
+- assert:
52
+    that:
53
+      - list_result is failed
54
+      - "'is expected to have a valid SemVer version value but got None' in list_result.stderr"
... ...
@@ -144,34 +144,24 @@
144 144
 - name: assert result of install collection with dep on another server
145 145
   assert:
146 146
     that:
147
-    - '"''secondary.name'' obtained from server secondary" in install_cross_dep.stdout'
147
+    - >-
148
+      "'secondary.name:1.0.0' obtained from server secondary"
149
+      in install_cross_dep.stdout
148 150
     # pulp_v2 is highest in the list so it will find it there first
149
-    - '"''parent_dep.parent_collection'' obtained from server pulp_v2" in install_cross_dep.stdout'
150
-    - '"''child_dep.child_collection'' obtained from server pulp_v2" in install_cross_dep.stdout'
151
-    - '"''child_dep.child_dep2'' obtained from server pulp_v2" in install_cross_dep.stdout'
151
+    - >-
152
+      "'parent_dep.parent_collection:1.0.0' obtained from server pulp_v2"
153
+      in install_cross_dep.stdout
154
+    - >-
155
+      "'child_dep.child_collection:0.9.9' obtained from server pulp_v2"
156
+      in install_cross_dep.stdout
157
+    - >-
158
+      "'child_dep.child_dep2:1.2.2' obtained from server pulp_v2"
159
+      in install_cross_dep.stdout
152 160
     - (install_cross_dep_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
153 161
     - (install_cross_dep_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
154 162
     - (install_cross_dep_actual.results[2].content | b64decode | from_json).collection_info.version == '0.9.9'
155 163
     - (install_cross_dep_actual.results[3].content | b64decode | from_json).collection_info.version == '1.2.2'
156 164
 
157
-# fake.fake does not exist but we check the output to ensure it checked all 3
158
-# servers defined in the config. We hardcode to -vvv as that's what level the
159
-# message is shown
160
-- name: test install fallback on server list
161
-  command: ansible-galaxy collection install fake.fake -vvv
162
-  ignore_errors: yes
163
-  environment:
164
-    ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
165
-  register: missing_fallback
166
-
167
-- name: assert test install fallback on server list
168
-  assert:
169
-    that:
170
-    - missing_fallback.rc == 1
171
-    - '"Collection ''fake.fake'' is not available from server pulp_v2" in missing_fallback.stdout'
172
-    - '"Collection ''fake.fake'' is not available from server pulp_v3" in missing_fallback.stdout'
173
-    - '"Collection ''fake.fake'' is not available from server galaxy_ng" in missing_fallback.stdout'
174
-
175 165
 - name: run ansible-galaxy collection download tests
176 166
   include_tasks: download.yml
177 167
   args:
... ...
@@ -189,3 +179,6 @@
189 189
     test_name: 'galaxy_ng'
190 190
     test_server: '{{ galaxy_ng_server }}'
191 191
     vX: "v3/"
192
+
193
+- name: run ansible-galaxy collection list tests
194
+  include_tasks: list.yml
... ...
@@ -21,7 +21,8 @@
21 21
 - assert:
22 22
     that:
23 23
       - verify.failed
24
-      - "'The format namespace.name is expected' in verify.stderr"
24
+      - >-
25
+        "ERROR! 'file' type is not supported. The format namespace.name is expected." in verify.stderr
25 26
 
26 27
 - name: install the collection from the server
27 28
   command: ansible-galaxy collection install ansible_test.verify:1.0.0
... ...
@@ -39,6 +40,11 @@
39 39
       - verify is success
40 40
       - "'Collection ansible_test.verify contains modified content' not in verify.stdout"
41 41
 
42
+- name: verify the installed collection against the server, with unspecified version in CLI
43
+  command: ansible-galaxy collection verify ansible_test.verify
44
+  environment:
45
+    ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
46
+
42 47
 - name: verify a collection that doesn't appear to be installed
43 48
   command: ansible-galaxy collection verify ansible_test.verify:1.0.0
44 49
   register: verify
... ...
@@ -1,3 +1,4 @@
1
+resolvelib >= 0.5.3, < 0.6.0  # keep in sync with `requirements.txt`
1 2
 coverage >= 4.5.1, < 5.0.0 ; python_version <  '3.7' # coverage 4.4 required for "disable_warnings" support but 4.5.1 needed for bug fixes, coverage 5.0+ incompatible
2 3
 coverage >= 4.5.2, < 5.0.0 ; python_version == '3.7' # coverage 4.5.2 fixes bugs in support for python 3.7, coverage 5.0+ incompatible
3 4
 coverage >= 4.5.4, < 5.0.0 ; python_version >  '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible
... ...
@@ -4,3 +4,4 @@ junit-xml
4 4
 ordereddict ; python_version < '2.7'
5 5
 packaging
6 6
 pyyaml
7
+resolvelib
... ...
@@ -5,3 +5,4 @@ pytest
5 5
 pytest-mock
6 6
 pytest-xdist
7 7
 pyyaml
8
+resolvelib
... ...
@@ -1,5 +1,6 @@
1 1
 jinja2
2 2
 pyyaml
3
+resolvelib
3 4
 sphinx
4 5
 sphinx-notfound-page
5 6
 straight.plugin
... ...
@@ -2,6 +2,7 @@ docutils
2 2
 jinja2
3 3
 packaging
4 4
 pyyaml  # ansible-core requirement
5
+resolvelib  # ansible-core requirement
5 6
 rstcheck
6 7
 setuptools > 39.2
7 8
 straight.plugin
... ...
@@ -41,7 +41,11 @@ lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
41 41
 lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
42 42
 lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
43 43
 lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
44
+lib/ansible/cli/galaxy.py compile-2.6!skip  # 'ansible-galaxy collection' requires 2.7+
44 45
 lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
46
+lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip  # 'ansible-galaxy collection' requires 2.7+
47
+lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip  # 'ansible-galaxy collection' requires 2.7+
48
+lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip  # 'ansible-galaxy collection' requires 2.7+
45 49
 lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
46 50
 lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
47 51
 lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
... ...
@@ -8,14 +8,13 @@ __metaclass__ = type
8 8
 import pytest
9 9
 
10 10
 from ansible.cli.galaxy import _display_collection
11
+from ansible.galaxy.dependency_resolution.dataclasses import Requirement
11 12
 
12 13
 
13 14
 @pytest.fixture
14
-def collection_object(mocker):
15
+def collection_object():
15 16
     def _cobj(fqcn='sandwiches.ham'):
16
-        cobj = mocker.MagicMock(latest_version='1.5.0')
17
-        cobj.__str__.return_value = fqcn
18
-        return cobj
17
+        return Requirement(fqcn, '1.5.0', None, 'galaxy')
19 18
     return _cobj
20 19
 
21 20
 
... ...
@@ -1,3 +1,4 @@
1
+# -*- coding: utf-8 -*-
1 2
 # Copyright (c) 2020 Ansible Project
2 3
 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
3 4
 
... ...
@@ -9,7 +10,8 @@ import pytest
9 9
 from ansible import context
10 10
 from ansible.cli.galaxy import GalaxyCLI
11 11
 from ansible.errors import AnsibleError, AnsibleOptionsError
12
-from ansible.galaxy.collection import CollectionRequirement
12
+from ansible.galaxy import collection
13
+from ansible.galaxy.dependency_resolution.dataclasses import Requirement
13 14
 from ansible.module_utils._text import to_native
14 15
 
15 16
 
... ...
@@ -48,47 +50,39 @@ def mock_collection_objects(mocker):
48 48
     mocker.patch('ansible.cli.galaxy.validate_collection_path',
49 49
                  side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections'])
50 50
 
51
-    collection_args = (
51
+    collection_args_1 = (
52 52
         (
53
-            'sandwiches',
54
-            'pbj',
55
-            b'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
56
-            mocker.Mock(),
57
-            ['1.0.0', '1.5.0'],
58
-            '1.0.0',
59
-            False,
53
+            'sandwiches.pbj',
54
+            '1.5.0',
55
+            None,
56
+            'dir',
60 57
         ),
61 58
         (
62
-            'sandwiches',
63
-            'pbj',
64
-            b'/root/.ansible/collections/ansible_collections/sandwiches/pbj',
65
-            mocker.Mock(),
66
-            ['1.0.0', '1.5.0'],
67
-            '1.5.0',
68
-            False,
59
+            'sandwiches.reuben',
60
+            '2.5.0',
61
+            None,
62
+            'dir',
69 63
         ),
64
+    )
65
+
66
+    collection_args_2 = (
70 67
         (
71
-            'sandwiches',
72
-            'ham',
73
-            b'/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
74
-            mocker.Mock(),
75
-            ['1.0.0'],
68
+            'sandwiches.pbj',
76 69
             '1.0.0',
77
-            False,
70
+            None,
71
+            'dir',
78 72
         ),
79 73
         (
80
-            'sandwiches',
81
-            'reuben',
82
-            b'/root/.ansible/collections/ansible_collections/sandwiches/reuben',
83
-            mocker.Mock(),
84
-            ['1.0.0', '2.5.0'],
85
-            '2.5.0',
86
-            False,
74
+            'sandwiches.ham',
75
+            '1.0.0',
76
+            None,
77
+            'dir',
87 78
         ),
88 79
     )
89 80
 
90
-    collections_path_1 = [CollectionRequirement(*cargs) for cargs in collection_args if to_native(cargs[2]).startswith('/root')]
91
-    collections_path_2 = [CollectionRequirement(*cargs) for cargs in collection_args if to_native(cargs[2]).startswith('/usr/share')]
81
+    collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1]
82
+    collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2]
83
+
92 84
     mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2])
93 85
 
94 86
 
... ...
@@ -98,44 +92,35 @@ def mock_from_path(mocker):
98 98
         collection_args = {
99 99
             'sandwiches.pbj': (
100 100
                 (
101
-                    'sandwiches',
102
-                    'pbj',
103
-                    b'/root/.ansible/collections/ansible_collections/sandwiches/pbj',
104
-                    mocker.Mock(),
105
-                    ['1.0.0', '1.5.0'],
101
+                    'sandwiches.pbj',
106 102
                     '1.5.0',
107
-                    False,
103
+                    None,
104
+                    'dir',
108 105
                 ),
109 106
                 (
110
-                    'sandwiches',
111
-                    'pbj',
112
-                    b'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
113
-                    mocker.Mock(),
114
-                    ['1.0.0', '1.5.0'],
107
+                    'sandwiches.pbj',
115 108
                     '1.0.0',
116
-                    False,
109
+                    None,
110
+                    'dir',
117 111
                 ),
118 112
             ),
119 113
             'sandwiches.ham': (
120 114
                 (
121
-                    'sandwiches',
122
-                    'ham',
123
-                    b'/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
124
-                    mocker.Mock(),
125
-                    ['1.0.0'],
115
+                    'sandwiches.ham',
126 116
                     '1.0.0',
127
-                    False,
117
+                    None,
118
+                    'dir',
128 119
                 ),
129 120
             ),
130 121
         }
131 122
 
132
-        from_path_objects = [CollectionRequirement(*args) for args in collection_args[collection_name]]
133
-        mocker.patch('ansible.galaxy.collection.CollectionRequirement.from_path', side_effect=from_path_objects)
123
+        from_path_objects = [Requirement(*args) for args in collection_args[collection_name]]
124
+        mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects)
134 125
 
135 126
     return _from_path
136 127
 
137 128
 
138
-def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
129
+def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory):
139 130
     """Test listing all collections from multiple paths"""
140 131
 
141 132
     cliargs()
... ...
@@ -143,7 +128,9 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
143 143
     mocker.patch('os.path.exists', return_value=True)
144 144
     mocker.patch('os.path.isdir', return_value=True)
145 145
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
146
-    gc.execute_list_collection()
146
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
147
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
148
+    gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
147 149
 
148 150
     out, err = capsys.readouterr()
149 151
     out_lines = out.splitlines()
... ...
@@ -163,7 +150,7 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
163 163
     assert out_lines[11] == 'sandwiches.pbj 1.0.0  '
164 164
 
165 165
 
166
-def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path):
166
+def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
167 167
     """Test listing a specific collection"""
168 168
 
169 169
     collection_name = 'sandwiches.ham'
... ...
@@ -176,7 +163,9 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
176 176
     mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
177 177
 
178 178
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
179
-    gc.execute_list_collection()
179
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
180
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
181
+    gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
180 182
 
181 183
     out, err = capsys.readouterr()
182 184
     out_lines = out.splitlines()
... ...
@@ -189,7 +178,7 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
189 189
     assert out_lines[4] == 'sandwiches.ham 1.0.0  '
190 190
 
191 191
 
192
-def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path):
192
+def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
193 193
     """Test listing a specific collection that exists at multiple paths"""
194 194
 
195 195
     collection_name = 'sandwiches.pbj'
... ...
@@ -201,7 +190,9 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
201 201
     mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
202 202
 
203 203
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
204
-    gc.execute_list_collection()
204
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
205
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
206
+    gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
205 207
 
206 208
     out, err = capsys.readouterr()
207 209
     out_lines = out.splitlines()
... ...
@@ -219,7 +210,7 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
219 219
     assert out_lines[9] == 'sandwiches.pbj 1.0.0  '
220 220
 
221 221
 
222
-def test_execute_list_collection_specific_invalid_fqcn(mocker):
222
+def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
223 223
     """Test an invalid fully qualified collection name (FQCN)"""
224 224
 
225 225
     collection_name = 'no.good.name'
... ...
@@ -229,11 +220,13 @@ def test_execute_list_collection_specific_invalid_fqcn(mocker):
229 229
     mocker.patch('os.path.isdir', return_value=True)
230 230
 
231 231
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
232
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
233
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
232 234
     with pytest.raises(AnsibleError, match='Invalid collection name'):
233
-        gc.execute_list_collection()
235
+        gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
234 236
 
235 237
 
236
-def test_execute_list_collection_no_valid_paths(mocker, capsys):
238
+def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory):
237 239
     """Test listing collections when no valid paths are given"""
238 240
 
239 241
     cliargs()
... ...
@@ -244,8 +237,11 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys):
244 244
     mocker.patch('ansible.cli.galaxy.display.columns', 79)
245 245
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
246 246
 
247
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
248
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
249
+
247 250
     with pytest.raises(AnsibleOptionsError, match=r'None of the provided paths were usable.'):
248
-        gc.execute_list_collection()
251
+        gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
249 252
 
250 253
     out, err = capsys.readouterr()
251 254
 
... ...
@@ -253,7 +249,7 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys):
253 253
     assert 'exists, but it\nis not a directory.' in err
254 254
 
255 255
 
256
-def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects):
256
+def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory):
257 257
     """Test listing all collections when one invalid path is given"""
258 258
 
259 259
     cliargs()
... ...
@@ -263,7 +259,9 @@ def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collectio
263 263
     mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
264 264
 
265 265
     gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])
266
-    gc.execute_list_collection()
266
+    tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
267
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
268
+    gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
267 269
 
268 270
     out, err = capsys.readouterr()
269 271
     out_lines = out.splitlines()
... ...
@@ -8,18 +8,16 @@ __metaclass__ = type
8 8
 import pytest
9 9
 
10 10
 from ansible.cli.galaxy import _get_collection_widths
11
+from ansible.galaxy.dependency_resolution.dataclasses import Requirement
11 12
 
12 13
 
13 14
 @pytest.fixture
14
-def collection_objects(mocker):
15
-    collection_ham = mocker.MagicMock(latest_version='1.5.0')
16
-    collection_ham.__str__.return_value = 'sandwiches.ham'
15
+def collection_objects():
16
+    collection_ham = Requirement('sandwiches.ham', '1.5.0', None, 'galaxy')
17 17
 
18
-    collection_pbj = mocker.MagicMock(latest_version='2.5')
19
-    collection_pbj.__str__.return_value = 'sandwiches.pbj'
18
+    collection_pbj = Requirement('sandwiches.pbj', '2.5', None, 'galaxy')
20 19
 
21
-    collection_reuben = mocker.MagicMock(latest_version='4')
22
-    collection_reuben.__str__.return_value = 'sandwiches.reuben'
20
+    collection_reuben = Requirement('sandwiches.reuben', '4', None, 'galaxy')
23 21
 
24 22
     return [collection_ham, collection_pbj, collection_reuben]
25 23
 
... ...
@@ -29,8 +27,7 @@ def test_get_collection_widths(collection_objects):
29 29
 
30 30
 
31 31
 def test_get_collection_widths_single_collection(mocker):
32
-    mocked_collection = mocker.MagicMock(latest_version='3.0.0')
33
-    mocked_collection.__str__.return_value = 'sandwiches.club'
32
+    mocked_collection = Requirement('sandwiches.club', '3.0.0', None, 'galaxy')
34 33
     # Make this look like it is not iterable
35 34
     mocker.patch('ansible.cli.galaxy.is_iterable', return_value=False)
36 35
 
... ...
@@ -21,6 +21,7 @@ from __future__ import (absolute_import, division, print_function)
21 21
 __metaclass__ = type
22 22
 
23 23
 import ansible
24
+from io import BytesIO
24 25
 import json
25 26
 import os
26 27
 import pytest
... ...
@@ -33,6 +34,7 @@ import yaml
33 33
 import ansible.constants as C
34 34
 from ansible import context
35 35
 from ansible.cli.galaxy import GalaxyCLI
36
+from ansible.galaxy import collection
36 37
 from ansible.galaxy.api import GalaxyAPI
37 38
 from ansible.errors import AnsibleError
38 39
 from ansible.module_utils._text import to_bytes, to_native, to_text
... ...
@@ -630,7 +632,12 @@ def test_invalid_collection_name_init(name):
630 630
 ])
631 631
 def test_invalid_collection_name_install(name, expected, tmp_path_factory):
632 632
     install_path = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
633
-    expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % expected
633
+
634
+    # FIXME: we should add the collection name in the error message
635
+    # Used to be: expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % expected
636
+    expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
637
+    expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
638
+    expected += r"Please make sure that the namespace and the collection name  contain characters from \[a\-zA\-Z0\-9_\] only\."
634 639
 
635 640
     gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', name, '-p', os.path.join(install_path, 'install')])
636 641
     with pytest.raises(AnsibleError, match=expected):
... ...
@@ -758,17 +765,17 @@ def test_collection_install_with_names(collection_install):
758 758
         in mock_warning.call_args[0][0]
759 759
 
760 760
     assert mock_install.call_count == 1
761
-    assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
762
-                                            ('namespace2.collection', '1.0.1', None, None)]
761
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
762
+    assert requirements == [('namespace.collection', '*', None, 'galaxy'),
763
+                            ('namespace2.collection', '1.0.1', None, 'galaxy')]
763 764
     assert mock_install.call_args[0][1] == collection_path
764 765
     assert len(mock_install.call_args[0][2]) == 1
765 766
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
766 767
     assert mock_install.call_args[0][2][0].validate_certs is True
767
-    assert mock_install.call_args[0][3] is True
768
-    assert mock_install.call_args[0][4] is False
769
-    assert mock_install.call_args[0][5] is False
770
-    assert mock_install.call_args[0][6] is False
771
-    assert mock_install.call_args[0][7] is False
768
+    assert mock_install.call_args[0][3] is False  # ignore_errors
769
+    assert mock_install.call_args[0][4] is False  # no_deps
770
+    assert mock_install.call_args[0][5] is False  # force
771
+    assert mock_install.call_args[0][6] is False  # force_deps
772 772
 
773 773
 
774 774
 def test_collection_install_with_requirements_file(collection_install):
... ...
@@ -795,17 +802,16 @@ collections:
795 795
         in mock_warning.call_args[0][0]
796 796
 
797 797
     assert mock_install.call_count == 1
798
-    assert mock_install.call_args[0][0] == [('namespace.coll', '*', None, None),
799
-                                            ('namespace2.coll', '>2.0.1', None, None)]
798
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
799
+    assert requirements == [('namespace.coll', '*', None, 'galaxy'),
800
+                            ('namespace2.coll', '>2.0.1', None, 'galaxy')]
800 801
     assert mock_install.call_args[0][1] == collection_path
801
-    assert len(mock_install.call_args[0][2]) == 1
802 802
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
803 803
     assert mock_install.call_args[0][2][0].validate_certs is True
804
-    assert mock_install.call_args[0][3] is True
805
-    assert mock_install.call_args[0][4] is False
806
-    assert mock_install.call_args[0][5] is False
807
-    assert mock_install.call_args[0][6] is False
808
-    assert mock_install.call_args[0][7] is False
804
+    assert mock_install.call_args[0][3] is False  # ignore_errors
805
+    assert mock_install.call_args[0][4] is False  # no_deps
806
+    assert mock_install.call_args[0][5] is False  # force
807
+    assert mock_install.call_args[0][6] is False  # force_deps
809 808
 
810 809
 
811 810
 def test_collection_install_with_relative_path(collection_install, monkeypatch):
... ...
@@ -829,11 +835,10 @@ def test_collection_install_with_relative_path(collection_install, monkeypatch):
829 829
     assert len(mock_install.call_args[0][2]) == 1
830 830
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
831 831
     assert mock_install.call_args[0][2][0].validate_certs is True
832
-    assert mock_install.call_args[0][3] is True
833
-    assert mock_install.call_args[0][4] is False
834
-    assert mock_install.call_args[0][5] is False
835
-    assert mock_install.call_args[0][6] is False
836
-    assert mock_install.call_args[0][7] is False
832
+    assert mock_install.call_args[0][3] is False  # ignore_errors
833
+    assert mock_install.call_args[0][4] is False  # no_deps
834
+    assert mock_install.call_args[0][5] is False  # force
835
+    assert mock_install.call_args[0][6] is False  # force_deps
837 836
 
838 837
     assert mock_req.call_count == 1
839 838
     assert mock_req.call_args[0][0] == os.path.abspath(requirements_file)
... ...
@@ -860,11 +865,10 @@ def test_collection_install_with_unexpanded_path(collection_install, monkeypatch
860 860
     assert len(mock_install.call_args[0][2]) == 1
861 861
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
862 862
     assert mock_install.call_args[0][2][0].validate_certs is True
863
-    assert mock_install.call_args[0][3] is True
864
-    assert mock_install.call_args[0][4] is False
865
-    assert mock_install.call_args[0][5] is False
866
-    assert mock_install.call_args[0][6] is False
867
-    assert mock_install.call_args[0][7] is False
863
+    assert mock_install.call_args[0][3] is False  # ignore_errors
864
+    assert mock_install.call_args[0][4] is False  # no_deps
865
+    assert mock_install.call_args[0][5] is False  # force
866
+    assert mock_install.call_args[0][6] is False  # force_deps
868 867
 
869 868
     assert mock_req.call_count == 1
870 869
     assert mock_req.call_args[0][0] == os.path.expanduser(os.path.expandvars(requirements_file))
... ...
@@ -882,22 +886,28 @@ def test_collection_install_in_collection_dir(collection_install, monkeypatch):
882 882
     assert mock_warning.call_count == 0
883 883
 
884 884
     assert mock_install.call_count == 1
885
-    assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
886
-                                            ('namespace2.collection', '1.0.1', None, None)]
885
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
886
+    assert requirements == [('namespace.collection', '*', None, 'galaxy'),
887
+                            ('namespace2.collection', '1.0.1', None, 'galaxy')]
887 888
     assert mock_install.call_args[0][1] == os.path.join(collections_path, 'ansible_collections')
888 889
     assert len(mock_install.call_args[0][2]) == 1
889 890
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
890 891
     assert mock_install.call_args[0][2][0].validate_certs is True
891
-    assert mock_install.call_args[0][3] is True
892
-    assert mock_install.call_args[0][4] is False
893
-    assert mock_install.call_args[0][5] is False
894
-    assert mock_install.call_args[0][6] is False
895
-    assert mock_install.call_args[0][7] is False
892
+    assert mock_install.call_args[0][3] is False  # ignore_errors
893
+    assert mock_install.call_args[0][4] is False  # no_deps
894
+    assert mock_install.call_args[0][5] is False  # force
895
+    assert mock_install.call_args[0][6] is False  # force_deps
896 896
 
897 897
 
898
-def test_collection_install_with_url(collection_install):
898
+def test_collection_install_with_url(monkeypatch, collection_install):
899 899
     mock_install, dummy, output_dir = collection_install
900 900
 
901
+    mock_open = MagicMock(return_value=BytesIO())
902
+    monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
903
+
904
+    mock_metadata = MagicMock(return_value={'namespace': 'foo', 'name': 'bar', 'version': 'v1.0.0'})
905
+    monkeypatch.setattr(collection.concrete_artifact_manager, '_get_meta_from_tar', mock_metadata)
906
+
901 907
     galaxy_args = ['ansible-galaxy', 'collection', 'install', 'https://foo/bar/foo-bar-v1.0.0.tar.gz',
902 908
                    '--collections-path', output_dir]
903 909
     GalaxyCLI(args=galaxy_args).run()
... ...
@@ -906,16 +916,16 @@ def test_collection_install_with_url(collection_install):
906 906
     assert os.path.isdir(collection_path)
907 907
 
908 908
     assert mock_install.call_count == 1
909
-    assert mock_install.call_args[0][0] == [('https://foo/bar/foo-bar-v1.0.0.tar.gz', '*', None, None)]
909
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
910
+    assert requirements == [('foo.bar', 'v1.0.0', 'https://foo/bar/foo-bar-v1.0.0.tar.gz', 'url')]
910 911
     assert mock_install.call_args[0][1] == collection_path
911 912
     assert len(mock_install.call_args[0][2]) == 1
912 913
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
913 914
     assert mock_install.call_args[0][2][0].validate_certs is True
914
-    assert mock_install.call_args[0][3] is True
915
-    assert mock_install.call_args[0][4] is False
916
-    assert mock_install.call_args[0][5] is False
917
-    assert mock_install.call_args[0][6] is False
918
-    assert mock_install.call_args[0][7] is False
915
+    assert mock_install.call_args[0][3] is False  # ignore_errors
916
+    assert mock_install.call_args[0][4] is False  # no_deps
917
+    assert mock_install.call_args[0][5] is False  # force
918
+    assert mock_install.call_args[0][6] is False  # force_deps
919 919
 
920 920
 
921 921
 def test_collection_install_name_and_requirements_fail(collection_install):
... ...
@@ -951,17 +961,17 @@ def test_collection_install_path_with_ansible_collections(collection_install):
951 951
         % collection_path in mock_warning.call_args[0][0]
952 952
 
953 953
     assert mock_install.call_count == 1
954
-    assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
955
-                                            ('namespace2.collection', '1.0.1', None, None)]
954
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
955
+    assert requirements == [('namespace.collection', '*', None, 'galaxy'),
956
+                            ('namespace2.collection', '1.0.1', None, 'galaxy')]
956 957
     assert mock_install.call_args[0][1] == collection_path
957 958
     assert len(mock_install.call_args[0][2]) == 1
958 959
     assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
959 960
     assert mock_install.call_args[0][2][0].validate_certs is True
960
-    assert mock_install.call_args[0][3] is True
961
-    assert mock_install.call_args[0][4] is False
962
-    assert mock_install.call_args[0][5] is False
963
-    assert mock_install.call_args[0][6] is False
964
-    assert mock_install.call_args[0][7] is False
961
+    assert mock_install.call_args[0][3] is False  # ignore_errors
962
+    assert mock_install.call_args[0][4] is False  # no_deps
963
+    assert mock_install.call_args[0][5] is False  # force
964
+    assert mock_install.call_args[0][6] is False  # force_deps
965 965
 
966 966
 
967 967
 def test_collection_install_ignore_certs(collection_install):
... ...
@@ -981,7 +991,8 @@ def test_collection_install_force(collection_install):
981 981
                    '--force']
982 982
     GalaxyCLI(args=galaxy_args).run()
983 983
 
984
-    assert mock_install.call_args[0][6] is True
984
+    # mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
985
+    assert mock_install.call_args[0][5] is True
985 986
 
986 987
 
987 988
 def test_collection_install_force_deps(collection_install):
... ...
@@ -991,7 +1002,8 @@ def test_collection_install_force_deps(collection_install):
991 991
                    '--force-with-deps']
992 992
     GalaxyCLI(args=galaxy_args).run()
993 993
 
994
-    assert mock_install.call_args[0][7] is True
994
+    # mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
995
+    assert mock_install.call_args[0][6] is True
995 996
 
996 997
 
997 998
 def test_collection_install_no_deps(collection_install):
... ...
@@ -1001,7 +1013,8 @@ def test_collection_install_no_deps(collection_install):
1001 1001
                    '--no-deps']
1002 1002
     GalaxyCLI(args=galaxy_args).run()
1003 1003
 
1004
-    assert mock_install.call_args[0][5] is True
1004
+    # mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
1005
+    assert mock_install.call_args[0][4] is True
1005 1006
 
1006 1007
 
1007 1008
 def test_collection_install_ignore(collection_install):
... ...
@@ -1011,7 +1024,8 @@ def test_collection_install_ignore(collection_install):
1011 1011
                    '--ignore-errors']
1012 1012
     GalaxyCLI(args=galaxy_args).run()
1013 1013
 
1014
-    assert mock_install.call_args[0][4] is True
1014
+    # mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
1015
+    assert mock_install.call_args[0][3] is True
1015 1016
 
1016 1017
 
1017 1018
 def test_collection_install_custom_server(collection_install):
... ...
@@ -1080,7 +1094,13 @@ collections:
1080 1080
 - version: 1.0.0
1081 1081
 '''], indirect=True)
1082 1082
 def test_parse_requirements_without_mandatory_name_key(requirements_cli, requirements_file):
1083
-    expected = "Collections requirement entry should contain the key name."
1083
+    # Used to be "Collections requirement entry should contain the key name."
1084
+    # Should we check that either source or name is provided before using the dep resolver?
1085
+
1086
+    expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
1087
+    expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
1088
+    expected += r"Please make sure that the namespace and the collection name  contain characters from \[a\-zA\-Z0\-9_\] only\."
1089
+
1084 1090
     with pytest.raises(AnsibleError, match=expected):
1085 1091
         requirements_cli._parse_requirements_file(requirements_file)
1086 1092
 
... ...
@@ -1097,9 +1117,10 @@ collections:
1097 1097
 def test_parse_requirements(requirements_cli, requirements_file):
1098 1098
     expected = {
1099 1099
         'roles': [],
1100
-        'collections': [('namespace.collection1', '*', None, None), ('namespace.collection2', '*', None, None)]
1100
+        'collections': [('namespace.collection1', '*', None, 'galaxy'), ('namespace.collection2', '*', None, 'galaxy')]
1101 1101
     }
1102 1102
     actual = requirements_cli._parse_requirements_file(requirements_file)
1103
+    actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
1103 1104
 
1104 1105
     assert actual == expected
1105 1106
 
... ...
@@ -1112,19 +1133,15 @@ collections:
1112 1112
 - namespace.collection2'''], indirect=True)
1113 1113
 def test_parse_requirements_with_extra_info(requirements_cli, requirements_file):
1114 1114
     actual = requirements_cli._parse_requirements_file(requirements_file)
1115
+    actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
1115 1116
 
1116 1117
     assert len(actual['roles']) == 0
1117 1118
     assert len(actual['collections']) == 2
1118 1119
     assert actual['collections'][0][0] == 'namespace.collection1'
1119 1120
     assert actual['collections'][0][1] == '>=1.0.0,<=2.0.0'
1120 1121
     assert actual['collections'][0][2].api_server == 'https://galaxy-dev.ansible.com'
1121
-    assert actual['collections'][0][2].name == 'explicit_requirement_namespace.collection1'
1122
-    assert actual['collections'][0][2].token is None
1123
-    assert actual['collections'][0][2].username is None
1124
-    assert actual['collections'][0][2].password is None
1125
-    assert actual['collections'][0][2].validate_certs is True
1126 1122
 
1127
-    assert actual['collections'][1] == ('namespace.collection2', '*', None, None)
1123
+    assert actual['collections'][1] == ('namespace.collection2', '*', None, 'galaxy')
1128 1124
 
1129 1125
 
1130 1126
 @pytest.mark.parametrize('requirements_file', ['''
... ...
@@ -1139,6 +1156,7 @@ collections:
1139 1139
 '''], indirect=True)
1140 1140
 def test_parse_requirements_with_roles_and_collections(requirements_cli, requirements_file):
1141 1141
     actual = requirements_cli._parse_requirements_file(requirements_file)
1142
+    actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
1142 1143
 
1143 1144
     assert len(actual['roles']) == 3
1144 1145
     assert actual['roles'][0].name == 'username.role_name'
... ...
@@ -1147,7 +1165,7 @@ def test_parse_requirements_with_roles_and_collections(requirements_cli, require
1147 1147
     assert actual['roles'][2].src == 'ssh://github.com/user/repo'
1148 1148
 
1149 1149
     assert len(actual['collections']) == 1
1150
-    assert actual['collections'][0] == ('namespace.collection2', '*', None, None)
1150
+    assert actual['collections'][0] == ('namespace.collection2', '*', None, 'galaxy')
1151 1151
 
1152 1152
 
1153 1153
 @pytest.mark.parametrize('requirements_file', ['''
... ...
@@ -1163,18 +1181,19 @@ def test_parse_requirements_with_collection_source(requirements_cli, requirement
1163 1163
     requirements_cli.api_servers.append(galaxy_api)
1164 1164
 
1165 1165
     actual = requirements_cli._parse_requirements_file(requirements_file)
1166
+    actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
1166 1167
 
1167 1168
     assert actual['roles'] == []
1168 1169
     assert len(actual['collections']) == 3
1169
-    assert actual['collections'][0] == ('namespace.collection', '*', None, None)
1170
+    assert actual['collections'][0] == ('namespace.collection', '*', None, 'galaxy')
1170 1171
 
1171 1172
     assert actual['collections'][1][0] == 'namespace2.collection2'
1172 1173
     assert actual['collections'][1][1] == '*'
1173 1174
     assert actual['collections'][1][2].api_server == 'https://galaxy-dev.ansible.com/'
1174
-    assert actual['collections'][1][2].name == 'explicit_requirement_namespace2.collection2'
1175
-    assert actual['collections'][1][2].token is None
1176 1175
 
1177
-    assert actual['collections'][2] == ('namespace3.collection3', '*', galaxy_api, None)
1176
+    assert actual['collections'][2][0] == 'namespace3.collection3'
1177
+    assert actual['collections'][2][1] == '*'
1178
+    assert actual['collections'][2][2].api_server == 'https://config-server'
1178 1179
 
1179 1180
 
1180 1181
 @pytest.mark.parametrize('requirements_file', ['''
... ...
@@ -1230,7 +1249,8 @@ def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
1230 1230
     cli.run()
1231 1231
 
1232 1232
     assert mock_collection_install.call_count == 1
1233
-    assert mock_collection_install.call_args[0][0] == [('namespace.name', '*', None, None)]
1233
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
1234
+    assert requirements == [('namespace.name', '*', None, 'galaxy')]
1234 1235
     assert mock_collection_install.call_args[0][1] == cli._get_default_collection_path()
1235 1236
 
1236 1237
     assert mock_role_install.call_count == 1
... ...
@@ -1328,8 +1348,8 @@ def test_install_collection_with_roles(requirements_file, monkeypatch):
1328 1328
     cli.run()
1329 1329
 
1330 1330
     assert mock_collection_install.call_count == 1
1331
-    assert mock_collection_install.call_args[0][0] == [('namespace.name', '*', None, None)]
1332
-    assert mock_collection_install.call_args[0][1] == cli._get_default_collection_path()
1331
+    requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
1332
+    assert requirements == [('namespace.name', '*', None, 'galaxy')]
1333 1333
 
1334 1334
     assert mock_role_install.call_count == 0
1335 1335
 
... ...
@@ -56,7 +56,7 @@ def collection_input(tmp_path_factory):
56 56
 def collection_artifact(monkeypatch, tmp_path_factory):
57 57
     ''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
58 58
     mock_open = MagicMock()
59
-    monkeypatch.setattr(collection, 'open_url', mock_open)
59
+    monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
60 60
 
61 61
     mock_uuid = MagicMock()
62 62
     mock_uuid.return_value.hex = 'uuid'
... ...
@@ -76,13 +76,13 @@ def collection_artifact(monkeypatch, tmp_path_factory):
76 76
 
77 77
 
78 78
 @pytest.fixture()
79
-def galaxy_yml(request, tmp_path_factory):
79
+def galaxy_yml_dir(request, tmp_path_factory):
80 80
     b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
81 81
     b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
82 82
     with open(b_galaxy_yml, 'wb') as galaxy_obj:
83 83
         galaxy_obj.write(to_bytes(request.param))
84 84
 
85
-    yield b_galaxy_yml
85
+    yield b_test_dir
86 86
 
87 87
 
88 88
 @pytest.fixture()
... ...
@@ -198,31 +198,12 @@ def manifest(manifest_info):
198 198
             yield fake_file, sha256(b_data).hexdigest()
199 199
 
200 200
 
201
-@pytest.fixture()
202
-def mock_collection(galaxy_server):
203
-    def create_mock_collection(namespace='ansible_namespace', name='collection', version='0.1.0', local=True, local_installed=True):
204
-        b_path = None
205
-        force = False
206
-
207
-        if local:
208
-            mock_collection = collection.CollectionRequirement(namespace, name, b_path, galaxy_server, [version], version, force, skip=local_installed)
209
-        else:
210
-            download_url = 'https://galaxy.ansible.com/download/{0}-{1}-{2}.tar.gz'.format(namespace, name, version)
211
-            digest = '19415a6a6df831df61cffde4a09d1d89ac8d8ca5c0586e85bea0b106d6dff29a'
212
-            dependencies = {}
213
-            metadata = api.CollectionVersionMetadata(namespace, name, version, download_url, digest, dependencies)
214
-            mock_collection = collection.CollectionRequirement(namespace, name, b_path, galaxy_server, [version], version, force, metadata=metadata)
215
-
216
-        return mock_collection
217
-    return create_mock_collection
218
-
219
-
220 201
 def test_build_collection_no_galaxy_yaml():
221 202
     fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
222 203
     expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
223 204
 
224 205
     with pytest.raises(AnsibleError, match=expected):
225
-        collection.build_collection(fake_path, 'output', False)
206
+        collection.build_collection(fake_path, u'output', False)
226 207
 
227 208
 
228 209
 def test_build_existing_output_file(collection_input):
... ...
@@ -234,7 +215,7 @@ def test_build_existing_output_file(collection_input):
234 234
     expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
235 235
                % to_native(existing_output_dir)
236 236
     with pytest.raises(AnsibleError, match=expected):
237
-        collection.build_collection(input_dir, output_dir, False)
237
+        collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
238 238
 
239 239
 
240 240
 def test_build_existing_output_without_force(collection_input):
... ...
@@ -248,7 +229,7 @@ def test_build_existing_output_without_force(collection_input):
248 248
     expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
249 249
                % to_native(existing_output)
250 250
     with pytest.raises(AnsibleError, match=expected):
251
-        collection.build_collection(input_dir, output_dir, False)
251
+        collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
252 252
 
253 253
 
254 254
 def test_build_existing_output_with_force(collection_input):
... ...
@@ -259,55 +240,57 @@ def test_build_existing_output_with_force(collection_input):
259 259
         out_file.write("random garbage")
260 260
         out_file.flush()
261 261
 
262
-    collection.build_collection(input_dir, output_dir, True)
262
+    collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), True)
263 263
 
264 264
     # Verify the file was replaced with an actual tar file
265 265
     assert tarfile.is_tarfile(existing_output)
266 266
 
267 267
 
268
-@pytest.mark.parametrize('galaxy_yml', [b'namespace: value: broken'], indirect=True)
269
-def test_invalid_yaml_galaxy_file(galaxy_yml):
270
-    expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_yml)
268
+@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: value: broken'], indirect=True)
269
+def test_invalid_yaml_galaxy_file(galaxy_yml_dir):
270
+    galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
271
+    expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_file)
271 272
 
272 273
     with pytest.raises(AnsibleError, match=expected):
273
-        collection._get_galaxy_yml(galaxy_yml)
274
+        collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
274 275
 
275 276
 
276
-@pytest.mark.parametrize('galaxy_yml', [b'namespace: test_namespace'], indirect=True)
277
-def test_missing_required_galaxy_key(galaxy_yml):
277
+@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: test_namespace'], indirect=True)
278
+def test_missing_required_galaxy_key(galaxy_yml_dir):
279
+    galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
278 280
     expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
279
-               "readme, version" % to_native(galaxy_yml)
281
+               "readme, version" % to_native(galaxy_file)
280 282
 
281 283
     with pytest.raises(AnsibleError, match=expected):
282
-        collection._get_galaxy_yml(galaxy_yml)
284
+        collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
283 285
 
284 286
 
285
-@pytest.mark.parametrize('galaxy_yml', [b"""
287
+@pytest.mark.parametrize('galaxy_yml_dir', [b"""
286 288
 namespace: namespace
287 289
 name: collection
288 290
 authors: Jordan
289 291
 version: 0.1.0
290 292
 readme: README.md
291 293
 invalid: value"""], indirect=True)
292
-def test_warning_extra_keys(galaxy_yml, monkeypatch):
294
+def test_warning_extra_keys(galaxy_yml_dir, monkeypatch):
293 295
     display_mock = MagicMock()
294 296
     monkeypatch.setattr(Display, 'warning', display_mock)
295 297
 
296
-    collection._get_galaxy_yml(galaxy_yml)
298
+    collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
297 299
 
298 300
     assert display_mock.call_count == 1
299
-    assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s': invalid"\
300
-        % to_text(galaxy_yml)
301
+    assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s/galaxy.yml': invalid"\
302
+        % to_text(galaxy_yml_dir)
301 303
 
302 304
 
303
-@pytest.mark.parametrize('galaxy_yml', [b"""
305
+@pytest.mark.parametrize('galaxy_yml_dir', [b"""
304 306
 namespace: namespace
305 307
 name: collection
306 308
 authors: Jordan
307 309
 version: 0.1.0
308 310
 readme: README.md"""], indirect=True)
309
-def test_defaults_galaxy_yml(galaxy_yml):
310
-    actual = collection._get_galaxy_yml(galaxy_yml)
311
+def test_defaults_galaxy_yml(galaxy_yml_dir):
312
+    actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
311 313
 
312 314
     assert actual['namespace'] == 'namespace'
313 315
     assert actual['name'] == 'collection'
... ...
@@ -321,10 +304,10 @@ def test_defaults_galaxy_yml(galaxy_yml):
321 321
     assert actual['issues'] is None
322 322
     assert actual['tags'] == []
323 323
     assert actual['dependencies'] == {}
324
-    assert actual['license_ids'] == []
324
+    assert actual['license'] == []
325 325
 
326 326
 
327
-@pytest.mark.parametrize('galaxy_yml', [(b"""
327
+@pytest.mark.parametrize('galaxy_yml_dir', [(b"""
328 328
 namespace: namespace
329 329
 name: collection
330 330
 authors: Jordan
... ...
@@ -338,9 +321,9 @@ version: 0.1.0
338 338
 readme: README.md
339 339
 license:
340 340
 - MIT""")], indirect=True)
341
-def test_galaxy_yml_list_value(galaxy_yml):
342
-    actual = collection._get_galaxy_yml(galaxy_yml)
343
-    assert actual['license_ids'] == ['MIT']
341
+def test_galaxy_yml_list_value(galaxy_yml_dir):
342
+    actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
343
+    assert actual['license'] == ['MIT']
344 344
 
345 345
 
346 346
 def test_build_ignore_files_and_folders(collection_input, monkeypatch):
... ...
@@ -529,7 +512,7 @@ def test_build_with_symlink_inside_collection(collection_input):
529 529
     os.symlink(roles_target, roles_link)
530 530
     os.symlink(os.path.join(input_dir, 'README.md'), file_link)
531 531
 
532
-    collection.build_collection(input_dir, output_dir, False)
532
+    collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
533 533
 
534 534
     output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
535 535
     assert tarfile.is_tarfile(output_artifact)
... ...
@@ -603,6 +586,7 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
603 603
 
604 604
 def test_find_existing_collections(tmp_path_factory, monkeypatch):
605 605
     test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
606
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
606 607
     collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
607 608
     collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
608 609
     fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
... ...
@@ -631,32 +615,24 @@ def test_find_existing_collections(tmp_path_factory, monkeypatch):
631 631
     mock_warning = MagicMock()
632 632
     monkeypatch.setattr(Display, 'warning', mock_warning)
633 633
 
634
-    actual = collection.find_existing_collections(test_dir)
634
+    actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm))
635 635
 
636 636
     assert len(actual) == 2
637 637
     for actual_collection in actual:
638
-        assert actual_collection.skip is True
639
-
640
-        if str(actual_collection) == 'namespace1.collection1':
638
+        if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1':
641 639
             assert actual_collection.namespace == 'namespace1'
642 640
             assert actual_collection.name == 'collection1'
643
-            assert actual_collection.b_path == to_bytes(collection1)
644
-            assert actual_collection.api is None
645
-            assert actual_collection.versions == set(['1.2.3'])
646
-            assert actual_collection.latest_version == '1.2.3'
647
-            assert actual_collection.dependencies == {}
641
+            assert actual_collection.ver == '1.2.3'
642
+            assert to_text(actual_collection.src) == collection1
648 643
         else:
649 644
             assert actual_collection.namespace == 'namespace2'
650 645
             assert actual_collection.name == 'collection2'
651
-            assert actual_collection.b_path == to_bytes(collection2)
652
-            assert actual_collection.api is None
653
-            assert actual_collection.versions == set(['*'])
654
-            assert actual_collection.latest_version == '*'
655
-            assert actual_collection.dependencies == {}
646
+            assert actual_collection.ver == '*'
647
+            assert to_text(actual_collection.src) == collection2
656 648
 
657 649
     assert mock_warning.call_count == 1
658
-    assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, cannot " \
659
-                                               "detect version." % to_text(collection2)
650
+    assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \
651
+                                               "cannot detect version." % to_text(collection2)
660 652
 
661 653
 
662 654
 def test_download_file(tmp_path_factory, monkeypatch):
... ...
@@ -668,9 +644,9 @@ def test_download_file(tmp_path_factory, monkeypatch):
668 668
 
669 669
     mock_open = MagicMock()
670 670
     mock_open.return_value = BytesIO(data)
671
-    monkeypatch.setattr(collection, 'open_url', mock_open)
671
+    monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
672 672
 
673
-    expected = os.path.join(temp_dir, b'file')
673
+    expected = temp_dir
674 674
     actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
675 675
 
676 676
     assert actual.startswith(expected)
... ...
@@ -689,7 +665,7 @@ def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
689 689
 
690 690
     mock_open = MagicMock()
691 691
     mock_open.return_value = BytesIO(data)
692
-    monkeypatch.setattr(collection, 'open_url', mock_open)
692
+    monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
693 693
 
694 694
     expected = "Mismatch artifact hash with downloaded file"
695 695
     with pytest.raises(AnsibleError, match=expected):
... ...
@@ -772,7 +748,8 @@ def test_require_one_of_collections_requirements_with_collections():
772 772
 
773 773
     requirements = cli._require_one_of_collections_requirements(collections, '')['collections']
774 774
 
775
-    assert requirements == [('namespace1.collection1', '*', None, None), ('namespace2.collection1', '1.0.0', None, None)]
775
+    req_tuples = [('%s.%s' % (req.namespace, req.name), req.ver, req.src, req.type,) for req in requirements]
776
+    assert req_tuples == [('namespace1.collection1', '*', None, 'galaxy'), ('namespace2.collection1', '1.0.0', None, 'galaxy')]
776 777
 
777 778
 
778 779
 @patch('ansible.cli.galaxy.GalaxyCLI._parse_requirements_file')
... ...
@@ -821,13 +798,13 @@ def test_execute_verify_with_defaults(mock_verify_collections):
821 821
 
822 822
     assert mock_verify_collections.call_count == 1
823 823
 
824
-    requirements, search_paths, galaxy_apis, validate, ignore_errors = mock_verify_collections.call_args[0]
824
+    print("Call args {0}".format(mock_verify_collections.call_args[0]))
825
+    requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
825 826
 
826
-    assert requirements == [('namespace.collection', '1.0.4', None, None)]
827
+    assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
827 828
     for install_path in search_paths:
828 829
         assert install_path.endswith('ansible_collections')
829 830
     assert galaxy_apis[0].api_server == 'https://galaxy.ansible.com'
830
-    assert validate is True
831 831
     assert ignore_errors is False
832 832
 
833 833
 
... ...
@@ -840,13 +817,12 @@ def test_execute_verify(mock_verify_collections):
840 840
 
841 841
     assert mock_verify_collections.call_count == 1
842 842
 
843
-    requirements, search_paths, galaxy_apis, validate, ignore_errors = mock_verify_collections.call_args[0]
843
+    requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
844 844
 
845
-    assert requirements == [('namespace.collection', '1.0.4', None, None)]
845
+    assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
846 846
     for install_path in search_paths:
847 847
         assert install_path.endswith('ansible_collections')
848 848
     assert galaxy_apis[0].api_server == 'http://galaxy-dev.com'
849
-    assert validate is False
850 849
     assert ignore_errors is True
851 850
 
852 851
 
... ...
@@ -863,8 +839,7 @@ def test_verify_file_hash_deleted_file(manifest_info):
863 863
 
864 864
     with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
865 865
         with patch.object(collection.os.path, 'isfile', MagicMock(return_value=False)) as mock_isfile:
866
-            collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
867
-            collection_req._verify_file_hash(b'path/', 'file', digest, error_queue)
866
+            collection._verify_file_hash(b'path/', 'file', digest, error_queue)
868 867
 
869 868
             assert mock_isfile.called_once
870 869
 
... ...
@@ -887,8 +862,7 @@ def test_verify_file_hash_matching_hash(manifest_info):
887 887
 
888 888
     with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
889 889
         with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
890
-            collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
891
-            collection_req._verify_file_hash(b'path/', 'file', digest, error_queue)
890
+            collection._verify_file_hash(b'path/', 'file', digest, error_queue)
892 891
 
893 892
             assert mock_isfile.called_once
894 893
 
... ...
@@ -910,8 +884,7 @@ def test_verify_file_hash_mismatching_hash(manifest_info):
910 910
 
911 911
     with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
912 912
         with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
913
-            collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
914
-            collection_req._verify_file_hash(b'path/', 'file', different_digest, error_queue)
913
+            collection._verify_file_hash(b'path/', 'file', different_digest, error_queue)
915 914
 
916 915
             assert mock_isfile.called_once
917 916
 
... ...
@@ -972,355 +945,3 @@ def test_get_json_from_tar_file(tmp_tarfile):
972 972
     data = collection._get_json_from_tar_file(tfile.name, 'MANIFEST.json')
973 973
 
974 974
     assert isinstance(data, dict)
975
-
976
-
977
-def test_verify_collection_not_installed(mock_collection):
978
-
979
-    local_collection = mock_collection(local_installed=False)
980
-    remote_collection = mock_collection(local=False)
981
-
982
-    with patch.object(collection.display, 'display') as mocked_display:
983
-        local_collection.verify(remote_collection, './', './')
984
-
985
-        assert mocked_display.called
986
-        assert mocked_display.call_args[0][0] == "'%s.%s' has not been installed, nothing to verify" % (local_collection.namespace, local_collection.name)
987
-
988
-
989
-def test_verify_successful_debug_info(monkeypatch, mock_collection):
990
-    local_collection = mock_collection()
991
-    remote_collection = mock_collection(local=False)
992
-
993
-    monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock())
994
-    monkeypatch.setattr(collection.CollectionRequirement, '_verify_file_hash', MagicMock())
995
-    monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock())
996
-
997
-    with patch.object(collection.display, 'vvv') as mock_display:
998
-        local_collection.verify(remote_collection, './', './')
999
-
1000
-        namespace = local_collection.namespace
1001
-        name = local_collection.name
1002
-        version = local_collection.latest_version
1003
-
1004
-        assert mock_display.call_count == 4
1005
-        assert mock_display.call_args_list[0][0][0] == "Verifying '%s.%s:%s'." % (namespace, name, version)
1006
-        assert mock_display.call_args_list[1][0][0] == "Installed collection found at './%s/%s'" % (namespace, name)
1007
-        located = "Remote collection found at 'https://galaxy.ansible.com/download/%s-%s-%s.tar.gz'" % (namespace, name, version)
1008
-        assert mock_display.call_args_list[2][0][0] == located
1009
-        verified = "Successfully verified that checksums for '%s.%s:%s' match the remote collection" % (namespace, name, version)
1010
-        assert mock_display.call_args_list[3][0][0] == verified
1011
-
1012
-
1013
-def test_verify_different_versions(mock_collection):
1014
-
1015
-    local_collection = mock_collection(version='0.1.0')
1016
-    remote_collection = mock_collection(local=False, version='3.0.0')
1017
-
1018
-    with patch.object(collection.display, 'display') as mock_display:
1019
-        local_collection.verify(remote_collection, './', './')
1020
-
1021
-        namespace = local_collection.namespace
1022
-        name = local_collection.name
1023
-        installed_version = local_collection.latest_version
1024
-        compared_version = remote_collection.latest_version
1025
-
1026
-        msg = "%s.%s has the version '%s' but is being compared to '%s'" % (namespace, name, installed_version, compared_version)
1027
-
1028
-        assert mock_display.call_count == 1
1029
-        assert mock_display.call_args[0][0] == msg
1030
-
1031
-
1032
-@patch.object(builtins, 'open', mock_open())
1033
-def test_verify_modified_manifest(monkeypatch, mock_collection, manifest_info):
1034
-    local_collection = mock_collection()
1035
-    remote_collection = mock_collection(local=False)
1036
-
1037
-    monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
1038
-    monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum_modified', 'files_manifest_checksum']))
1039
-    monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, {'files': []}]))
1040
-    monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
1041
-
1042
-    with patch.object(collection.display, 'display') as mock_display:
1043
-        with patch.object(collection.display, 'vvv') as mock_debug:
1044
-            local_collection.verify(remote_collection, './', './')
1045
-
1046
-            namespace = local_collection.namespace
1047
-            name = local_collection.name
1048
-
1049
-            assert mock_display.call_count == 3
1050
-            assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
1051
-            assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
1052
-            assert mock_display.call_args_list[2][0][0] == '    MANIFEST.json'
1053
-
1054
-            # The -vvv output should show details (the checksums do not match)
1055
-            assert mock_debug.call_count == 5
1056
-            assert mock_debug.call_args_list[-1][0][0] == '    Expected: manifest_checksum\n    Found: manifest_checksum_modified'
1057
-
1058
-
1059
-@patch.object(builtins, 'open', mock_open())
1060
-def test_verify_modified_files_manifest(monkeypatch, mock_collection, manifest_info):
1061
-    local_collection = mock_collection()
1062
-    remote_collection = mock_collection(local=False)
1063
-
1064
-    monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
1065
-    monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum', 'files_manifest_checksum_modified']))
1066
-    monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, {'files': []}]))
1067
-    monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
1068
-
1069
-    with patch.object(collection.display, 'display') as mock_display:
1070
-        with patch.object(collection.display, 'vvv') as mock_debug:
1071
-            local_collection.verify(remote_collection, './', './')
1072
-
1073
-            namespace = local_collection.namespace
1074
-            name = local_collection.name
1075
-
1076
-            assert mock_display.call_count == 3
1077
-            assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
1078
-            assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
1079
-            assert mock_display.call_args_list[2][0][0] == '    FILES.json'
1080
-
1081
-            # The -vvv output should show details (the checksums do not match)
1082
-            assert mock_debug.call_count == 5
1083
-            assert mock_debug.call_args_list[-1][0][0] == '    Expected: files_manifest_checksum\n    Found: files_manifest_checksum_modified'
1084
-
1085
-
1086
-@patch.object(builtins, 'open', mock_open())
1087
-def test_verify_modified_files(monkeypatch, mock_collection, manifest_info, files_manifest_info):
1088
-
1089
-    local_collection = mock_collection()
1090
-    remote_collection = mock_collection(local=False)
1091
-
1092
-    monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
1093
-    fakehashes = ['manifest_checksum', 'files_manifest_checksum', 'individual_file_checksum_modified']
1094
-    monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=fakehashes))
1095
-    monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, files_manifest_info]))
1096
-    monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
1097
-
1098
-    with patch.object(collection.display, 'display') as mock_display:
1099
-        with patch.object(collection.display, 'vvv') as mock_debug:
1100
-            local_collection.verify(remote_collection, './', './')
1101
-
1102
-            namespace = local_collection.namespace
1103
-            name = local_collection.name
1104
-
1105
-            assert mock_display.call_count == 3
1106
-            assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
1107
-            assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
1108
-            assert mock_display.call_args_list[2][0][0] == '    README.md'
1109
-
1110
-            # The -vvv output should show details (the checksums do not match)
1111
-            assert mock_debug.call_count == 5
1112
-            assert mock_debug.call_args_list[-1][0][0] == '    Expected: individual_file_checksum\n    Found: individual_file_checksum_modified'
1113
-
1114
-
1115
-@patch.object(builtins, 'open', mock_open())
1116
-def test_verify_identical(monkeypatch, mock_collection, manifest_info, files_manifest_info):
1117
-
1118
-    local_collection = mock_collection()
1119
-    remote_collection = mock_collection(local=False)
1120
-
1121
-    monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
1122
-    monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum', 'files_manifest_checksum', 'individual_file_checksum']))
1123
-    monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, files_manifest_info]))
1124
-    monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
1125
-
1126
-    with patch.object(collection.display, 'display') as mock_display:
1127
-        with patch.object(collection.display, 'vvv') as mock_debug:
1128
-            local_collection.verify(remote_collection, './', './')
1129
-
1130
-            # Successful verification is quiet
1131
-            assert mock_display.call_count == 0
1132
-
1133
-            # The -vvv output should show the checksums not matching
1134
-            namespace = local_collection.namespace
1135
-            name = local_collection.name
1136
-            version = local_collection.latest_version
1137
-            success_msg = "Successfully verified that checksums for '%s.%s:%s' match the remote collection" % (namespace, name, version)
1138
-
1139
-            assert mock_debug.call_count == 4
1140
-            assert mock_debug.call_args_list[-1][0][0] == success_msg
1141
-
1142
-
1143
-@patch.object(os.path, 'isdir', return_value=True)
1144
-def test_verify_collections_no_version(mock_isdir, mock_collection, monkeypatch):
1145
-    namespace = 'ansible_namespace'
1146
-    name = 'collection'
1147
-    version = '*'  # Occurs if MANIFEST.json does not exist
1148
-
1149
-    local_collection = mock_collection(namespace=namespace, name=name, version=version)
1150
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=local_collection))
1151
-
1152
-    collections = [('%s.%s' % (namespace, name), version, None)]
1153
-
1154
-    with pytest.raises(AnsibleError) as err:
1155
-        collection.verify_collections(collections, './', local_collection.api, False, False)
1156
-
1157
-    err_msg = 'Collection %s.%s does not appear to have a MANIFEST.json. ' % (namespace, name)
1158
-    err_msg += 'A MANIFEST.json is expected if the collection has been built and installed via ansible-galaxy.'
1159
-    assert err.value.message == err_msg
1160
-
1161
-
1162
-@patch.object(collection.CollectionRequirement, 'verify')
1163
-def test_verify_collections_not_installed(mock_verify, mock_collection, monkeypatch):
1164
-    namespace = 'ansible_namespace'
1165
-    name = 'collection'
1166
-    version = '1.0.0'
1167
-
1168
-    local_collection = mock_collection(local_installed=False)
1169
-
1170
-    found_remote = MagicMock(return_value=mock_collection(local=False))
1171
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_name', found_remote)
1172
-
1173
-    collections = [('%s.%s' % (namespace, name), version, None, None)]
1174
-    search_path = './'
1175
-    validate_certs = False
1176
-    ignore_errors = False
1177
-    apis = [local_collection.api]
1178
-
1179
-    with patch.object(collection, '_download_file') as mock_download_file:
1180
-        with pytest.raises(AnsibleError) as err:
1181
-            collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
1182
-
1183
-    assert err.value.message == "Collection %s.%s is not installed in any of the collection paths." % (namespace, name)
1184
-
1185
-
1186
-@patch.object(collection.CollectionRequirement, 'verify')
1187
-def test_verify_collections_not_installed_ignore_errors(mock_verify, mock_collection, monkeypatch):
1188
-    namespace = 'ansible_namespace'
1189
-    name = 'collection'
1190
-    version = '1.0.0'
1191
-
1192
-    local_collection = mock_collection(local_installed=False)
1193
-
1194
-    found_remote = MagicMock(return_value=mock_collection(local=False))
1195
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_name', found_remote)
1196
-
1197
-    collections = [('%s.%s' % (namespace, name), version, None)]
1198
-    search_path = './'
1199
-    validate_certs = False
1200
-    ignore_errors = True
1201
-    apis = [local_collection.api]
1202
-
1203
-    with patch.object(collection, '_download_file') as mock_download_file:
1204
-        with patch.object(Display, 'warning') as mock_warning:
1205
-            collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
1206
-
1207
-            skip_message = "Failed to verify collection %s.%s but skipping due to --ignore-errors being set." % (namespace, name)
1208
-            original_err = "Error: Collection %s.%s is not installed in any of the collection paths." % (namespace, name)
1209
-
1210
-            assert mock_warning.called
1211
-            assert mock_warning.call_args[0][0] == skip_message + " " + original_err
1212
-
1213
-
1214
-@patch.object(os.path, 'isdir', return_value=True)
1215
-@patch.object(collection.CollectionRequirement, 'verify')
1216
-def test_verify_collections_no_remote(mock_verify, mock_isdir, mock_collection, monkeypatch):
1217
-    namespace = 'ansible_namespace'
1218
-    name = 'collection'
1219
-    version = '1.0.0'
1220
-
1221
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True]))
1222
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=mock_collection()))
1223
-
1224
-    collections = [('%s.%s' % (namespace, name), version, None)]
1225
-    search_path = './'
1226
-    validate_certs = False
1227
-    ignore_errors = False
1228
-    apis = []
1229
-
1230
-    with pytest.raises(AnsibleError) as err:
1231
-        collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
1232
-
1233
-    assert err.value.message == "Failed to find remote collection %s.%s:%s on any of the galaxy servers" % (namespace, name, version)
1234
-
1235
-
1236
-@patch.object(os.path, 'isdir', return_value=True)
1237
-@patch.object(collection.CollectionRequirement, 'verify')
1238
-def test_verify_collections_no_remote_ignore_errors(mock_verify, mock_isdir, mock_collection, monkeypatch):
1239
-    namespace = 'ansible_namespace'
1240
-    name = 'collection'
1241
-    version = '1.0.0'
1242
-
1243
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True]))
1244
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=mock_collection()))
1245
-
1246
-    collections = [('%s.%s' % (namespace, name), version, None)]
1247
-    search_path = './'
1248
-    validate_certs = False
1249
-    ignore_errors = True
1250
-    apis = []
1251
-
1252
-    with patch.object(Display, 'warning') as mock_warning:
1253
-        collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
1254
-
1255
-        skip_message = "Failed to verify collection %s.%s but skipping due to --ignore-errors being set." % (namespace, name)
1256
-        original_err = "Error: Failed to find remote collection %s.%s:%s on any of the galaxy servers" % (namespace, name, version)
1257
-
1258
-        assert mock_warning.called
1259
-        assert mock_warning.call_args[0][0] == skip_message + " " + original_err
1260
-
1261
-
1262
-def test_verify_collections_tarfile(monkeypatch):
1263
-
1264
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=True))
1265
-
1266
-    invalid_format = 'ansible_namespace-collection-0.1.0.tar.gz'
1267
-    collections = [(invalid_format, '*', None)]
1268
-
1269
-    with pytest.raises(AnsibleError) as err:
1270
-        collection.verify_collections(collections, './', [], False, False)
1271
-
1272
-    msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
1273
-    assert err.value.message == msg
1274
-
1275
-
1276
-def test_verify_collections_path(monkeypatch):
1277
-
1278
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=False))
1279
-
1280
-    invalid_format = 'collections/collection_namespace/collection_name'
1281
-    collections = [(invalid_format, '*', None)]
1282
-
1283
-    with pytest.raises(AnsibleError) as err:
1284
-        collection.verify_collections(collections, './', [], False, False)
1285
-
1286
-    msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
1287
-    assert err.value.message == msg
1288
-
1289
-
1290
-def test_verify_collections_url(monkeypatch):
1291
-
1292
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=False))
1293
-
1294
-    invalid_format = 'https://galaxy.ansible.com/download/ansible_namespace-collection-0.1.0.tar.gz'
1295
-    collections = [(invalid_format, '*', None)]
1296
-
1297
-    with pytest.raises(AnsibleError) as err:
1298
-        collection.verify_collections(collections, './', [], False, False)
1299
-
1300
-    msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
1301
-    assert err.value.message == msg
1302
-
1303
-
1304
-@patch.object(os.path, 'isdir', return_value=True)
1305
-@patch.object(collection.CollectionRequirement, 'verify')
1306
-def test_verify_collections_name(mock_verify, mock_isdir, mock_collection, monkeypatch):
1307
-    local_collection = mock_collection()
1308
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=local_collection))
1309
-
1310
-    monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True, False]))
1311
-
1312
-    located_remote_from_name = MagicMock(return_value=mock_collection(local=False))
1313
-    monkeypatch.setattr(collection.CollectionRequirement, 'from_name', located_remote_from_name)
1314
-
1315
-    with patch.object(collection, '_download_file') as mock_download_file:
1316
-
1317
-        collections = [('%s.%s' % (local_collection.namespace, local_collection.name), '%s' % local_collection.latest_version, None)]
1318
-        search_path = './'
1319
-        validate_certs = False
1320
-        ignore_errors = False
1321
-        apis = [local_collection.api]
1322
-
1323
-        collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
1324
-
1325
-        assert mock_download_file.call_count == 1
1326
-        assert located_remote_from_name.call_count == 1
... ...
@@ -24,12 +24,24 @@ import ansible.module_utils.six.moves.urllib.error as urllib_error
24 24
 from ansible import context
25 25
 from ansible.cli.galaxy import GalaxyCLI
26 26
 from ansible.errors import AnsibleError
27
-from ansible.galaxy import collection, api
27
+from ansible.galaxy import collection, api, dependency_resolution
28
+from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
28 29
 from ansible.module_utils._text import to_bytes, to_native, to_text
29 30
 from ansible.utils import context_objects as co
30 31
 from ansible.utils.display import Display
31 32
 
32 33
 
34
+class RequirementCandidates():
35
+    def __init__(self):
36
+        self.candidates = []
37
+
38
+    def func_wrapper(self, func):
39
+        def run(*args, **kwargs):
40
+            self.candidates = func(*args, **kwargs)
41
+            return self.candidates
42
+        return run
43
+
44
+
33 45
 def call_galaxy_cli(args):
34 46
     orig = co.GlobalCLIArgs._Singleton__instance
35 47
     co.GlobalCLIArgs._Singleton__instance = None
... ...
@@ -160,16 +172,14 @@ def galaxy_server():
160 160
 
161 161
 
162 162
 def test_build_requirement_from_path(collection_artifact):
163
-    actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
163
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
164
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
165
+    actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
164 166
 
165 167
     assert actual.namespace == u'ansible_namespace'
166 168
     assert actual.name == u'collection'
167
-    assert actual.b_path == collection_artifact[0]
168
-    assert actual.api is None
169
-    assert actual.skip is True
170
-    assert actual.versions == set([u'*'])
171
-    assert actual.latest_version == u'*'
172
-    assert actual.dependencies == {}
169
+    assert actual.src == collection_artifact[0]
170
+    assert actual.ver == u'0.1.0'
173 171
 
174 172
 
175 173
 @pytest.mark.parametrize('version', ['1.1.1', '1.1.0', '1.0.0'])
... ...
@@ -188,17 +198,15 @@ def test_build_requirement_from_path_with_manifest(version, collection_artifact)
188 188
     with open(manifest_path, 'wb') as manifest_obj:
189 189
         manifest_obj.write(to_bytes(manifest_value))
190 190
 
191
-    actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
191
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
192
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
193
+    actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
192 194
 
193 195
     # While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
194 196
     assert actual.namespace == u'namespace'
195 197
     assert actual.name == u'name'
196
-    assert actual.b_path == collection_artifact[0]
197
-    assert actual.api is None
198
-    assert actual.skip is True
199
-    assert actual.versions == set([to_text(version)])
200
-    assert actual.latest_version == to_text(version)
201
-    assert actual.dependencies == {'ansible_namespace.collection': '*'}
198
+    assert actual.src == collection_artifact[0]
199
+    assert actual.ver == to_text(version)
202 200
 
203 201
 
204 202
 def test_build_requirement_from_path_invalid_manifest(collection_artifact):
... ...
@@ -206,12 +214,19 @@ def test_build_requirement_from_path_invalid_manifest(collection_artifact):
206 206
     with open(manifest_path, 'wb') as manifest_obj:
207 207
         manifest_obj.write(b"not json")
208 208
 
209
-    expected = "Collection file at '%s' does not contain a valid json string." % to_native(manifest_path)
209
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
210
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
211
+
212
+    expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
210 213
     with pytest.raises(AnsibleError, match=expected):
211
-        collection.CollectionRequirement.from_path(collection_artifact[0], True)
214
+        Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
212 215
 
213 216
 
214
-def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch):
217
+def test_build_artifact_from_path_no_version(collection_artifact, monkeypatch):
218
+    mock_display = MagicMock()
219
+    monkeypatch.setattr(Display, 'display', mock_display)
220
+
221
+    # a collection artifact should always contain a valid version
215 222
     manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
216 223
     manifest_value = json.dumps({
217 224
         'collection_info': {
... ...
@@ -224,40 +239,56 @@ def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch
224 224
     with open(manifest_path, 'wb') as manifest_obj:
225 225
         manifest_obj.write(to_bytes(manifest_value))
226 226
 
227
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
228
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
229
+
230
+    expected = (
231
+        '^Collection metadata file at `.*` is expected to have a valid SemVer '
232
+        'version value but got {empty_unicode_string!r}$'.
233
+        format(empty_unicode_string=u'')
234
+    )
235
+    with pytest.raises(AnsibleError, match=expected):
236
+        Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
237
+
238
+
239
+def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch):
227 240
     mock_display = MagicMock()
228 241
     monkeypatch.setattr(Display, 'display', mock_display)
229 242
 
230
-    actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
243
+    # version may be falsey/arbitrary strings for collections in development
244
+    manifest_path = os.path.join(collection_artifact[0], b'galaxy.yml')
245
+    metadata = {
246
+        'authors': ['Ansible'],
247
+        'readme': 'README.md',
248
+        'namespace': 'namespace',
249
+        'name': 'name',
250
+        'version': '',
251
+        'dependencies': {},
252
+    }
253
+    with open(manifest_path, 'wb') as manifest_obj:
254
+        manifest_obj.write(to_bytes(yaml.safe_dump(metadata)))
255
+
256
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
257
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
258
+    actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
231 259
 
232 260
     # While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
233 261
     assert actual.namespace == u'namespace'
234 262
     assert actual.name == u'name'
235
-    assert actual.b_path == collection_artifact[0]
236
-    assert actual.api is None
237
-    assert actual.skip is True
238
-    assert actual.versions == set(['*'])
239
-    assert actual.latest_version == u'*'
240
-    assert actual.dependencies == {}
241
-
242
-    assert mock_display.call_count == 1
243
-
244
-    actual_warn = ' '.join(mock_display.mock_calls[0][1][0].split('\n'))
245
-    expected_warn = "Collection at '%s' does not have a valid version set, falling back to '*'. Found version: ''" \
246
-        % to_text(collection_artifact[0])
247
-    assert expected_warn in actual_warn
263
+    assert actual.src == collection_artifact[0]
264
+    assert actual.ver == u'*'
248 265
 
249 266
 
250 267
 def test_build_requirement_from_tar(collection_artifact):
251
-    actual = collection.CollectionRequirement.from_tar(collection_artifact[1], True, True)
268
+    tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
269
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
270
+
271
+    actual = Requirement.from_requirement_dict({'name': to_text(collection_artifact[1])}, concrete_artifact_cm)
252 272
 
253 273
     assert actual.namespace == u'ansible_namespace'
254 274
     assert actual.name == u'collection'
255
-    assert actual.b_path == collection_artifact[1]
256
-    assert actual.api is None
257
-    assert actual.skip is False
258
-    assert actual.versions == set([u'0.1.0'])
259
-    assert actual.latest_version == u'0.1.0'
260
-    assert actual.dependencies == {}
275
+    assert actual.src == to_text(collection_artifact[1])
276
+    assert actual.ver == u'0.1.0'
261 277
 
262 278
 
263 279
 def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
... ...
@@ -266,9 +297,11 @@ def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
266 266
     with open(test_file, 'wb') as test_obj:
267 267
         test_obj.write(b"\x00\x01\x02\x03")
268 268
 
269
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
270
+
269 271
     expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
270 272
     with pytest.raises(AnsibleError, match=expected):
271
-        collection.CollectionRequirement.from_tar(test_file, True, True)
273
+        Requirement.from_requirement_dict({'name': to_text(test_file)}, concrete_artifact_cm)
272 274
 
273 275
 
274 276
 def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
... ...
@@ -289,9 +322,11 @@ def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
289 289
         tar_info.mode = 0o0644
290 290
         tfile.addfile(tarinfo=tar_info, fileobj=b_io)
291 291
 
292
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
293
+
292 294
     expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
293 295
     with pytest.raises(AnsibleError, match=expected):
294
-        collection.CollectionRequirement.from_tar(tar_path, True, True)
296
+        Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
295 297
 
296 298
 
297 299
 def test_build_requirement_from_tar_no_files(tmp_path_factory):
... ...
@@ -311,9 +346,9 @@ def test_build_requirement_from_tar_no_files(tmp_path_factory):
311 311
         tar_info.mode = 0o0644
312 312
         tfile.addfile(tarinfo=tar_info, fileobj=b_io)
313 313
 
314
-    expected = "Collection at '%s' does not contain the required file FILES.json." % to_native(tar_path)
315
-    with pytest.raises(AnsibleError, match=expected):
316
-        collection.CollectionRequirement.from_tar(tar_path, True, True)
314
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
315
+    with pytest.raises(KeyError, match='namespace'):
316
+        Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
317 317
 
318 318
 
319 319
 def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
... ...
@@ -329,95 +364,128 @@ def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
329 329
         tar_info.mode = 0o0644
330 330
         tfile.addfile(tarinfo=tar_info, fileobj=b_io)
331 331
 
332
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
333
+
332 334
     expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
333 335
     with pytest.raises(AnsibleError, match=expected):
334
-        collection.CollectionRequirement.from_tar(tar_path, True, True)
336
+        Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
335 337
 
336 338
 
337
-def test_build_requirement_from_name(galaxy_server, monkeypatch):
339
+def test_build_requirement_from_name(galaxy_server, monkeypatch, tmp_path_factory):
338 340
     mock_get_versions = MagicMock()
339 341
     mock_get_versions.return_value = ['2.1.9', '2.1.10']
340 342
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
341 343
 
342
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
344
+    mock_version_metadata = MagicMock(
345
+        namespace='namespace', name='collection',
346
+        version='2.1.10', artifact_sha256='', dependencies={}
347
+    )
348
+    monkeypatch.setattr(api.GalaxyAPI, 'get_collection_version_metadata', mock_version_metadata)
349
+
350
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
351
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
352
+
353
+    collections = ['namespace.collection']
354
+    requirements_file = None
355
+
356
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', collections[0]])
357
+    requirements = cli._require_one_of_collections_requirements(
358
+        collections, requirements_file, artifacts_manager=concrete_artifact_cm
359
+    )['collections']
360
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
343 361
 
344 362
     assert actual.namespace == u'namespace'
345 363
     assert actual.name == u'collection'
346
-    assert actual.b_path is None
347
-    assert actual.api == galaxy_server
348
-    assert actual.skip is False
349
-    assert actual.versions == set([u'2.1.9', u'2.1.10'])
350
-    assert actual.latest_version == u'2.1.10'
351
-    assert actual.dependencies == {}
364
+    assert actual.ver == u'2.1.10'
365
+    assert actual.src == galaxy_server
352 366
 
353 367
     assert mock_get_versions.call_count == 1
354 368
     assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
355 369
 
356 370
 
357
-def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch):
371
+def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch, tmp_path_factory):
358 372
     mock_get_versions = MagicMock()
359 373
     mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
360 374
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
361 375
 
362
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
376
+    mock_get_info = MagicMock()
377
+    mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None, {})
378
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
379
+
380
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
381
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
382
+
383
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
384
+    requirements = cli._require_one_of_collections_requirements(
385
+        ['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
386
+    )['collections']
387
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
363 388
 
364 389
     assert actual.namespace == u'namespace'
365 390
     assert actual.name == u'collection'
366
-    assert actual.b_path is None
367
-    assert actual.api == galaxy_server
368
-    assert actual.skip is False
369
-    assert actual.versions == set([u'1.0.1', u'2.0.1'])
370
-    assert actual.latest_version == u'2.0.1'
371
-    assert actual.dependencies == {}
391
+    assert actual.src == galaxy_server
392
+    assert actual.ver == u'2.0.1'
372 393
 
373 394
     assert mock_get_versions.call_count == 1
374 395
     assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
375 396
 
376 397
 
377
-def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monkeypatch):
398
+def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monkeypatch, tmp_path_factory):
399
+    mock_get_versions = MagicMock()
400
+    mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
401
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
402
+
378 403
     mock_get_info = MagicMock()
379 404
     mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1-beta.1', None, None,
380 405
                                                                {})
381 406
     monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
382 407
 
383
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.1-beta.1', True,
384
-                                                        True)
408
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
409
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
410
+
411
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:2.0.1-beta.1'])
412
+    requirements = cli._require_one_of_collections_requirements(
413
+        ['namespace.collection:2.0.1-beta.1'], None, artifacts_manager=concrete_artifact_cm
414
+    )['collections']
415
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
385 416
 
386 417
     assert actual.namespace == u'namespace'
387 418
     assert actual.name == u'collection'
388
-    assert actual.b_path is None
389
-    assert actual.api == galaxy_server
390
-    assert actual.skip is False
391
-    assert actual.versions == set([u'2.0.1-beta.1'])
392
-    assert actual.latest_version == u'2.0.1-beta.1'
393
-    assert actual.dependencies == {}
419
+    assert actual.src == galaxy_server
420
+    assert actual.ver == u'2.0.1-beta.1'
394 421
 
395 422
     assert mock_get_info.call_count == 1
396 423
     assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1-beta.1')
397 424
 
398 425
 
399
-def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch):
426
+def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, tmp_path_factory):
400 427
     mock_get_versions = MagicMock()
401 428
     mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
402 429
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
403 430
 
431
+    mock_get_info = MagicMock()
432
+    mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.0.3', None, None, {})
433
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
434
+
404 435
     broken_server = copy.copy(galaxy_server)
405 436
     broken_server.api_server = 'https://broken.com/'
406 437
     mock_version_list = MagicMock()
407 438
     mock_version_list.return_value = []
408 439
     monkeypatch.setattr(broken_server, 'get_collection_versions', mock_version_list)
409 440
 
410
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [broken_server, galaxy_server],
411
-                                                        '>1.0.1', False, True)
441
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
442
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
443
+
444
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
445
+    requirements = cli._require_one_of_collections_requirements(
446
+        ['namespace.collection:>1.0.1'], None, artifacts_manager=concrete_artifact_cm
447
+    )['collections']
448
+    actual = collection._resolve_depenency_map(requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
412 449
 
413 450
     assert actual.namespace == u'namespace'
414 451
     assert actual.name == u'collection'
415
-    assert actual.b_path is None
416
-    # assert actual.api == galaxy_server
417
-    assert actual.skip is False
418
-    assert actual.versions == set([u'1.0.2', u'1.0.3'])
419
-    assert actual.latest_version == u'1.0.3'
420
-    assert actual.dependencies == {}
452
+    assert actual.src == galaxy_server
453
+    assert actual.ver == u'1.0.3'
421 454
 
422 455
     assert mock_version_list.call_count == 1
423 456
     assert mock_version_list.mock_calls[0][1] == ('namespace', 'collection')
... ...
@@ -426,53 +494,91 @@ def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch):
426 426
     assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
427 427
 
428 428
 
429
-def test_build_requirement_from_name_missing(galaxy_server, monkeypatch):
429
+def test_build_requirement_from_name_missing(galaxy_server, monkeypatch, tmp_path_factory):
430 430
     mock_open = MagicMock()
431 431
     mock_open.return_value = []
432 432
 
433 433
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
434 434
 
435
-    expected = "Failed to find collection namespace.collection:*"
436
-    with pytest.raises(AnsibleError, match=expected):
437
-        collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False,
438
-                                                   True)
435
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
436
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
439 437
 
438
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
439
+    requirements = cli._require_one_of_collections_requirements(
440
+        ['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
441
+    )['collections']
440 442
 
441
-def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch):
443
+    expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n* namespace.collection:* (direct request)"
444
+    with pytest.raises(AnsibleError, match=re.escape(expected)):
445
+        collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True)
446
+
447
+
448
+def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch, tmp_path_factory):
442 449
     mock_open = MagicMock()
443 450
     mock_open.side_effect = api.GalaxyError(urllib_error.HTTPError('https://galaxy.server.com', 401, 'msg', {},
444 451
                                                                    StringIO()), "error")
445 452
 
446 453
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
447 454
 
455
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
456
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
457
+
458
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
459
+    requirements = cli._require_one_of_collections_requirements(
460
+        ['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
461
+    )['collections']
462
+
448 463
     expected = "error (HTTP Code: 401, Message: msg)"
449 464
     with pytest.raises(api.GalaxyError, match=re.escape(expected)):
450
-        collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False)
465
+        collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False)
451 466
 
452 467
 
453
-def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch):
468
+def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch, tmp_path_factory):
469
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
470
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
471
+    multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
472
+    dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
473
+
474
+    matches = RequirementCandidates()
475
+    mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
476
+    monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
477
+
478
+    mock_get_versions = MagicMock()
479
+    mock_get_versions.return_value = ['2.0.0']
480
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
481
+
454 482
     mock_get_info = MagicMock()
455 483
     mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.0', None, None,
456 484
                                                                {})
457 485
     monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
458 486
 
459
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.0', True,
460
-                                                        True)
487
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:==2.0.0'])
488
+    requirements = cli._require_one_of_collections_requirements(
489
+        ['namespace.collection:==2.0.0'], None, artifacts_manager=concrete_artifact_cm
490
+    )['collections']
491
+
492
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
461 493
 
462 494
     assert actual.namespace == u'namespace'
463 495
     assert actual.name == u'collection'
464
-    assert actual.b_path is None
465
-    assert actual.api == galaxy_server
466
-    assert actual.skip is False
467
-    assert actual.versions == set([u'2.0.0'])
468
-    assert actual.latest_version == u'2.0.0'
469
-    assert actual.dependencies == {}
496
+    assert actual.src == galaxy_server
497
+    assert actual.ver == u'2.0.0'
498
+    assert [c.ver for c in matches.candidates] == [u'2.0.0']
470 499
 
471 500
     assert mock_get_info.call_count == 1
472 501
     assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.0')
473 502
 
474 503
 
475
-def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch):
504
+def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch, tmp_path_factory):
505
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
506
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
507
+    multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
508
+    dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
509
+
510
+    matches = RequirementCandidates()
511
+    mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
512
+    monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
513
+
476 514
     mock_get_versions = MagicMock()
477 515
     mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2']
478 516
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
... ...
@@ -482,17 +588,18 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
482 482
                                                                {})
483 483
     monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
484 484
 
485
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '>=2.0.1,<2.0.2',
486
-                                                        True, True)
485
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>=2.0.1,<2.0.2'])
486
+    requirements = cli._require_one_of_collections_requirements(
487
+        ['namespace.collection:>=2.0.1,<2.0.2'], None, artifacts_manager=concrete_artifact_cm
488
+    )['collections']
489
+
490
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
487 491
 
488 492
     assert actual.namespace == u'namespace'
489 493
     assert actual.name == u'collection'
490
-    assert actual.b_path is None
491
-    assert actual.api == galaxy_server
492
-    assert actual.skip is False
493
-    assert actual.versions == set([u'2.0.1'])
494
-    assert actual.latest_version == u'2.0.1'
495
-    assert actual.dependencies == {}
494
+    assert actual.src == galaxy_server
495
+    assert actual.ver == u'2.0.1'
496
+    assert [c.ver for c in matches.candidates] == [u'2.0.1']
496 497
 
497 498
     assert mock_get_versions.call_count == 1
498 499
     assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
... ...
@@ -501,122 +608,118 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
501 501
     assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1')
502 502
 
503 503
 
504
-def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch):
504
+def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch, tmp_path_factory):
505
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
506
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
507
+    multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
508
+    dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
509
+
510
+    matches = RequirementCandidates()
511
+    mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
512
+    monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
513
+
514
+    mock_get_info = MagicMock()
515
+    mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
516
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
517
+
505 518
     mock_get_versions = MagicMock()
519
+    mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
520
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
521
+
506 522
     mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2', '2.0.3', '2.0.4', '2.0.5']
507 523
     monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
508 524
 
509
-    actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '!=2.0.2',
510
-                                                        True, True)
525
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.2'])
526
+    requirements = cli._require_one_of_collections_requirements(
527
+        ['namespace.collection:!=2.0.2'], None, artifacts_manager=concrete_artifact_cm
528
+    )['collections']
529
+
530
+    actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
511 531
 
512 532
     assert actual.namespace == u'namespace'
513 533
     assert actual.name == u'collection'
514
-    assert actual.b_path is None
515
-    assert actual.api == galaxy_server
516
-    assert actual.skip is False
517
-    assert actual.versions == set([u'2.0.0', u'2.0.1', u'2.0.3', u'2.0.4', u'2.0.5'])
518
-    assert actual.latest_version == u'2.0.5'
519
-    assert actual.dependencies == {}
534
+    assert actual.src == galaxy_server
535
+    assert actual.ver == u'2.0.5'
536
+    # should be ordered latest to earliest
537
+    assert [c.ver for c in matches.candidates] == [u'2.0.5', u'2.0.4', u'2.0.3', u'2.0.1', u'2.0.0']
520 538
 
521 539
     assert mock_get_versions.call_count == 1
522 540
     assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
523 541
 
524 542
 
525
-@pytest.mark.parametrize('versions, requirement, expected_filter, expected_latest', [
526
-    [['1.0.0', '1.0.1'], '*', ['1.0.0', '1.0.1'], '1.0.1'],
527
-    [['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<1.1.0', ['1.0.5'], '1.0.5'],
528
-    [['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<=1.0.5', ['1.0.5'], '1.0.5'],
529
-    [['1.0.0', '1.0.5', '1.1.0'], '>=1.1.0', ['1.1.0'], '1.1.0'],
530
-    [['1.0.0', '1.0.5', '1.1.0'], '!=1.1.0', ['1.0.0', '1.0.5'], '1.0.5'],
531
-    [['1.0.0', '1.0.5', '1.1.0'], '==1.0.5', ['1.0.5'], '1.0.5'],
532
-    [['1.0.0', '1.0.5', '1.1.0'], '1.0.5', ['1.0.5'], '1.0.5'],
533
-    [['1.0.0', '2.0.0', '3.0.0'], '>=2', ['2.0.0', '3.0.0'], '3.0.0'],
534
-])
535
-def test_add_collection_requirements(versions, requirement, expected_filter, expected_latest):
536
-    req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', versions, requirement,
537
-                                           False)
538
-    assert req.versions == set(expected_filter)
539
-    assert req.latest_version == expected_latest
540
-
541
-
542
-def test_add_collection_requirement_to_unknown_installed_version(monkeypatch):
543
-    mock_display = MagicMock()
544
-    monkeypatch.setattr(Display, 'display', mock_display)
545
-
546
-    req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
547
-                                           skip=True)
543
+def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
548 544
 
549
-    req.add_requirement('parent.collection', '1.0.0')
550
-    assert req.latest_version == '*'
545
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
546
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
551 547
 
552
-    assert mock_display.call_count == 1
548
+    mock_get_info = MagicMock()
549
+    mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
550
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
553 551
 
554
-    actual_warn = ' '.join(mock_display.mock_calls[0][1][0].split('\n'))
555
-    assert "Failed to validate the collection requirement 'namespace.name:1.0.0' for parent.collection" in actual_warn
552
+    mock_get_versions = MagicMock()
553
+    mock_get_versions.return_value = ['2.0.5']
554
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
556 555
 
556
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.5'])
557
+    requirements = cli._require_one_of_collections_requirements(
558
+        ['namespace.collection:!=2.0.5'], None, artifacts_manager=concrete_artifact_cm
559
+    )['collections']
557 560
 
558
-def test_add_collection_wildcard_requirement_to_unknown_installed_version():
559
-    req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
560
-                                           skip=True)
561
-    req.add_requirement(str(req), '*')
561
+    expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
562
+    expected += "* namespace.collection:!=2.0.5 (direct request)"
563
+    with pytest.raises(AnsibleError, match=re.escape(expected)):
564
+        collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)
562 565
 
563
-    assert req.versions == set('*')
564
-    assert req.latest_version == '*'
565 566
 
567
+def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
568
+    test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
569
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
566 570
 
567
-def test_add_collection_requirement_with_conflict(galaxy_server):
568
-    expected = "Cannot meet requirement ==1.0.2 for dependency namespace.name from source '%s'. Available versions " \
569
-               "before last requirement added: 1.0.0, 1.0.1\n" \
570
-               "Requirements from:\n" \
571
-               "\tbase - 'namespace.name:==1.0.2'" % galaxy_server.api_server
572
-    with pytest.raises(AnsibleError, match=expected):
573
-        collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '==1.0.2',
574
-                                         False)
571
+    mock_get_info_return = [
572
+        api.CollectionVersionMetadata('parent', 'collection', '2.0.5', None, None, {'namespace.collection': '!=1.0.0'}),
573
+        api.CollectionVersionMetadata('namespace', 'collection', '1.0.0', None, None, {}),
574
+    ]
575
+    mock_get_info = MagicMock(side_effect=mock_get_info_return)
576
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
575 577
 
578
+    mock_get_versions = MagicMock(side_effect=[['2.0.5'], ['1.0.0']])
579
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
576 580
 
577
-def test_add_requirement_to_existing_collection_with_conflict(galaxy_server):
578
-    req = collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '*', False)
581
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'parent.collection:2.0.5'])
582
+    requirements = cli._require_one_of_collections_requirements(
583
+        ['parent.collection:2.0.5'], None, artifacts_manager=concrete_artifact_cm
584
+    )['collections']
579 585
 
580
-    expected = "Cannot meet dependency requirement 'namespace.name:1.0.2' for collection namespace.collection2 from " \
581
-               "source '%s'. Available versions before last requirement added: 1.0.0, 1.0.1\n" \
582
-               "Requirements from:\n" \
583
-               "\tbase - 'namespace.name:*'\n" \
584
-               "\tnamespace.collection2 - 'namespace.name:1.0.2'" % galaxy_server.api_server
586
+    expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
587
+    expected += "* namespace.collection:!=1.0.0 (dependency of parent.collection:2.0.5)"
585 588
     with pytest.raises(AnsibleError, match=re.escape(expected)):
586
-        req.add_requirement('namespace.collection2', '1.0.2')
589
+        collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)
587 590
 
588 591
 
589
-def test_add_requirement_to_installed_collection_with_conflict():
590
-    source = 'https://galaxy.ansible.com'
591
-    req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
592
-                                           skip=True)
592
+def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_server):
593 593
 
594
-    expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
595
-               "Use --force to overwrite"
596
-    with pytest.raises(AnsibleError, match=re.escape(expected)):
597
-        req.add_requirement(None, '1.0.2')
594
+    mock_installed_collections = MagicMock(return_value=[Candidate('namespace.collection', '1.2.3', None, 'dir')])
598 595
 
596
+    monkeypatch.setattr(collection, 'find_existing_collections', mock_installed_collections)
599 597
 
600
-def test_add_requirement_to_installed_collection_with_conflict_as_dep():
601
-    source = 'https://galaxy.ansible.com'
602
-    req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
603
-                                           skip=True)
598
+    test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
599
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
604 600
 
605
-    expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
606
-               "Use --force-with-deps to overwrite"
607
-    with pytest.raises(AnsibleError, match=re.escape(expected)):
608
-        req.add_requirement('namespace.collection2', '1.0.2')
609
-
610
-
611
-def test_install_skipped_collection(monkeypatch):
612 601
     mock_display = MagicMock()
613 602
     monkeypatch.setattr(Display, 'display', mock_display)
614 603
 
615
-    req = collection.CollectionRequirement('namespace', 'name', None, 'source', ['1.0.0'], '*', False, skip=True)
616
-    req.install(None, None)
604
+    mock_get_info = MagicMock()
605
+    mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.2.3', None, None, {})
606
+    monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
607
+
608
+    mock_get_versions = MagicMock(return_value=['1.2.3', '1.3.0'])
609
+    monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
610
+
611
+    cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
612
+    cli.run()
617 613
 
618
-    assert mock_display.call_count == 1
619
-    assert mock_display.mock_calls[0][1][0] == "Skipping 'namespace.name' as it is already installed"
614
+    expected = "Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`."
615
+    assert mock_display.mock_calls[1][1][0] == expected
620 616
 
621 617
 
622 618
 def test_install_collection(collection_artifact, monkeypatch):
... ...
@@ -624,15 +727,17 @@ def test_install_collection(collection_artifact, monkeypatch):
624 624
     monkeypatch.setattr(Display, 'display', mock_display)
625 625
 
626 626
     collection_tar = collection_artifact[1]
627
-    output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
628
-    collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
629
-    os.makedirs(os.path.join(collection_path, b'delete_me'))  # Create a folder to verify the install cleans out the dir
630 627
 
631 628
     temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
632 629
     os.makedirs(temp_path)
630
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
633 631
 
634
-    req = collection.CollectionRequirement.from_tar(collection_tar, True, True)
635
-    req.install(to_text(output_path), temp_path)
632
+    output_path = os.path.join(os.path.split(collection_tar)[0])
633
+    collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
634
+    os.makedirs(os.path.join(collection_path, b'delete_me'))  # Create a folder to verify the install cleans out the dir
635
+
636
+    candidate = Candidate('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')
637
+    collection.install(candidate, to_text(output_path), concrete_artifact_cm)
636 638
 
637 639
     # Ensure the temp directory is empty, nothing is left behind
638 640
     assert os.listdir(temp_path) == []
... ...
@@ -649,33 +754,29 @@ def test_install_collection(collection_artifact, monkeypatch):
649 649
     assert mock_display.call_count == 2
650 650
     assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
651 651
         % to_text(collection_path)
652
-    assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection (0.1.0) was installed successfully"
652
+    assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
653 653
 
654 654
 
655 655
 def test_install_collection_with_download(galaxy_server, collection_artifact, monkeypatch):
656
-    collection_tar = collection_artifact[1]
657
-    output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
658
-    collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
656
+    collection_path, collection_tar = collection_artifact
657
+    shutil.rmtree(collection_path)
658
+
659
+    collections_dir = ('%s' % os.path.sep).join(to_text(collection_path).split('%s' % os.path.sep)[:-2])
660
+
661
+    temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
662
+    os.makedirs(temp_path)
659 663
 
660 664
     mock_display = MagicMock()
661 665
     monkeypatch.setattr(Display, 'display', mock_display)
662 666
 
667
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
668
+
663 669
     mock_download = MagicMock()
664 670
     mock_download.return_value = collection_tar
665
-    monkeypatch.setattr(collection, '_download_file', mock_download)
666
-
667
-    monkeypatch.setattr(galaxy_server, '_available_api_versions', {'v2': 'v2/'})
668
-    temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
669
-    os.makedirs(temp_path)
671
+    monkeypatch.setattr(concrete_artifact_cm, 'get_galaxy_artifact_path', mock_download)
670 672
 
671
-    meta = api.CollectionVersionMetadata('ansible_namespace', 'collection', '0.1.0', 'https://downloadme.com',
672
-                                         'myhash', {})
673
-    req = collection.CollectionRequirement('ansible_namespace', 'collection', None, galaxy_server,
674
-                                           ['0.1.0'], '*', False, metadata=meta)
675
-    req.install(to_text(output_path), temp_path)
676
-
677
-    # Ensure the temp directory is empty, nothing is left behind
678
-    assert os.listdir(temp_path) == []
673
+    req = Requirement('ansible_namespace.collection', '0.1.0', 'https://downloadme.com', 'galaxy')
674
+    collection.install(req, to_text(collections_dir), concrete_artifact_cm)
679 675
 
680 676
     actual_files = os.listdir(collection_path)
681 677
     actual_files.sort()
... ...
@@ -685,13 +786,11 @@ def test_install_collection_with_download(galaxy_server, collection_artifact, mo
685 685
     assert mock_display.call_count == 2
686 686
     assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
687 687
         % to_text(collection_path)
688
-    assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection (0.1.0) was installed successfully"
688
+    assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
689 689
 
690 690
     assert mock_download.call_count == 1
691
-    assert mock_download.mock_calls[0][1][0] == 'https://downloadme.com'
692
-    assert mock_download.mock_calls[0][1][1] == temp_path
693
-    assert mock_download.mock_calls[0][1][2] == 'myhash'
694
-    assert mock_download.mock_calls[0][1][3] is True
691
+    assert mock_download.mock_calls[0][1][0].src == 'https://downloadme.com'
692
+    assert mock_download.mock_calls[0][1][0].type == 'galaxy'
695 693
 
696 694
 
697 695
 def test_install_collections_from_tar(collection_artifact, monkeypatch):
... ...
@@ -702,8 +801,10 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
702 702
     mock_display = MagicMock()
703 703
     monkeypatch.setattr(Display, 'display', mock_display)
704 704
 
705
-    collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
706
-                                   [u'https://galaxy.ansible.com'], True, False, False, False, False)
705
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
706
+
707
+    requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
708
+    collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
707 709
 
708 710
     assert os.path.isdir(collection_path)
709 711
 
... ...
@@ -734,9 +835,12 @@ def test_install_collections_existing_without_force(collection_artifact, monkeyp
734 734
     mock_display = MagicMock()
735 735
     monkeypatch.setattr(Display, 'display', mock_display)
736 736
 
737
-    # If we don't delete collection_path it will think the original build skeleton is installed so we expect a skip
738
-    collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
739
-                                   [u'https://galaxy.ansible.com'], True, False, False, False, False)
737
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
738
+
739
+    assert os.path.isdir(collection_path)
740
+
741
+    requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
742
+    collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
740 743
 
741 744
     assert os.path.isdir(collection_path)
742 745
 
... ...
@@ -746,11 +850,9 @@ def test_install_collections_existing_without_force(collection_artifact, monkeyp
746 746
 
747 747
     # Filter out the progress cursor display calls.
748 748
     display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
749
-    assert len(display_msgs) == 3
749
+    assert len(display_msgs) == 1
750 750
 
751
-    assert display_msgs[0] == "Process install dependency map"
752
-    assert display_msgs[1] == "Starting collection install process"
753
-    assert display_msgs[2] == "Skipping 'ansible_namespace.collection' as it is already installed"
751
+    assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.'
754 752
 
755 753
     for msg in display_msgs:
756 754
         assert 'WARNING' not in msg
... ...
@@ -768,8 +870,9 @@ def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
768 768
         if os.path.isfile(b_path):
769 769
             os.unlink(b_path)
770 770
 
771
-    collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
772
-                                   [u'https://galaxy.ansible.com'], True, False, False, False, False)
771
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
772
+    requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
773
+    collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
773 774
 
774 775
     display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
775 776
 
... ...
@@ -788,8 +891,9 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
788 788
     mock_display = MagicMock()
789 789
     monkeypatch.setattr(Display, 'display', mock_display)
790 790
 
791
-    collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
792
-                                   [u'https://galaxy.ansible.com'], True, False, False, False, False)
791
+    concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
792
+    requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
793
+    collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
793 794
 
794 795
     assert os.path.isdir(collection_path)
795 796
 
... ...
@@ -811,4 +915,4 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
811 811
     assert display_msgs[0] == "Process install dependency map"
812 812
     assert display_msgs[1] == "Starting collection install process"
813 813
     assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
814
-    assert display_msgs[3] == "ansible_namespace.collection (0.1.0) was installed successfully"
814
+    assert display_msgs[3] == "ansible_namespace.collection:0.1.0 was installed successfully"