1 | 1 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,59 @@ |
0 |
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> |
|
1 |
+# |
|
2 |
+# This file is part of Ansible |
|
3 |
+# |
|
4 |
+# Ansible is free software: you can redistribute it and/or modify |
|
5 |
+# it under the terms of the GNU General Public License as published by |
|
6 |
+# the Free Software Foundation, either version 3 of the License, or |
|
7 |
+# (at your option) any later version. |
|
8 |
+# |
|
9 |
+# Ansible is distributed in the hope that it will be useful, |
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 |
+# GNU General Public License for more details. |
|
13 |
+# |
|
14 |
+# You should have received a copy of the GNU General Public License |
|
15 |
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
16 |
+ |
|
17 |
+from collections import MutableMapping |
|
18 |
+ |
|
19 |
+from ansible import utils |
|
20 |
+from ansible import constants as C |
|
21 |
+from ansible import errors |
|
22 |
+ |
|
23 |
+ |
|
24 |
+class FactCache(MutableMapping): |
|
25 |
+ |
|
26 |
+ def __init__(self, *args, **kwargs): |
|
27 |
+ self._plugin = utils.plugins.cache_loader.get(C.CACHE_PLUGIN) |
|
28 |
+ if self._plugin is None: |
|
29 |
+ return |
|
30 |
+ |
|
31 |
+ def __getitem__(self, key): |
|
32 |
+ if key not in self: |
|
33 |
+ raise KeyError |
|
34 |
+ return self._plugin.get(key) |
|
35 |
+ |
|
36 |
+ def __setitem__(self, key, value): |
|
37 |
+ self._plugin.set(key, value) |
|
38 |
+ |
|
39 |
+ def __delitem__(self, key): |
|
40 |
+ self._plugin.delete(key) |
|
41 |
+ |
|
42 |
+ def __contains__(self, key): |
|
43 |
+ return self._plugin.contains(key) |
|
44 |
+ |
|
45 |
+ def __iter__(self): |
|
46 |
+ return iter(self._plugin.keys()) |
|
47 |
+ |
|
48 |
+ def __len__(self): |
|
49 |
+ return len(self._plugin.keys()) |
|
50 |
+ |
|
51 |
+ def copy(self): |
|
52 |
+ """ |
|
53 |
+ Return a primitive copy of the keys and values from the cache. |
|
54 |
+ """ |
|
55 |
+ return dict([(k, v) for (k, v) in self.iteritems()]) |
|
56 |
+ |
|
57 |
+ def keys(self): |
|
58 |
+ return self._plugin.keys() |
0 | 59 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,15 @@ |
0 |
+class BaseCacheModule(object): |
|
1 |
+ def get(self, key): |
|
2 |
+ raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__)) |
|
3 |
+ |
|
4 |
+ def set(self, key, value): |
|
5 |
+ raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__)) |
|
6 |
+ |
|
7 |
+ def keys(self): |
|
8 |
+ raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__)) |
|
9 |
+ |
|
10 |
+ def contains(self, key): |
|
11 |
+ raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__)) |
|
12 |
+ |
|
13 |
+ def delete(self, key): |
|
14 |
+ raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__)) |
0 | 15 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,65 @@ |
0 |
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> |
|
1 |
+# |
|
2 |
+# This file is part of Ansible |
|
3 |
+# |
|
4 |
+# Ansible is free software: you can redistribute it and/or modify |
|
5 |
+# it under the terms of the GNU General Public License as published by |
|
6 |
+# the Free Software Foundation, either version 3 of the License, or |
|
7 |
+# (at your option) any later version. |
|
8 |
+# |
|
9 |
+# Ansible is distributed in the hope that it will be useful, |
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 |
+# GNU General Public License for more details. |
|
13 |
+# |
|
14 |
+# You should have received a copy of the GNU General Public License |
|
15 |
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
16 |
+from __future__ import absolute_import |
|
17 |
+ |
|
18 |
+import collections |
|
19 |
+import json |
|
20 |
+import os |
|
21 |
+import shutil |
|
22 |
+import sys |
|
23 |
+import tempfile |
|
24 |
+from datetime import datetime |
|
25 |
+ |
|
26 |
+from ansible import constants as C |
|
27 |
+from ansible.cache.memory import CacheModule as MemoryCacheModule |
|
28 |
+ |
|
29 |
+ |
|
30 |
+class CacheModule(MemoryCacheModule): |
|
31 |
+ def __init__(self): |
|
32 |
+ super(CacheModule, self).__init__() |
|
33 |
+ self._timeout = int(C.CACHE_PLUGIN_TIMEOUT) |
|
34 |
+ self._filename = '/tmp/ansible_facts.json' |
|
35 |
+ |
|
36 |
+ if os.access(self._filename, os.R_OK): |
|
37 |
+ mtime = datetime.fromtimestamp(os.path.getmtime(self._filename)) |
|
38 |
+ if self._timeout == 0 or (datetime.now() - mtime).total_seconds() < self._timeout: |
|
39 |
+ with open(self._filename, 'rb') as f: |
|
40 |
+ # we could make assumptions about the MemoryCacheModule here if we wanted |
|
41 |
+ # to be more efficient, but performance isn't the priority with this module |
|
42 |
+ data = json.load(f) |
|
43 |
+ if isinstance(data, collections.Mapping): |
|
44 |
+ for k, v in data.items(): |
|
45 |
+ super(CacheModule, self).set(k, v) |
|
46 |
+ |
|
47 |
+ def set(self, *args, **kwargs): |
|
48 |
+ super(CacheModule, self).set(*args, **kwargs) |
|
49 |
+ self.flush() |
|
50 |
+ |
|
51 |
+ def delete(self, *args, **kwargs): |
|
52 |
+ super(CacheModule, self).delete(*args, **kwargs) |
|
53 |
+ self.flush() |
|
54 |
+ |
|
55 |
+ def flush(self): |
|
56 |
+ temp = tempfile.TemporaryFile('r+b') |
|
57 |
+ |
|
58 |
+ try: |
|
59 |
+ json.dump(self._cache, temp, separators=(',', ':')) |
|
60 |
+ temp.seek(0) |
|
61 |
+ with open(self._filename, 'w+b') as f: |
|
62 |
+ shutil.copyfileobj(temp, f) |
|
63 |
+ finally: |
|
64 |
+ temp.close() |
0 | 65 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,112 @@ |
0 |
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> |
|
1 |
+# |
|
2 |
+# This file is part of Ansible |
|
3 |
+# |
|
4 |
+# Ansible is free software: you can redistribute it and/or modify |
|
5 |
+# it under the terms of the GNU General Public License as published by |
|
6 |
+# the Free Software Foundation, either version 3 of the License, or |
|
7 |
+# (at your option) any later version. |
|
8 |
+# |
|
9 |
+# Ansible is distributed in the hope that it will be useful, |
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 |
+# GNU General Public License for more details. |
|
13 |
+# |
|
14 |
+# You should have received a copy of the GNU General Public License |
|
15 |
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
16 |
+import collections |
|
17 |
+import sys |
|
18 |
+import time |
|
19 |
+ |
|
20 |
+from ansible import constants as C |
|
21 |
+from ansible.cache.base import BaseCacheModule |
|
22 |
+ |
|
23 |
+try: |
|
24 |
+ import memcache |
|
25 |
+except ImportError: |
|
26 |
+ print 'python-memcached is required for the memcached fact cache' |
|
27 |
+ sys.exit(1) |
|
28 |
+ |
|
29 |
+ |
|
30 |
+class CacheModuleKeys(collections.MutableSet): |
|
31 |
+ """ |
|
32 |
+ A set subclass that keeps track of insertion time and persists |
|
33 |
+ the set in memcached. |
|
34 |
+ """ |
|
35 |
+ PREFIX = 'ansible_cache_keys' |
|
36 |
+ |
|
37 |
+ def __init__(self, cache, *args, **kwargs): |
|
38 |
+ self._cache = cache |
|
39 |
+ self._keyset = dict(*args, **kwargs) |
|
40 |
+ |
|
41 |
+ def __contains__(self, key): |
|
42 |
+ return key in self._keyset |
|
43 |
+ |
|
44 |
+ def __iter__(self): |
|
45 |
+ return iter(self._keyset) |
|
46 |
+ |
|
47 |
+ def __len__(self): |
|
48 |
+ return len(self._keyset) |
|
49 |
+ |
|
50 |
+ def add(self, key): |
|
51 |
+ self._keyset[key] = time.time() |
|
52 |
+ self._cache.set(self.PREFIX, self._keyset) |
|
53 |
+ |
|
54 |
+ def discard(self, key): |
|
55 |
+ del self._keyset[key] |
|
56 |
+ self._cache.set(self.PREFIX, self._keyset) |
|
57 |
+ |
|
58 |
+ def remove_by_timerange(self, s_min, s_max): |
|
59 |
+ for k in self._keyset.keys(): |
|
60 |
+ t = self._keyset[k] |
|
61 |
+ if s_min < t < s_max: |
|
62 |
+ del self._keyset[k] |
|
63 |
+ self._cache.set(self.PREFIX, self._keyset) |
|
64 |
+ |
|
65 |
+ |
|
66 |
+class CacheModule(BaseCacheModule): |
|
67 |
+ |
|
68 |
+ def __init__(self, *args, **kwargs): |
|
69 |
+ if C.CACHE_PLUGIN_CONNECTION: |
|
70 |
+ connection = C.CACHE_PLUGIN_CONNECTION.split(',') |
|
71 |
+ else: |
|
72 |
+ connection = ['127.0.0.1:11211'] |
|
73 |
+ |
|
74 |
+ self._timeout = C.CACHE_PLUGIN_TIMEOUT |
|
75 |
+ self._prefix = C.CACHE_PLUGIN_PREFIX |
|
76 |
+ self._cache = memcache.Client(connection, debug=0) |
|
77 |
+ self._keys = CacheModuleKeys(self._cache, self._cache.get(CacheModuleKeys.PREFIX) or []) |
|
78 |
+ |
|
79 |
+ def _make_key(self, key): |
|
80 |
+ return "{}{}".format(self._prefix, key) |
|
81 |
+ |
|
82 |
+ def _expire_keys(self): |
|
83 |
+ if self._timeout > 0: |
|
84 |
+ expiry_age = time.time() - self._timeout |
|
85 |
+ self._keys.remove_by_timerange(0, expiry_age) |
|
86 |
+ |
|
87 |
+ def get(self, key): |
|
88 |
+ value = self._cache.get(self._make_key(key)) |
|
89 |
+ # guard against the key not being removed from the zset; |
|
90 |
+ # this could happen in cases where the timeout value is changed |
|
91 |
+ # between invocations |
|
92 |
+ if value is None: |
|
93 |
+ self.delete(key) |
|
94 |
+ raise KeyError |
|
95 |
+ return value |
|
96 |
+ |
|
97 |
+ def set(self, key, value): |
|
98 |
+ self._cache.set(self._make_key(key), value, time=self._timeout) |
|
99 |
+ self._keys.add(key) |
|
100 |
+ |
|
101 |
+ def keys(self): |
|
102 |
+ self._expire_keys() |
|
103 |
+ return list(iter(self._keys)) |
|
104 |
+ |
|
105 |
+ def contains(self, key): |
|
106 |
+ self._expire_keys() |
|
107 |
+ return key in self._keys |
|
108 |
+ |
|
109 |
+ def delete(self, key): |
|
110 |
+ self._cache.delete(self._make_key(key)) |
|
111 |
+ self._keys.discard(key) |
0 | 112 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,37 @@ |
0 |
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> |
|
1 |
+# |
|
2 |
+# This file is part of Ansible |
|
3 |
+# |
|
4 |
+# Ansible is free software: you can redistribute it and/or modify |
|
5 |
+# it under the terms of the GNU General Public License as published by |
|
6 |
+# the Free Software Foundation, either version 3 of the License, or |
|
7 |
+# (at your option) any later version. |
|
8 |
+# |
|
9 |
+# Ansible is distributed in the hope that it will be useful, |
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 |
+# GNU General Public License for more details. |
|
13 |
+# |
|
14 |
+# You should have received a copy of the GNU General Public License |
|
15 |
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
16 |
+ |
|
17 |
+ |
|
18 |
+class CacheModule(object): |
|
19 |
+ |
|
20 |
+ def __init__(self, *args, **kwargs): |
|
21 |
+ self._cache = {} |
|
22 |
+ |
|
23 |
+ def get(self, key): |
|
24 |
+ return self._cache.get(key) |
|
25 |
+ |
|
26 |
+ def set(self, key, value): |
|
27 |
+ self._cache[key] = value |
|
28 |
+ |
|
29 |
+ def keys(self): |
|
30 |
+ return self._cache.keys() |
|
31 |
+ |
|
32 |
+ def contains(self, key): |
|
33 |
+ return key in self._cache |
|
34 |
+ |
|
35 |
+ def delete(self, key): |
|
36 |
+ del self._cache[key] |
0 | 37 |
new file mode 100644 |
... | ... |
@@ -0,0 +1,108 @@ |
0 |
+# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com> |
|
1 |
+# |
|
2 |
+# This file is part of Ansible |
|
3 |
+# |
|
4 |
+# Ansible is free software: you can redistribute it and/or modify |
|
5 |
+# it under the terms of the GNU General Public License as published by |
|
6 |
+# the Free Software Foundation, either version 3 of the License, or |
|
7 |
+# (at your option) any later version. |
|
8 |
+# |
|
9 |
+# Ansible is distributed in the hope that it will be useful, |
|
10 |
+# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 |
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 |
+# GNU General Public License for more details. |
|
13 |
+# |
|
14 |
+# You should have received a copy of the GNU General Public License |
|
15 |
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>. |
|
16 |
+from __future__ import absolute_import |
|
17 |
+ |
|
18 |
+import collections |
|
19 |
+import pickle |
|
20 |
+import sys |
|
21 |
+import time |
|
22 |
+ |
|
23 |
+from ansible import constants as C |
|
24 |
+from ansible.cache.base import BaseCacheModule |
|
25 |
+ |
|
26 |
+try: |
|
27 |
+ from redis import StrictRedis |
|
28 |
+except ImportError: |
|
29 |
+ print "The 'redis' Python module is required for the redis fact cache" |
|
30 |
+ sys.exit(1) |
|
31 |
+ |
|
32 |
+ |
|
33 |
+class PickledRedis(StrictRedis): |
|
34 |
+ """ |
|
35 |
+ A subclass of StricRedis that uses the pickle module to store and load |
|
36 |
+ representations of the provided values. |
|
37 |
+ """ |
|
38 |
+ def get(self, name): |
|
39 |
+ pickled_value = super(PickledRedis, self).get(name) |
|
40 |
+ if pickled_value is None: |
|
41 |
+ return None |
|
42 |
+ return pickle.loads(pickled_value) |
|
43 |
+ |
|
44 |
+ def set(self, name, value, *args, **kwargs): |
|
45 |
+ return super(PickledRedis, self).set(name, pickle.dumps(value), *args, **kwargs) |
|
46 |
+ |
|
47 |
+ def setex(self, name, time, value): |
|
48 |
+ return super(PickledRedis, self).setex(name, time, pickle.dumps(value)) |
|
49 |
+ |
|
50 |
+ |
|
51 |
+class CacheModule(BaseCacheModule): |
|
52 |
+ """ |
|
53 |
+ A caching module backed by redis. |
|
54 |
+ |
|
55 |
+ Keys are maintained in a zset with their score being the timestamp |
|
56 |
+ when they are inserted. This allows for the usage of 'zremrangebyscore' |
|
57 |
+ to expire keys. This mechanism is used or a pattern matched 'scan' for |
|
58 |
+ performance. |
|
59 |
+ """ |
|
60 |
+ def __init__(self, *args, **kwargs): |
|
61 |
+ if C.CACHE_PLUGIN_CONNECTION: |
|
62 |
+ connection = C.CACHE_PLUGIN_CONNECTION.split(':') |
|
63 |
+ else: |
|
64 |
+ connection = [] |
|
65 |
+ |
|
66 |
+ self._timeout = C.CACHE_PLUGIN_TIMEOUT |
|
67 |
+ self._prefix = C.CACHE_PLUGIN_PREFIX |
|
68 |
+ self._cache = PickledRedis(*connection) |
|
69 |
+ self._keys_set = 'ansible_cache_keys' |
|
70 |
+ |
|
71 |
+ def _make_key(self, key): |
|
72 |
+ return "{}{}".format(self._prefix, key) |
|
73 |
+ |
|
74 |
+ def get(self, key): |
|
75 |
+ value = self._cache.get(self._make_key(key)) |
|
76 |
+ # guard against the key not being removed from the zset; |
|
77 |
+ # this could happen in cases where the timeout value is changed |
|
78 |
+ # between invocations |
|
79 |
+ if value is None: |
|
80 |
+ self.delete(key) |
|
81 |
+ raise KeyError |
|
82 |
+ return value |
|
83 |
+ |
|
84 |
+ def set(self, key, value): |
|
85 |
+ if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire' |
|
86 |
+ self._cache.setex(self._make_key(key), self._timeout, value) |
|
87 |
+ else: |
|
88 |
+ self._cache.set(self._make_key(key), value) |
|
89 |
+ |
|
90 |
+ self._cache.zadd(self._keys_set, time.time(), key) |
|
91 |
+ |
|
92 |
+ def _expire_keys(self): |
|
93 |
+ if self._timeout > 0: |
|
94 |
+ expiry_age = time.time() - self._timeout |
|
95 |
+ self._cache.zremrangebyscore(self._keys_set, 0, expiry_age) |
|
96 |
+ |
|
97 |
+ def keys(self): |
|
98 |
+ self._expire_keys() |
|
99 |
+ return self._cache.zrange(self._keys_set, 0, -1) |
|
100 |
+ |
|
101 |
+ def contains(self, key): |
|
102 |
+ self._expire_keys() |
|
103 |
+ return (self._cache.zrank(self._keys_set, key) >= 0) |
|
104 |
+ |
|
105 |
+ def delete(self, key): |
|
106 |
+ self._cache.delete(self._make_key(key)) |
|
107 |
+ self._cache.zrem(self._keys_set, key) |
... | ... |
@@ -140,6 +140,7 @@ DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_ |
140 | 140 |
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower() |
141 | 141 |
|
142 | 142 |
DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '/usr/share/ansible_plugins/action_plugins') |
143 |
+DEFAULT_CACHE_PLUGIN_PATH = get_config(p, DEFAULTS, 'cache_plugins', 'ANSIBLE_CACHE_PLUGINS', '/usr/share/ansible_plugins/cache_plugins') |
|
143 | 144 |
DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '/usr/share/ansible_plugins/callback_plugins') |
144 | 145 |
DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '/usr/share/ansible_plugins/connection_plugins') |
145 | 146 |
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '/usr/share/ansible_plugins/lookup_plugins') |
... | ... |
@@ -147,6 +148,11 @@ DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', ' |
147 | 147 |
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins') |
148 | 148 |
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '')) |
149 | 149 |
|
150 |
+CACHE_PLUGIN = get_config(p, DEFAULTS, 'cache_plugin', 'ANSIBLE_CACHE_PLUGIN', 'memory') |
|
151 |
+CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'cache_plugin_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None) |
|
152 |
+CACHE_PLUGIN_PREFIX = get_config(p, DEFAULTS, 'cache_plugin_prefix', 'ANSIBLE_CACHE_PLUGIN_PREFIX', 'ansible_facts') |
|
153 |
+CACHE_PLUGIN_TIMEOUT = get_config(p, DEFAULTS, 'cache_plugin_timeout', 'ANSIBLE_CACHE_PLUGIN_TIMEOUT', (60 * 60 * 24), integer=True) |
|
154 |
+ |
|
150 | 155 |
ANSIBLE_FORCE_COLOR = get_config(p, DEFAULTS, 'force_color', 'ANSIBLE_FORCE_COLOR', None, boolean=True) |
151 | 156 |
ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True) |
152 | 157 |
ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True) |
... | ... |
@@ -22,6 +22,7 @@ from ansible.utils.template import template |
22 | 22 |
from ansible import utils |
23 | 23 |
from ansible import errors |
24 | 24 |
import ansible.callbacks |
25 |
+import ansible.cache |
|
25 | 26 |
import os |
26 | 27 |
import shlex |
27 | 28 |
import collections |
... | ... |
@@ -32,9 +33,10 @@ import pipes |
32 | 32 |
# the setup cache stores all variables about a host |
33 | 33 |
# gathered during the setup step, while the vars cache |
34 | 34 |
# holds all other variables about a host |
35 |
-SETUP_CACHE = collections.defaultdict(dict) |
|
35 |
+SETUP_CACHE = ansible.cache.FactCache() |
|
36 | 36 |
VARS_CACHE = collections.defaultdict(dict) |
37 | 37 |
|
38 |
+ |
|
38 | 39 |
class PlayBook(object): |
39 | 40 |
''' |
40 | 41 |
runs an ansible playbook, given as a datastructure or YAML filename. |
... | ... |
@@ -98,7 +100,7 @@ class PlayBook(object): |
98 | 98 |
inventory: can be specified instead of host_list to use a pre-existing inventory object |
99 | 99 |
check: don't change anything, just try to detect some potential changes |
100 | 100 |
any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed |
101 |
- force_handlers: continue to notify and run handlers even if a task fails |
|
101 |
+ force_handlers: continue to notify and run handlers even if a task fails |
|
102 | 102 |
""" |
103 | 103 |
|
104 | 104 |
self.SETUP_CACHE = SETUP_CACHE |
... | ... |
@@ -187,7 +189,7 @@ class PlayBook(object): |
187 | 187 |
|
188 | 188 |
def _get_playbook_vars(self, play_ds, existing_vars): |
189 | 189 |
''' |
190 |
- Gets the vars specified with the play and blends them |
|
190 |
+ Gets the vars specified with the play and blends them |
|
191 | 191 |
with any existing vars that have already been read in |
192 | 192 |
''' |
193 | 193 |
new_vars = existing_vars.copy() |
... | ... |
@@ -470,6 +472,13 @@ class PlayBook(object): |
470 | 470 |
contacted = results.get('contacted', {}) |
471 | 471 |
self.stats.compute(results, ignore_errors=task.ignore_errors) |
472 | 472 |
|
473 |
+ def _register_play_vars(host, result): |
|
474 |
+ # when 'register' is used, persist the result in the vars cache |
|
475 |
+ # rather than the setup cache - vars should be transient between playbook executions |
|
476 |
+ if 'stdout' in result and 'stdout_lines' not in result: |
|
477 |
+ result['stdout_lines'] = result['stdout'].splitlines() |
|
478 |
+ utils.update_hash(self.VARS_CACHE, host, {task.register: result}) |
|
479 |
+ |
|
473 | 480 |
# add facts to the global setup cache |
474 | 481 |
for host, result in contacted.iteritems(): |
475 | 482 |
if 'results' in result: |
... | ... |
@@ -478,22 +487,19 @@ class PlayBook(object): |
478 | 478 |
for res in result['results']: |
479 | 479 |
if type(res) == dict: |
480 | 480 |
facts = res.get('ansible_facts', {}) |
481 |
- self.SETUP_CACHE[host].update(facts) |
|
481 |
+ utils.update_hash(self.SETUP_CACHE, host, facts) |
|
482 | 482 |
else: |
483 |
+ # when facts are returned, persist them in the setup cache |
|
483 | 484 |
facts = result.get('ansible_facts', {}) |
484 |
- self.SETUP_CACHE[host].update(facts) |
|
485 |
+ utils.update_hash(self.SETUP_CACHE, host, facts) |
|
485 | 486 |
if task.register: |
486 |
- if 'stdout' in result and 'stdout_lines' not in result: |
|
487 |
- result['stdout_lines'] = result['stdout'].splitlines() |
|
488 |
- self.SETUP_CACHE[host][task.register] = result |
|
487 |
+ _register_play_vars(host, result) |
|
489 | 488 |
|
490 | 489 |
# also have to register some failed, but ignored, tasks |
491 | 490 |
if task.ignore_errors and task.register: |
492 | 491 |
failed = results.get('failed', {}) |
493 | 492 |
for host, result in failed.iteritems(): |
494 |
- if 'stdout' in result and 'stdout_lines' not in result: |
|
495 |
- result['stdout_lines'] = result['stdout'].splitlines() |
|
496 |
- self.SETUP_CACHE[host][task.register] = result |
|
493 |
+ _register_play_vars(host, result) |
|
497 | 494 |
|
498 | 495 |
# flag which notify handlers need to be run |
499 | 496 |
if len(task.notify) > 0: |
... | ... |
@@ -585,8 +591,8 @@ class PlayBook(object): |
585 | 585 |
# let runner template out future commands |
586 | 586 |
setup_ok = setup_results.get('contacted', {}) |
587 | 587 |
for (host, result) in setup_ok.iteritems(): |
588 |
- self.SETUP_CACHE[host].update({'module_setup': True}) |
|
589 |
- self.SETUP_CACHE[host].update(result.get('ansible_facts', {})) |
|
588 |
+ utils.update_hash(self.SETUP_CACHE, host, {'module_setup': True}) |
|
589 |
+ utils.update_hash(self.SETUP_CACHE, host, result.get('ansible_facts', {})) |
|
590 | 590 |
return setup_results |
591 | 591 |
|
592 | 592 |
# ***************************************************** |
... | ... |
@@ -620,7 +626,7 @@ class PlayBook(object): |
620 | 620 |
|
621 | 621 |
def _run_play(self, play): |
622 | 622 |
''' run a list of tasks for a given pattern, in order ''' |
623 |
- |
|
623 |
+ |
|
624 | 624 |
self.callbacks.on_play_start(play.name) |
625 | 625 |
# Get the hosts for this play |
626 | 626 |
play._play_hosts = self.inventory.list_hosts(play.hosts) |
... | ... |
@@ -29,6 +29,7 @@ import os |
29 | 29 |
import sys |
30 | 30 |
import uuid |
31 | 31 |
|
32 |
+ |
|
32 | 33 |
class Play(object): |
33 | 34 |
|
34 | 35 |
__slots__ = [ |
... | ... |
@@ -85,7 +86,7 @@ class Play(object): |
85 | 85 |
# now we load the roles into the datastructure |
86 | 86 |
self.included_roles = [] |
87 | 87 |
ds = self._load_roles(self.roles, ds) |
88 |
- |
|
88 |
+ |
|
89 | 89 |
# and finally re-process the vars files as they may have |
90 | 90 |
# been updated by the included roles |
91 | 91 |
self.vars_files = ds.get('vars_files', []) |
... | ... |
@@ -153,6 +154,7 @@ class Play(object): |
153 | 153 |
self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars) |
154 | 154 |
self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars) |
155 | 155 |
|
156 |
+ |
|
156 | 157 |
# apply any missing tags to role tasks |
157 | 158 |
self._late_merge_role_tags() |
158 | 159 |
|
... | ... |
@@ -167,7 +169,7 @@ class Play(object): |
167 | 167 |
def _get_role_path(self, role): |
168 | 168 |
""" |
169 | 169 |
Returns the path on disk to the directory containing |
170 |
- the role directories like tasks, templates, etc. Also |
|
170 |
+ the role directories like tasks, templates, etc. Also |
|
171 | 171 |
returns any variables that were included with the role |
172 | 172 |
""" |
173 | 173 |
orig_path = template(self.basedir,role,self.vars) |
... | ... |
@@ -242,7 +244,7 @@ class Play(object): |
242 | 242 |
allow_dupes = utils.boolean(meta_data.get('allow_duplicates','')) |
243 | 243 |
|
244 | 244 |
# if any tags were specified as role/dep variables, merge |
245 |
- # them into the current dep_vars so they're passed on to any |
|
245 |
+ # them into the current dep_vars so they're passed on to any |
|
246 | 246 |
# further dependencies too, and so we only have one place |
247 | 247 |
# (dep_vars) to look for tags going forward |
248 | 248 |
def __merge_tags(var_obj): |
... | ... |
@@ -318,7 +320,7 @@ class Play(object): |
318 | 318 |
dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data]) |
319 | 319 |
|
320 | 320 |
# only add the current role when we're at the top level, |
321 |
- # otherwise we'll end up in a recursive loop |
|
321 |
+ # otherwise we'll end up in a recursive loop |
|
322 | 322 |
if level == 0: |
323 | 323 |
self.included_roles.append(role) |
324 | 324 |
dep_stack.append([role,role_path,role_vars,defaults_data]) |
... | ... |
@@ -505,7 +507,7 @@ class Play(object): |
505 | 505 |
if not isinstance(x, dict): |
506 | 506 |
raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file)) |
507 | 507 |
|
508 |
- # evaluate sudo vars for current and child tasks |
|
508 |
+ # evaluate sudo vars for current and child tasks |
|
509 | 509 |
included_sudo_vars = {} |
510 | 510 |
for k in ["sudo", "sudo_user"]: |
511 | 511 |
if k in x: |
... | ... |
@@ -554,7 +556,7 @@ class Play(object): |
554 | 554 |
else: |
555 | 555 |
default_vars = utils.combine_vars(self.default_vars, default_vars) |
556 | 556 |
|
557 |
- # append the vars defined with the include (from above) |
|
557 |
+ # append the vars defined with the include (from above) |
|
558 | 558 |
# as well as the old-style 'vars' element. The old-style |
559 | 559 |
# vars are given higher precedence here (just in case) |
560 | 560 |
task_vars = utils.combine_vars(task_vars, include_vars) |
... | ... |
@@ -610,8 +612,8 @@ class Play(object): |
610 | 610 |
|
611 | 611 |
def _is_valid_tag(self, tag_list): |
612 | 612 |
""" |
613 |
- Check to see if the list of tags passed in is in the list of tags |
|
614 |
- we only want (playbook.only_tags), or if it is not in the list of |
|
613 |
+ Check to see if the list of tags passed in is in the list of tags |
|
614 |
+ we only want (playbook.only_tags), or if it is not in the list of |
|
615 | 615 |
tags we don't want (playbook.skip_tags). |
616 | 616 |
""" |
617 | 617 |
matched_skip_tags = set(tag_list) & set(self.playbook.skip_tags) |
... | ... |
@@ -774,7 +776,7 @@ class Play(object): |
774 | 774 |
inject.update(self.vars) |
775 | 775 |
filename4 = template(self.basedir, filename3, inject) |
776 | 776 |
filename4 = utils.path_dwim(self.basedir, filename4) |
777 |
- else: |
|
777 |
+ else: |
|
778 | 778 |
filename4 = utils.path_dwim(self.basedir, filename3) |
779 | 779 |
return filename2, filename3, filename4 |
780 | 780 |
|
... | ... |
@@ -823,7 +825,7 @@ class Play(object): |
823 | 823 |
inject.update(self.playbook.SETUP_CACHE.get(host, {})) |
824 | 824 |
inject.update(self.playbook.VARS_CACHE.get(host, {})) |
825 | 825 |
else: |
826 |
- inject = None |
|
826 |
+ inject = None |
|
827 | 827 |
|
828 | 828 |
for filename in self.vars_files: |
829 | 829 |
if type(filename) == list: |
... | ... |
@@ -854,4 +856,4 @@ class Play(object): |
854 | 854 |
|
855 | 855 |
# finally, update the VARS_CACHE for the host, if it is set |
856 | 856 |
if host is not None: |
857 |
- self.playbook.VARS_CACHE[host].update(self.playbook.extra_vars) |
|
857 |
+ self.playbook.VARS_CACHE.setdefault(host, {}).update(self.playbook.extra_vars) |
... | ... |
@@ -108,14 +108,14 @@ class PluginLoader(object): |
108 | 108 |
if fullpath not in ret: |
109 | 109 |
ret.append(fullpath) |
110 | 110 |
|
111 |
- # look in any configured plugin paths, allow one level deep for subcategories |
|
111 |
+ # look in any configured plugin paths, allow one level deep for subcategories |
|
112 | 112 |
configured_paths = self.config.split(os.pathsep) |
113 | 113 |
for path in configured_paths: |
114 | 114 |
path = os.path.realpath(os.path.expanduser(path)) |
115 | 115 |
contents = glob.glob("%s/*" % path) |
116 | 116 |
for c in contents: |
117 | 117 |
if os.path.isdir(c) and c not in ret: |
118 |
- ret.append(c) |
|
118 |
+ ret.append(c) |
|
119 | 119 |
if path not in ret: |
120 | 120 |
ret.append(path) |
121 | 121 |
|
... | ... |
@@ -181,7 +181,7 @@ class PluginLoader(object): |
181 | 181 |
return getattr(self._module_cache[path], self.class_name)(*args, **kwargs) |
182 | 182 |
|
183 | 183 |
def all(self, *args, **kwargs): |
184 |
- ''' instantiates all plugins with the same arguments ''' |
|
184 |
+ ''' instantiates all plugins with the same arguments ''' |
|
185 | 185 |
|
186 | 186 |
for i in self._get_paths(): |
187 | 187 |
matches = glob.glob(os.path.join(i, "*.py")) |
... | ... |
@@ -195,24 +195,31 @@ class PluginLoader(object): |
195 | 195 |
yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs) |
196 | 196 |
|
197 | 197 |
action_loader = PluginLoader( |
198 |
- 'ActionModule', |
|
198 |
+ 'ActionModule', |
|
199 | 199 |
'ansible.runner.action_plugins', |
200 | 200 |
C.DEFAULT_ACTION_PLUGIN_PATH, |
201 | 201 |
'action_plugins' |
202 | 202 |
) |
203 | 203 |
|
204 |
+cache_loader = PluginLoader( |
|
205 |
+ 'CacheModule', |
|
206 |
+ 'ansible.cache', |
|
207 |
+ C.DEFAULT_CACHE_PLUGIN_PATH, |
|
208 |
+ 'cache_plugins' |
|
209 |
+) |
|
210 |
+ |
|
204 | 211 |
callback_loader = PluginLoader( |
205 |
- 'CallbackModule', |
|
206 |
- 'ansible.callback_plugins', |
|
207 |
- C.DEFAULT_CALLBACK_PLUGIN_PATH, |
|
212 |
+ 'CallbackModule', |
|
213 |
+ 'ansible.callback_plugins', |
|
214 |
+ C.DEFAULT_CALLBACK_PLUGIN_PATH, |
|
208 | 215 |
'callback_plugins' |
209 | 216 |
) |
210 | 217 |
|
211 | 218 |
connection_loader = PluginLoader( |
212 |
- 'Connection', |
|
213 |
- 'ansible.runner.connection_plugins', |
|
214 |
- C.DEFAULT_CONNECTION_PLUGIN_PATH, |
|
215 |
- 'connection_plugins', |
|
219 |
+ 'Connection', |
|
220 |
+ 'ansible.runner.connection_plugins', |
|
221 |
+ C.DEFAULT_CONNECTION_PLUGIN_PATH, |
|
222 |
+ 'connection_plugins', |
|
216 | 223 |
aliases={'paramiko': 'paramiko_ssh'} |
217 | 224 |
) |
218 | 225 |
|
... | ... |
@@ -224,30 +231,30 @@ shell_loader = PluginLoader( |
224 | 224 |
) |
225 | 225 |
|
226 | 226 |
module_finder = PluginLoader( |
227 |
- '', |
|
228 |
- '', |
|
229 |
- C.DEFAULT_MODULE_PATH, |
|
227 |
+ '', |
|
228 |
+ '', |
|
229 |
+ C.DEFAULT_MODULE_PATH, |
|
230 | 230 |
'library' |
231 | 231 |
) |
232 | 232 |
|
233 | 233 |
lookup_loader = PluginLoader( |
234 |
- 'LookupModule', |
|
235 |
- 'ansible.runner.lookup_plugins', |
|
236 |
- C.DEFAULT_LOOKUP_PLUGIN_PATH, |
|
234 |
+ 'LookupModule', |
|
235 |
+ 'ansible.runner.lookup_plugins', |
|
236 |
+ C.DEFAULT_LOOKUP_PLUGIN_PATH, |
|
237 | 237 |
'lookup_plugins' |
238 | 238 |
) |
239 | 239 |
|
240 | 240 |
vars_loader = PluginLoader( |
241 |
- 'VarsModule', |
|
242 |
- 'ansible.inventory.vars_plugins', |
|
243 |
- C.DEFAULT_VARS_PLUGIN_PATH, |
|
241 |
+ 'VarsModule', |
|
242 |
+ 'ansible.inventory.vars_plugins', |
|
243 |
+ C.DEFAULT_VARS_PLUGIN_PATH, |
|
244 | 244 |
'vars_plugins' |
245 | 245 |
) |
246 | 246 |
|
247 | 247 |
filter_loader = PluginLoader( |
248 |
- 'FilterModule', |
|
249 |
- 'ansible.runner.filter_plugins', |
|
250 |
- C.DEFAULT_FILTER_PLUGIN_PATH, |
|
248 |
+ 'FilterModule', |
|
249 |
+ 'ansible.runner.filter_plugins', |
|
250 |
+ C.DEFAULT_FILTER_PLUGIN_PATH, |
|
251 | 251 |
'filter_plugins' |
252 | 252 |
) |
253 | 253 |
|