Merge lp:~cjwatson/charms/trusty/turnipcake/build-label into lp:~canonical-launchpad-branches/charms/trusty/turnipcake/devel
- Trusty Tahr (14.04)
- build-label
- Merge into devel
Status: | Merged |
---|---|
Merged at revision: | 22 |
Proposed branch: | lp:~cjwatson/charms/trusty/turnipcake/build-label |
Merge into: | lp:~canonical-launchpad-branches/charms/trusty/turnipcake/devel |
Diff against target: |
449 lines (+220/-82) 6 files modified
.bzrignore (+1/-2) Makefile.common (+24/-21) config.yaml (+28/-0) deploy-requirements.txt (+0/-1) hooks/actions.py (+165/-55) hooks/services.py (+2/-3) |
To merge this branch: | bzr merge lp:~cjwatson/charms/trusty/turnipcake/build-label |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+275469@code.launchpad.net |
Commit message
Allow updating the code payload separately from the charm using a build label.
Description of the change
Allow updating the code payload separately from the charm using a build label. This parallels https:/
The approach used here is based heavily on the software-
The virtualenv moves inside the payload directory so that each payload gets its own. /srv/turnipcake
There are no tests directly here, but it'll at least get integration testing by way of the corresponding changes to the Mojo spec.
- 23. By Colin Watson
-
Fix over-indentation.
Preview Diff
1 | === modified file '.bzrignore' | |||
2 | --- .bzrignore 2015-02-11 01:09:24 +0000 | |||
3 | +++ .bzrignore 2015-11-02 11:16:30 +0000 | |||
4 | @@ -1,3 +1,2 @@ | |||
5 | 1 | lib/charmhelpers | 1 | lib/charmhelpers |
8 | 2 | files/*.tar.gz | 2 | files/* |
7 | 3 | files/*.tgz | ||
9 | 4 | 3 | ||
10 | === modified file 'Makefile.common' | |||
11 | --- Makefile.common 2015-03-26 02:18:32 +0000 | |||
12 | +++ Makefile.common 2015-11-02 11:16:30 +0000 | |||
13 | @@ -7,13 +7,13 @@ | |||
14 | 7 | TEST_PREFIX := PYTHONPATH=$(HOOKS_DIR) | 7 | TEST_PREFIX := PYTHONPATH=$(HOOKS_DIR) |
15 | 8 | TEST_DIR := $(PWD)/tests | 8 | TEST_DIR := $(PWD)/tests |
16 | 9 | SOURCE_DIR ?= $(shell dirname $(PWD))/.source/$(APP_NAME) | 9 | SOURCE_DIR ?= $(shell dirname $(PWD))/.source/$(APP_NAME) |
18 | 10 | PIP_CACHE := $(PWD)/files/pip-cache | 10 | FILES_DIR := $(PWD)/files |
19 | 11 | 11 | ||
25 | 12 | ifeq ($(PIP_SOURCE_DIR),) | 12 | BUILD_LABEL = $(shell bzr log -rlast: --show-ids $(SOURCE_DIR) | sed -n 's/^revision-id: //p') |
26 | 13 | PIP_CACHE_ARGS := | 13 | TARBALL = $(APP_NAME).tar.gz |
27 | 14 | else | 14 | ASSET = $(FILES_DIR)/$(BUILD_LABEL)/$(TARBALL) |
28 | 15 | PIP_CACHE_ARGS := --no-index --find-links=file://$(PIP_SOURCE_DIR) | 15 | UNIT = $(APP_NAME)/0 |
29 | 16 | endif | 16 | CHARM_UNIT_PATH := /var/lib/juju/agents/unit-$(APP_NAME)-0/charm |
30 | 17 | 17 | ||
31 | 18 | all: setup lint test | 18 | all: setup lint test |
32 | 19 | 19 | ||
33 | @@ -22,9 +22,22 @@ | |||
34 | 22 | @juju upgrade-charm --repository=../.. $(APP_NAME) | 22 | @juju upgrade-charm --repository=../.. $(APP_NAME) |
35 | 23 | 23 | ||
36 | 24 | 24 | ||
38 | 25 | deploy: tarball pip-cache | 25 | deploy: payload |
39 | 26 | @echo "Deploying $(APP_NAME)..." | 26 | @echo "Deploying $(APP_NAME)..." |
40 | 27 | @juju deploy --repository=../.. local:trusty/$(APP_NAME) | 27 | @juju deploy --repository=../.. local:trusty/$(APP_NAME) |
41 | 28 | @$(MAKE) rollout SKIP_BUILD=true | ||
42 | 29 | |||
43 | 30 | |||
44 | 31 | # deploy a new revision/branch | ||
45 | 32 | rollout: _PATH=$(CHARM_UNIT_PATH)/files/$(BUILD_LABEL) | ||
46 | 33 | rollout: | ||
47 | 34 | ifneq ($(SKIP_BUILD),true) | ||
48 | 35 | $(MAKE) payload | ||
49 | 36 | endif | ||
50 | 37 | # manually copy our asset to be in the right place, rather than upgrade-charm | ||
51 | 38 | juju scp $(ASSET) $(UNIT):$(TARBALL) | ||
52 | 39 | juju ssh $(UNIT) 'sudo mkdir -p $(_PATH) && sudo mv $(TARBALL) $(_PATH)/' | ||
53 | 40 | juju set $(APP_NAME) build_label=$(BUILD_LABEL) | ||
54 | 28 | 41 | ||
55 | 29 | 42 | ||
56 | 30 | ifeq ($(NO_FETCH_CODE),) | 43 | ifeq ($(NO_FETCH_CODE),) |
57 | @@ -41,23 +54,14 @@ | |||
58 | 41 | endif | 54 | endif |
59 | 42 | 55 | ||
60 | 43 | 56 | ||
61 | 44 | pip-cache: fetch-code | ||
62 | 45 | @echo "Updating python dependency cache..." | ||
63 | 46 | @mkdir -p $(PIP_CACHE) | ||
64 | 47 | @pip install $(PIP_CACHE_ARGS) --no-use-wheel --download $(PIP_CACHE) \ | ||
65 | 48 | -r $(SOURCE_DIR)/requirements.txt \ | ||
66 | 49 | -r deploy-requirements.txt | ||
67 | 50 | |||
68 | 51 | |||
69 | 52 | check-rev: | 57 | check-rev: |
70 | 53 | ifndef REV | 58 | ifndef REV |
71 | 54 | $(error Revision number required to fetch source: e.g. $ REV=10 make deploy) | 59 | $(error Revision number required to fetch source: e.g. $ REV=10 make deploy) |
72 | 55 | endif | 60 | endif |
73 | 56 | 61 | ||
78 | 57 | tarball: fetch-code | 62 | payload: fetch-code |
79 | 58 | @echo "Creating tarball for deploy..." | 63 | @echo "Building asset for $(BUILD_LABEL)..." |
80 | 59 | @mkdir -p files/ | 64 | @$(MAKE) -C $(SOURCE_DIR) build-tarball TARBALL_BUILDS_DIR=$(FILES_DIR) |
77 | 60 | @tar czf files/$(APP_NAME).tar.gz -C $(SOURCE_DIR) . | ||
81 | 61 | 65 | ||
82 | 62 | 66 | ||
83 | 63 | # The following targets are for charm maintenance. | 67 | # The following targets are for charm maintenance. |
84 | @@ -67,7 +71,6 @@ | |||
85 | 67 | @find . -depth -name '__pycache__' -exec rm -rf '{}' \; | 71 | @find . -depth -name '__pycache__' -exec rm -rf '{}' \; |
86 | 68 | @rm -f .coverage | 72 | @rm -f .coverage |
87 | 69 | @rm -rf $(SOURCE_DIR) | 73 | @rm -rf $(SOURCE_DIR) |
88 | 70 | @rm -rf $(PIP_CACHE) | ||
89 | 71 | @rm -rf .venv | 74 | @rm -rf .venv |
90 | 72 | 75 | ||
91 | 73 | 76 | ||
92 | @@ -100,4 +103,4 @@ | |||
93 | 100 | @echo "Starting tests..." | 103 | @echo "Starting tests..." |
94 | 101 | @$(TEST_PREFIX) .venv/bin/coverage run -m unittest discover -s unit_tests | 104 | @$(TEST_PREFIX) .venv/bin/coverage run -m unittest discover -s unit_tests |
95 | 102 | 105 | ||
97 | 103 | .PHONY: clean lint setup tarball test upgrade | 106 | .PHONY: clean lint setup payload test upgrade |
98 | 104 | 107 | ||
99 | === modified file 'config.yaml' | |||
100 | --- config.yaml 2015-03-26 02:18:32 +0000 | |||
101 | +++ config.yaml 2015-11-02 11:16:30 +0000 | |||
102 | @@ -3,6 +3,10 @@ | |||
103 | 3 | type: string | 3 | type: string |
104 | 4 | default: 'turnipcake' | 4 | default: 'turnipcake' |
105 | 5 | description: Name of this application. | 5 | description: Name of this application. |
106 | 6 | build_label: | ||
107 | 7 | type: string | ||
108 | 8 | default: "" | ||
109 | 9 | description: Build label to run. | ||
110 | 6 | nagios_context: | 10 | nagios_context: |
111 | 7 | default: "juju" | 11 | default: "juju" |
112 | 8 | type: string | 12 | type: string |
113 | @@ -37,3 +41,27 @@ | |||
114 | 37 | type: string | 41 | type: string |
115 | 38 | default: turnipcake | 42 | default: turnipcake |
116 | 39 | description: The service will run under this group. | 43 | description: The service will run under this group. |
117 | 44 | swift_username: | ||
118 | 45 | type: string | ||
119 | 46 | default: "" | ||
120 | 47 | description: Username to use when accessing Swift. | ||
121 | 48 | swift_password: | ||
122 | 49 | type: string | ||
123 | 50 | default: "" | ||
124 | 51 | description: Password to use when accessing Swift. | ||
125 | 52 | swift_auth_url: | ||
126 | 53 | type: string | ||
127 | 54 | default: "" | ||
128 | 55 | description: URL for authenticating against Keystone. | ||
129 | 56 | swift_region_name: | ||
130 | 57 | type: string | ||
131 | 58 | default: "" | ||
132 | 59 | description: Swift region. | ||
133 | 60 | swift_tenant_name: | ||
134 | 61 | type: string | ||
135 | 62 | default: "" | ||
136 | 63 | description: Entity that owns resources. | ||
137 | 64 | swift_container_name: | ||
138 | 65 | type: string | ||
139 | 66 | default: "" | ||
140 | 67 | description: Container to put objects in. | ||
141 | 40 | 68 | ||
142 | === removed file 'deploy-requirements.txt' | |||
143 | --- deploy-requirements.txt 2015-03-27 01:39:30 +0000 | |||
144 | +++ deploy-requirements.txt 1970-01-01 00:00:00 +0000 | |||
145 | @@ -1,1 +0,0 @@ | |||
146 | 1 | gunicorn==19.3.0 | ||
147 | 2 | 0 | ||
148 | === modified file 'hooks/actions.py' | |||
149 | --- hooks/actions.py 2015-03-27 07:41:51 +0000 | |||
150 | +++ hooks/actions.py 2015-11-02 11:16:30 +0000 | |||
151 | @@ -1,6 +1,8 @@ | |||
152 | 1 | import errno | ||
153 | 1 | import grp | 2 | import grp |
154 | 2 | import os | 3 | import os |
155 | 3 | import pwd | 4 | import pwd |
156 | 5 | import shutil | ||
157 | 4 | import subprocess | 6 | import subprocess |
158 | 5 | 7 | ||
159 | 6 | from charmhelpers import fetch | 8 | from charmhelpers import fetch |
160 | @@ -16,14 +18,22 @@ | |||
161 | 16 | 18 | ||
162 | 17 | # Globals | 19 | # Globals |
163 | 18 | CHARM_FILES_DIR = os.path.join(hookenv.charm_dir(), 'files') | 20 | CHARM_FILES_DIR = os.path.join(hookenv.charm_dir(), 'files') |
165 | 19 | REQUIRED_PACKAGES = ['python-virtualenv', 'python-dev', 'make'] | 21 | REQUIRED_PACKAGES = [ |
166 | 22 | 'python-virtualenv', | ||
167 | 23 | 'python-dev', | ||
168 | 24 | 'make', | ||
169 | 25 | 'python-swiftclient', | ||
170 | 26 | ] | ||
171 | 20 | BASE_DIR = config['base_dir'] | 27 | BASE_DIR = config['base_dir'] |
172 | 28 | PAYLOADS_DIR = os.path.join(BASE_DIR, 'payloads') | ||
173 | 21 | CODE_DIR = os.path.join(BASE_DIR, 'code') | 29 | CODE_DIR = os.path.join(BASE_DIR, 'code') |
175 | 22 | VENV_DIR = os.path.join(BASE_DIR, 'venv') | 30 | VENV_DIR = os.path.join(CODE_DIR, 'env') |
176 | 31 | OLD_VENV_DIR = os.path.join(BASE_DIR, 'venv') | ||
177 | 23 | LOGS_DIR = os.path.join(BASE_DIR, 'logs') | 32 | LOGS_DIR = os.path.join(BASE_DIR, 'logs') |
178 | 24 | DATA_DIR = os.path.join(BASE_DIR, 'data') | 33 | DATA_DIR = os.path.join(BASE_DIR, 'data') |
179 | 25 | # XXX: Should really move this outside the code dir. | 34 | # XXX: Should really move this outside the code dir. |
180 | 26 | DB_DIR = os.path.join(BASE_DIR, 'code', 'db') | 35 | DB_DIR = os.path.join(BASE_DIR, 'code', 'db') |
181 | 36 | CODE_TARBALL = 'turnipcake.tar.gz' | ||
182 | 27 | 37 | ||
183 | 28 | CODE_USER = config['code_user'] | 38 | CODE_USER = config['code_user'] |
184 | 29 | CODE_GROUP = config['code_group'] | 39 | CODE_GROUP = config['code_group'] |
185 | @@ -43,7 +53,7 @@ | |||
186 | 43 | def make_srv_location(): | 53 | def make_srv_location(): |
187 | 44 | hookenv.log('Creating directories...') | 54 | hookenv.log('Creating directories...') |
188 | 45 | 55 | ||
190 | 46 | for dir in (BASE_DIR, CODE_DIR): | 56 | for dir in (BASE_DIR, PAYLOADS_DIR): |
191 | 47 | host.mkdir(dir, owner=CODE_USER, group=CODE_GROUP, perms=0o755) | 57 | host.mkdir(dir, owner=CODE_USER, group=CODE_GROUP, perms=0o755) |
192 | 48 | for dir in (LOGS_DIR, DATA_DIR): | 58 | for dir in (LOGS_DIR, DATA_DIR): |
193 | 49 | host.mkdir(dir, owner=USER, group=GROUP, perms=0o755) | 59 | host.mkdir(dir, owner=USER, group=GROUP, perms=0o755) |
194 | @@ -63,28 +73,159 @@ | |||
195 | 63 | host.add_user_to_group(USER, GROUP) | 73 | host.add_user_to_group(USER, GROUP) |
196 | 64 | 74 | ||
197 | 65 | 75 | ||
201 | 66 | def unpack_source(service_name): | 76 | def get_swift_creds(config): |
202 | 67 | hookenv.log('Deploying source...') | 77 | return { |
203 | 68 | 78 | 'user': config['swift_username'], | |
204 | 79 | 'project': config['swift_tenant_name'], | ||
205 | 80 | 'password': config['swift_password'], | ||
206 | 81 | 'authurl': config['swift_auth_url'], | ||
207 | 82 | 'region': config['swift_region_name'], | ||
208 | 83 | } | ||
209 | 84 | |||
210 | 85 | |||
211 | 86 | def swift_base_cmd(**swift_creds): | ||
212 | 87 | return [ | ||
213 | 88 | 'swift', | ||
214 | 89 | '--os-username=' + swift_creds['user'], | ||
215 | 90 | '--os-tenant-name=' + swift_creds['project'], | ||
216 | 91 | '--os-password=' + swift_creds['password'], | ||
217 | 92 | '--os-auth-url=' + swift_creds['authurl'], | ||
218 | 93 | '--os-region-name=' + swift_creds['region'], | ||
219 | 94 | ] | ||
220 | 95 | |||
221 | 96 | |||
222 | 97 | def swift_get_etag(name, container=None, **swift_creds): | ||
223 | 98 | cmd = swift_base_cmd(**swift_creds) + ['stat', container, name] | ||
224 | 99 | file_stat = subprocess.check_output(cmd).splitlines() | ||
225 | 100 | for line in file_stat: | ||
226 | 101 | words = line.split() | ||
227 | 102 | if words[0] == 'ETag:': | ||
228 | 103 | return words[1] | ||
229 | 104 | |||
230 | 105 | |||
231 | 106 | def swift_fetch(source, target, container=None, **swift_creds): | ||
232 | 107 | cmd = swift_base_cmd(**swift_creds) + [ | ||
233 | 108 | 'download', '--output=' + target, container, source] | ||
234 | 109 | subprocess.check_call(cmd) | ||
235 | 110 | |||
236 | 111 | |||
237 | 112 | def unlink_force(path): | ||
238 | 113 | """Unlink path, without worrying about whether it exists.""" | ||
239 | 114 | try: | ||
240 | 115 | os.unlink(path) | ||
241 | 116 | except OSError as e: | ||
242 | 117 | if e.errno != errno.ENOENT: | ||
243 | 118 | raise | ||
244 | 119 | |||
245 | 120 | |||
246 | 121 | def symlink_force(source, link_name): | ||
247 | 122 | """Create symlink link_name -> source, even if link_name exists.""" | ||
248 | 123 | unlink_force(link_name) | ||
249 | 124 | os.symlink(source, link_name) | ||
250 | 125 | |||
251 | 126 | |||
252 | 127 | def install_python_packages(target_dir): | ||
253 | 128 | hookenv.log('Installing Python dependencies...') | ||
254 | 129 | subprocess.check_call( | ||
255 | 130 | ['sudo', '-u', CODE_USER, 'make', '-C', target_dir, 'build', | ||
256 | 131 | 'PIP_SOURCE_DIR=%s' % os.path.join(target_dir, 'pip-cache')]) | ||
257 | 132 | |||
258 | 133 | |||
259 | 134 | def prune_payloads(keep): | ||
260 | 135 | for entry in os.listdir(PAYLOADS_DIR): | ||
261 | 136 | if entry in keep: | ||
262 | 137 | continue | ||
263 | 138 | entry_path = os.path.join(PAYLOADS_DIR, entry) | ||
264 | 139 | if os.path.isdir(entry_path): | ||
265 | 140 | hookenv.log('Purging old build in %s...' % entry_path) | ||
266 | 141 | shutil.rmtree(entry_path) | ||
267 | 142 | |||
268 | 143 | |||
269 | 144 | def migrate_db(): | ||
270 | 145 | hookenv.log('Migrating database...') | ||
271 | 146 | path = '%s:%s' % (os.path.join(VENV_DIR, 'bin'), os.environ['PATH']) | ||
272 | 147 | |||
273 | 148 | with host.chdir(CODE_DIR): | ||
274 | 149 | subprocess.check_call([ | ||
275 | 150 | 'sudo', '-u', USER, 'PATH=%s' % path, 'make', 'migrate']) | ||
276 | 151 | |||
277 | 152 | |||
278 | 153 | def deploy_code(service_name): | ||
279 | 69 | make_srv_location() | 154 | make_srv_location() |
280 | 70 | 155 | ||
281 | 156 | current_build_label = None | ||
282 | 157 | if os.path.islink(CODE_DIR): | ||
283 | 158 | current_build_label = os.path.basename(os.path.realpath(CODE_DIR)) | ||
284 | 159 | elif os.path.isdir(os.path.join(CODE_DIR, '.bzr')): | ||
285 | 160 | log_output = subprocess.check_output( | ||
286 | 161 | ['bzr', 'log', '-rlast:', '--show-ids', CODE_DIR]) | ||
287 | 162 | for line in log_output.splitlines(): | ||
288 | 163 | if line.startswith('revision-id: '): | ||
289 | 164 | current_build_label = line[len('revision-id: '):] | ||
290 | 165 | desired_build_label = config['build_label'] | ||
291 | 166 | if not desired_build_label: | ||
292 | 167 | if current_build_label is not None: | ||
293 | 168 | hookenv.log( | ||
294 | 169 | 'No desired build label, but build %s is already deployed' % | ||
295 | 170 | current_build_label) | ||
296 | 171 | # We're probably upgrading from a charm that used old-style code | ||
297 | 172 | # assets, so make sure we at least have a virtualenv available | ||
298 | 173 | # from the current preferred location. | ||
299 | 174 | if not os.path.isdir(VENV_DIR) and os.path.isdir(OLD_VENV_DIR): | ||
300 | 175 | os.symlink(OLD_VENV_DIR, VENV_DIR) | ||
301 | 176 | return | ||
302 | 177 | else: | ||
303 | 178 | raise AssertionError('Build label unset, so cannot deploy code') | ||
304 | 179 | if current_build_label == desired_build_label: | ||
305 | 180 | hookenv.log('Build %s already deployed' % desired_build_label) | ||
306 | 181 | return | ||
307 | 182 | hookenv.log('Deploying build %s...' % desired_build_label) | ||
308 | 183 | |||
309 | 71 | # Copy source archive | 184 | # Copy source archive |
326 | 72 | archive_path = os.path.join(BASE_DIR, 'turnipcake.tar.gz') | 185 | archive_path = os.path.join(PAYLOADS_DIR, desired_build_label + '.tar.gz') |
327 | 73 | 186 | object_name = os.path.join(desired_build_label, CODE_TARBALL) | |
328 | 74 | with open(os.path.join(CHARM_FILES_DIR, 'turnipcake.tar.gz')) as file: | 187 | |
329 | 75 | host.write_file(archive_path, file.read(), perms=0o644) | 188 | try: |
330 | 76 | 189 | if config['swift_container_name']: | |
331 | 77 | # Unpack source | 190 | swift_creds = get_swift_creds(config) |
332 | 78 | archive.extract_tarfile(archive_path, CODE_DIR) | 191 | swift_container = config['swift_container_name'] |
333 | 79 | os.chown( | 192 | swift_fetch( |
334 | 80 | CODE_DIR, | 193 | os.path.join('turnipcake-builds', object_name), archive_path, |
335 | 81 | pwd.getpwnam(CODE_USER).pw_uid, grp.getgrnam(CODE_GROUP).gr_gid) | 194 | container=swift_container, **swift_creds) |
336 | 82 | host.lchownr(CODE_DIR, CODE_USER, CODE_GROUP) | 195 | else: |
337 | 83 | 196 | with open(os.path.join(CHARM_FILES_DIR, object_name)) as file: | |
338 | 84 | # Ensure the DB is writable by the app user. It really shouldn't | 197 | host.write_file(archive_path, file.read(), perms=0o644) |
339 | 85 | # live in the code tree. | 198 | |
340 | 86 | os.chown(DB_DIR, pwd.getpwnam(USER).pw_uid, grp.getgrnam(GROUP).gr_gid) | 199 | # Unpack source |
341 | 87 | host.lchownr(DB_DIR, USER, GROUP) | 200 | target_dir = os.path.join(PAYLOADS_DIR, desired_build_label) |
342 | 201 | if os.path.isdir(target_dir): | ||
343 | 202 | shutil.rmtree(target_dir) | ||
344 | 203 | archive.extract_tarfile(archive_path, target_dir) | ||
345 | 204 | os.chown( | ||
346 | 205 | target_dir, | ||
347 | 206 | pwd.getpwnam(CODE_USER).pw_uid, grp.getgrnam(CODE_GROUP).gr_gid) | ||
348 | 207 | host.lchownr(target_dir, CODE_USER, CODE_GROUP) | ||
349 | 208 | |||
350 | 209 | # Ensure the DB is writable by the app user. It really shouldn't | ||
351 | 210 | # live in the code tree. | ||
352 | 211 | os.chown(DB_DIR, pwd.getpwnam(USER).pw_uid, grp.getgrnam(GROUP).gr_gid) | ||
353 | 212 | host.lchownr(DB_DIR, USER, GROUP) | ||
354 | 213 | |||
355 | 214 | install_python_packages(target_dir) | ||
356 | 215 | |||
357 | 216 | if not os.path.islink(CODE_DIR) and os.path.isdir(CODE_DIR): | ||
358 | 217 | old_payload_dir = os.path.join(PAYLOADS_DIR, current_build_label) | ||
359 | 218 | if os.path.exists(old_payload_dir): | ||
360 | 219 | shutil.rmtree(CODE_DIR) | ||
361 | 220 | else: | ||
362 | 221 | os.rename(CODE_DIR, old_payload_dir) | ||
363 | 222 | symlink_force( | ||
364 | 223 | os.path.relpath(target_dir, os.path.dirname(CODE_DIR)), CODE_DIR) | ||
365 | 224 | prune_payloads([desired_build_label, current_build_label]) | ||
366 | 225 | finally: | ||
367 | 226 | unlink_force(archive_path) | ||
368 | 227 | |||
369 | 228 | migrate_db() | ||
370 | 88 | 229 | ||
371 | 89 | 230 | ||
372 | 90 | def install_packages(service_name): | 231 | def install_packages(service_name): |
373 | @@ -93,42 +234,11 @@ | |||
374 | 93 | fetch.apt_install(REQUIRED_PACKAGES, fatal=True) | 234 | fetch.apt_install(REQUIRED_PACKAGES, fatal=True) |
375 | 94 | 235 | ||
376 | 95 | 236 | ||
377 | 96 | def install_python_packages(service_name): | ||
378 | 97 | hookenv.log('Installing Python dependencies...') | ||
379 | 98 | pip_cache = os.path.join(CHARM_FILES_DIR, 'pip-cache') | ||
380 | 99 | code_reqs = os.path.join(CODE_DIR, 'requirements.txt') | ||
381 | 100 | deploy_reqs = os.path.join(hookenv.charm_dir(), 'deploy-requirements.txt') | ||
382 | 101 | |||
383 | 102 | pip_bin = os.path.join(VENV_DIR, 'bin', 'pip') | ||
384 | 103 | |||
385 | 104 | subprocess.call([ | ||
386 | 105 | 'sudo', '-u', CODE_USER, 'virtualenv', '--system-site-packages', | ||
387 | 106 | VENV_DIR]) | ||
388 | 107 | subprocess.check_call([ | ||
389 | 108 | 'sudo', '-u', CODE_USER, pip_bin, 'install', '--no-index', | ||
390 | 109 | '--find-links={}'.format(pip_cache), '-r', code_reqs, | ||
391 | 110 | '-r', deploy_reqs]) | ||
392 | 111 | subprocess.check_call([ | ||
393 | 112 | 'sudo', '-u', CODE_USER, pip_bin, 'install', '--no-deps', | ||
394 | 113 | '-e', CODE_DIR]) | ||
395 | 114 | |||
396 | 115 | |||
397 | 116 | def migrate_db(service_name): | ||
398 | 117 | hookenv.log('Migrating database...') | ||
399 | 118 | path = '%s:%s' % (os.path.join(VENV_DIR, 'bin'), os.environ['PATH']) | ||
400 | 119 | |||
401 | 120 | with host.chdir(CODE_DIR): | ||
402 | 121 | subprocess.check_call([ | ||
403 | 122 | 'sudo', '-u', USER, 'PATH=%s' % path, 'make', 'migrate']) | ||
404 | 123 | |||
405 | 124 | |||
406 | 125 | def publish_wsgi_relations(self): | 237 | def publish_wsgi_relations(self): |
407 | 126 | # Publish the wsgi-file relation so the gunicorn subordinate can | 238 | # Publish the wsgi-file relation so the gunicorn subordinate can |
408 | 127 | # serve us. Other WSGI containers could be made to work, as the most | 239 | # serve us. Other WSGI containers could be made to work, as the most |
409 | 128 | # gunicorn-specific thing is the --paste hack. | 240 | # gunicorn-specific thing is the --paste hack. |
410 | 129 | config = hookenv.config() | 241 | config = hookenv.config() |
411 | 130 | code_dir = os.path.join(config['base_dir'], 'code') | ||
412 | 131 | venv_bin = os.path.join(config['base_dir'], 'venv', 'bin') | ||
413 | 132 | # XXX We only support a single related turnip-api unit so far. | 242 | # XXX We only support a single related turnip-api unit so far. |
414 | 133 | turnip_api_rid = sorted(hookenv.relation_ids('turnip-api'))[0] | 243 | turnip_api_rid = sorted(hookenv.relation_ids('turnip-api'))[0] |
415 | 134 | turnip_api_unit = sorted(hookenv.related_units(turnip_api_rid))[0] | 244 | turnip_api_unit = sorted(hookenv.related_units(turnip_api_rid))[0] |
416 | @@ -136,7 +246,7 @@ | |||
417 | 136 | rid=turnip_api_rid, unit=turnip_api_unit) | 246 | rid=turnip_api_rid, unit=turnip_api_unit) |
418 | 137 | 247 | ||
419 | 138 | env = { | 248 | env = { |
421 | 139 | 'PATH': '%s:$PATH' % venv_bin, | 249 | 'PATH': '%s:$PATH' % os.path.join(VENV_DIR, 'bin'), |
422 | 140 | 'TURNIP_ENDPOINT': 'http://%s:%s' % ( | 250 | 'TURNIP_ENDPOINT': 'http://%s:%s' % ( |
423 | 141 | turnip_api_data['turnip_api_host'], | 251 | turnip_api_data['turnip_api_host'], |
424 | 142 | turnip_api_data['turnip_api_port']), | 252 | turnip_api_data['turnip_api_port']), |
425 | @@ -144,7 +254,7 @@ | |||
426 | 144 | for relid in hookenv.relation_ids('wsgi-file'): | 254 | for relid in hookenv.relation_ids('wsgi-file'): |
427 | 145 | hookenv.relation_set( | 255 | hookenv.relation_set( |
428 | 146 | relid, | 256 | relid, |
430 | 147 | working_dir=code_dir, | 257 | working_dir=CODE_DIR, |
431 | 148 | wsgi_wsgi_file='--paste turnipcake.ini', # XXX: Gross. | 258 | wsgi_wsgi_file='--paste turnipcake.ini', # XXX: Gross. |
432 | 149 | wsgi_user=config['user'], | 259 | wsgi_user=config['user'], |
433 | 150 | wsgi_group=config['group'], | 260 | wsgi_group=config['group'], |
434 | 151 | 261 | ||
435 | === modified file 'hooks/services.py' | |||
436 | --- hooks/services.py 2015-03-27 02:43:39 +0000 | |||
437 | +++ hooks/services.py 2015-11-02 11:16:30 +0000 | |||
438 | @@ -12,9 +12,8 @@ | |||
439 | 12 | actions.execd_preinstall('turnipcake') | 12 | actions.execd_preinstall('turnipcake') |
440 | 13 | actions.install_packages('turnipcake') | 13 | actions.install_packages('turnipcake') |
441 | 14 | actions.create_users('turnipcake') | 14 | actions.create_users('turnipcake') |
445 | 15 | actions.unpack_source('turnipcake') | 15 | if hookenv.hook_name() in ('install', 'upgrade-charm', 'config-changed'): |
446 | 16 | actions.install_python_packages('turnipcake') | 16 | actions.deploy_code('turnipcake') |
444 | 17 | actions.migrate_db('turnipcake') | ||
447 | 18 | 17 | ||
448 | 19 | config = hookenv.config() | 18 | config = hookenv.config() |
449 | 20 | manager = ServiceManager([ | 19 | manager = ServiceManager([ |
Approving as a parallel of https:/ /code.launchpad .net/~cjwatson/ charms/ trusty/ turnip/ build-label/ +merge/ 275468.