Merge lp:~cjwatson/launchpad/archive-file-history into lp:launchpad
- archive-file-history
- Merge into devel
Status: | Superseded |
---|---|
Proposed branch: | lp:~cjwatson/launchpad/archive-file-history |
Merge into: | lp:launchpad |
Diff against target: |
1088 lines (+300/-228) 15 files modified
cronscripts/librarian-gc.py (+3/-2) lib/lp/archivepublisher/publishing.py (+43/-22) lib/lp/archivepublisher/tests/test_publisher.py (+94/-61) lib/lp/blueprints/doc/sprint-agenda.txt (+2/-1) lib/lp/code/model/tests/test_codeimportjob.py (+4/-2) lib/lp/registry/doc/announcement.txt (+3/-1) lib/lp/services/database/sqlbase.py (+3/-5) lib/lp/services/database/tests/test_bulk.py (+3/-1) lib/lp/services/librarianserver/librariangc.py (+6/-7) lib/lp/services/librarianserver/tests/test_gc.py (+6/-5) lib/lp/services/xref/tests/test_model.py (+6/-7) lib/lp/soyuz/doc/publishing.txt (+2/-1) lib/lp/soyuz/interfaces/archivefile.py (+12/-9) lib/lp/soyuz/model/archivefile.py (+37/-47) lib/lp/soyuz/tests/test_archivefile.py (+76/-57) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/archive-file-history |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Launchpad code reviewers | Pending | ||
Review via email: mp+343751@code.launchpad.net |
This proposal has been superseded by a proposal from 2018-04-21.
Commit message
Turn ArchiveFile into a history table, adding date_created and date_superseded columns. Adjust the publisher to match.
Description of the change
The main complexity here is in the changed publisher logic, especially for reprieving (that is, the situation where file contents that were scheduled for deletion become live again, particularly common for empty files). We previously did this by simply clearing scheduled_
I think the publisher tests are now somewhat clearer, since they now explicitly test creation dates, making the chain of events more obvious.
ArchiveFile.
We'll need to backfill the new columns. I'll probably make a separate branch with a garbo job for this: my plan is to set date_created to some arbitrary value (probably just the epoch, so that it's clear that it's arbitrary), and to set date_superseded to scheduled_
Unmerged revisions
- 18624. By Colin Watson
-
Turn ArchiveFile into a history table, adding date_created and date_superseded columns. Adjust the publisher to match.
Preview Diff
1 | === modified file 'cronscripts/librarian-gc.py' |
2 | --- cronscripts/librarian-gc.py 2013-09-10 10:48:31 +0000 |
3 | +++ cronscripts/librarian-gc.py 2018-04-21 11:23:22 +0000 |
4 | @@ -66,11 +66,12 @@ |
5 | # XXX wgrant 2011-09-18 bug=853066: Using Storm's raw connection |
6 | # here is wrong. We should either create our own or use |
7 | # Store.execute or cursor() and the transaction module. |
8 | - conn = IStore(LibraryFileAlias)._connection._raw_connection |
9 | + store = IStore(LibraryFileAlias) |
10 | + conn = store._connection._raw_connection |
11 | |
12 | # Refuse to run if we have significant clock skew between the |
13 | # librarian and the database. |
14 | - librariangc.confirm_no_clock_skew(conn) |
15 | + librariangc.confirm_no_clock_skew(store) |
16 | |
17 | # Note that each of these next steps will issue commit commands |
18 | # as appropriate to make this script transaction friendly |
19 | |
20 | === modified file 'lib/lp/archivepublisher/publishing.py' |
21 | --- lib/lp/archivepublisher/publishing.py 2018-03-27 23:26:12 +0000 |
22 | +++ lib/lp/archivepublisher/publishing.py 2018-04-21 11:23:22 +0000 |
23 | @@ -1063,8 +1063,7 @@ |
24 | assert path.startswith("dists/") |
25 | return path[len("dists/"):] |
26 | |
27 | - # Gather information on entries in the current Release file, and |
28 | - # make sure nothing there is condemned. |
29 | + # Gather information on entries in the current Release file. |
30 | current_files = {} |
31 | for current_entry in ( |
32 | release_data["SHA256"] + extra_data.get("SHA256", [])): |
33 | @@ -1073,33 +1072,54 @@ |
34 | real_path = os.path.join(suite_dir, real_name) |
35 | current_files[path] = ( |
36 | int(current_entry["size"]), current_entry["sha256"], real_path) |
37 | + |
38 | + # Gather information on entries currently in the database. Ensure |
39 | + # that we know about all the relevant by-hash directory trees before |
40 | + # doing any removals so that we can prune them properly later, and |
41 | + # work out which condemned files should be reprieved due to the |
42 | + # paths in question having their previous content again. |
43 | + reprieved_files = defaultdict(list) |
44 | uncondemned_files = set() |
45 | for db_file in archive_file_set.getByArchive( |
46 | - self.archive, container=container, only_condemned=True, |
47 | - eager_load=True): |
48 | - stripped_path = strip_dists(db_file.path) |
49 | - if stripped_path in current_files: |
50 | - current_sha256 = current_files[stripped_path][1] |
51 | - if db_file.library_file.content.sha256 == current_sha256: |
52 | - uncondemned_files.add(db_file) |
53 | - if uncondemned_files: |
54 | - for container, path, sha256 in archive_file_set.unscheduleDeletion( |
55 | - uncondemned_files): |
56 | + self.archive, container=container, eager_load=True): |
57 | + by_hashes.registerChild(os.path.dirname(strip_dists(db_file.path))) |
58 | + file_key = (db_file.path, db_file.library_file.content.sha256) |
59 | + if db_file.scheduled_deletion_date is None: |
60 | + uncondemned_files.add(file_key) |
61 | + else: |
62 | + stripped_path = strip_dists(db_file.path) |
63 | + if stripped_path in current_files: |
64 | + current_sha256 = current_files[stripped_path][1] |
65 | + if db_file.library_file.content.sha256 == current_sha256: |
66 | + reprieved_files[file_key].append(db_file) |
67 | + |
68 | + # We may already have uncondemned entries with the same path and |
69 | + # content as condemned entries that we were about to reprieve; if |
70 | + # so, there's no need to reprieve them. |
71 | + for file_key in uncondemned_files: |
72 | + reprieved_files.pop(file_key, None) |
73 | + |
74 | + # Make sure nothing in the current Release file is condemned. |
75 | + if reprieved_files: |
76 | + reprieved_files_flat = set( |
77 | + chain.from_iterable(reprieved_files.values())) |
78 | + archive_file_set.unscheduleDeletion(reprieved_files_flat) |
79 | + for db_file in reprieved_files_flat: |
80 | self.log.debug( |
81 | "by-hash: Unscheduled %s for %s in %s for deletion" % ( |
82 | - sha256, path, container)) |
83 | + db_file.library_file.content.sha256, db_file.path, |
84 | + db_file.container)) |
85 | |
86 | # Remove any condemned files from the database whose stay of |
87 | # execution has elapsed. We ensure that we know about all the |
88 | # relevant by-hash directory trees before doing any removals so that |
89 | # we can prune them properly later. |
90 | - for db_file in archive_file_set.getByArchive( |
91 | - self.archive, container=container): |
92 | - by_hashes.registerChild(os.path.dirname(strip_dists(db_file.path))) |
93 | for container, path, sha256 in archive_file_set.reap( |
94 | self.archive, container=container): |
95 | - self.log.debug( |
96 | - "by-hash: Deleted %s for %s in %s" % (sha256, path, container)) |
97 | + if (path, sha256) not in uncondemned_files: |
98 | + self.log.debug( |
99 | + "by-hash: Deleted %s for %s in %s" % |
100 | + (sha256, path, container)) |
101 | |
102 | # Ensure that all files recorded in the database are in by-hash. |
103 | db_files = archive_file_set.getByArchive( |
104 | @@ -1120,12 +1140,13 @@ |
105 | if db_file.library_file.content.sha256 != current_sha256: |
106 | condemned_files.add(db_file) |
107 | if condemned_files: |
108 | - for container, path, sha256 in archive_file_set.scheduleDeletion( |
109 | - condemned_files, |
110 | - timedelta(days=BY_HASH_STAY_OF_EXECUTION)): |
111 | + archive_file_set.scheduleDeletion( |
112 | + condemned_files, timedelta(days=BY_HASH_STAY_OF_EXECUTION)) |
113 | + for db_file in condemned_files: |
114 | self.log.debug( |
115 | "by-hash: Scheduled %s for %s in %s for deletion" % ( |
116 | - sha256, path, container)) |
117 | + db_file.library_file.content.sha256, db_file.path, |
118 | + db_file.container)) |
119 | |
120 | # Ensure that all the current index files are in by-hash and have |
121 | # corresponding database entries. |
122 | |
123 | === modified file 'lib/lp/archivepublisher/tests/test_publisher.py' |
124 | --- lib/lp/archivepublisher/tests/test_publisher.py 2018-04-05 11:32:50 +0000 |
125 | +++ lib/lp/archivepublisher/tests/test_publisher.py 2018-04-21 11:23:22 +0000 |
126 | @@ -21,8 +21,6 @@ |
127 | from functools import partial |
128 | import gzip |
129 | import hashlib |
130 | -from itertools import product |
131 | -from operator import attrgetter |
132 | import os |
133 | import shutil |
134 | import stat |
135 | @@ -51,7 +49,6 @@ |
136 | LessThan, |
137 | Matcher, |
138 | MatchesDict, |
139 | - MatchesListwise, |
140 | MatchesSetwise, |
141 | MatchesStructure, |
142 | Not, |
143 | @@ -2581,12 +2578,12 @@ |
144 | publisher.D_writeReleaseFiles(False) |
145 | |
146 | @classmethod |
147 | - def _makeScheduledDeletionDateMatcher(cls, condemned_at): |
148 | - if condemned_at is None: |
149 | + def _makeScheduledDeletionDateMatcher(cls, superseded_at): |
150 | + if superseded_at is None: |
151 | return Is(None) |
152 | else: |
153 | return Equals( |
154 | - condemned_at + timedelta(days=BY_HASH_STAY_OF_EXECUTION)) |
155 | + superseded_at + timedelta(days=BY_HASH_STAY_OF_EXECUTION)) |
156 | |
157 | def assertHasSuiteFiles(self, patterns, *properties): |
158 | def is_interesting(path): |
159 | @@ -2600,11 +2597,13 @@ |
160 | self.ubuntutest.main_archive) |
161 | if is_interesting(archive_file.path)] |
162 | matchers = [] |
163 | - for path, condemned_at in properties: |
164 | + for path, created_at, superseded_at in properties: |
165 | matchers.append(MatchesStructure( |
166 | path=Equals('dists/breezy-autotest/%s' % path), |
167 | + date_created=Equals(created_at), |
168 | + date_superseded=Equals(superseded_at), |
169 | scheduled_deletion_date=self._makeScheduledDeletionDateMatcher( |
170 | - condemned_at))) |
171 | + superseded_at))) |
172 | self.assertThat(files, MatchesSetwise(*matchers)) |
173 | |
174 | def test_disabled(self): |
175 | @@ -2754,7 +2753,8 @@ |
176 | flush_database_caches() |
177 | self.assertHasSuiteFiles( |
178 | ('Contents-*', 'Release'), |
179 | - ('Contents-i386', None), ('Release', None)) |
180 | + ('Contents-i386', self.times[0], None), |
181 | + ('Release', self.times[0], None)) |
182 | releases = [get_release_contents()] |
183 | self.assertThat( |
184 | suite_path('by-hash'), |
185 | @@ -2768,8 +2768,10 @@ |
186 | flush_database_caches() |
187 | self.assertHasSuiteFiles( |
188 | ('Contents-*', 'Release'), |
189 | - ('Contents-i386', None), ('Contents-hppa', None), |
190 | - ('Release', self.times[1]), ('Release', None)) |
191 | + ('Contents-i386', self.times[0], None), |
192 | + ('Contents-hppa', self.times[1], None), |
193 | + ('Release', self.times[0], self.times[1]), |
194 | + ('Release', self.times[1], None)) |
195 | releases.append(get_release_contents()) |
196 | self.assertThat( |
197 | suite_path('by-hash'), |
198 | @@ -2782,9 +2784,11 @@ |
199 | flush_database_caches() |
200 | self.assertHasSuiteFiles( |
201 | ('Contents-*', 'Release'), |
202 | - ('Contents-i386', self.times[2]), ('Contents-hppa', None), |
203 | - ('Release', self.times[1]), ('Release', self.times[2]), |
204 | - ('Release', None)) |
205 | + ('Contents-i386', self.times[0], self.times[2]), |
206 | + ('Contents-hppa', self.times[1], None), |
207 | + ('Release', self.times[0], self.times[1]), |
208 | + ('Release', self.times[1], self.times[2]), |
209 | + ('Release', self.times[2], None)) |
210 | releases.append(get_release_contents()) |
211 | self.assertThat( |
212 | suite_path('by-hash'), |
213 | @@ -2796,9 +2800,12 @@ |
214 | flush_database_caches() |
215 | self.assertHasSuiteFiles( |
216 | ('Contents-*', 'Release'), |
217 | - ('Contents-i386', self.times[2]), ('Contents-hppa', None), |
218 | - ('Release', self.times[1]), ('Release', self.times[2]), |
219 | - ('Release', self.times[3]), ('Release', None)) |
220 | + ('Contents-i386', self.times[0], self.times[2]), |
221 | + ('Contents-hppa', self.times[1], None), |
222 | + ('Release', self.times[0], self.times[1]), |
223 | + ('Release', self.times[1], self.times[2]), |
224 | + ('Release', self.times[2], self.times[3]), |
225 | + ('Release', self.times[3], None)) |
226 | releases.append(get_release_contents()) |
227 | self.assertThat( |
228 | suite_path('by-hash'), |
229 | @@ -2817,9 +2824,10 @@ |
230 | flush_database_caches() |
231 | self.assertHasSuiteFiles( |
232 | ('Contents-*', 'Release'), |
233 | - ('Contents-hppa', self.times[4]), |
234 | - ('Release', self.times[3]), ('Release', self.times[4]), |
235 | - ('Release', None)) |
236 | + ('Contents-hppa', self.times[1], self.times[4]), |
237 | + ('Release', self.times[2], self.times[3]), |
238 | + ('Release', self.times[3], self.times[4]), |
239 | + ('Release', self.times[4], None)) |
240 | releases.append(get_release_contents()) |
241 | self.assertThat( |
242 | suite_path('by-hash'), |
243 | @@ -2836,7 +2844,8 @@ |
244 | flush_database_caches() |
245 | self.assertHasSuiteFiles( |
246 | ('Contents-*', 'Release'), |
247 | - ('Release', self.times[5]), ('Release', None)) |
248 | + ('Release', self.times[4], self.times[5]), |
249 | + ('Release', self.times[5], None)) |
250 | releases.append(get_release_contents()) |
251 | self.assertThat(suite_path('by-hash'), ByHashHasContents(releases[4:])) |
252 | |
253 | @@ -2863,9 +2872,13 @@ |
254 | for name in ('Release', 'Sources'): |
255 | with open(suite_path('main', 'source', name), 'rb') as f: |
256 | main_contents.add(f.read()) |
257 | + self.assertHasSuiteFiles( |
258 | + ('main/source/Sources',), |
259 | + ('main/source/Sources', self.times[0], None)) |
260 | |
261 | # Add a source package so that Sources is non-empty. |
262 | pub_source = self.getPubSource(filecontent='Source: foo\n') |
263 | + self.advanceTime(delta=timedelta(hours=1)) |
264 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
265 | transaction.commit() |
266 | with open(suite_path('main', 'source', 'Sources'), 'rb') as f: |
267 | @@ -2874,28 +2887,42 @@ |
268 | self.assertThat( |
269 | suite_path('main', 'source', 'by-hash'), |
270 | ByHashHasContents(main_contents)) |
271 | - |
272 | - # Make the empty Sources file ready to prune. |
273 | - self.advanceTime( |
274 | - delta=timedelta(days=BY_HASH_STAY_OF_EXECUTION, hours=1)) |
275 | + self.assertHasSuiteFiles( |
276 | + ('main/source/Sources',), |
277 | + ('main/source/Sources', self.times[0], self.times[1]), |
278 | + ('main/source/Sources', self.times[1], None)) |
279 | |
280 | # Delete the source package so that Sources is empty again. The |
281 | - # empty file is reprieved and the non-empty one is condemned. |
282 | + # empty file is reprieved (by creating a new ArchiveFile referring |
283 | + # to it) and the non-empty one is condemned. |
284 | pub_source.requestDeletion(self.ubuntutest.owner) |
285 | - self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
286 | - transaction.commit() |
287 | - self.assertThat( |
288 | - suite_path('main', 'source', 'by-hash'), |
289 | - ByHashHasContents(main_contents)) |
290 | - archive_files = getUtility(IArchiveFileSet).getByArchive( |
291 | - self.ubuntutest.main_archive, |
292 | - path='dists/breezy-autotest/main/source/Sources') |
293 | - self.assertThat( |
294 | - sorted(archive_files, key=attrgetter('id')), |
295 | - MatchesListwise([ |
296 | - MatchesStructure(scheduled_deletion_date=Is(None)), |
297 | - MatchesStructure(scheduled_deletion_date=Not(Is(None))), |
298 | - ])) |
299 | + self.advanceTime(delta=timedelta(hours=1)) |
300 | + self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
301 | + transaction.commit() |
302 | + self.assertThat( |
303 | + suite_path('main', 'source', 'by-hash'), |
304 | + ByHashHasContents(main_contents)) |
305 | + self.assertHasSuiteFiles( |
306 | + ('main/source/Sources',), |
307 | + ('main/source/Sources', self.times[0], self.times[1]), |
308 | + ('main/source/Sources', self.times[1], self.times[2]), |
309 | + ('main/source/Sources', self.times[2], None)) |
310 | + |
311 | + # Make the first empty Sources file ready to prune. This doesn't |
312 | + # change the set of files on disk, because there's still a newer |
313 | + # reference to the empty file. |
314 | + self.advanceTime( |
315 | + absolute=self.times[1] + timedelta( |
316 | + days=BY_HASH_STAY_OF_EXECUTION, minutes=30)) |
317 | + self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
318 | + transaction.commit() |
319 | + self.assertThat( |
320 | + suite_path('main', 'source', 'by-hash'), |
321 | + ByHashHasContents(main_contents)) |
322 | + self.assertHasSuiteFiles( |
323 | + ('main/source/Sources',), |
324 | + ('main/source/Sources', self.times[1], self.times[2]), |
325 | + ('main/source/Sources', self.times[2], None)) |
326 | |
327 | def setUpPruneableSuite(self): |
328 | self.setUpMockTime() |
329 | @@ -2924,14 +2951,18 @@ |
330 | # We have two condemned sets of index files and one uncondemned set. |
331 | # main/source/Release contains a small enough amount of information |
332 | # that it doesn't change. |
333 | - expected_suite_files = ( |
334 | - list(product( |
335 | - ('main/source/Sources.gz', 'main/source/Sources.bz2', |
336 | - 'Release'), |
337 | - (self.times[1], self.times[2], None))) + |
338 | - [('main/source/Release', None)]) |
339 | self.assertHasSuiteFiles( |
340 | - ('main/source/*', 'Release'), *expected_suite_files) |
341 | + ('main/source/*', 'Release'), |
342 | + ('main/source/Sources.gz', self.times[0], self.times[1]), |
343 | + ('main/source/Sources.gz', self.times[1], self.times[2]), |
344 | + ('main/source/Sources.gz', self.times[2], None), |
345 | + ('main/source/Sources.bz2', self.times[0], self.times[1]), |
346 | + ('main/source/Sources.bz2', self.times[1], self.times[2]), |
347 | + ('main/source/Sources.bz2', self.times[2], None), |
348 | + ('main/source/Release', self.times[0], None), |
349 | + ('Release', self.times[0], self.times[1]), |
350 | + ('Release', self.times[1], self.times[2]), |
351 | + ('Release', self.times[2], None)) |
352 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) |
353 | self.assertThat( |
354 | suite_path('main', 'source', 'by-hash'), |
355 | @@ -2964,14 +2995,15 @@ |
356 | self.assertEqual(set(), publisher.dirty_pockets) |
357 | # The condemned index files are removed, and no new Release file is |
358 | # generated. |
359 | - expected_suite_files = ( |
360 | - list(product( |
361 | - ('main/source/Sources.gz', 'main/source/Sources.bz2'), |
362 | - (self.times[2], None))) + |
363 | - [('main/source/Release', None), |
364 | - ('Release', self.times[2]), ('Release', None)]) |
365 | self.assertHasSuiteFiles( |
366 | - ('main/source/*', 'Release'), *expected_suite_files) |
367 | + ('main/source/*', 'Release'), |
368 | + ('main/source/Sources.gz', self.times[1], self.times[2]), |
369 | + ('main/source/Sources.gz', self.times[2], None), |
370 | + ('main/source/Sources.bz2', self.times[1], self.times[2]), |
371 | + ('main/source/Sources.bz2', self.times[2], None), |
372 | + ('main/source/Release', self.times[0], None), |
373 | + ('Release', self.times[1], self.times[2]), |
374 | + ('Release', self.times[2], None)) |
375 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) |
376 | self.assertThat( |
377 | suite_path('main', 'source', 'by-hash'), |
378 | @@ -2998,14 +3030,15 @@ |
379 | self.assertEqual(release_mtime, os.stat(release_path).st_mtime) |
380 | # The condemned index files are removed, and no new Release file is |
381 | # generated. |
382 | - expected_suite_files = ( |
383 | - list(product( |
384 | - ('main/source/Sources.gz', 'main/source/Sources.bz2'), |
385 | - (self.times[2], None))) + |
386 | - [('main/source/Release', None), |
387 | - ('Release', self.times[2]), ('Release', None)]) |
388 | self.assertHasSuiteFiles( |
389 | - ('main/source/*', 'Release'), *expected_suite_files) |
390 | + ('main/source/*', 'Release'), |
391 | + ('main/source/Sources.gz', self.times[1], self.times[2]), |
392 | + ('main/source/Sources.gz', self.times[2], None), |
393 | + ('main/source/Sources.bz2', self.times[1], self.times[2]), |
394 | + ('main/source/Sources.bz2', self.times[2], None), |
395 | + ('main/source/Release', self.times[0], None), |
396 | + ('Release', self.times[1], self.times[2]), |
397 | + ('Release', self.times[2], None)) |
398 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) |
399 | self.assertThat( |
400 | suite_path('main', 'source', 'by-hash'), |
401 | |
402 | === modified file 'lib/lp/blueprints/doc/sprint-agenda.txt' |
403 | --- lib/lp/blueprints/doc/sprint-agenda.txt 2011-12-30 06:14:56 +0000 |
404 | +++ lib/lp/blueprints/doc/sprint-agenda.txt 2018-04-21 11:23:22 +0000 |
405 | @@ -42,8 +42,9 @@ |
406 | It is possible to revise your choice, declining the spec. This should update |
407 | the date_decided. |
408 | |
409 | + >>> from storm.store import Store |
410 | >>> from lp.services.database.sqlbase import get_transaction_timestamp |
411 | - >>> transaction_timestamp = get_transaction_timestamp() |
412 | + >>> transaction_timestamp = get_transaction_timestamp(Store.of(sl)) |
413 | |
414 | # Nobody is allowed to write directly to SprintSpecification.date_decided, |
415 | # so we need to remove its security proxy here. |
416 | |
417 | === modified file 'lib/lp/code/model/tests/test_codeimportjob.py' |
418 | --- lib/lp/code/model/tests/test_codeimportjob.py 2018-03-15 20:44:04 +0000 |
419 | +++ lib/lp/code/model/tests/test_codeimportjob.py 2018-04-21 11:23:22 +0000 |
420 | @@ -55,6 +55,8 @@ |
421 | from lp.code.tests.helpers import GitHostingFixture |
422 | from lp.services.config import config |
423 | from lp.services.database.constants import UTC_NOW |
424 | +from lp.services.database.interfaces import IStore |
425 | +from lp.services.database.sqlbase import get_transaction_timestamp |
426 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
427 | from lp.services.librarian.interfaces.client import ILibrarianClient |
428 | from lp.services.macaroons.interfaces import IMacaroonIssuer |
429 | @@ -561,10 +563,10 @@ |
430 | # This causes problems for the "UTC_NOW - interval / 2" |
431 | # expression below. |
432 | interval = code_import.effective_update_interval |
433 | - from lp.services.database.sqlbase import get_transaction_timestamp |
434 | + store = IStore(CodeImportResult) |
435 | recent_result = CodeImportResult( |
436 | code_import=code_import, machine=machine, status=FAILURE, |
437 | - date_job_started=get_transaction_timestamp() - interval / 2) |
438 | + date_job_started=get_transaction_timestamp(store) - interval / 2) |
439 | # When we create the job, its date_due should be set to the date_due |
440 | # of the job that was deleted when the CodeImport review status |
441 | # changed from REVIEWED. That is the date_job_started of the most |
442 | |
443 | === modified file 'lib/lp/registry/doc/announcement.txt' |
444 | --- lib/lp/registry/doc/announcement.txt 2015-01-29 12:33:01 +0000 |
445 | +++ lib/lp/registry/doc/announcement.txt 2018-04-21 11:23:22 +0000 |
446 | @@ -212,8 +212,10 @@ |
447 | Announcements can be retracted at any time. Retracting an announcement |
448 | updates the date_last_modified and sets the announcement.active flag to False |
449 | |
450 | + >>> from storm.store import Store |
451 | >>> from lp.services.database.sqlbase import get_transaction_timestamp |
452 | - >>> transaction_timestamp = get_transaction_timestamp() |
453 | + >>> transaction_timestamp = get_transaction_timestamp( |
454 | + ... Store.of(apache_asia)) |
455 | |
456 | >>> print apache_asia.date_last_modified |
457 | None |
458 | |
459 | === modified file 'lib/lp/services/database/sqlbase.py' |
460 | --- lib/lp/services/database/sqlbase.py 2015-07-08 16:05:11 +0000 |
461 | +++ lib/lp/services/database/sqlbase.py 2018-04-21 11:23:22 +0000 |
462 | @@ -263,11 +263,9 @@ |
463 | _get_sqlobject_store().invalidate() |
464 | |
465 | |
466 | -def get_transaction_timestamp(): |
467 | - """Get the timestamp for the current transaction on the MAIN DEFAULT |
468 | - store. DEPRECATED - if needed it should become a method on the store. |
469 | - """ |
470 | - timestamp = _get_sqlobject_store().execute( |
471 | +def get_transaction_timestamp(store): |
472 | + """Get the timestamp for the current transaction on `store`.""" |
473 | + timestamp = store.execute( |
474 | "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'").get_one()[0] |
475 | return timestamp.replace(tzinfo=pytz.timezone('UTC')) |
476 | |
477 | |
478 | === modified file 'lib/lp/services/database/tests/test_bulk.py' |
479 | --- lib/lp/services/database/tests/test_bulk.py 2018-01-02 10:54:31 +0000 |
480 | +++ lib/lp/services/database/tests/test_bulk.py 2018-04-21 11:23:22 +0000 |
481 | @@ -343,4 +343,6 @@ |
482 | [(bug, person, person, |
483 | SQL("CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"), |
484 | BugNotificationLevel.LIFECYCLE)], get_objects=True) |
485 | - self.assertEqual(get_transaction_timestamp(), sub.date_created) |
486 | + self.assertEqual( |
487 | + get_transaction_timestamp(IStore(BugSubscription)), |
488 | + sub.date_created) |
489 | |
490 | === modified file 'lib/lp/services/librarianserver/librariangc.py' |
491 | --- lib/lp/services/librarianserver/librariangc.py 2018-01-03 17:17:12 +0000 |
492 | +++ lib/lp/services/librarianserver/librariangc.py 2018-04-21 11:23:22 +0000 |
493 | @@ -17,6 +17,7 @@ |
494 | from time import time |
495 | |
496 | import iso8601 |
497 | +import pytz |
498 | from swiftclient import client as swiftclient |
499 | from zope.interface import implementer |
500 | |
501 | @@ -26,6 +27,7 @@ |
502 | listReferences, |
503 | quoteIdentifier, |
504 | ) |
505 | +from lp.services.database.sqlbase import get_transaction_timestamp |
506 | from lp.services.features import getFeatureFlag |
507 | from lp.services.librarianserver import swift |
508 | from lp.services.librarianserver.storage import ( |
509 | @@ -67,7 +69,7 @@ |
510 | |
511 | def _utcnow(): |
512 | # Wrapper that is replaced in the test suite. |
513 | - return datetime.utcnow() |
514 | + return datetime.now(pytz.UTC) |
515 | |
516 | |
517 | def open_stream(content_id): |
518 | @@ -105,16 +107,14 @@ |
519 | return hasher.hexdigest(), length |
520 | |
521 | |
522 | -def confirm_no_clock_skew(con): |
523 | +def confirm_no_clock_skew(store): |
524 | """Raise an exception if there is significant clock skew between the |
525 | database and this machine. |
526 | |
527 | It is theoretically possible to lose data if there is more than several |
528 | hours of skew. |
529 | """ |
530 | - cur = con.cursor() |
531 | - cur.execute("SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'") |
532 | - db_now = cur.fetchone()[0] |
533 | + db_now = get_transaction_timestamp(store) |
534 | local_now = _utcnow() |
535 | five_minutes = timedelta(minutes=5) |
536 | |
537 | @@ -813,8 +813,7 @@ |
538 | and next_wanted_content_id == content_id) |
539 | |
540 | if not file_wanted: |
541 | - mod_time = iso8601.parse_date( |
542 | - obj['last_modified']).replace(tzinfo=None) |
543 | + mod_time = iso8601.parse_date(obj['last_modified']) |
544 | if mod_time > _utcnow() - timedelta(days=1): |
545 | log.debug3( |
546 | "File %d not removed - created too recently", content_id) |
547 | |
548 | === modified file 'lib/lp/services/librarianserver/tests/test_gc.py' |
549 | --- lib/lp/services/librarianserver/tests/test_gc.py 2018-02-13 17:46:48 +0000 |
550 | +++ lib/lp/services/librarianserver/tests/test_gc.py 2018-04-21 11:23:22 +0000 |
551 | @@ -23,6 +23,7 @@ |
552 | import sys |
553 | import tempfile |
554 | |
555 | +import pytz |
556 | from sqlobject import SQLObjectNotFound |
557 | from storm.store import Store |
558 | from swiftclient import client as swiftclient |
559 | @@ -88,8 +89,8 @@ |
560 | # Make sure that every file the database knows about exists on disk. |
561 | # We manually remove them for tests that need to cope with missing |
562 | # library items. |
563 | - store = IMasterStore(LibraryFileContent) |
564 | - for content in store.find(LibraryFileContent): |
565 | + self.store = IMasterStore(LibraryFileContent) |
566 | + for content in self.store.find(LibraryFileContent): |
567 | path = librariangc.get_file_path(content.id) |
568 | if not os.path.exists(path): |
569 | if not os.path.exists(os.path.dirname(path)): |
570 | @@ -451,7 +452,7 @@ |
571 | return org_time() + 24 * 60 * 60 + 1 |
572 | |
573 | def tomorrow_utcnow(): |
574 | - return datetime.utcnow() + timedelta(days=1, seconds=1) |
575 | + return datetime.now(pytz.UTC) + timedelta(days=1, seconds=1) |
576 | |
577 | try: |
578 | librariangc.time = tomorrow_time |
579 | @@ -584,13 +585,13 @@ |
580 | |
581 | def test_confirm_no_clock_skew(self): |
582 | # There should not be any clock skew when running the test suite. |
583 | - librariangc.confirm_no_clock_skew(self.con) |
584 | + librariangc.confirm_no_clock_skew(self.store) |
585 | |
586 | # To test this function raises an excption when it should, |
587 | # fool the garbage collector into thinking it is tomorrow. |
588 | with self.librariangc_thinking_it_is_tomorrow(): |
589 | self.assertRaises( |
590 | - Exception, librariangc.confirm_no_clock_skew, (self.con,) |
591 | + Exception, librariangc.confirm_no_clock_skew, (self.store,) |
592 | ) |
593 | |
594 | |
595 | |
596 | === modified file 'lib/lp/services/xref/tests/test_model.py' |
597 | --- lib/lp/services/xref/tests/test_model.py 2015-11-26 17:11:09 +0000 |
598 | +++ lib/lp/services/xref/tests/test_model.py 2018-04-21 11:23:22 +0000 |
599 | @@ -16,7 +16,10 @@ |
600 | from zope.component import getUtility |
601 | |
602 | from lp.services.database.interfaces import IStore |
603 | -from lp.services.database.sqlbase import flush_database_caches |
604 | +from lp.services.database.sqlbase import ( |
605 | + flush_database_caches, |
606 | + get_transaction_timestamp, |
607 | + ) |
608 | from lp.services.xref.interfaces import IXRefSet |
609 | from lp.services.xref.model import XRef |
610 | from lp.testing import ( |
611 | @@ -35,9 +38,7 @@ |
612 | # date_created defaults to now, but can be overridden. |
613 | old = datetime.datetime.strptime('2005-01-01', '%Y-%m-%d').replace( |
614 | tzinfo=pytz.UTC) |
615 | - now = IStore(XRef).execute( |
616 | - "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'" |
617 | - ).get_one()[0].replace(tzinfo=pytz.UTC) |
618 | + now = get_transaction_timestamp(IStore(XRef)) |
619 | getUtility(IXRefSet).create({ |
620 | ('a', '1'): {('b', 'foo'): {}}, |
621 | ('a', '2'): {('b', 'bar'): {'date_created': old}}}) |
622 | @@ -68,9 +69,7 @@ |
623 | |
624 | def test_findFrom(self): |
625 | creator = self.factory.makePerson() |
626 | - now = IStore(XRef).execute( |
627 | - "SELECT CURRENT_TIMESTAMP AT TIME ZONE 'UTC'" |
628 | - ).get_one()[0].replace(tzinfo=pytz.UTC) |
629 | + now = get_transaction_timestamp(IStore(XRef)) |
630 | getUtility(IXRefSet).create({ |
631 | ('a', 'bar'): { |
632 | ('b', 'foo'): {'creator': creator, 'metadata': {'test': 1}}}, |
633 | |
634 | === modified file 'lib/lp/soyuz/doc/publishing.txt' |
635 | --- lib/lp/soyuz/doc/publishing.txt 2017-06-02 21:46:50 +0000 |
636 | +++ lib/lp/soyuz/doc/publishing.txt 2018-04-21 11:23:22 +0000 |
637 | @@ -347,8 +347,9 @@ |
638 | |
639 | Inspecting the modified record shows it's ready for domination: |
640 | |
641 | + >>> from storm.store import Store |
642 | >>> from lp.services.database.sqlbase import get_transaction_timestamp |
643 | - >>> transaction_timestamp = get_transaction_timestamp() |
644 | + >>> transaction_timestamp = get_transaction_timestamp(Store.of(spph)) |
645 | |
646 | >>> modified_spph = spph |
647 | >>> modified_spph.status |
648 | |
649 | === modified file 'lib/lp/soyuz/interfaces/archivefile.py' |
650 | --- lib/lp/soyuz/interfaces/archivefile.py 2016-04-04 10:06:33 +0000 |
651 | +++ lib/lp/soyuz/interfaces/archivefile.py 2018-04-21 11:23:22 +0000 |
652 | @@ -1,4 +1,4 @@ |
653 | -# Copyright 2016 Canonical Ltd. This software is licensed under the |
654 | +# Copyright 2016-2018 Canonical Ltd. This software is licensed under the |
655 | # GNU Affero General Public License version 3 (see the file LICENSE). |
656 | |
657 | """Interface for a file in an archive.""" |
658 | @@ -49,6 +49,16 @@ |
659 | title=_("The index file in the librarian."), |
660 | schema=ILibraryFileAlias, required=True, readonly=True) |
661 | |
662 | + date_created = Datetime( |
663 | + title=_("The date when this file was created."), |
664 | + # XXX cjwatson 2018-04-17: Should be required=True, but we need to |
665 | + # backfill existing rows first. |
666 | + required=False, readonly=False) |
667 | + |
668 | + date_superseded = Datetime( |
669 | + title=_("The date when this file was scheduled for future deletion."), |
670 | + required=False, readonly=False) |
671 | + |
672 | scheduled_deletion_date = Datetime( |
673 | title=_("The date when this file should stop being published."), |
674 | required=False, readonly=False) |
675 | @@ -79,15 +89,12 @@ |
676 | :param content_type: The MIME type of the file. |
677 | """ |
678 | |
679 | - def getByArchive(archive, container=None, path=None, only_condemned=False, |
680 | - eager_load=False): |
681 | + def getByArchive(archive, container=None, path=None, eager_load=False): |
682 | """Get files in an archive. |
683 | |
684 | :param archive: Return files in this `IArchive`. |
685 | :param container: Return only files with this container. |
686 | :param path: Return only files with this path. |
687 | - :param only_condemned: If True, return only files with a |
688 | - scheduled_deletion_date set. |
689 | :param eager_load: If True, preload related `LibraryFileAlias` and |
690 | `LibraryFileContent` rows. |
691 | :return: An iterable of matched files. |
692 | @@ -99,8 +106,6 @@ |
693 | :param archive_files: The `IArchiveFile`s to schedule for deletion. |
694 | :param stay_of_execution: A `timedelta`; schedule files for deletion |
695 | this amount of time in the future. |
696 | - :return: An iterable of (container, path, sha256) for files that |
697 | - were scheduled for deletion. |
698 | """ |
699 | |
700 | def unscheduleDeletion(archive_files): |
701 | @@ -110,8 +115,6 @@ |
702 | identical to a version that was previously condemned. |
703 | |
704 | :param archive_files: The `IArchiveFile`s to unschedule for deletion. |
705 | - :return: An iterable of (container, path, sha256) for files that |
706 | - were unscheduled for deletion. |
707 | """ |
708 | |
709 | def getContainersToReap(archive, container_prefix=None): |
710 | |
711 | === modified file 'lib/lp/soyuz/model/archivefile.py' |
712 | --- lib/lp/soyuz/model/archivefile.py 2018-01-26 10:11:33 +0000 |
713 | +++ lib/lp/soyuz/model/archivefile.py 2018-04-21 11:23:22 +0000 |
714 | @@ -14,7 +14,6 @@ |
715 | import os.path |
716 | |
717 | import pytz |
718 | -from storm.databases.postgres import Returning |
719 | from storm.locals import ( |
720 | And, |
721 | DateTime, |
722 | @@ -26,7 +25,10 @@ |
723 | from zope.component import getUtility |
724 | from zope.interface import implementer |
725 | |
726 | -from lp.services.database.bulk import load_related |
727 | +from lp.services.database.bulk import ( |
728 | + create, |
729 | + load_related, |
730 | + ) |
731 | from lp.services.database.constants import UTC_NOW |
732 | from lp.services.database.decoratedresultset import DecoratedResultSet |
733 | from lp.services.database.interfaces import ( |
734 | @@ -34,7 +36,6 @@ |
735 | IStore, |
736 | ) |
737 | from lp.services.database.sqlbase import convert_storm_clause_to_string |
738 | -from lp.services.database.stormexpr import BulkUpdate |
739 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
740 | from lp.services.librarian.model import ( |
741 | LibraryFileAlias, |
742 | @@ -46,6 +47,15 @@ |
743 | ) |
744 | |
745 | |
746 | +def _now(): |
747 | + """Get the current transaction timestamp. |
748 | + |
749 | + Tests can override this with a Storm expression or a `datetime` to |
750 | + simulate time changes. |
751 | + """ |
752 | + return UTC_NOW |
753 | + |
754 | + |
755 | @implementer(IArchiveFile) |
756 | class ArchiveFile(Storm): |
757 | """See `IArchiveFile`.""" |
758 | @@ -64,6 +74,14 @@ |
759 | library_file_id = Int(name='library_file', allow_none=False) |
760 | library_file = Reference(library_file_id, 'LibraryFileAlias.id') |
761 | |
762 | + date_created = DateTime( |
763 | + # XXX cjwatson 2018-04-17: Should be allow_none=False, but we need |
764 | + # to backfill existing rows first. |
765 | + name='date_created', tzinfo=pytz.UTC, allow_none=True) |
766 | + |
767 | + date_superseded = DateTime( |
768 | + name='date_superseded', tzinfo=pytz.UTC, allow_none=True) |
769 | + |
770 | scheduled_deletion_date = DateTime( |
771 | name='scheduled_deletion_date', tzinfo=pytz.UTC, allow_none=True) |
772 | |
773 | @@ -74,18 +92,11 @@ |
774 | self.container = container |
775 | self.path = path |
776 | self.library_file = library_file |
777 | + self.date_created = _now() |
778 | + self.date_superseded = None |
779 | self.scheduled_deletion_date = None |
780 | |
781 | |
782 | -def _now(): |
783 | - """Get the current transaction timestamp. |
784 | - |
785 | - Tests can override this with a Storm expression or a `datetime` to |
786 | - simulate time changes. |
787 | - """ |
788 | - return UTC_NOW |
789 | - |
790 | - |
791 | @implementer(IArchiveFileSet) |
792 | class ArchiveFileSet: |
793 | """See `IArchiveFileSet`.""" |
794 | @@ -106,8 +117,7 @@ |
795 | return cls.new(archive, container, path, library_file) |
796 | |
797 | @staticmethod |
798 | - def getByArchive(archive, container=None, path=None, only_condemned=False, |
799 | - eager_load=False): |
800 | + def getByArchive(archive, container=None, path=None, eager_load=False): |
801 | """See `IArchiveFileSet`.""" |
802 | clauses = [ArchiveFile.archive == archive] |
803 | # XXX cjwatson 2016-03-15: We'll need some more sophisticated way to |
804 | @@ -116,8 +126,6 @@ |
805 | clauses.append(ArchiveFile.container == container) |
806 | if path is not None: |
807 | clauses.append(ArchiveFile.path == path) |
808 | - if only_condemned: |
809 | - clauses.append(ArchiveFile.scheduled_deletion_date != None) |
810 | archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses) |
811 | |
812 | def eager_load(rows): |
813 | @@ -132,41 +140,23 @@ |
814 | @staticmethod |
815 | def scheduleDeletion(archive_files, stay_of_execution): |
816 | """See `IArchiveFileSet`.""" |
817 | - clauses = [ |
818 | - ArchiveFile.id.is_in( |
819 | - set(archive_file.id for archive_file in archive_files)), |
820 | - ArchiveFile.library_file == LibraryFileAlias.id, |
821 | - LibraryFileAlias.content == LibraryFileContent.id, |
822 | - ] |
823 | - new_date = _now() + stay_of_execution |
824 | - return_columns = [ |
825 | - ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256] |
826 | - return list(IMasterStore(ArchiveFile).execute(Returning( |
827 | - BulkUpdate( |
828 | - {ArchiveFile.scheduled_deletion_date: new_date}, |
829 | - table=ArchiveFile, |
830 | - values=[LibraryFileAlias, LibraryFileContent], |
831 | - where=And(*clauses)), |
832 | - columns=return_columns))) |
833 | + rows = IMasterStore(ArchiveFile).find( |
834 | + ArchiveFile, ArchiveFile.id.is_in( |
835 | + set(archive_file.id for archive_file in archive_files))) |
836 | + rows.set( |
837 | + date_superseded=_now(), |
838 | + scheduled_deletion_date=_now() + stay_of_execution) |
839 | |
840 | @staticmethod |
841 | def unscheduleDeletion(archive_files): |
842 | """See `IArchiveFileSet`.""" |
843 | - clauses = [ |
844 | - ArchiveFile.id.is_in( |
845 | - set(archive_file.id for archive_file in archive_files)), |
846 | - ArchiveFile.library_file == LibraryFileAlias.id, |
847 | - LibraryFileAlias.content == LibraryFileContent.id, |
848 | - ] |
849 | - return_columns = [ |
850 | - ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256] |
851 | - return list(IMasterStore(ArchiveFile).execute(Returning( |
852 | - BulkUpdate( |
853 | - {ArchiveFile.scheduled_deletion_date: None}, |
854 | - table=ArchiveFile, |
855 | - values=[LibraryFileAlias, LibraryFileContent], |
856 | - where=And(*clauses)), |
857 | - columns=return_columns))) |
858 | + create( |
859 | + (ArchiveFile.archive, ArchiveFile.container, ArchiveFile.path, |
860 | + ArchiveFile.library_file, ArchiveFile.date_created, |
861 | + ArchiveFile.date_superseded, ArchiveFile.scheduled_deletion_date), |
862 | + [(archive_file.archive, archive_file.container, archive_file.path, |
863 | + archive_file.library_file, _now(), None, None) |
864 | + for archive_file in archive_files]) |
865 | |
866 | @staticmethod |
867 | def getContainersToReap(archive, container_prefix=None): |
868 | |
869 | === modified file 'lib/lp/soyuz/tests/test_archivefile.py' |
870 | --- lib/lp/soyuz/tests/test_archivefile.py 2016-04-04 10:06:33 +0000 |
871 | +++ lib/lp/soyuz/tests/test_archivefile.py 2018-04-21 11:23:22 +0000 |
872 | @@ -1,4 +1,4 @@ |
873 | -# Copyright 2016 Canonical Ltd. This software is licensed under the |
874 | +# Copyright 2016-2018 Canonical Ltd. This software is licensed under the |
875 | # GNU Affero General Public License version 3 (see the file LICENSE). |
876 | |
877 | """ArchiveFile tests.""" |
878 | @@ -7,25 +7,39 @@ |
879 | |
880 | __metaclass__ = type |
881 | |
882 | -from datetime import ( |
883 | - datetime, |
884 | - timedelta, |
885 | - ) |
886 | +from datetime import timedelta |
887 | import os |
888 | |
889 | -import pytz |
890 | -from testtools.matchers import LessThan |
891 | +from storm.store import Store |
892 | +from testtools.matchers import ( |
893 | + AfterPreprocessing, |
894 | + Equals, |
895 | + Is, |
896 | + MatchesSetwise, |
897 | + MatchesStructure, |
898 | + ) |
899 | import transaction |
900 | from zope.component import getUtility |
901 | from zope.security.proxy import removeSecurityProxy |
902 | |
903 | -from lp.services.database.sqlbase import flush_database_caches |
904 | +from lp.services.database.sqlbase import ( |
905 | + flush_database_caches, |
906 | + get_transaction_timestamp, |
907 | + ) |
908 | from lp.services.osutils import open_for_writing |
909 | from lp.soyuz.interfaces.archivefile import IArchiveFileSet |
910 | from lp.testing import TestCaseWithFactory |
911 | from lp.testing.layers import LaunchpadZopelessLayer |
912 | |
913 | |
914 | +def read_library_file(library_file): |
915 | + library_file.open() |
916 | + try: |
917 | + return library_file.read() |
918 | + finally: |
919 | + library_file.close() |
920 | + |
921 | + |
922 | class TestArchiveFile(TestCaseWithFactory): |
923 | |
924 | layer = LaunchpadZopelessLayer |
925 | @@ -35,11 +49,15 @@ |
926 | library_file = self.factory.makeLibraryFileAlias() |
927 | archive_file = getUtility(IArchiveFileSet).new( |
928 | archive, "foo", "dists/foo", library_file) |
929 | - self.assertEqual(archive, archive_file.archive) |
930 | - self.assertEqual("foo", archive_file.container) |
931 | - self.assertEqual("dists/foo", archive_file.path) |
932 | - self.assertEqual(library_file, archive_file.library_file) |
933 | - self.assertIsNone(archive_file.scheduled_deletion_date) |
934 | + self.assertThat(archive_file, MatchesStructure( |
935 | + archive=Equals(archive), |
936 | + container=Equals("foo"), |
937 | + path=Equals("dists/foo"), |
938 | + library_file=Equals(library_file), |
939 | + date_created=Equals( |
940 | + get_transaction_timestamp(Store.of(archive_file))), |
941 | + date_superseded=Is(None), |
942 | + scheduled_deletion_date=Is(None))) |
943 | |
944 | def test_newFromFile(self): |
945 | root = self.makeTemporaryDirectory() |
946 | @@ -49,24 +67,24 @@ |
947 | with open(os.path.join(root, "dists/foo"), "rb") as f: |
948 | archive_file = getUtility(IArchiveFileSet).newFromFile( |
949 | archive, "foo", "dists/foo", f, 4, "text/plain") |
950 | + now = get_transaction_timestamp(Store.of(archive_file)) |
951 | transaction.commit() |
952 | - self.assertEqual(archive, archive_file.archive) |
953 | - self.assertEqual("foo", archive_file.container) |
954 | - self.assertEqual("dists/foo", archive_file.path) |
955 | - archive_file.library_file.open() |
956 | - try: |
957 | - self.assertEqual("abc\n", archive_file.library_file.read()) |
958 | - finally: |
959 | - archive_file.library_file.close() |
960 | - self.assertIsNone(archive_file.scheduled_deletion_date) |
961 | + self.assertThat(archive_file, MatchesStructure( |
962 | + archive=Equals(archive), |
963 | + container=Equals("foo"), |
964 | + path=Equals("dists/foo"), |
965 | + library_file=AfterPreprocessing( |
966 | + read_library_file, Equals("abc\n")), |
967 | + date_created=Equals(now), |
968 | + date_superseded=Is(None), |
969 | + scheduled_deletion_date=Is(None))) |
970 | |
971 | def test_getByArchive(self): |
972 | archives = [self.factory.makeArchive(), self.factory.makeArchive()] |
973 | archive_files = [] |
974 | - now = datetime.now(pytz.UTC) |
975 | for archive in archives: |
976 | archive_files.append(self.factory.makeArchiveFile( |
977 | - archive=archive, scheduled_deletion_date=now)) |
978 | + archive=archive)) |
979 | archive_files.append(self.factory.makeArchiveFile( |
980 | archive=archive, container="foo")) |
981 | archive_file_set = getUtility(IArchiveFileSet) |
982 | @@ -84,9 +102,6 @@ |
983 | self.assertContentEqual( |
984 | [], archive_file_set.getByArchive(archives[0], path="other")) |
985 | self.assertContentEqual( |
986 | - [archive_files[0]], |
987 | - archive_file_set.getByArchive(archives[0], only_condemned=True)) |
988 | - self.assertContentEqual( |
989 | archive_files[2:], archive_file_set.getByArchive(archives[1])) |
990 | self.assertContentEqual( |
991 | [archive_files[3]], |
992 | @@ -99,46 +114,50 @@ |
993 | archives[1], path=archive_files[3].path)) |
994 | self.assertContentEqual( |
995 | [], archive_file_set.getByArchive(archives[1], path="other")) |
996 | - self.assertContentEqual( |
997 | - [archive_files[2]], |
998 | - archive_file_set.getByArchive(archives[1], only_condemned=True)) |
999 | |
1000 | def test_scheduleDeletion(self): |
1001 | archive_files = [self.factory.makeArchiveFile() for _ in range(3)] |
1002 | - expected_rows = [ |
1003 | - (archive_file.container, archive_file.path, |
1004 | - archive_file.library_file.content.sha256) |
1005 | - for archive_file in archive_files[:2]] |
1006 | - rows = getUtility(IArchiveFileSet).scheduleDeletion( |
1007 | + getUtility(IArchiveFileSet).scheduleDeletion( |
1008 | archive_files[:2], timedelta(days=1)) |
1009 | - self.assertContentEqual(expected_rows, rows) |
1010 | flush_database_caches() |
1011 | - tomorrow = datetime.now(pytz.UTC) + timedelta(days=1) |
1012 | - # Allow a bit of timing slack for slow tests. |
1013 | - self.assertThat( |
1014 | - tomorrow - archive_files[0].scheduled_deletion_date, |
1015 | - LessThan(timedelta(minutes=5))) |
1016 | - self.assertThat( |
1017 | - tomorrow - archive_files[1].scheduled_deletion_date, |
1018 | - LessThan(timedelta(minutes=5))) |
1019 | + tomorrow = ( |
1020 | + get_transaction_timestamp(Store.of(archive_files[0])) + |
1021 | + timedelta(days=1)) |
1022 | + self.assertEqual(tomorrow, archive_files[0].scheduled_deletion_date) |
1023 | + self.assertEqual(tomorrow, archive_files[1].scheduled_deletion_date) |
1024 | self.assertIsNone(archive_files[2].scheduled_deletion_date) |
1025 | |
1026 | def test_unscheduleDeletion(self): |
1027 | archive_files = [self.factory.makeArchiveFile() for _ in range(3)] |
1028 | - now = datetime.now(pytz.UTC) |
1029 | + now = get_transaction_timestamp(Store.of(archive_files[0])) |
1030 | for archive_file in archive_files: |
1031 | removeSecurityProxy(archive_file).scheduled_deletion_date = now |
1032 | - expected_rows = [ |
1033 | - (archive_file.container, archive_file.path, |
1034 | - archive_file.library_file.content.sha256) |
1035 | - for archive_file in archive_files[:2]] |
1036 | - rows = getUtility(IArchiveFileSet).unscheduleDeletion( |
1037 | - archive_files[:2]) |
1038 | - self.assertContentEqual(expected_rows, rows) |
1039 | + archive_file_set = getUtility(IArchiveFileSet) |
1040 | + archive_file_set.unscheduleDeletion(archive_files[:2]) |
1041 | flush_database_caches() |
1042 | - self.assertIsNone(archive_files[0].scheduled_deletion_date) |
1043 | - self.assertIsNone(archive_files[1].scheduled_deletion_date) |
1044 | - self.assertIsNotNone(archive_files[2].scheduled_deletion_date) |
1045 | + self.assertThat( |
1046 | + archive_file_set.getByArchive( |
1047 | + archive_files[0].archive, |
1048 | + container=archive_files[0].container, |
1049 | + path=archive_files[0].path), |
1050 | + MatchesSetwise( |
1051 | + MatchesStructure(scheduled_deletion_date=Equals(now)), |
1052 | + MatchesStructure(scheduled_deletion_date=Is(None)))) |
1053 | + self.assertThat( |
1054 | + archive_file_set.getByArchive( |
1055 | + archive_files[1].archive, |
1056 | + container=archive_files[1].container, |
1057 | + path=archive_files[1].path), |
1058 | + MatchesSetwise( |
1059 | + MatchesStructure(scheduled_deletion_date=Equals(now)), |
1060 | + MatchesStructure(scheduled_deletion_date=Is(None)))) |
1061 | + self.assertThat( |
1062 | + archive_file_set.getByArchive( |
1063 | + archive_files[2].archive, |
1064 | + container=archive_files[2].container, |
1065 | + path=archive_files[2].path), |
1066 | + MatchesSetwise( |
1067 | + MatchesStructure(scheduled_deletion_date=Equals(now)))) |
1068 | |
1069 | def test_getContainersToReap(self): |
1070 | archive = self.factory.makeArchive() |
1071 | @@ -150,7 +169,7 @@ |
1072 | other_archive = self.factory.makeArchive() |
1073 | archive_files.append(self.factory.makeArchiveFile( |
1074 | archive=other_archive, container="baz")) |
1075 | - now = datetime.now(pytz.UTC) |
1076 | + now = get_transaction_timestamp(Store.of(archive_files[0])) |
1077 | removeSecurityProxy(archive_files[0]).scheduled_deletion_date = ( |
1078 | now - timedelta(days=1)) |
1079 | removeSecurityProxy(archive_files[1]).scheduled_deletion_date = ( |
1080 | @@ -183,7 +202,7 @@ |
1081 | other_archive = self.factory.makeArchive() |
1082 | archive_files.append( |
1083 | self.factory.makeArchiveFile(archive=other_archive)) |
1084 | - now = datetime.now(pytz.UTC) |
1085 | + now = get_transaction_timestamp(Store.of(archive_files[0])) |
1086 | removeSecurityProxy(archive_files[0]).scheduled_deletion_date = ( |
1087 | now - timedelta(days=1)) |
1088 | removeSecurityProxy(archive_files[1]).scheduled_deletion_date = ( |