Merge lp:~cjwatson/launchpad/archive-file-history into lp:launchpad

Proposed by Colin Watson
Status: Rejected
Rejected by: Colin Watson
Proposed branch: lp:~cjwatson/launchpad/archive-file-history
Merge into: lp:launchpad
Prerequisite: lp:~cjwatson/launchpad/get-transaction-timestamp-per-store
Diff against target: 771 lines (+248/-178)
5 files modified
lib/lp/archivepublisher/publishing.py (+43/-22)
lib/lp/archivepublisher/tests/test_publisher.py (+94/-61)
lib/lp/soyuz/interfaces/archivefile.py (+12/-9)
lib/lp/soyuz/model/archivefile.py (+37/-47)
lib/lp/soyuz/tests/test_archivefile.py (+62/-39)
To merge this branch: bzr merge lp:~cjwatson/launchpad/archive-file-history
Reviewer Review Type Date Requested Status
Launchpad code reviewers Pending
Review via email: mp+343752@code.launchpad.net

This proposal supersedes a proposal from 2018-04-21.

Commit message

Turn ArchiveFile into a history table, adding date_created and date_superseded columns. Adjust the publisher to match.

Description of the change

The main complexity here is in the changed publisher logic, especially for reprieving (that is, the situation where file contents that were scheduled for deletion become live again, particularly common for empty files). We previously did this by simply clearing scheduled_deletion_date on the old ArchiveFile row, but that strategy no longer works when we're trying to maintain history: instead, we need to create new rows in such cases. As a result of the logic changes here, we no longer need the only_condemned=True option in ArchiveFileSet.getByArchive.

I think the publisher tests are now somewhat clearer, since they now explicitly test creation dates, making the chain of events more obvious.

ArchiveFile.superseded_at is arguably redundant with ArchiveFile.scheduled_deletion_date, but I think keeping both is a good idea for clarity, and in case we ever change the stay of execution in future.

We'll need to backfill the new columns. I'll probably make a separate branch with a garbo job for this: my plan is to set date_created to some arbitrary value (probably just the epoch, so that it's clear that it's arbitrary), and to set date_superseded to scheduled_deletion_date minus the stay of execution for rows that have a scheduled_deletion_date.

To post a comment you must log in.
Revision history for this message
Colin Watson (cjwatson) wrote :

Unmerged revisions

18624. By Colin Watson

Turn ArchiveFile into a history table, adding date_created and date_superseded columns. Adjust the publisher to match.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/lp/archivepublisher/publishing.py'
2--- lib/lp/archivepublisher/publishing.py 2018-03-27 23:26:12 +0000
3+++ lib/lp/archivepublisher/publishing.py 2018-04-21 11:24:08 +0000
4@@ -1063,8 +1063,7 @@
5 assert path.startswith("dists/")
6 return path[len("dists/"):]
7
8- # Gather information on entries in the current Release file, and
9- # make sure nothing there is condemned.
10+ # Gather information on entries in the current Release file.
11 current_files = {}
12 for current_entry in (
13 release_data["SHA256"] + extra_data.get("SHA256", [])):
14@@ -1073,33 +1072,54 @@
15 real_path = os.path.join(suite_dir, real_name)
16 current_files[path] = (
17 int(current_entry["size"]), current_entry["sha256"], real_path)
18+
19+ # Gather information on entries currently in the database. Ensure
20+ # that we know about all the relevant by-hash directory trees before
21+ # doing any removals so that we can prune them properly later, and
22+ # work out which condemned files should be reprieved due to the
23+ # paths in question having their previous content again.
24+ reprieved_files = defaultdict(list)
25 uncondemned_files = set()
26 for db_file in archive_file_set.getByArchive(
27- self.archive, container=container, only_condemned=True,
28- eager_load=True):
29- stripped_path = strip_dists(db_file.path)
30- if stripped_path in current_files:
31- current_sha256 = current_files[stripped_path][1]
32- if db_file.library_file.content.sha256 == current_sha256:
33- uncondemned_files.add(db_file)
34- if uncondemned_files:
35- for container, path, sha256 in archive_file_set.unscheduleDeletion(
36- uncondemned_files):
37+ self.archive, container=container, eager_load=True):
38+ by_hashes.registerChild(os.path.dirname(strip_dists(db_file.path)))
39+ file_key = (db_file.path, db_file.library_file.content.sha256)
40+ if db_file.scheduled_deletion_date is None:
41+ uncondemned_files.add(file_key)
42+ else:
43+ stripped_path = strip_dists(db_file.path)
44+ if stripped_path in current_files:
45+ current_sha256 = current_files[stripped_path][1]
46+ if db_file.library_file.content.sha256 == current_sha256:
47+ reprieved_files[file_key].append(db_file)
48+
49+ # We may already have uncondemned entries with the same path and
50+ # content as condemned entries that we were about to reprieve; if
51+ # so, there's no need to reprieve them.
52+ for file_key in uncondemned_files:
53+ reprieved_files.pop(file_key, None)
54+
55+ # Make sure nothing in the current Release file is condemned.
56+ if reprieved_files:
57+ reprieved_files_flat = set(
58+ chain.from_iterable(reprieved_files.values()))
59+ archive_file_set.unscheduleDeletion(reprieved_files_flat)
60+ for db_file in reprieved_files_flat:
61 self.log.debug(
62 "by-hash: Unscheduled %s for %s in %s for deletion" % (
63- sha256, path, container))
64+ db_file.library_file.content.sha256, db_file.path,
65+ db_file.container))
66
67 # Remove any condemned files from the database whose stay of
68 # execution has elapsed. We ensure that we know about all the
69 # relevant by-hash directory trees before doing any removals so that
70 # we can prune them properly later.
71- for db_file in archive_file_set.getByArchive(
72- self.archive, container=container):
73- by_hashes.registerChild(os.path.dirname(strip_dists(db_file.path)))
74 for container, path, sha256 in archive_file_set.reap(
75 self.archive, container=container):
76- self.log.debug(
77- "by-hash: Deleted %s for %s in %s" % (sha256, path, container))
78+ if (path, sha256) not in uncondemned_files:
79+ self.log.debug(
80+ "by-hash: Deleted %s for %s in %s" %
81+ (sha256, path, container))
82
83 # Ensure that all files recorded in the database are in by-hash.
84 db_files = archive_file_set.getByArchive(
85@@ -1120,12 +1140,13 @@
86 if db_file.library_file.content.sha256 != current_sha256:
87 condemned_files.add(db_file)
88 if condemned_files:
89- for container, path, sha256 in archive_file_set.scheduleDeletion(
90- condemned_files,
91- timedelta(days=BY_HASH_STAY_OF_EXECUTION)):
92+ archive_file_set.scheduleDeletion(
93+ condemned_files, timedelta(days=BY_HASH_STAY_OF_EXECUTION))
94+ for db_file in condemned_files:
95 self.log.debug(
96 "by-hash: Scheduled %s for %s in %s for deletion" % (
97- sha256, path, container))
98+ db_file.library_file.content.sha256, db_file.path,
99+ db_file.container))
100
101 # Ensure that all the current index files are in by-hash and have
102 # corresponding database entries.
103
104=== modified file 'lib/lp/archivepublisher/tests/test_publisher.py'
105--- lib/lp/archivepublisher/tests/test_publisher.py 2018-04-05 11:32:50 +0000
106+++ lib/lp/archivepublisher/tests/test_publisher.py 2018-04-21 11:24:08 +0000
107@@ -21,8 +21,6 @@
108 from functools import partial
109 import gzip
110 import hashlib
111-from itertools import product
112-from operator import attrgetter
113 import os
114 import shutil
115 import stat
116@@ -51,7 +49,6 @@
117 LessThan,
118 Matcher,
119 MatchesDict,
120- MatchesListwise,
121 MatchesSetwise,
122 MatchesStructure,
123 Not,
124@@ -2581,12 +2578,12 @@
125 publisher.D_writeReleaseFiles(False)
126
127 @classmethod
128- def _makeScheduledDeletionDateMatcher(cls, condemned_at):
129- if condemned_at is None:
130+ def _makeScheduledDeletionDateMatcher(cls, superseded_at):
131+ if superseded_at is None:
132 return Is(None)
133 else:
134 return Equals(
135- condemned_at + timedelta(days=BY_HASH_STAY_OF_EXECUTION))
136+ superseded_at + timedelta(days=BY_HASH_STAY_OF_EXECUTION))
137
138 def assertHasSuiteFiles(self, patterns, *properties):
139 def is_interesting(path):
140@@ -2600,11 +2597,13 @@
141 self.ubuntutest.main_archive)
142 if is_interesting(archive_file.path)]
143 matchers = []
144- for path, condemned_at in properties:
145+ for path, created_at, superseded_at in properties:
146 matchers.append(MatchesStructure(
147 path=Equals('dists/breezy-autotest/%s' % path),
148+ date_created=Equals(created_at),
149+ date_superseded=Equals(superseded_at),
150 scheduled_deletion_date=self._makeScheduledDeletionDateMatcher(
151- condemned_at)))
152+ superseded_at)))
153 self.assertThat(files, MatchesSetwise(*matchers))
154
155 def test_disabled(self):
156@@ -2754,7 +2753,8 @@
157 flush_database_caches()
158 self.assertHasSuiteFiles(
159 ('Contents-*', 'Release'),
160- ('Contents-i386', None), ('Release', None))
161+ ('Contents-i386', self.times[0], None),
162+ ('Release', self.times[0], None))
163 releases = [get_release_contents()]
164 self.assertThat(
165 suite_path('by-hash'),
166@@ -2768,8 +2768,10 @@
167 flush_database_caches()
168 self.assertHasSuiteFiles(
169 ('Contents-*', 'Release'),
170- ('Contents-i386', None), ('Contents-hppa', None),
171- ('Release', self.times[1]), ('Release', None))
172+ ('Contents-i386', self.times[0], None),
173+ ('Contents-hppa', self.times[1], None),
174+ ('Release', self.times[0], self.times[1]),
175+ ('Release', self.times[1], None))
176 releases.append(get_release_contents())
177 self.assertThat(
178 suite_path('by-hash'),
179@@ -2782,9 +2784,11 @@
180 flush_database_caches()
181 self.assertHasSuiteFiles(
182 ('Contents-*', 'Release'),
183- ('Contents-i386', self.times[2]), ('Contents-hppa', None),
184- ('Release', self.times[1]), ('Release', self.times[2]),
185- ('Release', None))
186+ ('Contents-i386', self.times[0], self.times[2]),
187+ ('Contents-hppa', self.times[1], None),
188+ ('Release', self.times[0], self.times[1]),
189+ ('Release', self.times[1], self.times[2]),
190+ ('Release', self.times[2], None))
191 releases.append(get_release_contents())
192 self.assertThat(
193 suite_path('by-hash'),
194@@ -2796,9 +2800,12 @@
195 flush_database_caches()
196 self.assertHasSuiteFiles(
197 ('Contents-*', 'Release'),
198- ('Contents-i386', self.times[2]), ('Contents-hppa', None),
199- ('Release', self.times[1]), ('Release', self.times[2]),
200- ('Release', self.times[3]), ('Release', None))
201+ ('Contents-i386', self.times[0], self.times[2]),
202+ ('Contents-hppa', self.times[1], None),
203+ ('Release', self.times[0], self.times[1]),
204+ ('Release', self.times[1], self.times[2]),
205+ ('Release', self.times[2], self.times[3]),
206+ ('Release', self.times[3], None))
207 releases.append(get_release_contents())
208 self.assertThat(
209 suite_path('by-hash'),
210@@ -2817,9 +2824,10 @@
211 flush_database_caches()
212 self.assertHasSuiteFiles(
213 ('Contents-*', 'Release'),
214- ('Contents-hppa', self.times[4]),
215- ('Release', self.times[3]), ('Release', self.times[4]),
216- ('Release', None))
217+ ('Contents-hppa', self.times[1], self.times[4]),
218+ ('Release', self.times[2], self.times[3]),
219+ ('Release', self.times[3], self.times[4]),
220+ ('Release', self.times[4], None))
221 releases.append(get_release_contents())
222 self.assertThat(
223 suite_path('by-hash'),
224@@ -2836,7 +2844,8 @@
225 flush_database_caches()
226 self.assertHasSuiteFiles(
227 ('Contents-*', 'Release'),
228- ('Release', self.times[5]), ('Release', None))
229+ ('Release', self.times[4], self.times[5]),
230+ ('Release', self.times[5], None))
231 releases.append(get_release_contents())
232 self.assertThat(suite_path('by-hash'), ByHashHasContents(releases[4:]))
233
234@@ -2863,9 +2872,13 @@
235 for name in ('Release', 'Sources'):
236 with open(suite_path('main', 'source', name), 'rb') as f:
237 main_contents.add(f.read())
238+ self.assertHasSuiteFiles(
239+ ('main/source/Sources',),
240+ ('main/source/Sources', self.times[0], None))
241
242 # Add a source package so that Sources is non-empty.
243 pub_source = self.getPubSource(filecontent='Source: foo\n')
244+ self.advanceTime(delta=timedelta(hours=1))
245 self.runSteps(publisher, step_a=True, step_c=True, step_d=True)
246 transaction.commit()
247 with open(suite_path('main', 'source', 'Sources'), 'rb') as f:
248@@ -2874,28 +2887,42 @@
249 self.assertThat(
250 suite_path('main', 'source', 'by-hash'),
251 ByHashHasContents(main_contents))
252-
253- # Make the empty Sources file ready to prune.
254- self.advanceTime(
255- delta=timedelta(days=BY_HASH_STAY_OF_EXECUTION, hours=1))
256+ self.assertHasSuiteFiles(
257+ ('main/source/Sources',),
258+ ('main/source/Sources', self.times[0], self.times[1]),
259+ ('main/source/Sources', self.times[1], None))
260
261 # Delete the source package so that Sources is empty again. The
262- # empty file is reprieved and the non-empty one is condemned.
263+ # empty file is reprieved (by creating a new ArchiveFile referring
264+ # to it) and the non-empty one is condemned.
265 pub_source.requestDeletion(self.ubuntutest.owner)
266- self.runSteps(publisher, step_a=True, step_c=True, step_d=True)
267- transaction.commit()
268- self.assertThat(
269- suite_path('main', 'source', 'by-hash'),
270- ByHashHasContents(main_contents))
271- archive_files = getUtility(IArchiveFileSet).getByArchive(
272- self.ubuntutest.main_archive,
273- path='dists/breezy-autotest/main/source/Sources')
274- self.assertThat(
275- sorted(archive_files, key=attrgetter('id')),
276- MatchesListwise([
277- MatchesStructure(scheduled_deletion_date=Is(None)),
278- MatchesStructure(scheduled_deletion_date=Not(Is(None))),
279- ]))
280+ self.advanceTime(delta=timedelta(hours=1))
281+ self.runSteps(publisher, step_a=True, step_c=True, step_d=True)
282+ transaction.commit()
283+ self.assertThat(
284+ suite_path('main', 'source', 'by-hash'),
285+ ByHashHasContents(main_contents))
286+ self.assertHasSuiteFiles(
287+ ('main/source/Sources',),
288+ ('main/source/Sources', self.times[0], self.times[1]),
289+ ('main/source/Sources', self.times[1], self.times[2]),
290+ ('main/source/Sources', self.times[2], None))
291+
292+ # Make the first empty Sources file ready to prune. This doesn't
293+ # change the set of files on disk, because there's still a newer
294+ # reference to the empty file.
295+ self.advanceTime(
296+ absolute=self.times[1] + timedelta(
297+ days=BY_HASH_STAY_OF_EXECUTION, minutes=30))
298+ self.runSteps(publisher, step_a=True, step_c=True, step_d=True)
299+ transaction.commit()
300+ self.assertThat(
301+ suite_path('main', 'source', 'by-hash'),
302+ ByHashHasContents(main_contents))
303+ self.assertHasSuiteFiles(
304+ ('main/source/Sources',),
305+ ('main/source/Sources', self.times[1], self.times[2]),
306+ ('main/source/Sources', self.times[2], None))
307
308 def setUpPruneableSuite(self):
309 self.setUpMockTime()
310@@ -2924,14 +2951,18 @@
311 # We have two condemned sets of index files and one uncondemned set.
312 # main/source/Release contains a small enough amount of information
313 # that it doesn't change.
314- expected_suite_files = (
315- list(product(
316- ('main/source/Sources.gz', 'main/source/Sources.bz2',
317- 'Release'),
318- (self.times[1], self.times[2], None))) +
319- [('main/source/Release', None)])
320 self.assertHasSuiteFiles(
321- ('main/source/*', 'Release'), *expected_suite_files)
322+ ('main/source/*', 'Release'),
323+ ('main/source/Sources.gz', self.times[0], self.times[1]),
324+ ('main/source/Sources.gz', self.times[1], self.times[2]),
325+ ('main/source/Sources.gz', self.times[2], None),
326+ ('main/source/Sources.bz2', self.times[0], self.times[1]),
327+ ('main/source/Sources.bz2', self.times[1], self.times[2]),
328+ ('main/source/Sources.bz2', self.times[2], None),
329+ ('main/source/Release', self.times[0], None),
330+ ('Release', self.times[0], self.times[1]),
331+ ('Release', self.times[1], self.times[2]),
332+ ('Release', self.times[2], None))
333 self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents))
334 self.assertThat(
335 suite_path('main', 'source', 'by-hash'),
336@@ -2964,14 +2995,15 @@
337 self.assertEqual(set(), publisher.dirty_pockets)
338 # The condemned index files are removed, and no new Release file is
339 # generated.
340- expected_suite_files = (
341- list(product(
342- ('main/source/Sources.gz', 'main/source/Sources.bz2'),
343- (self.times[2], None))) +
344- [('main/source/Release', None),
345- ('Release', self.times[2]), ('Release', None)])
346 self.assertHasSuiteFiles(
347- ('main/source/*', 'Release'), *expected_suite_files)
348+ ('main/source/*', 'Release'),
349+ ('main/source/Sources.gz', self.times[1], self.times[2]),
350+ ('main/source/Sources.gz', self.times[2], None),
351+ ('main/source/Sources.bz2', self.times[1], self.times[2]),
352+ ('main/source/Sources.bz2', self.times[2], None),
353+ ('main/source/Release', self.times[0], None),
354+ ('Release', self.times[1], self.times[2]),
355+ ('Release', self.times[2], None))
356 self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents))
357 self.assertThat(
358 suite_path('main', 'source', 'by-hash'),
359@@ -2998,14 +3030,15 @@
360 self.assertEqual(release_mtime, os.stat(release_path).st_mtime)
361 # The condemned index files are removed, and no new Release file is
362 # generated.
363- expected_suite_files = (
364- list(product(
365- ('main/source/Sources.gz', 'main/source/Sources.bz2'),
366- (self.times[2], None))) +
367- [('main/source/Release', None),
368- ('Release', self.times[2]), ('Release', None)])
369 self.assertHasSuiteFiles(
370- ('main/source/*', 'Release'), *expected_suite_files)
371+ ('main/source/*', 'Release'),
372+ ('main/source/Sources.gz', self.times[1], self.times[2]),
373+ ('main/source/Sources.gz', self.times[2], None),
374+ ('main/source/Sources.bz2', self.times[1], self.times[2]),
375+ ('main/source/Sources.bz2', self.times[2], None),
376+ ('main/source/Release', self.times[0], None),
377+ ('Release', self.times[1], self.times[2]),
378+ ('Release', self.times[2], None))
379 self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents))
380 self.assertThat(
381 suite_path('main', 'source', 'by-hash'),
382
383=== modified file 'lib/lp/soyuz/interfaces/archivefile.py'
384--- lib/lp/soyuz/interfaces/archivefile.py 2016-04-04 10:06:33 +0000
385+++ lib/lp/soyuz/interfaces/archivefile.py 2018-04-21 11:24:08 +0000
386@@ -1,4 +1,4 @@
387-# Copyright 2016 Canonical Ltd. This software is licensed under the
388+# Copyright 2016-2018 Canonical Ltd. This software is licensed under the
389 # GNU Affero General Public License version 3 (see the file LICENSE).
390
391 """Interface for a file in an archive."""
392@@ -49,6 +49,16 @@
393 title=_("The index file in the librarian."),
394 schema=ILibraryFileAlias, required=True, readonly=True)
395
396+ date_created = Datetime(
397+ title=_("The date when this file was created."),
398+ # XXX cjwatson 2018-04-17: Should be required=True, but we need to
399+ # backfill existing rows first.
400+ required=False, readonly=False)
401+
402+ date_superseded = Datetime(
403+ title=_("The date when this file was scheduled for future deletion."),
404+ required=False, readonly=False)
405+
406 scheduled_deletion_date = Datetime(
407 title=_("The date when this file should stop being published."),
408 required=False, readonly=False)
409@@ -79,15 +89,12 @@
410 :param content_type: The MIME type of the file.
411 """
412
413- def getByArchive(archive, container=None, path=None, only_condemned=False,
414- eager_load=False):
415+ def getByArchive(archive, container=None, path=None, eager_load=False):
416 """Get files in an archive.
417
418 :param archive: Return files in this `IArchive`.
419 :param container: Return only files with this container.
420 :param path: Return only files with this path.
421- :param only_condemned: If True, return only files with a
422- scheduled_deletion_date set.
423 :param eager_load: If True, preload related `LibraryFileAlias` and
424 `LibraryFileContent` rows.
425 :return: An iterable of matched files.
426@@ -99,8 +106,6 @@
427 :param archive_files: The `IArchiveFile`s to schedule for deletion.
428 :param stay_of_execution: A `timedelta`; schedule files for deletion
429 this amount of time in the future.
430- :return: An iterable of (container, path, sha256) for files that
431- were scheduled for deletion.
432 """
433
434 def unscheduleDeletion(archive_files):
435@@ -110,8 +115,6 @@
436 identical to a version that was previously condemned.
437
438 :param archive_files: The `IArchiveFile`s to unschedule for deletion.
439- :return: An iterable of (container, path, sha256) for files that
440- were unscheduled for deletion.
441 """
442
443 def getContainersToReap(archive, container_prefix=None):
444
445=== modified file 'lib/lp/soyuz/model/archivefile.py'
446--- lib/lp/soyuz/model/archivefile.py 2018-01-26 10:11:33 +0000
447+++ lib/lp/soyuz/model/archivefile.py 2018-04-21 11:24:08 +0000
448@@ -14,7 +14,6 @@
449 import os.path
450
451 import pytz
452-from storm.databases.postgres import Returning
453 from storm.locals import (
454 And,
455 DateTime,
456@@ -26,7 +25,10 @@
457 from zope.component import getUtility
458 from zope.interface import implementer
459
460-from lp.services.database.bulk import load_related
461+from lp.services.database.bulk import (
462+ create,
463+ load_related,
464+ )
465 from lp.services.database.constants import UTC_NOW
466 from lp.services.database.decoratedresultset import DecoratedResultSet
467 from lp.services.database.interfaces import (
468@@ -34,7 +36,6 @@
469 IStore,
470 )
471 from lp.services.database.sqlbase import convert_storm_clause_to_string
472-from lp.services.database.stormexpr import BulkUpdate
473 from lp.services.librarian.interfaces import ILibraryFileAliasSet
474 from lp.services.librarian.model import (
475 LibraryFileAlias,
476@@ -46,6 +47,15 @@
477 )
478
479
480+def _now():
481+ """Get the current transaction timestamp.
482+
483+ Tests can override this with a Storm expression or a `datetime` to
484+ simulate time changes.
485+ """
486+ return UTC_NOW
487+
488+
489 @implementer(IArchiveFile)
490 class ArchiveFile(Storm):
491 """See `IArchiveFile`."""
492@@ -64,6 +74,14 @@
493 library_file_id = Int(name='library_file', allow_none=False)
494 library_file = Reference(library_file_id, 'LibraryFileAlias.id')
495
496+ date_created = DateTime(
497+ # XXX cjwatson 2018-04-17: Should be allow_none=False, but we need
498+ # to backfill existing rows first.
499+ name='date_created', tzinfo=pytz.UTC, allow_none=True)
500+
501+ date_superseded = DateTime(
502+ name='date_superseded', tzinfo=pytz.UTC, allow_none=True)
503+
504 scheduled_deletion_date = DateTime(
505 name='scheduled_deletion_date', tzinfo=pytz.UTC, allow_none=True)
506
507@@ -74,18 +92,11 @@
508 self.container = container
509 self.path = path
510 self.library_file = library_file
511+ self.date_created = _now()
512+ self.date_superseded = None
513 self.scheduled_deletion_date = None
514
515
516-def _now():
517- """Get the current transaction timestamp.
518-
519- Tests can override this with a Storm expression or a `datetime` to
520- simulate time changes.
521- """
522- return UTC_NOW
523-
524-
525 @implementer(IArchiveFileSet)
526 class ArchiveFileSet:
527 """See `IArchiveFileSet`."""
528@@ -106,8 +117,7 @@
529 return cls.new(archive, container, path, library_file)
530
531 @staticmethod
532- def getByArchive(archive, container=None, path=None, only_condemned=False,
533- eager_load=False):
534+ def getByArchive(archive, container=None, path=None, eager_load=False):
535 """See `IArchiveFileSet`."""
536 clauses = [ArchiveFile.archive == archive]
537 # XXX cjwatson 2016-03-15: We'll need some more sophisticated way to
538@@ -116,8 +126,6 @@
539 clauses.append(ArchiveFile.container == container)
540 if path is not None:
541 clauses.append(ArchiveFile.path == path)
542- if only_condemned:
543- clauses.append(ArchiveFile.scheduled_deletion_date != None)
544 archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses)
545
546 def eager_load(rows):
547@@ -132,41 +140,23 @@
548 @staticmethod
549 def scheduleDeletion(archive_files, stay_of_execution):
550 """See `IArchiveFileSet`."""
551- clauses = [
552- ArchiveFile.id.is_in(
553- set(archive_file.id for archive_file in archive_files)),
554- ArchiveFile.library_file == LibraryFileAlias.id,
555- LibraryFileAlias.content == LibraryFileContent.id,
556- ]
557- new_date = _now() + stay_of_execution
558- return_columns = [
559- ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256]
560- return list(IMasterStore(ArchiveFile).execute(Returning(
561- BulkUpdate(
562- {ArchiveFile.scheduled_deletion_date: new_date},
563- table=ArchiveFile,
564- values=[LibraryFileAlias, LibraryFileContent],
565- where=And(*clauses)),
566- columns=return_columns)))
567+ rows = IMasterStore(ArchiveFile).find(
568+ ArchiveFile, ArchiveFile.id.is_in(
569+ set(archive_file.id for archive_file in archive_files)))
570+ rows.set(
571+ date_superseded=_now(),
572+ scheduled_deletion_date=_now() + stay_of_execution)
573
574 @staticmethod
575 def unscheduleDeletion(archive_files):
576 """See `IArchiveFileSet`."""
577- clauses = [
578- ArchiveFile.id.is_in(
579- set(archive_file.id for archive_file in archive_files)),
580- ArchiveFile.library_file == LibraryFileAlias.id,
581- LibraryFileAlias.content == LibraryFileContent.id,
582- ]
583- return_columns = [
584- ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256]
585- return list(IMasterStore(ArchiveFile).execute(Returning(
586- BulkUpdate(
587- {ArchiveFile.scheduled_deletion_date: None},
588- table=ArchiveFile,
589- values=[LibraryFileAlias, LibraryFileContent],
590- where=And(*clauses)),
591- columns=return_columns)))
592+ create(
593+ (ArchiveFile.archive, ArchiveFile.container, ArchiveFile.path,
594+ ArchiveFile.library_file, ArchiveFile.date_created,
595+ ArchiveFile.date_superseded, ArchiveFile.scheduled_deletion_date),
596+ [(archive_file.archive, archive_file.container, archive_file.path,
597+ archive_file.library_file, _now(), None, None)
598+ for archive_file in archive_files])
599
600 @staticmethod
601 def getContainersToReap(archive, container_prefix=None):
602
603=== modified file 'lib/lp/soyuz/tests/test_archivefile.py'
604--- lib/lp/soyuz/tests/test_archivefile.py 2018-04-21 11:24:07 +0000
605+++ lib/lp/soyuz/tests/test_archivefile.py 2018-04-21 11:24:08 +0000
606@@ -1,4 +1,4 @@
607-# Copyright 2016 Canonical Ltd. This software is licensed under the
608+# Copyright 2016-2018 Canonical Ltd. This software is licensed under the
609 # GNU Affero General Public License version 3 (see the file LICENSE).
610
611 """ArchiveFile tests."""
612@@ -11,6 +11,13 @@
613 import os
614
615 from storm.store import Store
616+from testtools.matchers import (
617+ AfterPreprocessing,
618+ Equals,
619+ Is,
620+ MatchesSetwise,
621+ MatchesStructure,
622+ )
623 import transaction
624 from zope.component import getUtility
625 from zope.security.proxy import removeSecurityProxy
626@@ -25,6 +32,14 @@
627 from lp.testing.layers import LaunchpadZopelessLayer
628
629
630+def read_library_file(library_file):
631+ library_file.open()
632+ try:
633+ return library_file.read()
634+ finally:
635+ library_file.close()
636+
637+
638 class TestArchiveFile(TestCaseWithFactory):
639
640 layer = LaunchpadZopelessLayer
641@@ -34,11 +49,15 @@
642 library_file = self.factory.makeLibraryFileAlias()
643 archive_file = getUtility(IArchiveFileSet).new(
644 archive, "foo", "dists/foo", library_file)
645- self.assertEqual(archive, archive_file.archive)
646- self.assertEqual("foo", archive_file.container)
647- self.assertEqual("dists/foo", archive_file.path)
648- self.assertEqual(library_file, archive_file.library_file)
649- self.assertIsNone(archive_file.scheduled_deletion_date)
650+ self.assertThat(archive_file, MatchesStructure(
651+ archive=Equals(archive),
652+ container=Equals("foo"),
653+ path=Equals("dists/foo"),
654+ library_file=Equals(library_file),
655+ date_created=Equals(
656+ get_transaction_timestamp(Store.of(archive_file))),
657+ date_superseded=Is(None),
658+ scheduled_deletion_date=Is(None)))
659
660 def test_newFromFile(self):
661 root = self.makeTemporaryDirectory()
662@@ -48,24 +67,24 @@
663 with open(os.path.join(root, "dists/foo"), "rb") as f:
664 archive_file = getUtility(IArchiveFileSet).newFromFile(
665 archive, "foo", "dists/foo", f, 4, "text/plain")
666+ now = get_transaction_timestamp(Store.of(archive_file))
667 transaction.commit()
668- self.assertEqual(archive, archive_file.archive)
669- self.assertEqual("foo", archive_file.container)
670- self.assertEqual("dists/foo", archive_file.path)
671- archive_file.library_file.open()
672- try:
673- self.assertEqual("abc\n", archive_file.library_file.read())
674- finally:
675- archive_file.library_file.close()
676- self.assertIsNone(archive_file.scheduled_deletion_date)
677+ self.assertThat(archive_file, MatchesStructure(
678+ archive=Equals(archive),
679+ container=Equals("foo"),
680+ path=Equals("dists/foo"),
681+ library_file=AfterPreprocessing(
682+ read_library_file, Equals("abc\n")),
683+ date_created=Equals(now),
684+ date_superseded=Is(None),
685+ scheduled_deletion_date=Is(None)))
686
687 def test_getByArchive(self):
688 archives = [self.factory.makeArchive(), self.factory.makeArchive()]
689 archive_files = []
690- now = get_transaction_timestamp(Store.of(archives[0]))
691 for archive in archives:
692 archive_files.append(self.factory.makeArchiveFile(
693- archive=archive, scheduled_deletion_date=now))
694+ archive=archive))
695 archive_files.append(self.factory.makeArchiveFile(
696 archive=archive, container="foo"))
697 archive_file_set = getUtility(IArchiveFileSet)
698@@ -83,9 +102,6 @@
699 self.assertContentEqual(
700 [], archive_file_set.getByArchive(archives[0], path="other"))
701 self.assertContentEqual(
702- [archive_files[0]],
703- archive_file_set.getByArchive(archives[0], only_condemned=True))
704- self.assertContentEqual(
705 archive_files[2:], archive_file_set.getByArchive(archives[1]))
706 self.assertContentEqual(
707 [archive_files[3]],
708@@ -98,19 +114,11 @@
709 archives[1], path=archive_files[3].path))
710 self.assertContentEqual(
711 [], archive_file_set.getByArchive(archives[1], path="other"))
712- self.assertContentEqual(
713- [archive_files[2]],
714- archive_file_set.getByArchive(archives[1], only_condemned=True))
715
716 def test_scheduleDeletion(self):
717 archive_files = [self.factory.makeArchiveFile() for _ in range(3)]
718- expected_rows = [
719- (archive_file.container, archive_file.path,
720- archive_file.library_file.content.sha256)
721- for archive_file in archive_files[:2]]
722- rows = getUtility(IArchiveFileSet).scheduleDeletion(
723+ getUtility(IArchiveFileSet).scheduleDeletion(
724 archive_files[:2], timedelta(days=1))
725- self.assertContentEqual(expected_rows, rows)
726 flush_database_caches()
727 tomorrow = (
728 get_transaction_timestamp(Store.of(archive_files[0])) +
729@@ -124,17 +132,32 @@
730 now = get_transaction_timestamp(Store.of(archive_files[0]))
731 for archive_file in archive_files:
732 removeSecurityProxy(archive_file).scheduled_deletion_date = now
733- expected_rows = [
734- (archive_file.container, archive_file.path,
735- archive_file.library_file.content.sha256)
736- for archive_file in archive_files[:2]]
737- rows = getUtility(IArchiveFileSet).unscheduleDeletion(
738- archive_files[:2])
739- self.assertContentEqual(expected_rows, rows)
740+ archive_file_set = getUtility(IArchiveFileSet)
741+ archive_file_set.unscheduleDeletion(archive_files[:2])
742 flush_database_caches()
743- self.assertIsNone(archive_files[0].scheduled_deletion_date)
744- self.assertIsNone(archive_files[1].scheduled_deletion_date)
745- self.assertEqual(now, archive_files[2].scheduled_deletion_date)
746+ self.assertThat(
747+ archive_file_set.getByArchive(
748+ archive_files[0].archive,
749+ container=archive_files[0].container,
750+ path=archive_files[0].path),
751+ MatchesSetwise(
752+ MatchesStructure(scheduled_deletion_date=Equals(now)),
753+ MatchesStructure(scheduled_deletion_date=Is(None))))
754+ self.assertThat(
755+ archive_file_set.getByArchive(
756+ archive_files[1].archive,
757+ container=archive_files[1].container,
758+ path=archive_files[1].path),
759+ MatchesSetwise(
760+ MatchesStructure(scheduled_deletion_date=Equals(now)),
761+ MatchesStructure(scheduled_deletion_date=Is(None))))
762+ self.assertThat(
763+ archive_file_set.getByArchive(
764+ archive_files[2].archive,
765+ container=archive_files[2].container,
766+ path=archive_files[2].path),
767+ MatchesSetwise(
768+ MatchesStructure(scheduled_deletion_date=Equals(now))))
769
770 def test_getContainersToReap(self):
771 archive = self.factory.makeArchive()