Merge lp:~jelmer/launchpad/128259-async-1 into lp:launchpad
- 128259-async-1
- Merge into devel
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merged at revision: | 11399 |
Proposed branch: | lp:~jelmer/launchpad/128259-async-1 |
Merge into: | lp:launchpad |
Prerequisite: | lp:~jelmer/launchpad/refactor-uploadprocessor |
Diff against target: |
559 lines (+230/-44) 4 files modified
lib/lp/archiveuploader/tests/test_uploadprocessor.py (+112/-7) lib/lp/archiveuploader/uploadpolicy.py (+2/-1) lib/lp/archiveuploader/uploadprocessor.py (+108/-33) lib/lp/soyuz/scripts/soyuz_process_upload.py (+8/-3) |
To merge this branch: | bzr merge lp:~jelmer/launchpad/128259-async-1 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Abel Deuring (community) | code | Approve | |
Review via email: mp+32439@code.launchpad.net |
Commit message
Add --builds option to process-upload.
Description of the change
This branch adds a --builds option to process-upload.
This new option makes process-upload the directory name in which the upload was found as <buildid>
This makes it possible for uploads of builds to no longer happen synchronously inside of the buildd manager, but rather for the buildd manager to move them into a separate queue where they can be processed by a process-upload independently without blocking the buildd manager.
Paul Hummer (rockstar) wrote : | # |
Abel Deuring (adeuring) wrote : | # |
Hi Jelmer,
a nice branch! I have just a few formal nitpicks, see below.
> === modified file 'lib/lp/
> --- lib/lp/
> +++ lib/lp/
[...]
> + def testSuccess(self):
> + # Upload a source package
> + upload_dir = self.queueUploa
> + self.processUpl
> + source_pub = self.publishPac
> + [build] = source_
> +
> + # Move the source from the accepted queue.
> + [queue_item] = self.breezy.
> + status=
> + version="1.0-1", name="bar")
> + queue_item.
> +
> + build.jobStarted()
> + build.builder = self.factory.
> +
> + # Upload and accept a binary for the primary archive source.
> + shutil.
> + self.layer.
> + leaf_name = "%d-%d" % (build.id, 60)
> + upload_dir = self.queueUploa
> + queue_entry=
> + self.options.
> + self.options.builds = True
> + self.uploadproc
> + self.layer.
> + self.assertEqua
> + log_lines = build.upload_
> + self.assertTrue(
> + 'INFO: Processing upload bar_1.0-
> + self.assertTrue(
> + 'INFO: Committing the transaction and any mails associated with'
> + 'this upload.')
As already mentioned on IRC, this should be something like
assertTrue(
(And I assume that a ' ' is missing after 'with'.)
> === modified file 'lib/lp/
> --- lib/lp/
> +++ lib/lp/
[...]
> @@ -184,7 +257,8 @@
> for changes_file in changes_files:
> self.log.
> try:
> - result = self.processCha
> + result = self.processCha
> + self.log)
Our style guide says that we should put all parameters into the second
(and following) line if they don't fit completely into the first line.
[...]
> @@ -376,11 +451,11 @@
> except UploadPolicyError, e:
> upload.
> "%s " % e)
> - self.log.
> + logger.
> exc_info=True)
Again, please move all parameters to the second line.
> except FatalUploadError, e:
> ...
Abel Deuring (adeuring) wrote : | # |
ah, one more detail: line 269 of the diff is
+ self.ztm.commit()
could you add a comment why the commit() call is necessary?
Preview Diff
1 | === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py' |
2 | --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-18 14:03:15 +0000 |
3 | +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-19 14:33:31 +0000 |
4 | @@ -25,7 +25,9 @@ |
5 | from lp.app.errors import NotFoundError |
6 | from lp.archiveuploader.uploadpolicy import ( |
7 | AbstractUploadPolicy, IArchiveUploadPolicy, findPolicyByName) |
8 | -from lp.archiveuploader.uploadprocessor import UploadProcessor |
9 | +from lp.archiveuploader.uploadprocessor import (UploadProcessor, |
10 | + parse_build_upload_leaf_name) |
11 | +from lp.buildmaster.interfaces.buildbase import BuildStatus |
12 | from canonical.config import config |
13 | from canonical.database.constants import UTC_NOW |
14 | from lp.soyuz.model.archivepermission import ArchivePermission |
15 | @@ -61,7 +63,10 @@ |
16 | ISourcePackageNameSet) |
17 | from lp.services.mail import stub |
18 | from canonical.launchpad.testing.fakepackager import FakePackager |
19 | -from lp.testing import TestCaseWithFactory |
20 | +from lp.testing import ( |
21 | + TestCase, |
22 | + TestCaseWithFactory, |
23 | + ) |
24 | from lp.testing.mail_helpers import pop_notifications |
25 | from canonical.launchpad.webapp.errorlog import ErrorReportingUtility |
26 | from canonical.testing import LaunchpadZopelessLayer |
27 | @@ -125,6 +130,7 @@ |
28 | |
29 | self.options = MockOptions() |
30 | self.options.base_fsroot = self.queue_folder |
31 | + self.options.builds = True |
32 | self.options.leafname = None |
33 | self.options.distro = "ubuntu" |
34 | self.options.distroseries = None |
35 | @@ -150,7 +156,7 @@ |
36 | return policy |
37 | return UploadProcessor( |
38 | self.options.base_fsroot, self.options.dryrun, |
39 | - self.options.nomails, |
40 | + self.options.nomails, self.options.builds, |
41 | self.options.keep, getPolicy, txn, self.log) |
42 | |
43 | def publishPackage(self, packagename, version, source=True, |
44 | @@ -434,7 +440,7 @@ |
45 | # Move it |
46 | self.options.base_fsroot = testdir |
47 | up = self.getUploadProcessor(None) |
48 | - up.moveUpload(upload, target_name) |
49 | + up.moveUpload(upload, target_name, self.log) |
50 | |
51 | # Check it moved |
52 | self.assertTrue(os.path.exists(os.path.join(target, upload_name))) |
53 | @@ -455,7 +461,7 @@ |
54 | # Remove it |
55 | self.options.base_fsroot = testdir |
56 | up = self.getUploadProcessor(None) |
57 | - up.moveProcessedUpload(upload, "accepted") |
58 | + up.moveProcessedUpload(upload, "accepted", self.log) |
59 | |
60 | # Check it was removed, not moved |
61 | self.assertFalse(os.path.exists(os.path.join( |
62 | @@ -478,7 +484,7 @@ |
63 | # Move it |
64 | self.options.base_fsroot = testdir |
65 | up = self.getUploadProcessor(None) |
66 | - up.moveProcessedUpload(upload, "rejected") |
67 | + up.moveProcessedUpload(upload, "rejected", self.log) |
68 | |
69 | # Check it moved |
70 | self.assertTrue(os.path.exists(os.path.join(testdir, |
71 | @@ -501,7 +507,7 @@ |
72 | # Remove it |
73 | self.options.base_fsroot = testdir |
74 | up = self.getUploadProcessor(None) |
75 | - up.removeUpload(upload) |
76 | + up.removeUpload(upload, self.log) |
77 | |
78 | # Check it was removed, not moved |
79 | self.assertFalse(os.path.exists(os.path.join( |
80 | @@ -1316,6 +1322,7 @@ |
81 | used. |
82 | That exception will then initiate the creation of an OOPS report. |
83 | """ |
84 | + self.options.builds = False |
85 | processor = self.getUploadProcessor(self.layer.txn) |
86 | |
87 | upload_dir = self.queueUpload("foocomm_1.0-1_proposed") |
88 | @@ -1825,3 +1832,101 @@ |
89 | pubrec.datepublished = UTC_NOW |
90 | queue_item.setDone() |
91 | self.PGPSignatureNotPreserved(archive=self.breezy.main_archive) |
92 | + |
93 | + |
94 | +class TestBuildUploadProcessor(TestUploadProcessorBase): |
95 | + """Test that processing build uploads works.""" |
96 | + |
97 | + def setUp(self): |
98 | + super(TestBuildUploadProcessor, self).setUp() |
99 | + self.uploadprocessor = self.setupBreezyAndGetUploadProcessor() |
100 | + |
101 | + def testInvalidLeafName(self): |
102 | + upload_dir = self.queueUpload("bar_1.0-1") |
103 | + self.uploadprocessor.processBuildUpload(upload_dir, "bar_1.0-1") |
104 | + self.assertLogContains('Unable to extract build id from leaf ' |
105 | + 'name bar_1.0-1, skipping.') |
106 | + |
107 | + def testNoBuildEntry(self): |
108 | + upload_dir = self.queueUpload("bar_1.0-1", queue_entry="42-60") |
109 | + self.assertRaises(NotFoundError, self.uploadprocessor.processBuildUpload, |
110 | + upload_dir, "42-60") |
111 | + |
112 | + def testNoFiles(self): |
113 | + # Upload a source package |
114 | + upload_dir = self.queueUpload("bar_1.0-1") |
115 | + self.processUpload(self.uploadprocessor, upload_dir) |
116 | + source_pub = self.publishPackage('bar', '1.0-1') |
117 | + [build] = source_pub.createMissingBuilds() |
118 | + |
119 | + # Move the source from the accepted queue. |
120 | + [queue_item] = self.breezy.getQueueItems( |
121 | + status=PackageUploadStatus.ACCEPTED, |
122 | + version="1.0-1", name="bar") |
123 | + queue_item.setDone() |
124 | + |
125 | + build.jobStarted() |
126 | + build.builder = self.factory.makeBuilder() |
127 | + |
128 | + # Upload and accept a binary for the primary archive source. |
129 | + shutil.rmtree(upload_dir) |
130 | + self.layer.txn.commit() |
131 | + leaf_name = "%d-%d" % (build.id, 60) |
132 | + os.mkdir(os.path.join(self.incoming_folder, leaf_name)) |
133 | + self.options.context = 'buildd' |
134 | + self.options.builds = True |
135 | + self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name) |
136 | + self.layer.txn.commit() |
137 | + self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status) |
138 | + log_contents = build.upload_log.read() |
139 | + self.assertTrue('ERROR: Exception while processing upload ' |
140 | + in log_contents) |
141 | + self.assertTrue('DEBUG: Moving upload directory ' |
142 | + in log_contents) |
143 | + |
144 | + def testSuccess(self): |
145 | + # Upload a source package |
146 | + upload_dir = self.queueUpload("bar_1.0-1") |
147 | + self.processUpload(self.uploadprocessor, upload_dir) |
148 | + source_pub = self.publishPackage('bar', '1.0-1') |
149 | + [build] = source_pub.createMissingBuilds() |
150 | + |
151 | + # Move the source from the accepted queue. |
152 | + [queue_item] = self.breezy.getQueueItems( |
153 | + status=PackageUploadStatus.ACCEPTED, |
154 | + version="1.0-1", name="bar") |
155 | + queue_item.setDone() |
156 | + |
157 | + build.jobStarted() |
158 | + build.builder = self.factory.makeBuilder() |
159 | + |
160 | + # Upload and accept a binary for the primary archive source. |
161 | + shutil.rmtree(upload_dir) |
162 | + self.layer.txn.commit() |
163 | + leaf_name = "%d-%d" % (build.id, 60) |
164 | + upload_dir = self.queueUpload("bar_1.0-1_binary", |
165 | + queue_entry=leaf_name) |
166 | + self.options.context = 'buildd' |
167 | + self.options.builds = True |
168 | + self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name) |
169 | + self.layer.txn.commit() |
170 | + self.assertEquals(BuildStatus.FULLYBUILT, build.status) |
171 | + log_lines = build.upload_log.read().splitlines() |
172 | + self.assertTrue( |
173 | + 'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines) |
174 | + self.assertTrue( |
175 | + 'INFO: Committing the transaction and any mails associated with ' |
176 | + 'this upload.' in log_lines) |
177 | + |
178 | + |
179 | +class ParseBuildUploadLeafNameTests(TestCase): |
180 | + """Tests for parse_build_upload_leaf_name.""" |
181 | + |
182 | + def test_valid(self): |
183 | + self.assertEquals((42, 60), parse_build_upload_leaf_name("42-60")) |
184 | + |
185 | + def test_invalid_chars(self): |
186 | + self.assertRaises(ValueError, parse_build_upload_leaf_name, "a42-460") |
187 | + |
188 | + def test_no_dash(self): |
189 | + self.assertRaises(ValueError, parse_build_upload_leaf_name, "32") |
190 | |
191 | === modified file 'lib/lp/archiveuploader/uploadpolicy.py' |
192 | --- lib/lp/archiveuploader/uploadpolicy.py 2010-08-18 16:12:28 +0000 |
193 | +++ lib/lp/archiveuploader/uploadpolicy.py 2010-08-19 14:33:31 +0000 |
194 | @@ -290,7 +290,8 @@ |
195 | def setOptions(self, options): |
196 | AbstractUploadPolicy.setOptions(self, options) |
197 | # We require a buildid to be provided |
198 | - if getattr(options, 'buildid', None) is None: |
199 | + if (getattr(options, 'buildid', None) is None and |
200 | + not getattr(options, 'builds', False)): |
201 | raise UploadPolicyError("BuildID required for buildd context") |
202 | |
203 | def policySpecificChecks(self, upload): |
204 | |
205 | === modified file 'lib/lp/archiveuploader/uploadprocessor.py' |
206 | --- lib/lp/archiveuploader/uploadprocessor.py 2010-08-17 18:33:26 +0000 |
207 | +++ lib/lp/archiveuploader/uploadprocessor.py 2010-08-19 14:33:31 +0000 |
208 | @@ -47,7 +47,9 @@ |
209 | |
210 | __metaclass__ = type |
211 | |
212 | +import datetime |
213 | import os |
214 | +import pytz |
215 | import shutil |
216 | import stat |
217 | import sys |
218 | @@ -63,9 +65,12 @@ |
219 | BuildDaemonUploadPolicy, |
220 | SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, |
221 | UploadPolicyError) |
222 | +from lp.buildmaster.interfaces.buildbase import BuildStatus |
223 | from lp.soyuz.interfaces.archive import IArchiveSet, NoSuchPPA |
224 | +from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet |
225 | from lp.registry.interfaces.distribution import IDistributionSet |
226 | from lp.registry.interfaces.person import IPersonSet |
227 | +from canonical.launchpad.scripts.logger import BufferLogger |
228 | from canonical.launchpad.webapp.errorlog import ( |
229 | ErrorReportingUtility, ScriptRequest) |
230 | |
231 | @@ -73,6 +78,7 @@ |
232 | |
233 | __all__ = [ |
234 | 'UploadProcessor', |
235 | + 'parse_build_upload_leaf_name', |
236 | 'parse_upload_path', |
237 | ] |
238 | |
239 | @@ -86,6 +92,19 @@ |
240 | """) |
241 | |
242 | |
243 | +def parse_build_upload_leaf_name(name): |
244 | + """Parse the leaf directory name of a build upload. |
245 | + |
246 | + :param name: Directory name. |
247 | + :return: Tuple with build id and build queue record id. |
248 | + """ |
249 | + (build_id_str, queue_record_str) = name.split("-", 1) |
250 | + try: |
251 | + return int(build_id_str), int(queue_record_str) |
252 | + except TypeError: |
253 | + raise ValueError |
254 | + |
255 | + |
256 | class UploadStatusEnum: |
257 | """Possible results from processing an upload. |
258 | |
259 | @@ -110,8 +129,8 @@ |
260 | class UploadProcessor: |
261 | """Responsible for processing uploads. See module docstring.""" |
262 | |
263 | - def __init__(self, base_fsroot, dry_run, no_mails, keep, policy_for_distro, |
264 | - ztm, log): |
265 | + def __init__(self, base_fsroot, dry_run, no_mails, builds, keep, |
266 | + policy_for_distro, ztm, log): |
267 | """Create a new upload processor. |
268 | |
269 | :param base_fsroot: Root path for queue to use |
270 | @@ -130,6 +149,7 @@ |
271 | self.last_processed_upload = None |
272 | self.log = log |
273 | self.no_mails = no_mails |
274 | + self.builds = builds |
275 | self._getPolicyForDistro = policy_for_distro |
276 | self.ztm = ztm |
277 | |
278 | @@ -161,11 +181,63 @@ |
279 | self.log.debug("Skipping %s -- does not match %s" % ( |
280 | upload, leaf_name)) |
281 | continue |
282 | - self.processUpload(fsroot, upload) |
283 | + if self.builds: |
284 | + # Upload directories contain build results, |
285 | + # directories are named after build ids. |
286 | + self.processBuildUpload(fsroot, upload) |
287 | + else: |
288 | + self.processUpload(fsroot, upload) |
289 | finally: |
290 | self.log.debug("Rolling back any remaining transactions.") |
291 | self.ztm.abort() |
292 | |
293 | + def processBuildUpload(self, fsroot, upload): |
294 | + """Process an upload that is the result of a build. |
295 | + |
296 | + The name of the leaf is the build id of the build. |
297 | + Build uploads always contain a single package per leaf. |
298 | + """ |
299 | + try: |
300 | + (build_id, build_queue_record_id) = parse_build_upload_leaf_name( |
301 | + upload) |
302 | + except ValueError: |
303 | + self.log.warn("Unable to extract build id from leaf name %s," |
304 | + " skipping." % upload) |
305 | + return |
306 | + build = getUtility(IBinaryPackageBuildSet).getByBuildID(int(build_id)) |
307 | + self.log.debug("Build %s found" % build.id) |
308 | + logger = BufferLogger() |
309 | + upload_path = os.path.join(fsroot, upload) |
310 | + try: |
311 | + [changes_file] = self.locateChangesFiles(upload_path) |
312 | + logger.debug("Considering changefile %s" % changes_file) |
313 | + result = self.processChangesFile( |
314 | + upload_path, changes_file, logger) |
315 | + except (KeyboardInterrupt, SystemExit): |
316 | + raise |
317 | + except: |
318 | + info = sys.exc_info() |
319 | + message = ( |
320 | + 'Exception while processing upload %s' % upload_path) |
321 | + properties = [('error-explanation', message)] |
322 | + request = ScriptRequest(properties) |
323 | + error_utility = ErrorReportingUtility() |
324 | + error_utility.raising(info, request) |
325 | + logger.error('%s (%s)' % (message, request.oopsid)) |
326 | + result = UploadStatusEnum.FAILED |
327 | + destination = { |
328 | + UploadStatusEnum.FAILED: "failed", |
329 | + UploadStatusEnum.REJECTED: "rejected", |
330 | + UploadStatusEnum.ACCEPTED: "accepted"}[result] |
331 | + self.moveProcessedUpload(upload_path, destination, logger) |
332 | + if not (result == UploadStatusEnum.ACCEPTED and |
333 | + build.verifySuccessfulUpload() and |
334 | + build.status == BuildStatus.FULLYBUILT): |
335 | + build.status = BuildStatus.FAILEDTOUPLOAD |
336 | + build.date_finished = datetime.datetime.now(pytz.UTC) |
337 | + build.notify(extra_info="Uploading build %s failed." % upload) |
338 | + build.storeUploadLog(logger.buffer.getvalue()) |
339 | + |
340 | def processUpload(self, fsroot, upload): |
341 | """Process an upload's changes files, and move it to a new directory. |
342 | |
343 | @@ -186,7 +258,8 @@ |
344 | for changes_file in changes_files: |
345 | self.log.debug("Considering changefile %s" % changes_file) |
346 | try: |
347 | - result = self.processChangesFile(upload_path, changes_file) |
348 | + result = self.processChangesFile( |
349 | + upload_path, changes_file, self.log) |
350 | if result == UploadStatusEnum.FAILED: |
351 | some_failed = True |
352 | elif result == UploadStatusEnum.REJECTED: |
353 | @@ -216,8 +289,7 @@ |
354 | # There were no changes files at all. We consider |
355 | # the upload to be failed in this case. |
356 | destination = "failed" |
357 | - |
358 | - self.moveProcessedUpload(upload_path, destination) |
359 | + self.moveProcessedUpload(upload_path, destination, self.log) |
360 | |
361 | def locateDirectories(self, fsroot): |
362 | """Return a list of upload directories in a given queue. |
363 | @@ -280,7 +352,7 @@ |
364 | os.path.join(relative_path, filename)) |
365 | return self.orderFilenames(changes_files) |
366 | |
367 | - def processChangesFile(self, upload_path, changes_file): |
368 | + def processChangesFile(self, upload_path, changes_file, logger=None): |
369 | """Process a single changes file. |
370 | |
371 | This is done by obtaining the appropriate upload policy (according |
372 | @@ -297,6 +369,8 @@ |
373 | Returns a value from UploadStatusEnum, or re-raises an exception |
374 | from NascentUpload. |
375 | """ |
376 | + if logger is None: |
377 | + logger = self.log |
378 | # Calculate the distribution from the path within the upload |
379 | # Reject the upload since we could not process the path, |
380 | # Store the exception information as a rejection message. |
381 | @@ -333,7 +407,7 @@ |
382 | "Please check the documentation at " |
383 | "https://help.launchpad.net/Packaging/PPA#Uploading " |
384 | "and update your configuration."))) |
385 | - self.log.debug("Finding fresh policy") |
386 | + logger.debug("Finding fresh policy") |
387 | policy = self._getPolicyForDistro(distribution) |
388 | policy.archive = archive |
389 | |
390 | @@ -360,7 +434,7 @@ |
391 | "Invalid upload path (%s) for this policy (%s)" % |
392 | (relative_path, policy.name)) |
393 | upload.reject(error_message) |
394 | - self.log.error(error_message) |
395 | + logger.error(error_message) |
396 | |
397 | # Reject upload with path processing errors. |
398 | if upload_path_error is not None: |
399 | @@ -370,7 +444,7 @@ |
400 | self.last_processed_upload = upload |
401 | |
402 | try: |
403 | - self.log.info("Processing upload %s" % upload.changes.filename) |
404 | + logger.info("Processing upload %s" % upload.changes.filename) |
405 | result = UploadStatusEnum.ACCEPTED |
406 | |
407 | try: |
408 | @@ -378,12 +452,12 @@ |
409 | except UploadPolicyError, e: |
410 | upload.reject("UploadPolicyError escaped upload.process: " |
411 | "%s " % e) |
412 | - self.log.debug("UploadPolicyError escaped upload.process", |
413 | - exc_info=True) |
414 | + logger.debug( |
415 | + "UploadPolicyError escaped upload.process", exc_info=True) |
416 | except FatalUploadError, e: |
417 | upload.reject("UploadError escaped upload.process: %s" % e) |
418 | - self.log.debug("UploadError escaped upload.process", |
419 | - exc_info=True) |
420 | + logger.debug( |
421 | + "UploadError escaped upload.process", exc_info=True) |
422 | except (KeyboardInterrupt, SystemExit): |
423 | raise |
424 | except EarlyReturnUploadError: |
425 | @@ -400,7 +474,7 @@ |
426 | # the new exception will be handled by the caller just like |
427 | # the one we caught would have been, by failing the upload |
428 | # with no email. |
429 | - self.log.exception("Unhandled exception processing upload") |
430 | + logger.exception("Unhandled exception processing upload") |
431 | upload.reject("Unhandled exception processing upload: %s" % e) |
432 | |
433 | # XXX julian 2007-05-25 bug=29744: |
434 | @@ -418,21 +492,22 @@ |
435 | successful = upload.do_accept(notify=notify) |
436 | if not successful: |
437 | result = UploadStatusEnum.REJECTED |
438 | - self.log.info("Rejection during accept. " |
439 | - "Aborting partial accept.") |
440 | + logger.info( |
441 | + "Rejection during accept. Aborting partial accept.") |
442 | self.ztm.abort() |
443 | |
444 | if upload.is_rejected: |
445 | - self.log.warn("Upload was rejected:") |
446 | + logger.warn("Upload was rejected:") |
447 | for msg in upload.rejections: |
448 | - self.log.warn("\t%s" % msg) |
449 | + logger.warn("\t%s" % msg) |
450 | |
451 | if self.dry_run: |
452 | - self.log.info("Dry run, aborting transaction.") |
453 | + logger.info("Dry run, aborting transaction.") |
454 | self.ztm.abort() |
455 | else: |
456 | - self.log.info("Committing the transaction and any mails " |
457 | - "associated with this upload.") |
458 | + logger.info( |
459 | + "Committing the transaction and any mails associated " |
460 | + "with this upload.") |
461 | self.ztm.commit() |
462 | except: |
463 | self.ztm.abort() |
464 | @@ -440,47 +515,47 @@ |
465 | |
466 | return result |
467 | |
468 | - def removeUpload(self, upload): |
469 | + def removeUpload(self, upload, logger): |
470 | """Remove an upload that has succesfully been processed. |
471 | |
472 | This includes moving the given upload directory and moving the |
473 | matching .distro file, if it exists. |
474 | """ |
475 | if self.keep or self.dry_run: |
476 | - self.log.debug("Keeping contents untouched") |
477 | + logger.debug("Keeping contents untouched") |
478 | return |
479 | |
480 | - self.log.debug("Removing upload directory %s", upload) |
481 | + logger.debug("Removing upload directory %s", upload) |
482 | shutil.rmtree(upload) |
483 | |
484 | distro_filename = upload + ".distro" |
485 | if os.path.isfile(distro_filename): |
486 | - self.log.debug("Removing distro file %s", distro_filename) |
487 | + logger.debug("Removing distro file %s", distro_filename) |
488 | os.remove(distro_filename) |
489 | |
490 | - def moveProcessedUpload(self, upload_path, destination): |
491 | + def moveProcessedUpload(self, upload_path, destination, logger): |
492 | """Move or remove the upload depending on the status of the upload. |
493 | """ |
494 | if destination == "accepted": |
495 | - self.removeUpload(upload_path) |
496 | + self.removeUpload(upload_path, logger) |
497 | else: |
498 | - self.moveUpload(upload_path, destination) |
499 | + self.moveUpload(upload_path, destination, logger) |
500 | |
501 | - def moveUpload(self, upload, subdir_name): |
502 | + def moveUpload(self, upload, subdir_name, logger): |
503 | """Move the upload to the named subdir of the root, eg 'accepted'. |
504 | |
505 | This includes moving the given upload directory and moving the |
506 | matching .distro file, if it exists. |
507 | """ |
508 | if self.keep or self.dry_run: |
509 | - self.log.debug("Keeping contents untouched") |
510 | + logger.debug("Keeping contents untouched") |
511 | return |
512 | |
513 | pathname = os.path.basename(upload) |
514 | |
515 | target_path = os.path.join( |
516 | self.base_fsroot, subdir_name, pathname) |
517 | - self.log.debug("Moving upload directory %s to %s" % |
518 | + logger.debug("Moving upload directory %s to %s" % |
519 | (upload, target_path)) |
520 | shutil.move(upload, target_path) |
521 | |
522 | @@ -488,7 +563,7 @@ |
523 | if os.path.isfile(distro_filename): |
524 | target_path = os.path.join(self.base_fsroot, subdir_name, |
525 | os.path.basename(distro_filename)) |
526 | - self.log.debug("Moving distro file %s to %s" % (distro_filename, |
527 | + logger.debug("Moving distro file %s to %s" % (distro_filename, |
528 | target_path)) |
529 | shutil.move(distro_filename, target_path) |
530 | |
531 | |
532 | === modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py' |
533 | --- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-18 14:03:15 +0000 |
534 | +++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-19 14:33:31 +0000 |
535 | @@ -35,6 +35,11 @@ |
536 | help="Whether to suppress the sending of mails or not.") |
537 | |
538 | self.parser.add_option( |
539 | + "--builds", action="store_true", |
540 | + dest="builds", default=False, |
541 | + help="Whether to interpret leaf names as build ids.") |
542 | + |
543 | + self.parser.add_option( |
544 | "-J", "--just-leaf", action="store", dest="leafname", |
545 | default=None, help="A specific leaf dir to limit to.", |
546 | metavar = "LEAF") |
547 | @@ -80,9 +85,9 @@ |
548 | policy = findPolicyByName(self.options.context) |
549 | policy.setOptions(self.options) |
550 | return policy |
551 | - processor = UploadProcessor(self.options.base_fsroot, |
552 | - self.options.dryrun, self.options.nomails, self.options.keep, |
553 | - getPolicy, self.txn, self.logger) |
554 | + processor = UploadProcessor(self.options.base_fsroot, |
555 | + self.options.dryrun, self.options.nomails, self.options.builds, |
556 | + self.options.keep, getPolicy, self.txn, self.logger) |
557 | processor.processUploadQueue(self.options.leafname) |
558 | |
559 | @property |
It looks like that there are conflicts with this branch. Could you please resolve them?