Merge lp:~jelmer/launchpad/128259-async-1 into lp:launchpad
- 128259-async-1
- Merge into devel
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merged at revision: | 11399 |
Proposed branch: | lp:~jelmer/launchpad/128259-async-1 |
Merge into: | lp:launchpad |
Prerequisite: | lp:~jelmer/launchpad/refactor-uploadprocessor |
Diff against target: |
559 lines (+230/-44) 4 files modified
lib/lp/archiveuploader/tests/test_uploadprocessor.py (+112/-7) lib/lp/archiveuploader/uploadpolicy.py (+2/-1) lib/lp/archiveuploader/uploadprocessor.py (+108/-33) lib/lp/soyuz/scripts/soyuz_process_upload.py (+8/-3) |
To merge this branch: | bzr merge lp:~jelmer/launchpad/128259-async-1 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Abel Deuring (community) | code | Approve | |
Review via email: mp+32439@code.launchpad.net |
Commit message
Add --builds option to process-upload.
Description of the change
This branch adds a --builds option to process-upload.
This new option makes process-upload the directory name in which the upload was found as <buildid>
This makes it possible for uploads of builds to no longer happen synchronously inside of the buildd manager, but rather for the buildd manager to move them into a separate queue where they can be processed by a process-upload independently without blocking the buildd manager.
Paul Hummer (rockstar) wrote : | # |
Abel Deuring (adeuring) wrote : | # |
Hi Jelmer,
a nice branch! I have just a few formal nitpicks, see below.
> === modified file 'lib/lp/
> --- lib/lp/
> +++ lib/lp/
[...]
> + def testSuccess(self):
> + # Upload a source package
> + upload_dir = self.queueUploa
> + self.processUpl
> + source_pub = self.publishPac
> + [build] = source_
> +
> + # Move the source from the accepted queue.
> + [queue_item] = self.breezy.
> + status=
> + version="1.0-1", name="bar")
> + queue_item.
> +
> + build.jobStarted()
> + build.builder = self.factory.
> +
> + # Upload and accept a binary for the primary archive source.
> + shutil.
> + self.layer.
> + leaf_name = "%d-%d" % (build.id, 60)
> + upload_dir = self.queueUploa
> + queue_entry=
> + self.options.
> + self.options.builds = True
> + self.uploadproc
> + self.layer.
> + self.assertEqua
> + log_lines = build.upload_
> + self.assertTrue(
> + 'INFO: Processing upload bar_1.0-
> + self.assertTrue(
> + 'INFO: Committing the transaction and any mails associated with'
> + 'this upload.')
As already mentioned on IRC, this should be something like
assertTrue(
(And I assume that a ' ' is missing after 'with'.)
> === modified file 'lib/lp/
> --- lib/lp/
> +++ lib/lp/
[...]
> @@ -184,7 +257,8 @@
> for changes_file in changes_files:
> self.log.
> try:
> - result = self.processCha
> + result = self.processCha
> + self.log)
Our style guide says that we should put all parameters into the second
(and following) line if they don't fit completely into the first line.
[...]
> @@ -376,11 +451,11 @@
> except UploadPolicyError, e:
> upload.
> "%s " % e)
> - self.log.
> + logger.
> exc_info=True)
Again, please move all parameters to the second line.
> except FatalUploadError, e:
> ...
Abel Deuring (adeuring) wrote : | # |
ah, one more detail: line 269 of the diff is
+ self.ztm.commit()
could you add a comment why the commit() call is necessary?
Preview Diff
1 | === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py' | |||
2 | --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-18 14:03:15 +0000 | |||
3 | +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-08-19 14:33:31 +0000 | |||
4 | @@ -25,7 +25,9 @@ | |||
5 | 25 | from lp.app.errors import NotFoundError | 25 | from lp.app.errors import NotFoundError |
6 | 26 | from lp.archiveuploader.uploadpolicy import ( | 26 | from lp.archiveuploader.uploadpolicy import ( |
7 | 27 | AbstractUploadPolicy, IArchiveUploadPolicy, findPolicyByName) | 27 | AbstractUploadPolicy, IArchiveUploadPolicy, findPolicyByName) |
9 | 28 | from lp.archiveuploader.uploadprocessor import UploadProcessor | 28 | from lp.archiveuploader.uploadprocessor import (UploadProcessor, |
10 | 29 | parse_build_upload_leaf_name) | ||
11 | 30 | from lp.buildmaster.interfaces.buildbase import BuildStatus | ||
12 | 29 | from canonical.config import config | 31 | from canonical.config import config |
13 | 30 | from canonical.database.constants import UTC_NOW | 32 | from canonical.database.constants import UTC_NOW |
14 | 31 | from lp.soyuz.model.archivepermission import ArchivePermission | 33 | from lp.soyuz.model.archivepermission import ArchivePermission |
15 | @@ -61,7 +63,10 @@ | |||
16 | 61 | ISourcePackageNameSet) | 63 | ISourcePackageNameSet) |
17 | 62 | from lp.services.mail import stub | 64 | from lp.services.mail import stub |
18 | 63 | from canonical.launchpad.testing.fakepackager import FakePackager | 65 | from canonical.launchpad.testing.fakepackager import FakePackager |
20 | 64 | from lp.testing import TestCaseWithFactory | 66 | from lp.testing import ( |
21 | 67 | TestCase, | ||
22 | 68 | TestCaseWithFactory, | ||
23 | 69 | ) | ||
24 | 65 | from lp.testing.mail_helpers import pop_notifications | 70 | from lp.testing.mail_helpers import pop_notifications |
25 | 66 | from canonical.launchpad.webapp.errorlog import ErrorReportingUtility | 71 | from canonical.launchpad.webapp.errorlog import ErrorReportingUtility |
26 | 67 | from canonical.testing import LaunchpadZopelessLayer | 72 | from canonical.testing import LaunchpadZopelessLayer |
27 | @@ -125,6 +130,7 @@ | |||
28 | 125 | 130 | ||
29 | 126 | self.options = MockOptions() | 131 | self.options = MockOptions() |
30 | 127 | self.options.base_fsroot = self.queue_folder | 132 | self.options.base_fsroot = self.queue_folder |
31 | 133 | self.options.builds = True | ||
32 | 128 | self.options.leafname = None | 134 | self.options.leafname = None |
33 | 129 | self.options.distro = "ubuntu" | 135 | self.options.distro = "ubuntu" |
34 | 130 | self.options.distroseries = None | 136 | self.options.distroseries = None |
35 | @@ -150,7 +156,7 @@ | |||
36 | 150 | return policy | 156 | return policy |
37 | 151 | return UploadProcessor( | 157 | return UploadProcessor( |
38 | 152 | self.options.base_fsroot, self.options.dryrun, | 158 | self.options.base_fsroot, self.options.dryrun, |
40 | 153 | self.options.nomails, | 159 | self.options.nomails, self.options.builds, |
41 | 154 | self.options.keep, getPolicy, txn, self.log) | 160 | self.options.keep, getPolicy, txn, self.log) |
42 | 155 | 161 | ||
43 | 156 | def publishPackage(self, packagename, version, source=True, | 162 | def publishPackage(self, packagename, version, source=True, |
44 | @@ -434,7 +440,7 @@ | |||
45 | 434 | # Move it | 440 | # Move it |
46 | 435 | self.options.base_fsroot = testdir | 441 | self.options.base_fsroot = testdir |
47 | 436 | up = self.getUploadProcessor(None) | 442 | up = self.getUploadProcessor(None) |
49 | 437 | up.moveUpload(upload, target_name) | 443 | up.moveUpload(upload, target_name, self.log) |
50 | 438 | 444 | ||
51 | 439 | # Check it moved | 445 | # Check it moved |
52 | 440 | self.assertTrue(os.path.exists(os.path.join(target, upload_name))) | 446 | self.assertTrue(os.path.exists(os.path.join(target, upload_name))) |
53 | @@ -455,7 +461,7 @@ | |||
54 | 455 | # Remove it | 461 | # Remove it |
55 | 456 | self.options.base_fsroot = testdir | 462 | self.options.base_fsroot = testdir |
56 | 457 | up = self.getUploadProcessor(None) | 463 | up = self.getUploadProcessor(None) |
58 | 458 | up.moveProcessedUpload(upload, "accepted") | 464 | up.moveProcessedUpload(upload, "accepted", self.log) |
59 | 459 | 465 | ||
60 | 460 | # Check it was removed, not moved | 466 | # Check it was removed, not moved |
61 | 461 | self.assertFalse(os.path.exists(os.path.join( | 467 | self.assertFalse(os.path.exists(os.path.join( |
62 | @@ -478,7 +484,7 @@ | |||
63 | 478 | # Move it | 484 | # Move it |
64 | 479 | self.options.base_fsroot = testdir | 485 | self.options.base_fsroot = testdir |
65 | 480 | up = self.getUploadProcessor(None) | 486 | up = self.getUploadProcessor(None) |
67 | 481 | up.moveProcessedUpload(upload, "rejected") | 487 | up.moveProcessedUpload(upload, "rejected", self.log) |
68 | 482 | 488 | ||
69 | 483 | # Check it moved | 489 | # Check it moved |
70 | 484 | self.assertTrue(os.path.exists(os.path.join(testdir, | 490 | self.assertTrue(os.path.exists(os.path.join(testdir, |
71 | @@ -501,7 +507,7 @@ | |||
72 | 501 | # Remove it | 507 | # Remove it |
73 | 502 | self.options.base_fsroot = testdir | 508 | self.options.base_fsroot = testdir |
74 | 503 | up = self.getUploadProcessor(None) | 509 | up = self.getUploadProcessor(None) |
76 | 504 | up.removeUpload(upload) | 510 | up.removeUpload(upload, self.log) |
77 | 505 | 511 | ||
78 | 506 | # Check it was removed, not moved | 512 | # Check it was removed, not moved |
79 | 507 | self.assertFalse(os.path.exists(os.path.join( | 513 | self.assertFalse(os.path.exists(os.path.join( |
80 | @@ -1316,6 +1322,7 @@ | |||
81 | 1316 | used. | 1322 | used. |
82 | 1317 | That exception will then initiate the creation of an OOPS report. | 1323 | That exception will then initiate the creation of an OOPS report. |
83 | 1318 | """ | 1324 | """ |
84 | 1325 | self.options.builds = False | ||
85 | 1319 | processor = self.getUploadProcessor(self.layer.txn) | 1326 | processor = self.getUploadProcessor(self.layer.txn) |
86 | 1320 | 1327 | ||
87 | 1321 | upload_dir = self.queueUpload("foocomm_1.0-1_proposed") | 1328 | upload_dir = self.queueUpload("foocomm_1.0-1_proposed") |
88 | @@ -1825,3 +1832,101 @@ | |||
89 | 1825 | pubrec.datepublished = UTC_NOW | 1832 | pubrec.datepublished = UTC_NOW |
90 | 1826 | queue_item.setDone() | 1833 | queue_item.setDone() |
91 | 1827 | self.PGPSignatureNotPreserved(archive=self.breezy.main_archive) | 1834 | self.PGPSignatureNotPreserved(archive=self.breezy.main_archive) |
92 | 1835 | |||
93 | 1836 | |||
94 | 1837 | class TestBuildUploadProcessor(TestUploadProcessorBase): | ||
95 | 1838 | """Test that processing build uploads works.""" | ||
96 | 1839 | |||
97 | 1840 | def setUp(self): | ||
98 | 1841 | super(TestBuildUploadProcessor, self).setUp() | ||
99 | 1842 | self.uploadprocessor = self.setupBreezyAndGetUploadProcessor() | ||
100 | 1843 | |||
101 | 1844 | def testInvalidLeafName(self): | ||
102 | 1845 | upload_dir = self.queueUpload("bar_1.0-1") | ||
103 | 1846 | self.uploadprocessor.processBuildUpload(upload_dir, "bar_1.0-1") | ||
104 | 1847 | self.assertLogContains('Unable to extract build id from leaf ' | ||
105 | 1848 | 'name bar_1.0-1, skipping.') | ||
106 | 1849 | |||
107 | 1850 | def testNoBuildEntry(self): | ||
108 | 1851 | upload_dir = self.queueUpload("bar_1.0-1", queue_entry="42-60") | ||
109 | 1852 | self.assertRaises(NotFoundError, self.uploadprocessor.processBuildUpload, | ||
110 | 1853 | upload_dir, "42-60") | ||
111 | 1854 | |||
112 | 1855 | def testNoFiles(self): | ||
113 | 1856 | # Upload a source package | ||
114 | 1857 | upload_dir = self.queueUpload("bar_1.0-1") | ||
115 | 1858 | self.processUpload(self.uploadprocessor, upload_dir) | ||
116 | 1859 | source_pub = self.publishPackage('bar', '1.0-1') | ||
117 | 1860 | [build] = source_pub.createMissingBuilds() | ||
118 | 1861 | |||
119 | 1862 | # Move the source from the accepted queue. | ||
120 | 1863 | [queue_item] = self.breezy.getQueueItems( | ||
121 | 1864 | status=PackageUploadStatus.ACCEPTED, | ||
122 | 1865 | version="1.0-1", name="bar") | ||
123 | 1866 | queue_item.setDone() | ||
124 | 1867 | |||
125 | 1868 | build.jobStarted() | ||
126 | 1869 | build.builder = self.factory.makeBuilder() | ||
127 | 1870 | |||
128 | 1871 | # Upload and accept a binary for the primary archive source. | ||
129 | 1872 | shutil.rmtree(upload_dir) | ||
130 | 1873 | self.layer.txn.commit() | ||
131 | 1874 | leaf_name = "%d-%d" % (build.id, 60) | ||
132 | 1875 | os.mkdir(os.path.join(self.incoming_folder, leaf_name)) | ||
133 | 1876 | self.options.context = 'buildd' | ||
134 | 1877 | self.options.builds = True | ||
135 | 1878 | self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name) | ||
136 | 1879 | self.layer.txn.commit() | ||
137 | 1880 | self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status) | ||
138 | 1881 | log_contents = build.upload_log.read() | ||
139 | 1882 | self.assertTrue('ERROR: Exception while processing upload ' | ||
140 | 1883 | in log_contents) | ||
141 | 1884 | self.assertTrue('DEBUG: Moving upload directory ' | ||
142 | 1885 | in log_contents) | ||
143 | 1886 | |||
144 | 1887 | def testSuccess(self): | ||
145 | 1888 | # Upload a source package | ||
146 | 1889 | upload_dir = self.queueUpload("bar_1.0-1") | ||
147 | 1890 | self.processUpload(self.uploadprocessor, upload_dir) | ||
148 | 1891 | source_pub = self.publishPackage('bar', '1.0-1') | ||
149 | 1892 | [build] = source_pub.createMissingBuilds() | ||
150 | 1893 | |||
151 | 1894 | # Move the source from the accepted queue. | ||
152 | 1895 | [queue_item] = self.breezy.getQueueItems( | ||
153 | 1896 | status=PackageUploadStatus.ACCEPTED, | ||
154 | 1897 | version="1.0-1", name="bar") | ||
155 | 1898 | queue_item.setDone() | ||
156 | 1899 | |||
157 | 1900 | build.jobStarted() | ||
158 | 1901 | build.builder = self.factory.makeBuilder() | ||
159 | 1902 | |||
160 | 1903 | # Upload and accept a binary for the primary archive source. | ||
161 | 1904 | shutil.rmtree(upload_dir) | ||
162 | 1905 | self.layer.txn.commit() | ||
163 | 1906 | leaf_name = "%d-%d" % (build.id, 60) | ||
164 | 1907 | upload_dir = self.queueUpload("bar_1.0-1_binary", | ||
165 | 1908 | queue_entry=leaf_name) | ||
166 | 1909 | self.options.context = 'buildd' | ||
167 | 1910 | self.options.builds = True | ||
168 | 1911 | self.uploadprocessor.processBuildUpload(self.incoming_folder, leaf_name) | ||
169 | 1912 | self.layer.txn.commit() | ||
170 | 1913 | self.assertEquals(BuildStatus.FULLYBUILT, build.status) | ||
171 | 1914 | log_lines = build.upload_log.read().splitlines() | ||
172 | 1915 | self.assertTrue( | ||
173 | 1916 | 'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines) | ||
174 | 1917 | self.assertTrue( | ||
175 | 1918 | 'INFO: Committing the transaction and any mails associated with ' | ||
176 | 1919 | 'this upload.' in log_lines) | ||
177 | 1920 | |||
178 | 1921 | |||
179 | 1922 | class ParseBuildUploadLeafNameTests(TestCase): | ||
180 | 1923 | """Tests for parse_build_upload_leaf_name.""" | ||
181 | 1924 | |||
182 | 1925 | def test_valid(self): | ||
183 | 1926 | self.assertEquals((42, 60), parse_build_upload_leaf_name("42-60")) | ||
184 | 1927 | |||
185 | 1928 | def test_invalid_chars(self): | ||
186 | 1929 | self.assertRaises(ValueError, parse_build_upload_leaf_name, "a42-460") | ||
187 | 1930 | |||
188 | 1931 | def test_no_dash(self): | ||
189 | 1932 | self.assertRaises(ValueError, parse_build_upload_leaf_name, "32") | ||
190 | 1828 | 1933 | ||
191 | === modified file 'lib/lp/archiveuploader/uploadpolicy.py' | |||
192 | --- lib/lp/archiveuploader/uploadpolicy.py 2010-08-18 16:12:28 +0000 | |||
193 | +++ lib/lp/archiveuploader/uploadpolicy.py 2010-08-19 14:33:31 +0000 | |||
194 | @@ -290,7 +290,8 @@ | |||
195 | 290 | def setOptions(self, options): | 290 | def setOptions(self, options): |
196 | 291 | AbstractUploadPolicy.setOptions(self, options) | 291 | AbstractUploadPolicy.setOptions(self, options) |
197 | 292 | # We require a buildid to be provided | 292 | # We require a buildid to be provided |
199 | 293 | if getattr(options, 'buildid', None) is None: | 293 | if (getattr(options, 'buildid', None) is None and |
200 | 294 | not getattr(options, 'builds', False)): | ||
201 | 294 | raise UploadPolicyError("BuildID required for buildd context") | 295 | raise UploadPolicyError("BuildID required for buildd context") |
202 | 295 | 296 | ||
203 | 296 | def policySpecificChecks(self, upload): | 297 | def policySpecificChecks(self, upload): |
204 | 297 | 298 | ||
205 | === modified file 'lib/lp/archiveuploader/uploadprocessor.py' | |||
206 | --- lib/lp/archiveuploader/uploadprocessor.py 2010-08-17 18:33:26 +0000 | |||
207 | +++ lib/lp/archiveuploader/uploadprocessor.py 2010-08-19 14:33:31 +0000 | |||
208 | @@ -47,7 +47,9 @@ | |||
209 | 47 | 47 | ||
210 | 48 | __metaclass__ = type | 48 | __metaclass__ = type |
211 | 49 | 49 | ||
212 | 50 | import datetime | ||
213 | 50 | import os | 51 | import os |
214 | 52 | import pytz | ||
215 | 51 | import shutil | 53 | import shutil |
216 | 52 | import stat | 54 | import stat |
217 | 53 | import sys | 55 | import sys |
218 | @@ -63,9 +65,12 @@ | |||
219 | 63 | BuildDaemonUploadPolicy, | 65 | BuildDaemonUploadPolicy, |
220 | 64 | SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, | 66 | SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, |
221 | 65 | UploadPolicyError) | 67 | UploadPolicyError) |
222 | 68 | from lp.buildmaster.interfaces.buildbase import BuildStatus | ||
223 | 66 | from lp.soyuz.interfaces.archive import IArchiveSet, NoSuchPPA | 69 | from lp.soyuz.interfaces.archive import IArchiveSet, NoSuchPPA |
224 | 70 | from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet | ||
225 | 67 | from lp.registry.interfaces.distribution import IDistributionSet | 71 | from lp.registry.interfaces.distribution import IDistributionSet |
226 | 68 | from lp.registry.interfaces.person import IPersonSet | 72 | from lp.registry.interfaces.person import IPersonSet |
227 | 73 | from canonical.launchpad.scripts.logger import BufferLogger | ||
228 | 69 | from canonical.launchpad.webapp.errorlog import ( | 74 | from canonical.launchpad.webapp.errorlog import ( |
229 | 70 | ErrorReportingUtility, ScriptRequest) | 75 | ErrorReportingUtility, ScriptRequest) |
230 | 71 | 76 | ||
231 | @@ -73,6 +78,7 @@ | |||
232 | 73 | 78 | ||
233 | 74 | __all__ = [ | 79 | __all__ = [ |
234 | 75 | 'UploadProcessor', | 80 | 'UploadProcessor', |
235 | 81 | 'parse_build_upload_leaf_name', | ||
236 | 76 | 'parse_upload_path', | 82 | 'parse_upload_path', |
237 | 77 | ] | 83 | ] |
238 | 78 | 84 | ||
239 | @@ -86,6 +92,19 @@ | |||
240 | 86 | """) | 92 | """) |
241 | 87 | 93 | ||
242 | 88 | 94 | ||
243 | 95 | def parse_build_upload_leaf_name(name): | ||
244 | 96 | """Parse the leaf directory name of a build upload. | ||
245 | 97 | |||
246 | 98 | :param name: Directory name. | ||
247 | 99 | :return: Tuple with build id and build queue record id. | ||
248 | 100 | """ | ||
249 | 101 | (build_id_str, queue_record_str) = name.split("-", 1) | ||
250 | 102 | try: | ||
251 | 103 | return int(build_id_str), int(queue_record_str) | ||
252 | 104 | except TypeError: | ||
253 | 105 | raise ValueError | ||
254 | 106 | |||
255 | 107 | |||
256 | 89 | class UploadStatusEnum: | 108 | class UploadStatusEnum: |
257 | 90 | """Possible results from processing an upload. | 109 | """Possible results from processing an upload. |
258 | 91 | 110 | ||
259 | @@ -110,8 +129,8 @@ | |||
260 | 110 | class UploadProcessor: | 129 | class UploadProcessor: |
261 | 111 | """Responsible for processing uploads. See module docstring.""" | 130 | """Responsible for processing uploads. See module docstring.""" |
262 | 112 | 131 | ||
265 | 113 | def __init__(self, base_fsroot, dry_run, no_mails, keep, policy_for_distro, | 132 | def __init__(self, base_fsroot, dry_run, no_mails, builds, keep, |
266 | 114 | ztm, log): | 133 | policy_for_distro, ztm, log): |
267 | 115 | """Create a new upload processor. | 134 | """Create a new upload processor. |
268 | 116 | 135 | ||
269 | 117 | :param base_fsroot: Root path for queue to use | 136 | :param base_fsroot: Root path for queue to use |
270 | @@ -130,6 +149,7 @@ | |||
271 | 130 | self.last_processed_upload = None | 149 | self.last_processed_upload = None |
272 | 131 | self.log = log | 150 | self.log = log |
273 | 132 | self.no_mails = no_mails | 151 | self.no_mails = no_mails |
274 | 152 | self.builds = builds | ||
275 | 133 | self._getPolicyForDistro = policy_for_distro | 153 | self._getPolicyForDistro = policy_for_distro |
276 | 134 | self.ztm = ztm | 154 | self.ztm = ztm |
277 | 135 | 155 | ||
278 | @@ -161,11 +181,63 @@ | |||
279 | 161 | self.log.debug("Skipping %s -- does not match %s" % ( | 181 | self.log.debug("Skipping %s -- does not match %s" % ( |
280 | 162 | upload, leaf_name)) | 182 | upload, leaf_name)) |
281 | 163 | continue | 183 | continue |
283 | 164 | self.processUpload(fsroot, upload) | 184 | if self.builds: |
284 | 185 | # Upload directories contain build results, | ||
285 | 186 | # directories are named after build ids. | ||
286 | 187 | self.processBuildUpload(fsroot, upload) | ||
287 | 188 | else: | ||
288 | 189 | self.processUpload(fsroot, upload) | ||
289 | 165 | finally: | 190 | finally: |
290 | 166 | self.log.debug("Rolling back any remaining transactions.") | 191 | self.log.debug("Rolling back any remaining transactions.") |
291 | 167 | self.ztm.abort() | 192 | self.ztm.abort() |
292 | 168 | 193 | ||
293 | 194 | def processBuildUpload(self, fsroot, upload): | ||
294 | 195 | """Process an upload that is the result of a build. | ||
295 | 196 | |||
296 | 197 | The name of the leaf is the build id of the build. | ||
297 | 198 | Build uploads always contain a single package per leaf. | ||
298 | 199 | """ | ||
299 | 200 | try: | ||
300 | 201 | (build_id, build_queue_record_id) = parse_build_upload_leaf_name( | ||
301 | 202 | upload) | ||
302 | 203 | except ValueError: | ||
303 | 204 | self.log.warn("Unable to extract build id from leaf name %s," | ||
304 | 205 | " skipping." % upload) | ||
305 | 206 | return | ||
306 | 207 | build = getUtility(IBinaryPackageBuildSet).getByBuildID(int(build_id)) | ||
307 | 208 | self.log.debug("Build %s found" % build.id) | ||
308 | 209 | logger = BufferLogger() | ||
309 | 210 | upload_path = os.path.join(fsroot, upload) | ||
310 | 211 | try: | ||
311 | 212 | [changes_file] = self.locateChangesFiles(upload_path) | ||
312 | 213 | logger.debug("Considering changefile %s" % changes_file) | ||
313 | 214 | result = self.processChangesFile( | ||
314 | 215 | upload_path, changes_file, logger) | ||
315 | 216 | except (KeyboardInterrupt, SystemExit): | ||
316 | 217 | raise | ||
317 | 218 | except: | ||
318 | 219 | info = sys.exc_info() | ||
319 | 220 | message = ( | ||
320 | 221 | 'Exception while processing upload %s' % upload_path) | ||
321 | 222 | properties = [('error-explanation', message)] | ||
322 | 223 | request = ScriptRequest(properties) | ||
323 | 224 | error_utility = ErrorReportingUtility() | ||
324 | 225 | error_utility.raising(info, request) | ||
325 | 226 | logger.error('%s (%s)' % (message, request.oopsid)) | ||
326 | 227 | result = UploadStatusEnum.FAILED | ||
327 | 228 | destination = { | ||
328 | 229 | UploadStatusEnum.FAILED: "failed", | ||
329 | 230 | UploadStatusEnum.REJECTED: "rejected", | ||
330 | 231 | UploadStatusEnum.ACCEPTED: "accepted"}[result] | ||
331 | 232 | self.moveProcessedUpload(upload_path, destination, logger) | ||
332 | 233 | if not (result == UploadStatusEnum.ACCEPTED and | ||
333 | 234 | build.verifySuccessfulUpload() and | ||
334 | 235 | build.status == BuildStatus.FULLYBUILT): | ||
335 | 236 | build.status = BuildStatus.FAILEDTOUPLOAD | ||
336 | 237 | build.date_finished = datetime.datetime.now(pytz.UTC) | ||
337 | 238 | build.notify(extra_info="Uploading build %s failed." % upload) | ||
338 | 239 | build.storeUploadLog(logger.buffer.getvalue()) | ||
339 | 240 | |||
340 | 169 | def processUpload(self, fsroot, upload): | 241 | def processUpload(self, fsroot, upload): |
341 | 170 | """Process an upload's changes files, and move it to a new directory. | 242 | """Process an upload's changes files, and move it to a new directory. |
342 | 171 | 243 | ||
343 | @@ -186,7 +258,8 @@ | |||
344 | 186 | for changes_file in changes_files: | 258 | for changes_file in changes_files: |
345 | 187 | self.log.debug("Considering changefile %s" % changes_file) | 259 | self.log.debug("Considering changefile %s" % changes_file) |
346 | 188 | try: | 260 | try: |
348 | 189 | result = self.processChangesFile(upload_path, changes_file) | 261 | result = self.processChangesFile( |
349 | 262 | upload_path, changes_file, self.log) | ||
350 | 190 | if result == UploadStatusEnum.FAILED: | 263 | if result == UploadStatusEnum.FAILED: |
351 | 191 | some_failed = True | 264 | some_failed = True |
352 | 192 | elif result == UploadStatusEnum.REJECTED: | 265 | elif result == UploadStatusEnum.REJECTED: |
353 | @@ -216,8 +289,7 @@ | |||
354 | 216 | # There were no changes files at all. We consider | 289 | # There were no changes files at all. We consider |
355 | 217 | # the upload to be failed in this case. | 290 | # the upload to be failed in this case. |
356 | 218 | destination = "failed" | 291 | destination = "failed" |
359 | 219 | 292 | self.moveProcessedUpload(upload_path, destination, self.log) | |
358 | 220 | self.moveProcessedUpload(upload_path, destination) | ||
360 | 221 | 293 | ||
361 | 222 | def locateDirectories(self, fsroot): | 294 | def locateDirectories(self, fsroot): |
362 | 223 | """Return a list of upload directories in a given queue. | 295 | """Return a list of upload directories in a given queue. |
363 | @@ -280,7 +352,7 @@ | |||
364 | 280 | os.path.join(relative_path, filename)) | 352 | os.path.join(relative_path, filename)) |
365 | 281 | return self.orderFilenames(changes_files) | 353 | return self.orderFilenames(changes_files) |
366 | 282 | 354 | ||
368 | 283 | def processChangesFile(self, upload_path, changes_file): | 355 | def processChangesFile(self, upload_path, changes_file, logger=None): |
369 | 284 | """Process a single changes file. | 356 | """Process a single changes file. |
370 | 285 | 357 | ||
371 | 286 | This is done by obtaining the appropriate upload policy (according | 358 | This is done by obtaining the appropriate upload policy (according |
372 | @@ -297,6 +369,8 @@ | |||
373 | 297 | Returns a value from UploadStatusEnum, or re-raises an exception | 369 | Returns a value from UploadStatusEnum, or re-raises an exception |
374 | 298 | from NascentUpload. | 370 | from NascentUpload. |
375 | 299 | """ | 371 | """ |
376 | 372 | if logger is None: | ||
377 | 373 | logger = self.log | ||
378 | 300 | # Calculate the distribution from the path within the upload | 374 | # Calculate the distribution from the path within the upload |
379 | 301 | # Reject the upload since we could not process the path, | 375 | # Reject the upload since we could not process the path, |
380 | 302 | # Store the exception information as a rejection message. | 376 | # Store the exception information as a rejection message. |
381 | @@ -333,7 +407,7 @@ | |||
382 | 333 | "Please check the documentation at " | 407 | "Please check the documentation at " |
383 | 334 | "https://help.launchpad.net/Packaging/PPA#Uploading " | 408 | "https://help.launchpad.net/Packaging/PPA#Uploading " |
384 | 335 | "and update your configuration."))) | 409 | "and update your configuration."))) |
386 | 336 | self.log.debug("Finding fresh policy") | 410 | logger.debug("Finding fresh policy") |
387 | 337 | policy = self._getPolicyForDistro(distribution) | 411 | policy = self._getPolicyForDistro(distribution) |
388 | 338 | policy.archive = archive | 412 | policy.archive = archive |
389 | 339 | 413 | ||
390 | @@ -360,7 +434,7 @@ | |||
391 | 360 | "Invalid upload path (%s) for this policy (%s)" % | 434 | "Invalid upload path (%s) for this policy (%s)" % |
392 | 361 | (relative_path, policy.name)) | 435 | (relative_path, policy.name)) |
393 | 362 | upload.reject(error_message) | 436 | upload.reject(error_message) |
395 | 363 | self.log.error(error_message) | 437 | logger.error(error_message) |
396 | 364 | 438 | ||
397 | 365 | # Reject upload with path processing errors. | 439 | # Reject upload with path processing errors. |
398 | 366 | if upload_path_error is not None: | 440 | if upload_path_error is not None: |
399 | @@ -370,7 +444,7 @@ | |||
400 | 370 | self.last_processed_upload = upload | 444 | self.last_processed_upload = upload |
401 | 371 | 445 | ||
402 | 372 | try: | 446 | try: |
404 | 373 | self.log.info("Processing upload %s" % upload.changes.filename) | 447 | logger.info("Processing upload %s" % upload.changes.filename) |
405 | 374 | result = UploadStatusEnum.ACCEPTED | 448 | result = UploadStatusEnum.ACCEPTED |
406 | 375 | 449 | ||
407 | 376 | try: | 450 | try: |
408 | @@ -378,12 +452,12 @@ | |||
409 | 378 | except UploadPolicyError, e: | 452 | except UploadPolicyError, e: |
410 | 379 | upload.reject("UploadPolicyError escaped upload.process: " | 453 | upload.reject("UploadPolicyError escaped upload.process: " |
411 | 380 | "%s " % e) | 454 | "%s " % e) |
414 | 381 | self.log.debug("UploadPolicyError escaped upload.process", | 455 | logger.debug( |
415 | 382 | exc_info=True) | 456 | "UploadPolicyError escaped upload.process", exc_info=True) |
416 | 383 | except FatalUploadError, e: | 457 | except FatalUploadError, e: |
417 | 384 | upload.reject("UploadError escaped upload.process: %s" % e) | 458 | upload.reject("UploadError escaped upload.process: %s" % e) |
420 | 385 | self.log.debug("UploadError escaped upload.process", | 459 | logger.debug( |
421 | 386 | exc_info=True) | 460 | "UploadError escaped upload.process", exc_info=True) |
422 | 387 | except (KeyboardInterrupt, SystemExit): | 461 | except (KeyboardInterrupt, SystemExit): |
423 | 388 | raise | 462 | raise |
424 | 389 | except EarlyReturnUploadError: | 463 | except EarlyReturnUploadError: |
425 | @@ -400,7 +474,7 @@ | |||
426 | 400 | # the new exception will be handled by the caller just like | 474 | # the new exception will be handled by the caller just like |
427 | 401 | # the one we caught would have been, by failing the upload | 475 | # the one we caught would have been, by failing the upload |
428 | 402 | # with no email. | 476 | # with no email. |
430 | 403 | self.log.exception("Unhandled exception processing upload") | 477 | logger.exception("Unhandled exception processing upload") |
431 | 404 | upload.reject("Unhandled exception processing upload: %s" % e) | 478 | upload.reject("Unhandled exception processing upload: %s" % e) |
432 | 405 | 479 | ||
433 | 406 | # XXX julian 2007-05-25 bug=29744: | 480 | # XXX julian 2007-05-25 bug=29744: |
434 | @@ -418,21 +492,22 @@ | |||
435 | 418 | successful = upload.do_accept(notify=notify) | 492 | successful = upload.do_accept(notify=notify) |
436 | 419 | if not successful: | 493 | if not successful: |
437 | 420 | result = UploadStatusEnum.REJECTED | 494 | result = UploadStatusEnum.REJECTED |
440 | 421 | self.log.info("Rejection during accept. " | 495 | logger.info( |
441 | 422 | "Aborting partial accept.") | 496 | "Rejection during accept. Aborting partial accept.") |
442 | 423 | self.ztm.abort() | 497 | self.ztm.abort() |
443 | 424 | 498 | ||
444 | 425 | if upload.is_rejected: | 499 | if upload.is_rejected: |
446 | 426 | self.log.warn("Upload was rejected:") | 500 | logger.warn("Upload was rejected:") |
447 | 427 | for msg in upload.rejections: | 501 | for msg in upload.rejections: |
449 | 428 | self.log.warn("\t%s" % msg) | 502 | logger.warn("\t%s" % msg) |
450 | 429 | 503 | ||
451 | 430 | if self.dry_run: | 504 | if self.dry_run: |
453 | 431 | self.log.info("Dry run, aborting transaction.") | 505 | logger.info("Dry run, aborting transaction.") |
454 | 432 | self.ztm.abort() | 506 | self.ztm.abort() |
455 | 433 | else: | 507 | else: |
458 | 434 | self.log.info("Committing the transaction and any mails " | 508 | logger.info( |
459 | 435 | "associated with this upload.") | 509 | "Committing the transaction and any mails associated " |
460 | 510 | "with this upload.") | ||
461 | 436 | self.ztm.commit() | 511 | self.ztm.commit() |
462 | 437 | except: | 512 | except: |
463 | 438 | self.ztm.abort() | 513 | self.ztm.abort() |
464 | @@ -440,47 +515,47 @@ | |||
465 | 440 | 515 | ||
466 | 441 | return result | 516 | return result |
467 | 442 | 517 | ||
469 | 443 | def removeUpload(self, upload): | 518 | def removeUpload(self, upload, logger): |
470 | 444 | """Remove an upload that has succesfully been processed. | 519 | """Remove an upload that has succesfully been processed. |
471 | 445 | 520 | ||
472 | 446 | This includes moving the given upload directory and moving the | 521 | This includes moving the given upload directory and moving the |
473 | 447 | matching .distro file, if it exists. | 522 | matching .distro file, if it exists. |
474 | 448 | """ | 523 | """ |
475 | 449 | if self.keep or self.dry_run: | 524 | if self.keep or self.dry_run: |
477 | 450 | self.log.debug("Keeping contents untouched") | 525 | logger.debug("Keeping contents untouched") |
478 | 451 | return | 526 | return |
479 | 452 | 527 | ||
481 | 453 | self.log.debug("Removing upload directory %s", upload) | 528 | logger.debug("Removing upload directory %s", upload) |
482 | 454 | shutil.rmtree(upload) | 529 | shutil.rmtree(upload) |
483 | 455 | 530 | ||
484 | 456 | distro_filename = upload + ".distro" | 531 | distro_filename = upload + ".distro" |
485 | 457 | if os.path.isfile(distro_filename): | 532 | if os.path.isfile(distro_filename): |
487 | 458 | self.log.debug("Removing distro file %s", distro_filename) | 533 | logger.debug("Removing distro file %s", distro_filename) |
488 | 459 | os.remove(distro_filename) | 534 | os.remove(distro_filename) |
489 | 460 | 535 | ||
491 | 461 | def moveProcessedUpload(self, upload_path, destination): | 536 | def moveProcessedUpload(self, upload_path, destination, logger): |
492 | 462 | """Move or remove the upload depending on the status of the upload. | 537 | """Move or remove the upload depending on the status of the upload. |
493 | 463 | """ | 538 | """ |
494 | 464 | if destination == "accepted": | 539 | if destination == "accepted": |
496 | 465 | self.removeUpload(upload_path) | 540 | self.removeUpload(upload_path, logger) |
497 | 466 | else: | 541 | else: |
499 | 467 | self.moveUpload(upload_path, destination) | 542 | self.moveUpload(upload_path, destination, logger) |
500 | 468 | 543 | ||
502 | 469 | def moveUpload(self, upload, subdir_name): | 544 | def moveUpload(self, upload, subdir_name, logger): |
503 | 470 | """Move the upload to the named subdir of the root, eg 'accepted'. | 545 | """Move the upload to the named subdir of the root, eg 'accepted'. |
504 | 471 | 546 | ||
505 | 472 | This includes moving the given upload directory and moving the | 547 | This includes moving the given upload directory and moving the |
506 | 473 | matching .distro file, if it exists. | 548 | matching .distro file, if it exists. |
507 | 474 | """ | 549 | """ |
508 | 475 | if self.keep or self.dry_run: | 550 | if self.keep or self.dry_run: |
510 | 476 | self.log.debug("Keeping contents untouched") | 551 | logger.debug("Keeping contents untouched") |
511 | 477 | return | 552 | return |
512 | 478 | 553 | ||
513 | 479 | pathname = os.path.basename(upload) | 554 | pathname = os.path.basename(upload) |
514 | 480 | 555 | ||
515 | 481 | target_path = os.path.join( | 556 | target_path = os.path.join( |
516 | 482 | self.base_fsroot, subdir_name, pathname) | 557 | self.base_fsroot, subdir_name, pathname) |
518 | 483 | self.log.debug("Moving upload directory %s to %s" % | 558 | logger.debug("Moving upload directory %s to %s" % |
519 | 484 | (upload, target_path)) | 559 | (upload, target_path)) |
520 | 485 | shutil.move(upload, target_path) | 560 | shutil.move(upload, target_path) |
521 | 486 | 561 | ||
522 | @@ -488,7 +563,7 @@ | |||
523 | 488 | if os.path.isfile(distro_filename): | 563 | if os.path.isfile(distro_filename): |
524 | 489 | target_path = os.path.join(self.base_fsroot, subdir_name, | 564 | target_path = os.path.join(self.base_fsroot, subdir_name, |
525 | 490 | os.path.basename(distro_filename)) | 565 | os.path.basename(distro_filename)) |
527 | 491 | self.log.debug("Moving distro file %s to %s" % (distro_filename, | 566 | logger.debug("Moving distro file %s to %s" % (distro_filename, |
528 | 492 | target_path)) | 567 | target_path)) |
529 | 493 | shutil.move(distro_filename, target_path) | 568 | shutil.move(distro_filename, target_path) |
530 | 494 | 569 | ||
531 | 495 | 570 | ||
532 | === modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py' | |||
533 | --- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-18 14:03:15 +0000 | |||
534 | +++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-19 14:33:31 +0000 | |||
535 | @@ -35,6 +35,11 @@ | |||
536 | 35 | help="Whether to suppress the sending of mails or not.") | 35 | help="Whether to suppress the sending of mails or not.") |
537 | 36 | 36 | ||
538 | 37 | self.parser.add_option( | 37 | self.parser.add_option( |
539 | 38 | "--builds", action="store_true", | ||
540 | 39 | dest="builds", default=False, | ||
541 | 40 | help="Whether to interpret leaf names as build ids.") | ||
542 | 41 | |||
543 | 42 | self.parser.add_option( | ||
544 | 38 | "-J", "--just-leaf", action="store", dest="leafname", | 43 | "-J", "--just-leaf", action="store", dest="leafname", |
545 | 39 | default=None, help="A specific leaf dir to limit to.", | 44 | default=None, help="A specific leaf dir to limit to.", |
546 | 40 | metavar = "LEAF") | 45 | metavar = "LEAF") |
547 | @@ -80,9 +85,9 @@ | |||
548 | 80 | policy = findPolicyByName(self.options.context) | 85 | policy = findPolicyByName(self.options.context) |
549 | 81 | policy.setOptions(self.options) | 86 | policy.setOptions(self.options) |
550 | 82 | return policy | 87 | return policy |
554 | 83 | processor = UploadProcessor(self.options.base_fsroot, | 88 | processor = UploadProcessor(self.options.base_fsroot, |
555 | 84 | self.options.dryrun, self.options.nomails, self.options.keep, | 89 | self.options.dryrun, self.options.nomails, self.options.builds, |
556 | 85 | getPolicy, self.txn, self.logger) | 90 | self.options.keep, getPolicy, self.txn, self.logger) |
557 | 86 | processor.processUploadQueue(self.options.leafname) | 91 | processor.processUploadQueue(self.options.leafname) |
558 | 87 | 92 | ||
559 | 88 | @property | 93 | @property |
It looks like that there are conflicts with this branch. Could you please resolve them?