Merge lp:~jelmer/launchpad/506256-remove-popen-2 into lp:launchpad

Proposed by Jelmer Vernooij
Status: Merged
Approved by: Jelmer Vernooij
Approved revision: no longer in the source branch.
Merged at revision: 11579
Proposed branch: lp:~jelmer/launchpad/506256-remove-popen-2
Merge into: lp:launchpad
Prerequisite: lp:~jelmer/launchpad/506256-remove-popen
Diff against target: 1585 lines (+374/-273)
27 files modified
database/schema/security.cfg (+2/-0)
lib/lp/archiveuploader/dscfile.py (+0/-29)
lib/lp/archiveuploader/nascentupload.py (+31/-16)
lib/lp/archiveuploader/nascentuploadfile.py (+59/-35)
lib/lp/archiveuploader/tests/__init__.py (+5/-7)
lib/lp/archiveuploader/tests/nascentupload.txt (+4/-5)
lib/lp/archiveuploader/tests/test_buildduploads.py (+7/-8)
lib/lp/archiveuploader/tests/test_nascentuploadfile.py (+61/-0)
lib/lp/archiveuploader/tests/test_ppauploadprocessor.py (+11/-12)
lib/lp/archiveuploader/tests/test_recipeuploads.py (+8/-12)
lib/lp/archiveuploader/tests/test_uploadprocessor.py (+99/-30)
lib/lp/archiveuploader/tests/uploadpolicy.txt (+1/-8)
lib/lp/archiveuploader/uploadpolicy.py (+12/-14)
lib/lp/archiveuploader/uploadprocessor.py (+16/-19)
lib/lp/buildmaster/interfaces/packagebuild.py (+8/-4)
lib/lp/buildmaster/model/packagebuild.py (+11/-2)
lib/lp/buildmaster/tests/test_packagebuild.py (+1/-1)
lib/lp/code/configure.zcml (+1/-5)
lib/lp/code/model/sourcepackagerecipebuild.py (+4/-29)
lib/lp/code/model/tests/test_sourcepackagerecipebuild.py (+6/-0)
lib/lp/soyuz/doc/build-failedtoupload-workflow.txt (+2/-3)
lib/lp/soyuz/doc/buildd-slavescanner.txt (+0/-3)
lib/lp/soyuz/doc/distroseriesqueue-translations.txt (+3/-5)
lib/lp/soyuz/doc/soyuz-set-of-uploads.txt (+3/-20)
lib/lp/soyuz/model/binarypackagebuild.py (+4/-0)
lib/lp/soyuz/scripts/soyuz_process_upload.py (+6/-6)
lib/lp/soyuz/tests/test_binarypackagebuild.py (+9/-0)
To merge this branch: bzr merge lp:~jelmer/launchpad/506256-remove-popen-2
Reviewer Review Type Date Requested Status
Michael Nelson (community) code Approve
Review via email: mp+35412@code.launchpad.net

This proposal supersedes a proposal from 2010-09-14.

Commit message

Fix handling of source recipe builds when processing build uploads asynchronously.

Description of the change

This MP actually has two prerequisites which both have been approved but not yet landed: lp:~jelmer/launchpad/506256-remove-popen and lp:~jelmer/launchpad/archiveuploader-build-handling. Since I can only set one as prerequisite I've set the first, since it's the biggest.

A bit of background: This branch is a followup to earlier work I did to make it possible for the builddmaster to no longer popen("./archiveuploader --build-id --policy buildd") - something that was very time-consuming - but rather move finished builds out of the way, so a cron job running a separate instance of the archiveuploader could process them, later.

This branch fixes source package recipe build processing in the separate upload processor.

It does the following things:

 * The separate upload policy for source package recipe builds has been merged into the overall buildd upload policy
 * related, getUploader() is no longer on the upload policy but on the build class (it is different for binarypackagebuilds and recipe builds)
 * Clean up the buildqueue record earlier so we don't keep the builder busy.

test:
./bin/test lp.buildmaster
./bin/test lp.archiveuploader

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) wrote :
Download full text (21.8 KiB)

=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-16 00:33:37 +0000
+++ database/schema/security.cfg 2010-09-16 09:04:13 +0000
@@ -1130,9 +1130,12 @@
 public.packagebuild = SELECT, INSERT, UPDATE
 public.binarypackagebuild = SELECT, INSERT, UPDATE
 public.sourcepackagerecipebuild = SELECT, UPDATE
-public.buildqueue = SELECT, INSERT, UPDATE
-public.job = SELECT, INSERT, UPDATE
-public.buildpackagejob = SELECT, INSERT, UPDATE
+public.sourcepackagerecipebuildjob = SELECT, UPDATE
+public.sourcepackagerecipe = SELECT, UPDATE
+public.buildqueue = SELECT, INSERT, UPDATE, DELETE
+public.job = SELECT, INSERT, UPDATE, DELETE
+public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE
+public.builder = SELECT

 # Thusly the librarian
 public.libraryfilecontent = SELECT, INSERT

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000
@@ -498,10 +498,13 @@
         if self.binaryful:
             return

- # Set up some convenient shortcut variables.
-
- uploader = self.policy.getUploader(self.changes, build)
- archive = self.policy.archive
+ # The build can have an explicit uploader, which may be different
+ # from the changes file signer. (i.e in case of daily source package
+ # builds)
+ if build is not None:
+ uploader = build.getUploader(self.changes)
+ else:
+ uploader = self.changes.signer

         # If we have no signer, there's no ACL we can apply.
         if uploader is None:

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:05:48 +0000
@@ -42,7 +42,7 @@
             requester=self.recipe.owner)

         Store.of(self.build).flush()
- self.options.context = 'recipe'
+ self.options.context = 'buildd'

         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000
@@ -1884,7 +1884,7 @@
         self.assertLogContains(
             "Unable to find package build job with id 42. Skipping.")

- def testNoFiles(self):
+ def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
         # will fail.

@@ -1908,6 +1908,8 @@

         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCo...

Revision history for this message
Michael Nelson (michael.nelson) wrote :
Download full text (13.3 KiB)

Hi Jelmer,

r=me, assuming you check the following:

* to check: the db permissions - can we get rid of delete perms elsewhere),
* to fix: don't see why we need to use removeSecurityProxy in your test instead of a small update to the factory mothed),
* to check: regarding the actual diff - my local db-devel seems to already have some of these changes

Thanks!

> === modified file 'database/schema/security.cfg'
> --- database/schema/security.cfg 2010-09-16 00:33:37 +0000
> +++ database/schema/security.cfg 2010-09-16 09:04:13 +0000
> @@ -1130,9 +1130,12 @@
> public.packagebuild = SELECT, INSERT, UPDATE
> public.binarypackagebuild = SELECT, INSERT, UPDATE
> public.sourcepackagerecipebuild = SELECT, UPDATE
> -public.buildqueue = SELECT, INSERT, UPDATE
> -public.job = SELECT, INSERT, UPDATE
> -public.buildpackagejob = SELECT, INSERT, UPDATE
> +public.sourcepackagerecipebuildjob = SELECT, UPDATE
> +public.sourcepackagerecipe = SELECT, UPDATE
> +public.buildqueue = SELECT, INSERT, UPDATE, DELETE
> +public.job = SELECT, INSERT, UPDATE, DELETE
> +public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE

Right - so this is so the buildqueue record can be cleaned up earlier by the
uploader. Nice.

Should we be able to remove some DELETE perms elsewhere?

> +public.builder = SELECT
>
> # Thusly the librarian
> public.libraryfilecontent = SELECT, INSERT
>
> === modified file 'lib/lp/archiveuploader/nascentupload.py'
> --- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
> +++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000
> @@ -498,10 +498,13 @@
> if self.binaryful:
> return
>
> - # Set up some convenient shortcut variables.
> -
> - uploader = self.policy.getUploader(self.changes, build)
> - archive = self.policy.archive
> + # The build can have an explicit uploader, which may be different
> + # from the changes file signer. (i.e in case of daily source package
> + # builds)
> + if build is not None:
> + uploader = build.getUploader(self.changes)
> + else:
> + uploader = self.changes.signer

This seems strange? do we not need archive any more? In db-devel it's used
straight afterwards, but I'm assuming you've changed that code in a previous
branch. And checking your full diff shows that is the case (you're using
policy.archive).

>
> # If we have no signer, there's no ACL we can apply.
> if uploader is None:
>
> === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
> --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
> +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000
> @@ -1884,7 +1884,7 @@
> self.assertLogContains(
> "Unable to find package build job with id 42. Skipping.")
>
> - def testNoFiles(self):
> + def testBinaryPackageBuild_fail(self):
>...

review: Approve (code)
Revision history for this message
Jelmer Vernooij (jelmer) wrote :
Download full text (22.5 KiB)

=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-16 00:33:37 +0000
+++ database/schema/security.cfg 2010-09-16 12:27:46 +0000
@@ -1130,6 +1130,8 @@
 public.packagebuild = SELECT, INSERT, UPDATE
 public.binarypackagebuild = SELECT, INSERT, UPDATE
 public.sourcepackagerecipebuild = SELECT, UPDATE
+public.sourcepackagerecipebuildjob = SELECT, UPDATE
+public.sourcepackagerecipe = SELECT, UPDATE
 public.buildqueue = SELECT, INSERT, UPDATE
 public.job = SELECT, INSERT, UPDATE
 public.buildpackagejob = SELECT, INSERT, UPDATE

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 12:27:46 +0000
@@ -498,10 +498,13 @@
         if self.binaryful:
             return

- # Set up some convenient shortcut variables.
-
- uploader = self.policy.getUploader(self.changes, build)
- archive = self.policy.archive
+ # The build can have an explicit uploader, which may be different
+ # from the changes file signer. (i.e in case of daily source package
+ # builds)
+ if build is not None:
+ uploader = build.getUploader(self.changes)
+ else:
+ uploader = self.changes.signer

         # If we have no signer, there's no ACL we can apply.
         if uploader is None:

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 12:27:46 +0000
@@ -42,7 +42,7 @@
             requester=self.recipe.owner)

         Store.of(self.build).flush()
- self.options.context = 'recipe'
+ self.options.context = 'buildd'

         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 13:37:25 +0000
@@ -18,6 +18,7 @@
 import tempfile
 import traceback

+from storm.locals import Store
 from zope.component import (
     getGlobalSiteManager,
     getUtility,
@@ -1884,7 +1885,7 @@
         self.assertLogContains(
             "Unable to find package build job with id 42. Skipping.")

- def testNoFiles(self):
+ def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
         # will fail.

@@ -1908,6 +1909,8 @@

         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
@@ -1928,7 +1931,7 @@
         self.assertTrue('DEBUG: Moving upload directory '
             in log_contents)

- def t...

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'database/schema/security.cfg'
2--- database/schema/security.cfg 2010-09-10 02:46:28 +0000
3+++ database/schema/security.cfg 2010-09-17 06:08:57 +0000
4@@ -1130,6 +1130,8 @@
5 public.packagebuild = SELECT, INSERT, UPDATE
6 public.binarypackagebuild = SELECT, INSERT, UPDATE
7 public.sourcepackagerecipebuild = SELECT, UPDATE
8+public.sourcepackagerecipebuildjob = SELECT, UPDATE
9+public.sourcepackagerecipe = SELECT, UPDATE
10 public.buildqueue = SELECT, INSERT, UPDATE
11 public.job = SELECT, INSERT, UPDATE
12 public.buildpackagejob = SELECT, INSERT, UPDATE
13
14=== modified file 'lib/lp/archiveuploader/dscfile.py'
15--- lib/lp/archiveuploader/dscfile.py 2010-09-09 17:02:33 +0000
16+++ lib/lp/archiveuploader/dscfile.py 2010-09-17 06:08:57 +0000
17@@ -630,35 +630,6 @@
18 cleanup_unpacked_dir(unpacked_dir)
19 self.logger.debug("Done")
20
21- def findBuild(self):
22- """Find and return the SourcePackageRecipeBuild, if one is specified.
23-
24- If by any chance an inconsistent build was found this method will
25- raise UploadError resulting in a upload rejection.
26- """
27- build_id = getattr(self.policy.options, 'buildid', None)
28- if build_id is None:
29- return None
30-
31- build = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
32-
33- # The master verifies the status to confirm successful upload.
34- build.status = BuildStatus.FULLYBUILT
35- # If this upload is successful, any existing log is wrong and
36- # unuseful.
37- build.upload_log = None
38-
39- # Sanity check; raise an error if the build we've been
40- # told to link to makes no sense.
41- if (build.pocket != self.policy.pocket or
42- build.distroseries != self.policy.distroseries or
43- build.archive != self.policy.archive):
44- raise UploadError(
45- "Attempt to upload source specifying "
46- "recipe build %s, where it doesn't fit." % build.id)
47-
48- return build
49-
50 def storeInDatabase(self, build):
51 """Store DSC information as a SourcePackageRelease record.
52
53
54=== modified file 'lib/lp/archiveuploader/nascentupload.py'
55--- lib/lp/archiveuploader/nascentupload.py 2010-08-27 14:27:22 +0000
56+++ lib/lp/archiveuploader/nascentupload.py 2010-09-17 06:08:57 +0000
57@@ -137,7 +137,7 @@
58 raise FatalUploadError(str(e))
59 return cls(changesfile, policy, logger)
60
61- def process(self):
62+ def process(self, build=None):
63 """Process this upload, checking it against policy, loading it into
64 the database if it seems okay.
65
66@@ -200,7 +200,7 @@
67 self.overrideArchive()
68
69 # Check upload rights for the signer of the upload.
70- self.verify_acl()
71+ self.verify_acl(build)
72
73 # Perform policy checks.
74 policy.checkUpload(self)
75@@ -483,7 +483,7 @@
76 #
77 # Signature and ACL stuff
78 #
79- def verify_acl(self):
80+ def verify_acl(self, build=None):
81 """Check the signer's upload rights.
82
83 The signer must have permission to upload to either the component
84@@ -498,10 +498,13 @@
85 if self.binaryful:
86 return
87
88- # Set up some convenient shortcut variables.
89-
90- uploader = self.policy.getUploader(self.changes)
91- archive = self.policy.archive
92+ # The build can have an explicit uploader, which may be different
93+ # from the changes file signer. (i.e in case of daily source package
94+ # builds)
95+ if build is not None:
96+ uploader = build.getUploader(self.changes)
97+ else:
98+ uploader = self.changes.signer
99
100 # If we have no signer, there's no ACL we can apply.
101 if uploader is None:
102@@ -511,7 +514,7 @@
103 source_name = getUtility(
104 ISourcePackageNameSet).queryByName(self.changes.dsc.package)
105
106- rejection_reason = archive.checkUpload(
107+ rejection_reason = self.policy.archive.checkUpload(
108 uploader, self.policy.distroseries, source_name,
109 self.changes.dsc.component, self.policy.pocket, not self.is_new)
110
111@@ -824,7 +827,7 @@
112 #
113 # Actually processing accepted or rejected uploads -- and mailing people
114 #
115- def do_accept(self, notify=True):
116+ def do_accept(self, notify=True, build=None):
117 """Accept the upload into the queue.
118
119 This *MAY* in extreme cases cause a database error and thus
120@@ -834,13 +837,14 @@
121 constraint.
122
123 :param notify: True to send an email, False to not send one.
124+ :param build: The build associated with this upload.
125 """
126 if self.is_rejected:
127 self.reject("Alas, someone called do_accept when we're rejected")
128 self.do_reject(notify)
129 return False
130 try:
131- self.storeObjectsInDatabase()
132+ self.storeObjectsInDatabase(build=build)
133
134 # Send the email.
135 # There is also a small corner case here where the DB transaction
136@@ -923,7 +927,7 @@
137 #
138 # Inserting stuff in the database
139 #
140- def storeObjectsInDatabase(self):
141+ def storeObjectsInDatabase(self, build=None):
142 """Insert this nascent upload into the database."""
143
144 # Queue entries are created in the NEW state by default; at the
145@@ -939,7 +943,8 @@
146 sourcepackagerelease = None
147 if self.sourceful:
148 assert self.changes.dsc, "Sourceful upload lacks DSC."
149- build = self.changes.dsc.findBuild()
150+ if build is not None:
151+ self.changes.dsc.checkBuild(build)
152 sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
153 package_upload_source = self.queue_root.addSource(
154 sourcepackagerelease)
155@@ -980,11 +985,21 @@
156 sourcepackagerelease = (
157 binary_package_file.findSourcePackageRelease())
158
159- build = binary_package_file.findBuild(sourcepackagerelease)
160- assert self.queue_root.pocket == build.pocket, (
161+ # Find the build for this particular binary package file.
162+ if build is None:
163+ bpf_build = binary_package_file.findBuild(
164+ sourcepackagerelease)
165+ else:
166+ bpf_build = build
167+ if bpf_build.source_package_release != sourcepackagerelease:
168+ raise AssertionError(
169+ "Attempt to upload binaries specifying build %s, "
170+ "where they don't fit." % bpf_build.id)
171+ binary_package_file.checkBuild(bpf_build)
172+ assert self.queue_root.pocket == bpf_build.pocket, (
173 "Binary was not build for the claimed pocket.")
174- binary_package_file.storeInDatabase(build)
175- processed_builds.append(build)
176+ binary_package_file.storeInDatabase(bpf_build)
177+ processed_builds.append(bpf_build)
178
179 # Store the related builds after verifying they were built
180 # from the same source.
181
182=== modified file 'lib/lp/archiveuploader/nascentuploadfile.py'
183--- lib/lp/archiveuploader/nascentuploadfile.py 2010-09-02 16:28:50 +0000
184+++ lib/lp/archiveuploader/nascentuploadfile.py 2010-09-17 06:08:57 +0000
185@@ -33,6 +33,7 @@
186 from canonical.encoding import guess as guess_encoding
187 from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
188 from canonical.librarian.utils import filechunks
189+from lp.app.errors import NotFoundError
190 from lp.archiveuploader.utils import (
191 determine_source_file_type,
192 prefix_multi_line_string,
193@@ -52,7 +53,6 @@
194 PackageUploadCustomFormat,
195 PackageUploadStatus,
196 )
197-from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
198 from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
199 from lp.soyuz.interfaces.component import IComponentSet
200 from lp.soyuz.interfaces.section import ISectionSet
201@@ -338,6 +338,13 @@
202 """Return an ISection for self.section_name."""
203 return getUtility(ISectionSet)[self.section_name]
204
205+ def checkBuild(self, build):
206+ """Check the status of the build this file is part of.
207+
208+ :param build: an `IPackageBuild` instance
209+ """
210+ raise NotImplementedError(self.checkBuild)
211+
212 def extractUserDefinedFields(self, control):
213 """Extract the user defined fields out of a control file list.
214 """
215@@ -381,6 +388,23 @@
216 yield UploadError("%s: should be %s according to changes file."
217 % (filename_version, version_chopped))
218
219+ def checkBuild(self, build):
220+ """See PackageUploadFile."""
221+ # The master verifies the status to confirm successful upload.
222+ build.status = BuildStatus.FULLYBUILT
223+ # If this upload is successful, any existing log is wrong and
224+ # unuseful.
225+ build.upload_log = None
226+
227+ # Sanity check; raise an error if the build we've been
228+ # told to link to makes no sense.
229+ if (build.pocket != self.policy.pocket or
230+ build.distroseries != self.policy.distroseries or
231+ build.archive != self.policy.archive):
232+ raise UploadError(
233+ "Attempt to upload source specifying "
234+ "recipe build %s, where it doesn't fit." % build.id)
235+
236
237 class BaseBinaryUploadFile(PackageUploadFile):
238 """Base methods for binary upload modeling."""
239@@ -834,52 +858,52 @@
240 in this case, change this build to be FULLYBUILT.
241 - Create a new build in FULLYBUILT status.
242
243- If by any chance an inconsistent build was found this method will
244- raise UploadError resulting in a upload rejection.
245 """
246- build_id = getattr(self.policy.options, 'buildid', None)
247 dar = self.policy.distroseries[self.archtag]
248
249- if build_id is None:
250- # Check if there's a suitable existing build.
251- build = sourcepackagerelease.getBuildByArch(
252- dar, self.policy.archive)
253- if build is not None:
254- build.status = BuildStatus.FULLYBUILT
255- self.logger.debug("Updating build for %s: %s" % (
256- dar.architecturetag, build.id))
257- else:
258- # No luck. Make one.
259- # Usually happen for security binary uploads.
260- build = sourcepackagerelease.createBuild(
261- dar, self.policy.pocket, self.policy.archive,
262- status=BuildStatus.FULLYBUILT)
263- self.logger.debug("Build %s created" % build.id)
264- else:
265- build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id)
266- self.logger.debug("Build %s found" % build.id)
267- # Ensure gathered binary is related to a FULLYBUILT build
268- # record. It will be check in slave-scanner procedure to
269- # certify that the build was processed correctly.
270+ # Check if there's a suitable existing build.
271+ build = sourcepackagerelease.getBuildByArch(
272+ dar, self.policy.archive)
273+ if build is not None:
274 build.status = BuildStatus.FULLYBUILT
275- # Also purge any previous failed upload_log stored, so its
276- # content can be garbage-collected since it's not useful
277- # anymore.
278- build.upload_log = None
279+ self.logger.debug("Updating build for %s: %s" % (
280+ dar.architecturetag, build.id))
281+ else:
282+ # No luck. Make one.
283+ # Usually happen for security binary uploads.
284+ build = sourcepackagerelease.createBuild(
285+ dar, self.policy.pocket, self.policy.archive,
286+ status=BuildStatus.FULLYBUILT)
287+ self.logger.debug("Build %s created" % build.id)
288+ return build
289+
290+ def checkBuild(self, build):
291+ """See PackageUploadFile."""
292+ try:
293+ dar = self.policy.distroseries[self.archtag]
294+ except NotFoundError:
295+ raise UploadError(
296+ "Upload to unknown architecture %s for distroseries %s" %
297+ (self.archtag, self.policy.distroseries))
298+
299+ # Ensure gathered binary is related to a FULLYBUILT build
300+ # record. It will be check in slave-scanner procedure to
301+ # certify that the build was processed correctly.
302+ build.status = BuildStatus.FULLYBUILT
303+ # Also purge any previous failed upload_log stored, so its
304+ # content can be garbage-collected since it's not useful
305+ # anymore.
306+ build.upload_log = None
307
308 # Sanity check; raise an error if the build we've been
309- # told to link to makes no sense (ie. is not for the right
310- # source package).
311- if (build.source_package_release != sourcepackagerelease or
312- build.pocket != self.policy.pocket or
313+ # told to link to makes no sense.
314+ if (build.pocket != self.policy.pocket or
315 build.distro_arch_series != dar or
316 build.archive != self.policy.archive):
317 raise UploadError(
318 "Attempt to upload binaries specifying "
319 "build %s, where they don't fit." % build.id)
320
321- return build
322-
323 def storeInDatabase(self, build):
324 """Insert this binary release and build into the database."""
325 # Reencode everything we are supplying, because old packages
326
327=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
328--- lib/lp/archiveuploader/tests/__init__.py 2010-08-26 20:08:43 +0000
329+++ lib/lp/archiveuploader/tests/__init__.py 2010-09-17 06:08:57 +0000
330@@ -64,17 +64,15 @@
331 class MockUploadOptions:
332 """Mock upload policy options helper"""
333
334- def __init__(self, distro='ubuntutest', distroseries=None, buildid=None):
335+ def __init__(self, distro='ubuntutest', distroseries=None):
336 self.distro = distro
337 self.distroseries = distroseries
338- self.buildid = buildid
339-
340-
341-def getPolicy(name='anything', distro='ubuntu', distroseries=None,
342- buildid=None):
343+
344+
345+def getPolicy(name='anything', distro='ubuntu', distroseries=None):
346 """Build and return an Upload Policy for the given context."""
347 policy = findPolicyByName(name)
348- options = MockUploadOptions(distro, distroseries, buildid)
349+ options = MockUploadOptions(distro, distroseries)
350 policy.setOptions(options)
351 return policy
352
353
354=== modified file 'lib/lp/archiveuploader/tests/nascentupload.txt'
355--- lib/lp/archiveuploader/tests/nascentupload.txt 2010-08-26 15:28:34 +0000
356+++ lib/lp/archiveuploader/tests/nascentupload.txt 2010-09-17 06:08:57 +0000
357@@ -27,7 +27,7 @@
358 ... datadir, getPolicy, mock_logger, mock_logger_quiet)
359
360 >>> buildd_policy = getPolicy(
361- ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
362+ ... name='buildd', distro='ubuntu', distroseries='hoary')
363
364 >>> sync_policy = getPolicy(
365 ... name='sync', distro='ubuntu', distroseries='hoary')
366@@ -216,7 +216,7 @@
367 # Use the buildd policy as it accepts unsigned changes files and binary
368 # uploads.
369 >>> modified_buildd_policy = getPolicy(
370- ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)
371+ ... name='buildd', distro='ubuntu', distroseries='hoary')
372
373 >>> ed_mismatched_upload = NascentUpload.from_changesfile_path(
374 ... datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),
375@@ -640,13 +640,12 @@
376 the 'buildd' upload policy and the build record id.
377
378 >>> buildd_policy = getPolicy(
379- ... name='buildd', distro='ubuntu', distroseries='hoary',
380- ... buildid=multibar_build.id)
381+ ... name='buildd', distro='ubuntu', distroseries='hoary')
382
383 >>> multibar_bin_upload = NascentUpload.from_changesfile_path(
384 ... datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),
385 ... buildd_policy, mock_logger_quiet)
386- >>> multibar_bin_upload.process()
387+ >>> multibar_bin_upload.process(build=multibar_build)
388 >>> success = multibar_bin_upload.do_accept()
389
390 Now that we have successfully processed the binaries coming from a
391
392=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
393--- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-08-26 15:28:34 +0000
394+++ lib/lp/archiveuploader/tests/test_buildduploads.py 2010-09-17 06:08:57 +0000
395@@ -112,7 +112,7 @@
396 # Store source queue item for future use.
397 self.source_queue = queue_item
398
399- def _uploadBinary(self, archtag):
400+ def _uploadBinary(self, archtag, build):
401 """Upload the base binary.
402
403 Ensure it got processed and has a respective queue record.
404@@ -121,7 +121,7 @@
405 self._prepareUpload(self.binary_dir)
406 self.uploadprocessor.processChangesFile(
407 os.path.join(self.queue_folder, "incoming", self.binary_dir),
408- self.getBinaryChangesfileFor(archtag))
409+ self.getBinaryChangesfileFor(archtag), build=build)
410 queue_item = self.uploadprocessor.last_processed_upload.queue_root
411 self.assertTrue(
412 queue_item is not None,
413@@ -205,10 +205,9 @@
414 pubrec.datepublished = UTC_NOW
415 queue_item.setDone()
416
417- def _setupUploadProcessorForBuild(self, build_candidate):
418+ def _setupUploadProcessorForBuild(self):
419 """Setup an UploadProcessor instance for a given buildd context."""
420 self.options.context = self.policy
421- self.options.buildid = str(build_candidate.id)
422 self.uploadprocessor = self.getUploadProcessor(
423 self.layer.txn)
424
425@@ -223,8 +222,8 @@
426 """
427 # Upload i386 binary.
428 build_candidate = self._createBuild('i386')
429- self._setupUploadProcessorForBuild(build_candidate)
430- build_used = self._uploadBinary('i386')
431+ self._setupUploadProcessorForBuild()
432+ build_used = self._uploadBinary('i386', build_candidate)
433
434 self.assertEqual(build_used.id, build_candidate.id)
435 self.assertBuildsCreated(1)
436@@ -239,8 +238,8 @@
437
438 # Upload powerpc binary
439 build_candidate = self._createBuild('powerpc')
440- self._setupUploadProcessorForBuild(build_candidate)
441- build_used = self._uploadBinary('powerpc')
442+ self._setupUploadProcessorForBuild()
443+ build_used = self._uploadBinary('powerpc', build_candidate)
444
445 self.assertEqual(build_used.id, build_candidate.id)
446 self.assertBuildsCreated(2)
447
448=== modified file 'lib/lp/archiveuploader/tests/test_nascentuploadfile.py'
449--- lib/lp/archiveuploader/tests/test_nascentuploadfile.py 2010-09-03 06:06:40 +0000
450+++ lib/lp/archiveuploader/tests/test_nascentuploadfile.py 2010-09-17 06:08:57 +0000
451@@ -20,8 +20,11 @@
452 from lp.archiveuploader.nascentuploadfile import (
453 CustomUploadFile,
454 DebBinaryUploadFile,
455+ UploadError,
456 )
457+from lp.registry.interfaces.pocket import PackagePublishingPocket
458 from lp.archiveuploader.tests import AbsolutelyAnythingGoesUploadPolicy
459+from lp.buildmaster.enums import BuildStatus
460 from lp.soyuz.enums import PackageUploadCustomFormat
461 from lp.testing import TestCaseWithFactory
462
463@@ -34,6 +37,7 @@
464 self.logger = BufferLogger()
465 self.policy = AbsolutelyAnythingGoesUploadPolicy()
466 self.distro = self.factory.makeDistribution()
467+ self.policy.pocket = PackagePublishingPocket.RELEASE
468 self.policy.archive = self.factory.makeArchive(
469 distribution=self.distro)
470
471@@ -217,6 +221,34 @@
472 release = uploadfile.storeInDatabase(None)
473 self.assertEquals(u"http://samba.org/~jelmer/bzr", release.homepage)
474
475+ def test_checkBuild(self):
476+ # checkBuild() verifies consistency with a build.
477+ build = self.factory.makeSourcePackageRecipeBuild(
478+ pocket=self.policy.pocket, distroseries=self.policy.distroseries,
479+ archive=self.policy.archive)
480+ dsc = self.getBaseDsc()
481+ uploadfile = self.createDSCFile(
482+ "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
483+ self.createChangesFile("foo.changes", self.getBaseChanges()))
484+ uploadfile.checkBuild(build)
485+ # checkBuild() sets the build status to FULLYBUILT and
486+ # removes the upload log.
487+ self.assertEquals(BuildStatus.FULLYBUILT, build.status)
488+ self.assertIs(None, build.upload_log)
489+
490+ def test_checkBuild_inconsistent(self):
491+ # checkBuild() raises UploadError if inconsistencies between build
492+ # and upload file are found.
493+ build = self.factory.makeSourcePackageRecipeBuild(
494+ pocket=self.policy.pocket,
495+ distroseries=self.factory.makeDistroSeries(),
496+ archive=self.policy.archive)
497+ dsc = self.getBaseDsc()
498+ uploadfile = self.createDSCFile(
499+ "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
500+ self.createChangesFile("foo.changes", self.getBaseChanges()))
501+ self.assertRaises(UploadError, uploadfile.checkBuild, build)
502+
503
504 class DebBinaryUploadFileTests(PackageUploadFileTestCase):
505 """Tests for DebBinaryUploadFile."""
506@@ -326,3 +358,32 @@
507 bpr = uploadfile.storeInDatabase(build)
508 self.assertEquals(
509 u"http://samba.org/~jelmer/dulwich", bpr.homepage)
510+
511+ def test_checkBuild(self):
512+ # checkBuild() verifies consistency with a build.
513+ das = self.factory.makeDistroArchSeries(
514+ distroseries=self.policy.distroseries, architecturetag="i386")
515+ build = self.factory.makeBinaryPackageBuild(
516+ distroarchseries=das,
517+ archive=self.policy.archive)
518+ uploadfile = self.createDebBinaryUploadFile(
519+ "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
520+ None)
521+ uploadfile.checkBuild(build)
522+ # checkBuild() sets the build status to FULLYBUILT and
523+ # removes the upload log.
524+ self.assertEquals(BuildStatus.FULLYBUILT, build.status)
525+ self.assertIs(None, build.upload_log)
526+
527+ def test_checkBuild_inconsistent(self):
528+ # checkBuild() raises UploadError if inconsistencies between build
529+ # and upload file are found.
530+ das = self.factory.makeDistroArchSeries(
531+ distroseries=self.policy.distroseries, architecturetag="amd64")
532+ build = self.factory.makeBinaryPackageBuild(
533+ distroarchseries=das,
534+ archive=self.policy.archive)
535+ uploadfile = self.createDebBinaryUploadFile(
536+ "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
537+ None)
538+ self.assertRaises(UploadError, uploadfile.checkBuild, build)
539
540=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
541--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-31 11:11:09 +0000
542+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-09-17 06:08:57 +0000
543@@ -355,10 +355,10 @@
544 builds = self.name16.archive.getBuildRecords(name="bar")
545 [build] = builds
546 self.options.context = 'buildd'
547- self.options.buildid = build.id
548 upload_dir = self.queueUpload(
549 "bar_1.0-1_binary_universe", "~name16/ubuntu")
550- self.processUpload(self.uploadprocessor, upload_dir)
551+ self.processUpload(
552+ self.uploadprocessor, upload_dir, build=build)
553
554 # No mails are sent for successful binary uploads.
555 self.assertEqual(len(stub.test_emails), 0,
556@@ -405,9 +405,9 @@
557
558 # Binary upload to the just-created build record.
559 self.options.context = 'buildd'
560- self.options.buildid = build.id
561 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
562- self.processUpload(self.uploadprocessor, upload_dir)
563+ self.processUpload(
564+ self.uploadprocessor, upload_dir, build=build)
565
566 # The binary upload was accepted and it's waiting in the queue.
567 queue_items = self.breezy.getQueueItems(
568@@ -459,9 +459,9 @@
569
570 # Binary upload to the just-created build record.
571 self.options.context = 'buildd'
572- self.options.buildid = build_bar_i386.id
573 upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
574- self.processUpload(self.uploadprocessor, upload_dir)
575+ self.processUpload(
576+ self.uploadprocessor, upload_dir, build=build_bar_i386)
577
578 # The binary upload was accepted and it's waiting in the queue.
579 queue_items = self.breezy.getQueueItems(
580@@ -760,9 +760,9 @@
581 builds = self.name16.archive.getBuildRecords(name='bar')
582 [build] = builds
583 self.options.context = 'buildd'
584- self.options.buildid = build.id
585 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
586- self.processUpload(self.uploadprocessor, upload_dir)
587+ self.processUpload(
588+ self.uploadprocessor, upload_dir, build=build)
589
590 # The binary upload was accepted and it's waiting in the queue.
591 queue_items = self.breezy.getQueueItems(
592@@ -804,10 +804,9 @@
593 # Binary uploads should exhibit the same behaviour:
594 [build] = self.name16.archive.getBuildRecords(name="bar")
595 self.options.context = 'buildd'
596- self.options.buildid = build.id
597 upload_dir = self.queueUpload(
598 "bar_1.0-1_contrib_binary", "~name16/ubuntu")
599- self.processUpload(self.uploadprocessor, upload_dir)
600+ self.processUpload(self.uploadprocessor, upload_dir, build=build)
601 queue_items = self.breezy.getQueueItems(
602 status=PackageUploadStatus.ACCEPTED, name="bar",
603 version="1.0-1", exact_match=True, archive=self.name16.archive)
604@@ -1306,14 +1305,14 @@
605 builds = self.name16.archive.getBuildRecords(name='bar')
606 [build] = builds
607 self.options.context = 'buildd'
608- self.options.buildid = build.id
609
610 # Stuff 1024 MiB in name16 PPA, so anything will be above the
611 # default quota limit, 1024 MiB.
612 self._fillArchive(self.name16.archive, 1024 * (2 ** 20))
613
614 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
615- self.processUpload(self.uploadprocessor, upload_dir)
616+ self.processUpload(
617+ self.uploadprocessor, upload_dir, build=build)
618
619 # The binary upload was accepted, and it's waiting in the queue.
620 queue_items = self.breezy.getQueueItems(
621
622=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
623--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-08-27 11:19:54 +0000
624+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-17 06:08:57 +0000
625@@ -10,6 +10,9 @@
626 from storm.store import Store
627 from zope.component import getUtility
628
629+from lp.archiveuploader.uploadprocessor import (
630+ UploadStatusEnum,
631+ )
632 from lp.archiveuploader.tests.test_uploadprocessor import (
633 TestUploadProcessorBase,
634 )
635@@ -17,7 +20,6 @@
636 from lp.code.interfaces.sourcepackagerecipebuild import (
637 ISourcePackageRecipeBuildSource,
638 )
639-from lp.soyuz.enums import PackageUploadStatus
640
641
642 class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
643@@ -40,8 +42,7 @@
644 requester=self.recipe.owner)
645
646 Store.of(self.build).flush()
647- self.options.context = 'recipe'
648- self.options.buildid = self.build.id
649+ self.options.context = 'buildd'
650
651 self.uploadprocessor = self.getUploadProcessor(
652 self.layer.txn)
653@@ -54,19 +55,14 @@
654 self.assertIs(None, self.build.source_package_release)
655 self.assertEqual(False, self.build.verifySuccessfulUpload())
656 self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
657- self.uploadprocessor.processChangesFile(
658+ result = self.uploadprocessor.processChangesFile(
659 os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
660- '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)
661+ '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
662+ build=self.build)
663 self.layer.txn.commit()
664
665- queue_item = self.uploadprocessor.last_processed_upload.queue_root
666- self.assertTrue(
667- queue_item is not None,
668+ self.assertEquals(UploadStatusEnum.ACCEPTED, result,
669 "Source upload failed\nGot: %s" % "\n".join(self.log.lines))
670
671- self.assertEqual(PackageUploadStatus.DONE, queue_item.status)
672- spr = queue_item.sources[0].sourcepackagerelease
673- self.assertEqual(self.build, spr.source_package_recipe_build)
674- self.assertEqual(spr, self.build.source_package_release)
675 self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
676 self.assertEqual(True, self.build.verifySuccessfulUpload())
677
678=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
679--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-17 06:08:54 +0000
680+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-17 06:08:57 +0000
681@@ -18,6 +18,7 @@
682 import tempfile
683 import traceback
684
685+from storm.locals import Store
686 from zope.component import (
687 getGlobalSiteManager,
688 getUtility,
689@@ -153,7 +154,7 @@
690
691 self.options = MockOptions()
692 self.options.base_fsroot = self.queue_folder
693- self.options.builds = True
694+ self.options.builds = False
695 self.options.leafname = None
696 self.options.distro = "ubuntu"
697 self.options.distroseries = None
698@@ -172,9 +173,13 @@
699 super(TestUploadProcessorBase, self).tearDown()
700
701 def getUploadProcessor(self, txn):
702- def getPolicy(distro):
703+ def getPolicy(distro, build):
704 self.options.distro = distro.name
705 policy = findPolicyByName(self.options.context)
706+ if self.options.builds:
707+ policy.distroseries = build.distro_series
708+ policy.pocket = build.pocket
709+ policy.archive = build.archive
710 policy.setOptions(self.options)
711 return policy
712 return UploadProcessor(
713@@ -288,7 +293,7 @@
714 shutil.copytree(upload_dir, target_path)
715 return os.path.join(self.incoming_folder, queue_entry)
716
717- def processUpload(self, processor, upload_dir):
718+ def processUpload(self, processor, upload_dir, build=None):
719 """Process an upload queue entry directory.
720
721 There is some duplication here with logic in UploadProcessor,
722@@ -298,7 +303,8 @@
723 results = []
724 changes_files = processor.locateChangesFiles(upload_dir)
725 for changes_file in changes_files:
726- result = processor.processChangesFile(upload_dir, changes_file)
727+ result = processor.processChangesFile(
728+ upload_dir, changes_file, build=build)
729 results.append(result)
730 return results
731
732@@ -693,10 +699,10 @@
733 # Upload and accept a binary for the primary archive source.
734 shutil.rmtree(upload_dir)
735 self.options.context = 'buildd'
736- self.options.buildid = bar_original_build.id
737 self.layer.txn.commit()
738 upload_dir = self.queueUpload("bar_1.0-1_binary")
739- self.processUpload(uploadprocessor, upload_dir)
740+ self.processUpload(uploadprocessor, upload_dir,
741+ build=bar_original_build)
742 self.assertEqual(
743 uploadprocessor.last_processed_upload.is_rejected, False)
744 bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
745@@ -724,10 +730,10 @@
746
747 shutil.rmtree(upload_dir)
748 self.options.context = 'buildd'
749- self.options.buildid = bar_copied_build.id
750 upload_dir = self.queueUpload(
751 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
752- self.processUpload(uploadprocessor, upload_dir)
753+ self.processUpload(uploadprocessor, upload_dir,
754+ build=bar_copied_build)
755
756 # Make sure the upload succeeded.
757 self.assertEqual(
758@@ -796,9 +802,9 @@
759 [bar_original_build] = bar_source_pub.createMissingBuilds()
760
761 self.options.context = 'buildd'
762- self.options.buildid = bar_original_build.id
763 upload_dir = self.queueUpload("bar_1.0-1_binary")
764- self.processUpload(uploadprocessor, upload_dir)
765+ self.processUpload(
766+ uploadprocessor, upload_dir, build=bar_original_build)
767 [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
768
769 # Prepare ubuntu/breezy-autotest to build sources in i386.
770@@ -818,10 +824,10 @@
771 # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
772 # in breezy-autotest context.
773 shutil.rmtree(upload_dir)
774- self.options.buildid = bar_copied_build.id
775 self.options.distroseries = breezy_autotest.name
776 upload_dir = self.queueUpload("bar_1.0-1_binary")
777- self.processUpload(uploadprocessor, upload_dir)
778+ self.processUpload(uploadprocessor, upload_dir,
779+ build=bar_copied_build)
780 [duplicated_binary_upload] = breezy_autotest.getQueueItems(
781 status=PackageUploadStatus.NEW, name='bar',
782 version='1.0-1', exact_match=True)
783@@ -859,9 +865,9 @@
784 [bar_original_build] = bar_source_pub.getBuilds()
785
786 self.options.context = 'buildd'
787- self.options.buildid = bar_original_build.id
788 upload_dir = self.queueUpload("bar_1.0-2_binary")
789- self.processUpload(uploadprocessor, upload_dir)
790+ self.processUpload(uploadprocessor, upload_dir,
791+ build=bar_original_build)
792 [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
793
794 # Create a COPY archive for building in non-virtual builds.
795@@ -878,10 +884,10 @@
796 [bar_copied_build] = bar_copied_source.createMissingBuilds()
797
798 shutil.rmtree(upload_dir)
799- self.options.buildid = bar_copied_build.id
800 upload_dir = self.queueUpload(
801 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
802- self.processUpload(uploadprocessor, upload_dir)
803+ self.processUpload(uploadprocessor, upload_dir,
804+ build=bar_copied_build)
805
806 # The binary just uploaded is accepted because it's destined for a
807 # copy archive and the PRIMARY and the COPY archives are isolated
808@@ -1034,9 +1040,9 @@
809 self.breezy['i386'], PackagePublishingPocket.RELEASE,
810 self.ubuntu.main_archive)
811 self.layer.txn.commit()
812- self.options.buildid = foocomm_build.id
813 upload_dir = self.queueUpload("foocomm_1.0-1_binary")
814- self.processUpload(uploadprocessor, upload_dir)
815+ self.processUpload(
816+ uploadprocessor, upload_dir, build=foocomm_build)
817
818 contents = [
819 "Subject: foocomm_1.0-1_i386.changes rejected",
820@@ -1044,10 +1050,8 @@
821 "where they don't fit."]
822 self.assertEmail(contents)
823
824- # Reset upload queue directory for a new upload and the
825- # uploadprocessor buildid option.
826+ # Reset upload queue directory for a new upload.
827 shutil.rmtree(upload_dir)
828- self.options.buildid = None
829
830 # Now upload a binary package of 'foocomm', letting a new build record
831 # with appropriate data be created by the uploadprocessor.
832@@ -1881,7 +1885,7 @@
833 self.assertLogContains(
834 "Unable to find package build job with id 42. Skipping.")
835
836- def testNoFiles(self):
837+ def testBinaryPackageBuild_fail(self):
838 # If the upload directory is empty, the upload
839 # will fail.
840
841@@ -1905,6 +1909,8 @@
842
843 # Upload and accept a binary for the primary archive source.
844 shutil.rmtree(upload_dir)
845+
846+ # Commit so the build cookie has the right ids.
847 self.layer.txn.commit()
848 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
849 os.mkdir(os.path.join(self.incoming_folder, leaf_name))
850@@ -1925,7 +1931,7 @@
851 self.assertTrue('DEBUG: Moving upload directory '
852 in log_contents)
853
854- def testSuccess(self):
855+ def testBinaryPackageBuilds(self):
856 # Properly uploaded binaries should result in the
857 # build status changing to FULLYBUILT.
858 # Upload a source package
859@@ -1946,6 +1952,8 @@
860
861 # Upload and accept a binary for the primary archive source.
862 shutil.rmtree(upload_dir)
863+
864+ # Commit so the build cookie has the right ids.
865 self.layer.txn.commit()
866 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
867 upload_dir = self.queueUpload("bar_1.0-1_binary",
868@@ -1959,13 +1967,74 @@
869 # No emails are sent on success
870 self.assertEquals(len(stub.test_emails), last_stub_mail_count)
871 self.assertEquals(BuildStatus.FULLYBUILT, build.status)
872- log_contents = build.upload_log.read()
873- log_lines = log_contents.splitlines()
874- self.assertTrue(
875- 'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines)
876- self.assertTrue(
877- 'INFO: Committing the transaction and any mails associated with '
878- 'this upload.' in log_lines)
879+ # Upon full build the upload log is unset.
880+ self.assertIs(None, build.upload_log)
881+
882+ def testSourcePackageRecipeBuild(self):
883+ # Properly uploaded source packages should result in the
884+ # build status changing to FULLYBUILT.
885+
886+ # Upload a source package
887+ archive = self.factory.makeArchive()
888+ archive.require_virtualized = False
889+ build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
890+ distroseries=self.breezy, archive=archive, requester=archive.owner)
891+ self.assertEquals(archive.owner, build.requester)
892+ bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
893+ # Commit so the build cookie has the right ids.
894+ self.layer.txn.commit()
895+ leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
896+ relative_path = "~%s/%s/%s/%s" % (
897+ archive.owner.name, archive.name, self.breezy.distribution.name,
898+ self.breezy.name)
899+ upload_dir = self.queueUpload(
900+ "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path)
901+ self.options.context = 'buildd'
902+ self.options.builds = True
903+ build.jobStarted()
904+ # Commit so date_started is recorded and doesn't cause constraint
905+ # violations later.
906+ build.status = BuildStatus.UPLOADING
907+ Store.of(build).flush()
908+ self.uploadprocessor.processBuildUpload(
909+ self.incoming_folder, leaf_name)
910+ self.layer.txn.commit()
911+
912+ self.assertEquals(BuildStatus.FULLYBUILT, build.status)
913+ self.assertEquals(None, build.builder)
914+ self.assertIsNot(None, build.date_finished)
915+ self.assertIsNot(None, build.duration)
916+ # Upon full build the upload log is unset.
917+ self.assertIs(None, build.upload_log)
918+
919+ def testSourcePackageRecipeBuild_fail(self):
920+ # A source package recipe build will fail if no files are present.
921+
922+ # Upload a source package
923+ archive = self.factory.makeArchive()
924+ archive.require_virtualized = False
925+ build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
926+ distroseries=self.breezy, archive=archive)
927+ bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
928+ # Commit so the build cookie has the right ids.
929+ Store.of(build).flush()
930+ leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
931+ os.mkdir(os.path.join(self.incoming_folder, leaf_name))
932+ self.options.context = 'buildd'
933+ self.options.builds = True
934+ build.jobStarted()
935+ # Commit so date_started is recorded and doesn't cause constraint
936+ # violations later.
937+ Store.of(build).flush()
938+ build.status = BuildStatus.UPLOADING
939+ self.uploadprocessor.processBuildUpload(
940+ self.incoming_folder, leaf_name)
941+ self.layer.txn.commit()
942+ self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
943+ self.assertEquals(None, build.builder)
944+ self.assertIsNot(None, build.date_finished)
945+ self.assertIsNot(None, build.duration)
946+ self.assertIsNot(None, build.upload_log)
947
948
949 class ParseBuildUploadLeafNameTests(TestCase):
950
951=== modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt'
952--- lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-08-18 14:03:15 +0000
953+++ lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-09-17 06:08:57 +0000
954@@ -53,23 +53,16 @@
955 ... distro = 'ubuntu'
956 ... distroseries = None
957 >>> class MockOptions(MockAbstractOptions):
958- ... buildid = 1
959+ ... builds = True
960
961 >>> ab_opts = MockAbstractOptions()
962 >>> bd_opts = MockOptions()
963
964 >>> insecure_policy.setOptions(ab_opts)
965- >>> insecure_policy.options is ab_opts
966- True
967 >>> insecure_policy.distro.name
968 u'ubuntu'
969 >>> buildd_policy.setOptions(ab_opts)
970- Traceback (most recent call last):
971- ...
972- UploadPolicyError: BuildID required for buildd context
973 >>> buildd_policy.setOptions(bd_opts)
974- >>> buildd_policy.options is bd_opts
975- True
976 >>> buildd_policy.distro.name
977 u'ubuntu'
978
979
980=== modified file 'lib/lp/archiveuploader/uploadpolicy.py'
981--- lib/lp/archiveuploader/uploadpolicy.py 2010-08-25 13:04:14 +0000
982+++ lib/lp/archiveuploader/uploadpolicy.py 2010-09-17 06:08:57 +0000
983@@ -11,7 +11,6 @@
984 "BuildDaemonUploadPolicy",
985 "findPolicyByName",
986 "IArchiveUploadPolicy",
987- "SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME",
988 "UploadPolicyError",
989 ]
990
991@@ -34,8 +33,6 @@
992 from lazr.enum import EnumeratedType, Item
993
994
995-# Defined here so that uploadpolicy.py doesn't depend on lp.code.
996-SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME = 'recipe'
997 # Number of seconds in an hour (used later)
998 HOURS = 3600
999
1000@@ -128,13 +125,8 @@
1001 raise AssertionError(
1002 "Upload is not sourceful, binaryful or mixed.")
1003
1004- def getUploader(self, changes):
1005- """Get the person who is doing the uploading."""
1006- return changes.signer
1007-
1008 def setOptions(self, options):
1009 """Store the options for later."""
1010- self.options = options
1011 # Extract and locate the distribution though...
1012 self.distro = getUtility(IDistributionSet)[options.distro]
1013 if options.distroseries is not None:
1014@@ -324,7 +316,6 @@
1015 """The build daemon upload policy is invoked by the slave scanner."""
1016
1017 name = 'buildd'
1018- accepted_type = ArchiveUploadType.BINARY_ONLY
1019
1020 def __init__(self):
1021 super(BuildDaemonUploadPolicy, self).__init__()
1022@@ -333,11 +324,9 @@
1023 self.unsigned_dsc_ok = True
1024
1025 def setOptions(self, options):
1026- AbstractUploadPolicy.setOptions(self, options)
1027- # We require a buildid to be provided
1028- if (getattr(options, 'buildid', None) is None and
1029- not getattr(options, 'builds', False)):
1030- raise UploadPolicyError("BuildID required for buildd context")
1031+ """Store the options for later."""
1032+ super(BuildDaemonUploadPolicy, self).setOptions(options)
1033+ options.builds = True
1034
1035 def policySpecificChecks(self, upload):
1036 """The buildd policy should enforce that the buildid matches."""
1037@@ -349,6 +338,15 @@
1038 """Buildd policy allows PPA upload."""
1039 return False
1040
1041+ def validateUploadType(self, upload):
1042+ if upload.sourceful and upload.binaryful:
1043+ if self.accepted_type != ArchiveUploadType.MIXED_ONLY:
1044+ upload.reject(
1045+ "Source/binary (i.e. mixed) uploads are not allowed.")
1046+ elif not upload.sourceful and not upload.binaryful:
1047+ raise AssertionError(
1048+ "Upload is not sourceful, binaryful or mixed.")
1049+
1050
1051 class SyncUploadPolicy(AbstractUploadPolicy):
1052 """This policy is invoked when processing sync uploads."""
1053
1054=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
1055--- lib/lp/archiveuploader/uploadprocessor.py 2010-09-17 06:08:54 +0000
1056+++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-17 06:08:57 +0000
1057@@ -71,7 +71,6 @@
1058 )
1059 from lp.archiveuploader.uploadpolicy import (
1060 BuildDaemonUploadPolicy,
1061- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
1062 UploadPolicyError,
1063 )
1064 from lp.buildmaster.enums import (
1065@@ -207,6 +206,7 @@
1066 The name of the leaf is the build id of the build.
1067 Build uploads always contain a single package per leaf.
1068 """
1069+ upload_path = os.path.join(fsroot, upload)
1070 try:
1071 job_id = parse_build_upload_leaf_name(upload)
1072 except ValueError:
1073@@ -220,20 +220,20 @@
1074 "Unable to find package build job with id %d. Skipping." %
1075 job_id)
1076 return
1077+ logger = BufferLogger()
1078 build = buildfarm_job.getSpecificJob()
1079 if build.status != BuildStatus.UPLOADING:
1080 self.log.warn(
1081- "Expected build status to be 'UPLOADING', was %s. Skipping.",
1082- build.status.name)
1083+ "Expected build status to be 'UPLOADING', was %s. "
1084+ "Moving to failed.", build.status.name)
1085+ self.moveProcessedUpload(upload_path, "failed", logger)
1086 return
1087 self.log.debug("Build %s found" % build.id)
1088- logger = BufferLogger()
1089- upload_path = os.path.join(fsroot, upload)
1090 try:
1091 [changes_file] = self.locateChangesFiles(upload_path)
1092 logger.debug("Considering changefile %s" % changes_file)
1093 result = self.processChangesFile(
1094- upload_path, changes_file, logger)
1095+ upload_path, changes_file, logger, build)
1096 except (KeyboardInterrupt, SystemExit):
1097 raise
1098 except:
1099@@ -251,16 +251,13 @@
1100 UploadStatusEnum.REJECTED: "rejected",
1101 UploadStatusEnum.ACCEPTED: "accepted"}[result]
1102 self.moveProcessedUpload(upload_path, destination, logger)
1103+ build.date_finished = datetime.datetime.now(pytz.UTC)
1104 if not (result == UploadStatusEnum.ACCEPTED and
1105 build.verifySuccessfulUpload() and
1106 build.status == BuildStatus.FULLYBUILT):
1107 build.status = BuildStatus.FAILEDTOUPLOAD
1108- build.date_finished = datetime.datetime.now(pytz.UTC)
1109 build.notify(extra_info="Uploading build %s failed." % upload)
1110- build.storeUploadLog(logger.buffer.getvalue())
1111-
1112- # Remove BuildQueue record.
1113- build.buildqueue_record.destroySelf()
1114+ build.storeUploadLog(logger.buffer.getvalue())
1115
1116 def processUpload(self, fsroot, upload):
1117 """Process an upload's changes files, and move it to a new directory.
1118@@ -376,7 +373,8 @@
1119 os.path.join(relative_path, filename))
1120 return self.orderFilenames(changes_files)
1121
1122- def processChangesFile(self, upload_path, changes_file, logger=None):
1123+ def processChangesFile(self, upload_path, changes_file, logger=None,
1124+ build=None):
1125 """Process a single changes file.
1126
1127 This is done by obtaining the appropriate upload policy (according
1128@@ -432,7 +430,7 @@
1129 "https://help.launchpad.net/Packaging/PPA#Uploading "
1130 "and update your configuration.")))
1131 logger.debug("Finding fresh policy")
1132- policy = self._getPolicyForDistro(distribution)
1133+ policy = self._getPolicyForDistro(distribution, build)
1134 policy.archive = archive
1135
1136 # DistroSeries overriding respect the following precedence:
1137@@ -450,10 +448,8 @@
1138
1139 # Reject source upload to buildd upload paths.
1140 first_path = relative_path.split(os.path.sep)[0]
1141- is_not_buildd_nor_recipe_policy = policy.name not in [
1142- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
1143- BuildDaemonUploadPolicy.name]
1144- if first_path.isdigit() and is_not_buildd_nor_recipe_policy:
1145+ if (first_path.isdigit() and
1146+ policy.name != BuildDaemonUploadPolicy.name):
1147 error_message = (
1148 "Invalid upload path (%s) for this policy (%s)" %
1149 (relative_path, policy.name))
1150@@ -472,7 +468,7 @@
1151 result = UploadStatusEnum.ACCEPTED
1152
1153 try:
1154- upload.process()
1155+ upload.process(build)
1156 except UploadPolicyError, e:
1157 upload.reject("UploadPolicyError escaped upload.process: "
1158 "%s " % e)
1159@@ -513,7 +509,8 @@
1160 upload.do_reject(notify)
1161 self.ztm.abort()
1162 else:
1163- successful = upload.do_accept(notify=notify)
1164+ successful = upload.do_accept(
1165+ notify=notify, build=build)
1166 if not successful:
1167 result = UploadStatusEnum.REJECTED
1168 logger.info(
1169
1170=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
1171--- lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-17 06:08:54 +0000
1172+++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-17 06:08:57 +0000
1173@@ -71,10 +71,6 @@
1174 title=_('Build farm job'), schema=IBuildFarmJob, required=True,
1175 readonly=True, description=_('The base build farm job.'))
1176
1177- policy_name = TextLine(
1178- title=_("Policy name"), required=True,
1179- description=_("The upload policy to use for handling these builds."))
1180-
1181 current_component = Attribute(
1182 'Component where the source related to this build was last '
1183 'published.')
1184@@ -149,6 +145,14 @@
1185 created in a suspended state.
1186 """
1187
1188+ def getUploader(changes):
1189+ """Return the person responsible for the upload.
1190+
1191+ This is used to when checking permissions.
1192+
1193+ :param changes: Changes file from the upload.
1194+ """
1195+
1196
1197 class IPackageBuildSource(Interface):
1198 """A utility of this interface used to create _things_."""
1199
1200=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
1201--- lib/lp/buildmaster/model/packagebuild.py 2010-09-17 06:08:54 +0000
1202+++ lib/lp/buildmaster/model/packagebuild.py 2010-09-17 06:08:57 +0000
1203@@ -94,8 +94,6 @@
1204 build_farm_job_id = Int(name='build_farm_job', allow_none=False)
1205 build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
1206
1207- policy_name = 'buildd'
1208-
1209 # The following two properties are part of the IPackageBuild
1210 # interface, but need to be provided by derived classes.
1211 distribution = None
1212@@ -239,6 +237,10 @@
1213 """See `IPackageBuild`."""
1214 raise NotImplementedError
1215
1216+ def getUploader(self, changes):
1217+ """See `IPackageBuild`."""
1218+ raise NotImplementedError
1219+
1220
1221 class PackageBuildDerived:
1222 """Setup the delegation for package build.
1223@@ -352,6 +354,10 @@
1224 if not os.path.exists(target_dir):
1225 os.mkdir(target_dir)
1226
1227+ # Flush so there are no race conditions with archiveuploader about
1228+ # self.status.
1229+ Store.of(self).flush()
1230+
1231 # Move the directory used to grab the binaries into
1232 # the incoming directory so the upload processor never
1233 # sees half-finished uploads.
1234@@ -360,6 +366,9 @@
1235 # Release the builder for another job.
1236 self.buildqueue_record.builder.cleanSlave()
1237
1238+ # Remove BuildQueue record.
1239+ self.buildqueue_record.destroySelf()
1240+
1241 def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):
1242 """Handle a package that had failed to build.
1243
1244
1245=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
1246--- lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-17 06:08:54 +0000
1247+++ lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-17 06:08:57 +0000
1248@@ -105,7 +105,6 @@
1249
1250 def test_default_values(self):
1251 # PackageBuild has a number of default values.
1252- self.failUnlessEqual('buildd', self.package_build.policy_name)
1253 self.failUnlessEqual(
1254 'multiverse', self.package_build.current_component.name)
1255 self.failUnlessEqual(None, self.package_build.distribution)
1256@@ -327,6 +326,7 @@
1257 })
1258 self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status)
1259 self.assertResultCount(0, "failed")
1260+ self.assertIs(None, self.build.buildqueue_record)
1261
1262 def test_handleStatus_OK_relative_filepath(self):
1263 # A filemap that tries to write to files outside of
1264
1265=== modified file 'lib/lp/code/configure.zcml'
1266--- lib/lp/code/configure.zcml 2010-09-13 04:56:29 +0000
1267+++ lib/lp/code/configure.zcml 2010-09-17 06:08:57 +0000
1268@@ -923,7 +923,7 @@
1269 <require permission="launchpad.View" interface="lp.code.interfaces.sourcepackagerecipebuild.ISourcePackageRecipeBuild"/>
1270 <!-- This is needed for UploadProcessor to run. The permission isn't
1271 important; launchpad.Edit isn't actually held by anybody. -->
1272- <require permission="launchpad.Edit" set_attributes="status upload_log" />
1273+ <require permission="launchpad.Edit" set_attributes="status upload_log date_finished requester" />
1274 </class>
1275
1276 <securedutility
1277@@ -988,10 +988,6 @@
1278 name="RECIPEBRANCHBUILD"
1279 provides="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJob"/>
1280
1281- <call
1282- callable="lp.code.model.sourcepackagerecipebuild.register_archive_upload_policy_adapter"
1283- />
1284-
1285 <webservice:register module="lp.code.interfaces.webservice" />
1286 <adapter
1287 provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJob"
1288
1289=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
1290--- lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-09 17:02:33 +0000
1291+++ lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-17 06:08:57 +0000
1292@@ -22,7 +22,6 @@
1293 )
1294 from storm.store import Store
1295 from zope.component import (
1296- getGlobalSiteManager,
1297 getUtility,
1298 )
1299 from zope.interface import (
1300@@ -39,12 +38,6 @@
1301 )
1302 from canonical.launchpad.webapp import errorlog
1303 from lp.app.errors import NotFoundError
1304-from lp.archiveuploader.uploadpolicy import (
1305- ArchiveUploadType,
1306- BuildDaemonUploadPolicy,
1307- IArchiveUploadPolicy,
1308- SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
1309- )
1310 from lp.buildmaster.enums import (
1311 BuildFarmJobType,
1312 BuildStatus,
1313@@ -77,25 +70,10 @@
1314 from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
1315
1316
1317-class SourcePackageRecipeUploadPolicy(BuildDaemonUploadPolicy):
1318- """Policy for uploading the results of a source package recipe build."""
1319-
1320- name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME
1321- accepted_type = ArchiveUploadType.SOURCE_ONLY
1322-
1323- def getUploader(self, changes):
1324- """Return the person doing the upload."""
1325- build_id = int(getattr(self.options, 'buildid'))
1326- sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
1327- return sprb.requester
1328-
1329-
1330 class SourcePackageRecipeBuild(PackageBuildDerived, Storm):
1331
1332 __storm_table__ = 'SourcePackageRecipeBuild'
1333
1334- policy_name = SourcePackageRecipeUploadPolicy.name
1335-
1336 implements(ISourcePackageRecipeBuild)
1337 classProvides(ISourcePackageRecipeBuildSource)
1338
1339@@ -333,6 +311,10 @@
1340 if self.status == BuildStatus.FULLYBUILT:
1341 self.notify()
1342
1343+ def getUploader(self, changes):
1344+ """See `IPackageBuild`."""
1345+ return self.requester
1346+
1347
1348 class SourcePackageRecipeBuildJob(BuildFarmJobOldDerived, Storm):
1349 classProvides(ISourcePackageRecipeBuildJobSource)
1350@@ -384,13 +366,6 @@
1351 return 2505 + self.build.archive.relative_build_score
1352
1353
1354-def register_archive_upload_policy_adapter():
1355- getGlobalSiteManager().registerUtility(
1356- component=SourcePackageRecipeUploadPolicy,
1357- provided=IArchiveUploadPolicy,
1358- name=SourcePackageRecipeUploadPolicy.name)
1359-
1360-
1361 def get_recipe_build_for_build_farm_job(build_farm_job):
1362 """Return the SourcePackageRecipeBuild associated with a BuildFarmJob."""
1363 store = Store.of(build_farm_job)
1364
1365=== modified file 'lib/lp/code/model/tests/test_sourcepackagerecipebuild.py'
1366--- lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-17 06:08:54 +0000
1367+++ lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-17 06:08:57 +0000
1368@@ -309,6 +309,12 @@
1369 job = sprb.build_farm_job.getSpecificJob()
1370 self.assertEqual(sprb, job)
1371
1372+ def test_getUploader(self):
1373+ # For ACL purposes the uploader is the build requester.
1374+ build = self.makeSourcePackageRecipeBuild()
1375+ self.assertEquals(build.requester,
1376+ build.getUploader(None))
1377+
1378
1379 class TestAsBuildmaster(TestCaseWithFactory):
1380
1381
1382=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
1383--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-08-04 00:16:44 +0000
1384+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-09-17 06:08:57 +0000
1385@@ -162,8 +162,7 @@
1386 >>> buildd_policy = getPolicy(
1387 ... name='buildd',
1388 ... distro=failedtoupload_candidate.distribution.name,
1389- ... distroseries=failedtoupload_candidate.distro_series.name,
1390- ... buildid=failedtoupload_candidate.id)
1391+ ... distroseries=failedtoupload_candidate.distro_series.name)
1392
1393 >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(
1394 ... datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),
1395@@ -171,7 +170,7 @@
1396 >>> cdrkit_bin_upload.process()
1397 >>> cdrkit_bin_upload.is_rejected
1398 False
1399- >>> success = cdrkit_bin_upload.do_accept()
1400+ >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate)
1401 >>> print cdrkit_bin_upload.queue_root.status.name
1402 NEW
1403
1404
1405=== modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt'
1406--- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-17 06:08:54 +0000
1407+++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-17 06:08:57 +0000
1408@@ -339,8 +339,6 @@
1409 >>> build.status.title
1410 'Uploading build'
1411
1412- >>> bqItem10.destroySelf()
1413-
1414 === Successfully collected and uploaded (FULLYBUILT) ===
1415
1416 Build item 6 has binary packages available in the sample data, letting us test
1417@@ -1062,7 +1060,6 @@
1418 True
1419 >>> print lfa.filename
1420 buildlog_ubuntu-hoary-i386.mozilla-firefox_0.9_BUILDING.txt.gz
1421- >>> candidate.destroySelf()
1422
1423 The attempt to fetch the buildlog from the common librarian will fail
1424 since this is a build in a private archive and the buildlog was thus
1425
1426=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
1427--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-08-24 15:29:01 +0000
1428+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-09-17 06:08:57 +0000
1429@@ -74,15 +74,14 @@
1430 ... dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)
1431
1432 >>> buildd_policy = getPolicy(
1433- ... name='buildd', distro='ubuntu', distroseries='dapper',
1434- ... buildid=build.id)
1435+ ... name='buildd', distro='ubuntu', distroseries='dapper')
1436
1437 >>> pmount_upload = NascentUpload.from_changesfile_path(
1438 ... datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),
1439 ... buildd_policy, mock_logger)
1440 DEBUG: Changes file can be unsigned.
1441
1442- >>> pmount_upload.process()
1443+ >>> pmount_upload.process(build=build)
1444 DEBUG: Beginning processing.
1445 DEBUG: Verifying the changes file.
1446 DEBUG: Verifying files in upload.
1447@@ -105,9 +104,8 @@
1448 >>> print len(dapper_pmount.getLatestTranslationsUploads())
1449 0
1450
1451- >>> success = pmount_upload.do_accept()
1452+ >>> success = pmount_upload.do_accept(build=build)
1453 DEBUG: Creating queue entry
1454- DEBUG: Build ... found
1455 ...
1456
1457 # And all things worked.
1458
1459=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
1460--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-08-30 02:07:38 +0000
1461+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-09-17 06:08:57 +0000
1462@@ -119,21 +119,17 @@
1463 >>> from lp.soyuz.scripts.soyuz_process_upload import (
1464 ... ProcessUpload)
1465 >>> from canonical.testing import LaunchpadZopelessLayer
1466- >>> def process_uploads(upload_policy, build_id, series, loglevel):
1467+ >>> def process_uploads(upload_policy, series, loglevel):
1468 ... """Simulate process-upload.py script run.
1469 ...
1470 ... :param upload_policy: context in which to consider the upload
1471 ... (equivalent to script's --context option).
1472- ... :param build_id: build to which to attach this upload.
1473- ... (equivalent to script's --buildid option).
1474 ... :param series: distro series to give back from.
1475 ... (equivalent to script's --series option).
1476 ... :param loglevel: logging level (as defined in logging module). Any
1477 ... log messages below this level will be suppressed.
1478 ... """
1479 ... args = [temp_dir, "-C", upload_policy]
1480- ... if build_id is not None:
1481- ... args.extend(["-b", build_id])
1482 ... if series is not None:
1483 ... args.extend(["-s", series])
1484 ... # Run script under 'uploader' DB user. The dbuser argument to the
1485@@ -230,11 +226,11 @@
1486 >>> from lp.services.mail import stub
1487
1488 >>> def simulate_upload(
1489- ... leafname, is_new=False, upload_policy='anything', build_id=None,
1490+ ... leafname, is_new=False, upload_policy='anything',
1491 ... series=None, distro="ubuntutest", loglevel=logging.WARN):
1492 ... """Process upload(s). Options are as for process_uploads()."""
1493 ... punt_upload_into_queue(leafname, distro=distro)
1494- ... process_uploads(upload_policy, build_id, series, loglevel)
1495+ ... process_uploads(upload_policy, series, loglevel)
1496 ... # We seem to be leaving a lock file behind here for some reason.
1497 ... # Naturally it doesn't count as an unprocessed incoming file, which
1498 ... # is what we're really looking for.
1499@@ -289,19 +285,6 @@
1500
1501 >>> simulate_upload('bar_1.0-2')
1502
1503-Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id.
1504-
1505- >>> simulate_upload(
1506- ... 'bar_1.0-2_binary', upload_policy="buildd", build_id="2",
1507- ... loglevel=logging.ERROR)
1508- log> Exception while accepting:
1509- Attempt to upload binaries specifying build 2, where they don't fit.
1510- ...
1511- Rejected uploads: ['bar_1.0-2_binary']
1512-
1513-Try it again without the bogus build id. This succeeds without
1514-complaints.
1515-
1516 >>> simulate_upload('bar_1.0-2_binary')
1517
1518 Check the rejection of a malicious version of bar package which refers
1519
1520=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
1521--- lib/lp/soyuz/model/binarypackagebuild.py 2010-09-17 06:08:54 +0000
1522+++ lib/lp/soyuz/model/binarypackagebuild.py 2010-09-17 06:08:57 +0000
1523@@ -760,6 +760,10 @@
1524 # package build, then don't hit the db.
1525 return self
1526
1527+ def getUploader(self, changes):
1528+ """See `IBinaryPackageBuild`."""
1529+ return changes.signer
1530+
1531
1532 class BinaryPackageBuildSet:
1533 implements(IBinaryPackageBuildSet)
1534
1535=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
1536--- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-20 20:31:18 +0000
1537+++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-09-17 06:08:57 +0000
1538@@ -61,11 +61,6 @@
1539 help="Distro series to give back from.")
1540
1541 self.parser.add_option(
1542- "-b", "--buildid", action="store", type="int", dest="buildid",
1543- metavar="BUILD",
1544- help="The build ID to which to attach this upload.")
1545-
1546- self.parser.add_option(
1547 "-a", "--announce", action="store", dest="announcelist",
1548 metavar="ANNOUNCELIST", help="Override the announcement list")
1549
1550@@ -82,10 +77,15 @@
1551 "%s is not a directory" % self.options.base_fsroot)
1552
1553 self.logger.debug("Initialising connection.")
1554- def getPolicy(distro):
1555+ def getPolicy(distro, build):
1556 self.options.distro = distro.name
1557 policy = findPolicyByName(self.options.context)
1558 policy.setOptions(self.options)
1559+ if self.options.builds:
1560+ assert build, "--builds specified but no build"
1561+ policy.distroseries = build.distro_series
1562+ policy.pocket = build.pocket
1563+ policy.archive = build.archive
1564 return policy
1565 processor = UploadProcessor(self.options.base_fsroot,
1566 self.options.dryrun, self.options.nomails, self.options.builds,
1567
1568=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
1569--- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-09 17:02:33 +0000
1570+++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-17 06:08:57 +0000
1571@@ -150,6 +150,15 @@
1572 self.assertStatementCount(
1573 0, self.build.getSpecificJob)
1574
1575+ def test_getUploader(self):
1576+ # For ACL purposes the uploader is the changes file signer.
1577+
1578+ class MockChanges:
1579+ signer = "Somebody <somebody@ubuntu.com>"
1580+
1581+ self.assertEquals("Somebody <somebody@ubuntu.com>",
1582+ self.build.getUploader(MockChanges()))
1583+
1584
1585 class TestBuildUpdateDependencies(TestCaseWithFactory):
1586