Merge lp:~jelmer/launchpad/506256-remove-popen-2 into lp:launchpad

Proposed by Jelmer Vernooij
Status: Merged
Approved by: Jelmer Vernooij
Approved revision: no longer in the source branch.
Merged at revision: 11579
Proposed branch: lp:~jelmer/launchpad/506256-remove-popen-2
Merge into: lp:launchpad
Prerequisite: lp:~jelmer/launchpad/506256-remove-popen
Diff against target: 1585 lines (+374/-273)
27 files modified
database/schema/security.cfg (+2/-0)
lib/lp/archiveuploader/dscfile.py (+0/-29)
lib/lp/archiveuploader/nascentupload.py (+31/-16)
lib/lp/archiveuploader/nascentuploadfile.py (+59/-35)
lib/lp/archiveuploader/tests/__init__.py (+5/-7)
lib/lp/archiveuploader/tests/nascentupload.txt (+4/-5)
lib/lp/archiveuploader/tests/test_buildduploads.py (+7/-8)
lib/lp/archiveuploader/tests/test_nascentuploadfile.py (+61/-0)
lib/lp/archiveuploader/tests/test_ppauploadprocessor.py (+11/-12)
lib/lp/archiveuploader/tests/test_recipeuploads.py (+8/-12)
lib/lp/archiveuploader/tests/test_uploadprocessor.py (+99/-30)
lib/lp/archiveuploader/tests/uploadpolicy.txt (+1/-8)
lib/lp/archiveuploader/uploadpolicy.py (+12/-14)
lib/lp/archiveuploader/uploadprocessor.py (+16/-19)
lib/lp/buildmaster/interfaces/packagebuild.py (+8/-4)
lib/lp/buildmaster/model/packagebuild.py (+11/-2)
lib/lp/buildmaster/tests/test_packagebuild.py (+1/-1)
lib/lp/code/configure.zcml (+1/-5)
lib/lp/code/model/sourcepackagerecipebuild.py (+4/-29)
lib/lp/code/model/tests/test_sourcepackagerecipebuild.py (+6/-0)
lib/lp/soyuz/doc/build-failedtoupload-workflow.txt (+2/-3)
lib/lp/soyuz/doc/buildd-slavescanner.txt (+0/-3)
lib/lp/soyuz/doc/distroseriesqueue-translations.txt (+3/-5)
lib/lp/soyuz/doc/soyuz-set-of-uploads.txt (+3/-20)
lib/lp/soyuz/model/binarypackagebuild.py (+4/-0)
lib/lp/soyuz/scripts/soyuz_process_upload.py (+6/-6)
lib/lp/soyuz/tests/test_binarypackagebuild.py (+9/-0)
To merge this branch: bzr merge lp:~jelmer/launchpad/506256-remove-popen-2
Reviewer Review Type Date Requested Status
Michael Nelson (community) code Approve
Review via email: mp+35412@code.launchpad.net

This proposal supersedes a proposal from 2010-09-14.

Commit message

Fix handling of source recipe builds when processing build uploads asynchronously.

Description of the change

This MP actually has two prerequisites which both have been approved but not yet landed: lp:~jelmer/launchpad/506256-remove-popen and lp:~jelmer/launchpad/archiveuploader-build-handling. Since I can only set one as prerequisite I've set the first, since it's the biggest.

A bit of background: This branch is a followup to earlier work I did to make it possible for the builddmaster to no longer popen("./archiveuploader --build-id --policy buildd") - something that was very time-consuming - but rather move finished builds out of the way, so a cron job running a separate instance of the archiveuploader could process them, later.

This branch fixes source package recipe build processing in the separate upload processor.

It does the following things:

 * The separate upload policy for source package recipe builds has been merged into the overall buildd upload policy
 * related, getUploader() is no longer on the upload policy but on the build class (it is different for binarypackagebuilds and recipe builds)
 * Clean up the buildqueue record earlier so we don't keep the builder busy.

test:
./bin/test lp.buildmaster
./bin/test lp.archiveuploader

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) wrote :
Download full text (21.8 KiB)

=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-16 00:33:37 +0000
+++ database/schema/security.cfg 2010-09-16 09:04:13 +0000
@@ -1130,9 +1130,12 @@
 public.packagebuild = SELECT, INSERT, UPDATE
 public.binarypackagebuild = SELECT, INSERT, UPDATE
 public.sourcepackagerecipebuild = SELECT, UPDATE
-public.buildqueue = SELECT, INSERT, UPDATE
-public.job = SELECT, INSERT, UPDATE
-public.buildpackagejob = SELECT, INSERT, UPDATE
+public.sourcepackagerecipebuildjob = SELECT, UPDATE
+public.sourcepackagerecipe = SELECT, UPDATE
+public.buildqueue = SELECT, INSERT, UPDATE, DELETE
+public.job = SELECT, INSERT, UPDATE, DELETE
+public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE
+public.builder = SELECT

 # Thusly the librarian
 public.libraryfilecontent = SELECT, INSERT

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000
@@ -498,10 +498,13 @@
         if self.binaryful:
             return

- # Set up some convenient shortcut variables.
-
- uploader = self.policy.getUploader(self.changes, build)
- archive = self.policy.archive
+ # The build can have an explicit uploader, which may be different
+ # from the changes file signer. (i.e in case of daily source package
+ # builds)
+ if build is not None:
+ uploader = build.getUploader(self.changes)
+ else:
+ uploader = self.changes.signer

         # If we have no signer, there's no ACL we can apply.
         if uploader is None:

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:05:48 +0000
@@ -42,7 +42,7 @@
             requester=self.recipe.owner)

         Store.of(self.build).flush()
- self.options.context = 'recipe'
+ self.options.context = 'buildd'

         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000
@@ -1884,7 +1884,7 @@
         self.assertLogContains(
             "Unable to find package build job with id 42. Skipping.")

- def testNoFiles(self):
+ def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
         # will fail.

@@ -1908,6 +1908,8 @@

         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCo...

Revision history for this message
Michael Nelson (michael.nelson) wrote :
Download full text (13.3 KiB)

Hi Jelmer,

r=me, assuming you check the following:

* to check: the db permissions - can we get rid of delete perms elsewhere),
* to fix: don't see why we need to use removeSecurityProxy in your test instead of a small update to the factory mothed),
* to check: regarding the actual diff - my local db-devel seems to already have some of these changes

Thanks!

> === modified file 'database/schema/security.cfg'
> --- database/schema/security.cfg 2010-09-16 00:33:37 +0000
> +++ database/schema/security.cfg 2010-09-16 09:04:13 +0000
> @@ -1130,9 +1130,12 @@
> public.packagebuild = SELECT, INSERT, UPDATE
> public.binarypackagebuild = SELECT, INSERT, UPDATE
> public.sourcepackagerecipebuild = SELECT, UPDATE
> -public.buildqueue = SELECT, INSERT, UPDATE
> -public.job = SELECT, INSERT, UPDATE
> -public.buildpackagejob = SELECT, INSERT, UPDATE
> +public.sourcepackagerecipebuildjob = SELECT, UPDATE
> +public.sourcepackagerecipe = SELECT, UPDATE
> +public.buildqueue = SELECT, INSERT, UPDATE, DELETE
> +public.job = SELECT, INSERT, UPDATE, DELETE
> +public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE

Right - so this is so the buildqueue record can be cleaned up earlier by the
uploader. Nice.

Should we be able to remove some DELETE perms elsewhere?

> +public.builder = SELECT
>
> # Thusly the librarian
> public.libraryfilecontent = SELECT, INSERT
>
> === modified file 'lib/lp/archiveuploader/nascentupload.py'
> --- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
> +++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000
> @@ -498,10 +498,13 @@
> if self.binaryful:
> return
>
> - # Set up some convenient shortcut variables.
> -
> - uploader = self.policy.getUploader(self.changes, build)
> - archive = self.policy.archive
> + # The build can have an explicit uploader, which may be different
> + # from the changes file signer. (i.e in case of daily source package
> + # builds)
> + if build is not None:
> + uploader = build.getUploader(self.changes)
> + else:
> + uploader = self.changes.signer

This seems strange? do we not need archive any more? In db-devel it's used
straight afterwards, but I'm assuming you've changed that code in a previous
branch. And checking your full diff shows that is the case (you're using
policy.archive).

>
> # If we have no signer, there's no ACL we can apply.
> if uploader is None:
>
> === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
> --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
> +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000
> @@ -1884,7 +1884,7 @@
> self.assertLogContains(
> "Unable to find package build job with id 42. Skipping.")
>
> - def testNoFiles(self):
> + def testBinaryPackageBuild_fail(self):
>...

review: Approve (code)
Revision history for this message
Jelmer Vernooij (jelmer) wrote :
Download full text (22.5 KiB)

=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-16 00:33:37 +0000
+++ database/schema/security.cfg 2010-09-16 12:27:46 +0000
@@ -1130,6 +1130,8 @@
 public.packagebuild = SELECT, INSERT, UPDATE
 public.binarypackagebuild = SELECT, INSERT, UPDATE
 public.sourcepackagerecipebuild = SELECT, UPDATE
+public.sourcepackagerecipebuildjob = SELECT, UPDATE
+public.sourcepackagerecipe = SELECT, UPDATE
 public.buildqueue = SELECT, INSERT, UPDATE
 public.job = SELECT, INSERT, UPDATE
 public.buildpackagejob = SELECT, INSERT, UPDATE

=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 12:27:46 +0000
@@ -498,10 +498,13 @@
         if self.binaryful:
             return

- # Set up some convenient shortcut variables.
-
- uploader = self.policy.getUploader(self.changes, build)
- archive = self.policy.archive
+ # The build can have an explicit uploader, which may be different
+ # from the changes file signer. (i.e in case of daily source package
+ # builds)
+ if build is not None:
+ uploader = build.getUploader(self.changes)
+ else:
+ uploader = self.changes.signer

         # If we have no signer, there's no ACL we can apply.
         if uploader is None:

=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-16 12:27:46 +0000
@@ -42,7 +42,7 @@
             requester=self.recipe.owner)

         Store.of(self.build).flush()
- self.options.context = 'recipe'
+ self.options.context = 'buildd'

         self.uploadprocessor = self.getUploadProcessor(
             self.layer.txn)

=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 13:37:25 +0000
@@ -18,6 +18,7 @@
 import tempfile
 import traceback

+from storm.locals import Store
 from zope.component import (
     getGlobalSiteManager,
     getUtility,
@@ -1884,7 +1885,7 @@
         self.assertLogContains(
             "Unable to find package build job with id 42. Skipping.")

- def testNoFiles(self):
+ def testBinaryPackageBuild_fail(self):
         # If the upload directory is empty, the upload
         # will fail.

@@ -1908,6 +1909,8 @@

         # Upload and accept a binary for the primary archive source.
         shutil.rmtree(upload_dir)
+
+ # Commit so the build cookie has the right ids.
         self.layer.txn.commit()
         leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
         os.mkdir(os.path.join(self.incoming_folder, leaf_name))
@@ -1928,7 +1931,7 @@
         self.assertTrue('DEBUG: Moving upload directory '
             in log_contents)

- def t...

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'database/schema/security.cfg'
--- database/schema/security.cfg 2010-09-10 02:46:28 +0000
+++ database/schema/security.cfg 2010-09-17 06:08:57 +0000
@@ -1130,6 +1130,8 @@
1130public.packagebuild = SELECT, INSERT, UPDATE1130public.packagebuild = SELECT, INSERT, UPDATE
1131public.binarypackagebuild = SELECT, INSERT, UPDATE1131public.binarypackagebuild = SELECT, INSERT, UPDATE
1132public.sourcepackagerecipebuild = SELECT, UPDATE1132public.sourcepackagerecipebuild = SELECT, UPDATE
1133public.sourcepackagerecipebuildjob = SELECT, UPDATE
1134public.sourcepackagerecipe = SELECT, UPDATE
1133public.buildqueue = SELECT, INSERT, UPDATE1135public.buildqueue = SELECT, INSERT, UPDATE
1134public.job = SELECT, INSERT, UPDATE1136public.job = SELECT, INSERT, UPDATE
1135public.buildpackagejob = SELECT, INSERT, UPDATE1137public.buildpackagejob = SELECT, INSERT, UPDATE
11361138
=== modified file 'lib/lp/archiveuploader/dscfile.py'
--- lib/lp/archiveuploader/dscfile.py 2010-09-09 17:02:33 +0000
+++ lib/lp/archiveuploader/dscfile.py 2010-09-17 06:08:57 +0000
@@ -630,35 +630,6 @@
630 cleanup_unpacked_dir(unpacked_dir)630 cleanup_unpacked_dir(unpacked_dir)
631 self.logger.debug("Done")631 self.logger.debug("Done")
632632
633 def findBuild(self):
634 """Find and return the SourcePackageRecipeBuild, if one is specified.
635
636 If by any chance an inconsistent build was found this method will
637 raise UploadError resulting in a upload rejection.
638 """
639 build_id = getattr(self.policy.options, 'buildid', None)
640 if build_id is None:
641 return None
642
643 build = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
644
645 # The master verifies the status to confirm successful upload.
646 build.status = BuildStatus.FULLYBUILT
647 # If this upload is successful, any existing log is wrong and
648 # unuseful.
649 build.upload_log = None
650
651 # Sanity check; raise an error if the build we've been
652 # told to link to makes no sense.
653 if (build.pocket != self.policy.pocket or
654 build.distroseries != self.policy.distroseries or
655 build.archive != self.policy.archive):
656 raise UploadError(
657 "Attempt to upload source specifying "
658 "recipe build %s, where it doesn't fit." % build.id)
659
660 return build
661
662 def storeInDatabase(self, build):633 def storeInDatabase(self, build):
663 """Store DSC information as a SourcePackageRelease record.634 """Store DSC information as a SourcePackageRelease record.
664635
665636
=== modified file 'lib/lp/archiveuploader/nascentupload.py'
--- lib/lp/archiveuploader/nascentupload.py 2010-08-27 14:27:22 +0000
+++ lib/lp/archiveuploader/nascentupload.py 2010-09-17 06:08:57 +0000
@@ -137,7 +137,7 @@
137 raise FatalUploadError(str(e))137 raise FatalUploadError(str(e))
138 return cls(changesfile, policy, logger)138 return cls(changesfile, policy, logger)
139139
140 def process(self):140 def process(self, build=None):
141 """Process this upload, checking it against policy, loading it into141 """Process this upload, checking it against policy, loading it into
142 the database if it seems okay.142 the database if it seems okay.
143143
@@ -200,7 +200,7 @@
200 self.overrideArchive()200 self.overrideArchive()
201201
202 # Check upload rights for the signer of the upload.202 # Check upload rights for the signer of the upload.
203 self.verify_acl()203 self.verify_acl(build)
204204
205 # Perform policy checks.205 # Perform policy checks.
206 policy.checkUpload(self)206 policy.checkUpload(self)
@@ -483,7 +483,7 @@
483 #483 #
484 # Signature and ACL stuff484 # Signature and ACL stuff
485 #485 #
486 def verify_acl(self):486 def verify_acl(self, build=None):
487 """Check the signer's upload rights.487 """Check the signer's upload rights.
488488
489 The signer must have permission to upload to either the component489 The signer must have permission to upload to either the component
@@ -498,10 +498,13 @@
498 if self.binaryful:498 if self.binaryful:
499 return499 return
500500
501 # Set up some convenient shortcut variables.501 # The build can have an explicit uploader, which may be different
502502 # from the changes file signer. (i.e in case of daily source package
503 uploader = self.policy.getUploader(self.changes)503 # builds)
504 archive = self.policy.archive504 if build is not None:
505 uploader = build.getUploader(self.changes)
506 else:
507 uploader = self.changes.signer
505508
506 # If we have no signer, there's no ACL we can apply.509 # If we have no signer, there's no ACL we can apply.
507 if uploader is None:510 if uploader is None:
@@ -511,7 +514,7 @@
511 source_name = getUtility(514 source_name = getUtility(
512 ISourcePackageNameSet).queryByName(self.changes.dsc.package)515 ISourcePackageNameSet).queryByName(self.changes.dsc.package)
513516
514 rejection_reason = archive.checkUpload(517 rejection_reason = self.policy.archive.checkUpload(
515 uploader, self.policy.distroseries, source_name,518 uploader, self.policy.distroseries, source_name,
516 self.changes.dsc.component, self.policy.pocket, not self.is_new)519 self.changes.dsc.component, self.policy.pocket, not self.is_new)
517520
@@ -824,7 +827,7 @@
824 #827 #
825 # Actually processing accepted or rejected uploads -- and mailing people828 # Actually processing accepted or rejected uploads -- and mailing people
826 #829 #
827 def do_accept(self, notify=True):830 def do_accept(self, notify=True, build=None):
828 """Accept the upload into the queue.831 """Accept the upload into the queue.
829832
830 This *MAY* in extreme cases cause a database error and thus833 This *MAY* in extreme cases cause a database error and thus
@@ -834,13 +837,14 @@
834 constraint.837 constraint.
835838
836 :param notify: True to send an email, False to not send one.839 :param notify: True to send an email, False to not send one.
840 :param build: The build associated with this upload.
837 """841 """
838 if self.is_rejected:842 if self.is_rejected:
839 self.reject("Alas, someone called do_accept when we're rejected")843 self.reject("Alas, someone called do_accept when we're rejected")
840 self.do_reject(notify)844 self.do_reject(notify)
841 return False845 return False
842 try:846 try:
843 self.storeObjectsInDatabase()847 self.storeObjectsInDatabase(build=build)
844848
845 # Send the email.849 # Send the email.
846 # There is also a small corner case here where the DB transaction850 # There is also a small corner case here where the DB transaction
@@ -923,7 +927,7 @@
923 #927 #
924 # Inserting stuff in the database928 # Inserting stuff in the database
925 #929 #
926 def storeObjectsInDatabase(self):930 def storeObjectsInDatabase(self, build=None):
927 """Insert this nascent upload into the database."""931 """Insert this nascent upload into the database."""
928932
929 # Queue entries are created in the NEW state by default; at the933 # Queue entries are created in the NEW state by default; at the
@@ -939,7 +943,8 @@
939 sourcepackagerelease = None943 sourcepackagerelease = None
940 if self.sourceful:944 if self.sourceful:
941 assert self.changes.dsc, "Sourceful upload lacks DSC."945 assert self.changes.dsc, "Sourceful upload lacks DSC."
942 build = self.changes.dsc.findBuild()946 if build is not None:
947 self.changes.dsc.checkBuild(build)
943 sourcepackagerelease = self.changes.dsc.storeInDatabase(build)948 sourcepackagerelease = self.changes.dsc.storeInDatabase(build)
944 package_upload_source = self.queue_root.addSource(949 package_upload_source = self.queue_root.addSource(
945 sourcepackagerelease)950 sourcepackagerelease)
@@ -980,11 +985,21 @@
980 sourcepackagerelease = (985 sourcepackagerelease = (
981 binary_package_file.findSourcePackageRelease())986 binary_package_file.findSourcePackageRelease())
982987
983 build = binary_package_file.findBuild(sourcepackagerelease)988 # Find the build for this particular binary package file.
984 assert self.queue_root.pocket == build.pocket, (989 if build is None:
990 bpf_build = binary_package_file.findBuild(
991 sourcepackagerelease)
992 else:
993 bpf_build = build
994 if bpf_build.source_package_release != sourcepackagerelease:
995 raise AssertionError(
996 "Attempt to upload binaries specifying build %s, "
997 "where they don't fit." % bpf_build.id)
998 binary_package_file.checkBuild(bpf_build)
999 assert self.queue_root.pocket == bpf_build.pocket, (
985 "Binary was not build for the claimed pocket.")1000 "Binary was not build for the claimed pocket.")
986 binary_package_file.storeInDatabase(build)1001 binary_package_file.storeInDatabase(bpf_build)
987 processed_builds.append(build)1002 processed_builds.append(bpf_build)
9881003
989 # Store the related builds after verifying they were built1004 # Store the related builds after verifying they were built
990 # from the same source.1005 # from the same source.
9911006
=== modified file 'lib/lp/archiveuploader/nascentuploadfile.py'
--- lib/lp/archiveuploader/nascentuploadfile.py 2010-09-02 16:28:50 +0000
+++ lib/lp/archiveuploader/nascentuploadfile.py 2010-09-17 06:08:57 +0000
@@ -33,6 +33,7 @@
33from canonical.encoding import guess as guess_encoding33from canonical.encoding import guess as guess_encoding
34from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet34from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
35from canonical.librarian.utils import filechunks35from canonical.librarian.utils import filechunks
36from lp.app.errors import NotFoundError
36from lp.archiveuploader.utils import (37from lp.archiveuploader.utils import (
37 determine_source_file_type,38 determine_source_file_type,
38 prefix_multi_line_string,39 prefix_multi_line_string,
@@ -52,7 +53,6 @@
52 PackageUploadCustomFormat,53 PackageUploadCustomFormat,
53 PackageUploadStatus,54 PackageUploadStatus,
54 )55 )
55from lp.soyuz.interfaces.binarypackagebuild import IBinaryPackageBuildSet
56from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet56from lp.soyuz.interfaces.binarypackagename import IBinaryPackageNameSet
57from lp.soyuz.interfaces.component import IComponentSet57from lp.soyuz.interfaces.component import IComponentSet
58from lp.soyuz.interfaces.section import ISectionSet58from lp.soyuz.interfaces.section import ISectionSet
@@ -338,6 +338,13 @@
338 """Return an ISection for self.section_name."""338 """Return an ISection for self.section_name."""
339 return getUtility(ISectionSet)[self.section_name]339 return getUtility(ISectionSet)[self.section_name]
340340
341 def checkBuild(self, build):
342 """Check the status of the build this file is part of.
343
344 :param build: an `IPackageBuild` instance
345 """
346 raise NotImplementedError(self.checkBuild)
347
341 def extractUserDefinedFields(self, control):348 def extractUserDefinedFields(self, control):
342 """Extract the user defined fields out of a control file list.349 """Extract the user defined fields out of a control file list.
343 """350 """
@@ -381,6 +388,23 @@
381 yield UploadError("%s: should be %s according to changes file."388 yield UploadError("%s: should be %s according to changes file."
382 % (filename_version, version_chopped))389 % (filename_version, version_chopped))
383390
391 def checkBuild(self, build):
392 """See PackageUploadFile."""
393 # The master verifies the status to confirm successful upload.
394 build.status = BuildStatus.FULLYBUILT
395 # If this upload is successful, any existing log is wrong and
396 # unuseful.
397 build.upload_log = None
398
399 # Sanity check; raise an error if the build we've been
400 # told to link to makes no sense.
401 if (build.pocket != self.policy.pocket or
402 build.distroseries != self.policy.distroseries or
403 build.archive != self.policy.archive):
404 raise UploadError(
405 "Attempt to upload source specifying "
406 "recipe build %s, where it doesn't fit." % build.id)
407
384408
385class BaseBinaryUploadFile(PackageUploadFile):409class BaseBinaryUploadFile(PackageUploadFile):
386 """Base methods for binary upload modeling."""410 """Base methods for binary upload modeling."""
@@ -834,52 +858,52 @@
834 in this case, change this build to be FULLYBUILT.858 in this case, change this build to be FULLYBUILT.
835 - Create a new build in FULLYBUILT status.859 - Create a new build in FULLYBUILT status.
836860
837 If by any chance an inconsistent build was found this method will
838 raise UploadError resulting in a upload rejection.
839 """861 """
840 build_id = getattr(self.policy.options, 'buildid', None)
841 dar = self.policy.distroseries[self.archtag]862 dar = self.policy.distroseries[self.archtag]
842863
843 if build_id is None:864 # Check if there's a suitable existing build.
844 # Check if there's a suitable existing build.865 build = sourcepackagerelease.getBuildByArch(
845 build = sourcepackagerelease.getBuildByArch(866 dar, self.policy.archive)
846 dar, self.policy.archive)867 if build is not None:
847 if build is not None:
848 build.status = BuildStatus.FULLYBUILT
849 self.logger.debug("Updating build for %s: %s" % (
850 dar.architecturetag, build.id))
851 else:
852 # No luck. Make one.
853 # Usually happen for security binary uploads.
854 build = sourcepackagerelease.createBuild(
855 dar, self.policy.pocket, self.policy.archive,
856 status=BuildStatus.FULLYBUILT)
857 self.logger.debug("Build %s created" % build.id)
858 else:
859 build = getUtility(IBinaryPackageBuildSet).getByBuildID(build_id)
860 self.logger.debug("Build %s found" % build.id)
861 # Ensure gathered binary is related to a FULLYBUILT build
862 # record. It will be check in slave-scanner procedure to
863 # certify that the build was processed correctly.
864 build.status = BuildStatus.FULLYBUILT868 build.status = BuildStatus.FULLYBUILT
865 # Also purge any previous failed upload_log stored, so its869 self.logger.debug("Updating build for %s: %s" % (
866 # content can be garbage-collected since it's not useful870 dar.architecturetag, build.id))
867 # anymore.871 else:
868 build.upload_log = None872 # No luck. Make one.
873 # Usually happen for security binary uploads.
874 build = sourcepackagerelease.createBuild(
875 dar, self.policy.pocket, self.policy.archive,
876 status=BuildStatus.FULLYBUILT)
877 self.logger.debug("Build %s created" % build.id)
878 return build
879
880 def checkBuild(self, build):
881 """See PackageUploadFile."""
882 try:
883 dar = self.policy.distroseries[self.archtag]
884 except NotFoundError:
885 raise UploadError(
886 "Upload to unknown architecture %s for distroseries %s" %
887 (self.archtag, self.policy.distroseries))
888
889 # Ensure gathered binary is related to a FULLYBUILT build
890 # record. It will be check in slave-scanner procedure to
891 # certify that the build was processed correctly.
892 build.status = BuildStatus.FULLYBUILT
893 # Also purge any previous failed upload_log stored, so its
894 # content can be garbage-collected since it's not useful
895 # anymore.
896 build.upload_log = None
869897
870 # Sanity check; raise an error if the build we've been898 # Sanity check; raise an error if the build we've been
871 # told to link to makes no sense (ie. is not for the right899 # told to link to makes no sense.
872 # source package).900 if (build.pocket != self.policy.pocket or
873 if (build.source_package_release != sourcepackagerelease or
874 build.pocket != self.policy.pocket or
875 build.distro_arch_series != dar or901 build.distro_arch_series != dar or
876 build.archive != self.policy.archive):902 build.archive != self.policy.archive):
877 raise UploadError(903 raise UploadError(
878 "Attempt to upload binaries specifying "904 "Attempt to upload binaries specifying "
879 "build %s, where they don't fit." % build.id)905 "build %s, where they don't fit." % build.id)
880906
881 return build
882
883 def storeInDatabase(self, build):907 def storeInDatabase(self, build):
884 """Insert this binary release and build into the database."""908 """Insert this binary release and build into the database."""
885 # Reencode everything we are supplying, because old packages909 # Reencode everything we are supplying, because old packages
886910
=== modified file 'lib/lp/archiveuploader/tests/__init__.py'
--- lib/lp/archiveuploader/tests/__init__.py 2010-08-26 20:08:43 +0000
+++ lib/lp/archiveuploader/tests/__init__.py 2010-09-17 06:08:57 +0000
@@ -64,17 +64,15 @@
64class MockUploadOptions:64class MockUploadOptions:
65 """Mock upload policy options helper"""65 """Mock upload policy options helper"""
6666
67 def __init__(self, distro='ubuntutest', distroseries=None, buildid=None):67 def __init__(self, distro='ubuntutest', distroseries=None):
68 self.distro = distro68 self.distro = distro
69 self.distroseries = distroseries69 self.distroseries = distroseries
70 self.buildid = buildid70
7171
7272def getPolicy(name='anything', distro='ubuntu', distroseries=None):
73def getPolicy(name='anything', distro='ubuntu', distroseries=None,
74 buildid=None):
75 """Build and return an Upload Policy for the given context."""73 """Build and return an Upload Policy for the given context."""
76 policy = findPolicyByName(name)74 policy = findPolicyByName(name)
77 options = MockUploadOptions(distro, distroseries, buildid)75 options = MockUploadOptions(distro, distroseries)
78 policy.setOptions(options)76 policy.setOptions(options)
79 return policy77 return policy
8078
8179
=== modified file 'lib/lp/archiveuploader/tests/nascentupload.txt'
--- lib/lp/archiveuploader/tests/nascentupload.txt 2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/nascentupload.txt 2010-09-17 06:08:57 +0000
@@ -27,7 +27,7 @@
27 ... datadir, getPolicy, mock_logger, mock_logger_quiet)27 ... datadir, getPolicy, mock_logger, mock_logger_quiet)
2828
29 >>> buildd_policy = getPolicy(29 >>> buildd_policy = getPolicy(
30 ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)30 ... name='buildd', distro='ubuntu', distroseries='hoary')
3131
32 >>> sync_policy = getPolicy(32 >>> sync_policy = getPolicy(
33 ... name='sync', distro='ubuntu', distroseries='hoary')33 ... name='sync', distro='ubuntu', distroseries='hoary')
@@ -216,7 +216,7 @@
216 # Use the buildd policy as it accepts unsigned changes files and binary216 # Use the buildd policy as it accepts unsigned changes files and binary
217 # uploads.217 # uploads.
218 >>> modified_buildd_policy = getPolicy(218 >>> modified_buildd_policy = getPolicy(
219 ... name='buildd', distro='ubuntu', distroseries='hoary', buildid=1)219 ... name='buildd', distro='ubuntu', distroseries='hoary')
220220
221 >>> ed_mismatched_upload = NascentUpload.from_changesfile_path(221 >>> ed_mismatched_upload = NascentUpload.from_changesfile_path(
222 ... datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),222 ... datadir("ed_0.2-20_i386.changes.mismatched-arch-unsigned"),
@@ -640,13 +640,12 @@
640the 'buildd' upload policy and the build record id.640the 'buildd' upload policy and the build record id.
641641
642 >>> buildd_policy = getPolicy(642 >>> buildd_policy = getPolicy(
643 ... name='buildd', distro='ubuntu', distroseries='hoary',643 ... name='buildd', distro='ubuntu', distroseries='hoary')
644 ... buildid=multibar_build.id)
645644
646 >>> multibar_bin_upload = NascentUpload.from_changesfile_path(645 >>> multibar_bin_upload = NascentUpload.from_changesfile_path(
647 ... datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),646 ... datadir('suite/multibar_1.0-1/multibar_1.0-1_i386.changes'),
648 ... buildd_policy, mock_logger_quiet)647 ... buildd_policy, mock_logger_quiet)
649 >>> multibar_bin_upload.process()648 >>> multibar_bin_upload.process(build=multibar_build)
650 >>> success = multibar_bin_upload.do_accept()649 >>> success = multibar_bin_upload.do_accept()
651650
652Now that we have successfully processed the binaries coming from a651Now that we have successfully processed the binaries coming from a
653652
=== modified file 'lib/lp/archiveuploader/tests/test_buildduploads.py'
--- lib/lp/archiveuploader/tests/test_buildduploads.py 2010-08-26 15:28:34 +0000
+++ lib/lp/archiveuploader/tests/test_buildduploads.py 2010-09-17 06:08:57 +0000
@@ -112,7 +112,7 @@
112 # Store source queue item for future use.112 # Store source queue item for future use.
113 self.source_queue = queue_item113 self.source_queue = queue_item
114114
115 def _uploadBinary(self, archtag):115 def _uploadBinary(self, archtag, build):
116 """Upload the base binary.116 """Upload the base binary.
117117
118 Ensure it got processed and has a respective queue record.118 Ensure it got processed and has a respective queue record.
@@ -121,7 +121,7 @@
121 self._prepareUpload(self.binary_dir)121 self._prepareUpload(self.binary_dir)
122 self.uploadprocessor.processChangesFile(122 self.uploadprocessor.processChangesFile(
123 os.path.join(self.queue_folder, "incoming", self.binary_dir),123 os.path.join(self.queue_folder, "incoming", self.binary_dir),
124 self.getBinaryChangesfileFor(archtag))124 self.getBinaryChangesfileFor(archtag), build=build)
125 queue_item = self.uploadprocessor.last_processed_upload.queue_root125 queue_item = self.uploadprocessor.last_processed_upload.queue_root
126 self.assertTrue(126 self.assertTrue(
127 queue_item is not None,127 queue_item is not None,
@@ -205,10 +205,9 @@
205 pubrec.datepublished = UTC_NOW205 pubrec.datepublished = UTC_NOW
206 queue_item.setDone()206 queue_item.setDone()
207207
208 def _setupUploadProcessorForBuild(self, build_candidate):208 def _setupUploadProcessorForBuild(self):
209 """Setup an UploadProcessor instance for a given buildd context."""209 """Setup an UploadProcessor instance for a given buildd context."""
210 self.options.context = self.policy210 self.options.context = self.policy
211 self.options.buildid = str(build_candidate.id)
212 self.uploadprocessor = self.getUploadProcessor(211 self.uploadprocessor = self.getUploadProcessor(
213 self.layer.txn)212 self.layer.txn)
214213
@@ -223,8 +222,8 @@
223 """222 """
224 # Upload i386 binary.223 # Upload i386 binary.
225 build_candidate = self._createBuild('i386')224 build_candidate = self._createBuild('i386')
226 self._setupUploadProcessorForBuild(build_candidate)225 self._setupUploadProcessorForBuild()
227 build_used = self._uploadBinary('i386')226 build_used = self._uploadBinary('i386', build_candidate)
228227
229 self.assertEqual(build_used.id, build_candidate.id)228 self.assertEqual(build_used.id, build_candidate.id)
230 self.assertBuildsCreated(1)229 self.assertBuildsCreated(1)
@@ -239,8 +238,8 @@
239238
240 # Upload powerpc binary239 # Upload powerpc binary
241 build_candidate = self._createBuild('powerpc')240 build_candidate = self._createBuild('powerpc')
242 self._setupUploadProcessorForBuild(build_candidate)241 self._setupUploadProcessorForBuild()
243 build_used = self._uploadBinary('powerpc')242 build_used = self._uploadBinary('powerpc', build_candidate)
244243
245 self.assertEqual(build_used.id, build_candidate.id)244 self.assertEqual(build_used.id, build_candidate.id)
246 self.assertBuildsCreated(2)245 self.assertBuildsCreated(2)
247246
=== modified file 'lib/lp/archiveuploader/tests/test_nascentuploadfile.py'
--- lib/lp/archiveuploader/tests/test_nascentuploadfile.py 2010-09-03 06:06:40 +0000
+++ lib/lp/archiveuploader/tests/test_nascentuploadfile.py 2010-09-17 06:08:57 +0000
@@ -20,8 +20,11 @@
20from lp.archiveuploader.nascentuploadfile import (20from lp.archiveuploader.nascentuploadfile import (
21 CustomUploadFile,21 CustomUploadFile,
22 DebBinaryUploadFile,22 DebBinaryUploadFile,
23 UploadError,
23 )24 )
25from lp.registry.interfaces.pocket import PackagePublishingPocket
24from lp.archiveuploader.tests import AbsolutelyAnythingGoesUploadPolicy26from lp.archiveuploader.tests import AbsolutelyAnythingGoesUploadPolicy
27from lp.buildmaster.enums import BuildStatus
25from lp.soyuz.enums import PackageUploadCustomFormat28from lp.soyuz.enums import PackageUploadCustomFormat
26from lp.testing import TestCaseWithFactory29from lp.testing import TestCaseWithFactory
2730
@@ -34,6 +37,7 @@
34 self.logger = BufferLogger()37 self.logger = BufferLogger()
35 self.policy = AbsolutelyAnythingGoesUploadPolicy()38 self.policy = AbsolutelyAnythingGoesUploadPolicy()
36 self.distro = self.factory.makeDistribution()39 self.distro = self.factory.makeDistribution()
40 self.policy.pocket = PackagePublishingPocket.RELEASE
37 self.policy.archive = self.factory.makeArchive(41 self.policy.archive = self.factory.makeArchive(
38 distribution=self.distro)42 distribution=self.distro)
3943
@@ -217,6 +221,34 @@
217 release = uploadfile.storeInDatabase(None)221 release = uploadfile.storeInDatabase(None)
218 self.assertEquals(u"http://samba.org/~jelmer/bzr", release.homepage)222 self.assertEquals(u"http://samba.org/~jelmer/bzr", release.homepage)
219223
224 def test_checkBuild(self):
225 # checkBuild() verifies consistency with a build.
226 build = self.factory.makeSourcePackageRecipeBuild(
227 pocket=self.policy.pocket, distroseries=self.policy.distroseries,
228 archive=self.policy.archive)
229 dsc = self.getBaseDsc()
230 uploadfile = self.createDSCFile(
231 "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
232 self.createChangesFile("foo.changes", self.getBaseChanges()))
233 uploadfile.checkBuild(build)
234 # checkBuild() sets the build status to FULLYBUILT and
235 # removes the upload log.
236 self.assertEquals(BuildStatus.FULLYBUILT, build.status)
237 self.assertIs(None, build.upload_log)
238
239 def test_checkBuild_inconsistent(self):
240 # checkBuild() raises UploadError if inconsistencies between build
241 # and upload file are found.
242 build = self.factory.makeSourcePackageRecipeBuild(
243 pocket=self.policy.pocket,
244 distroseries=self.factory.makeDistroSeries(),
245 archive=self.policy.archive)
246 dsc = self.getBaseDsc()
247 uploadfile = self.createDSCFile(
248 "foo.dsc", dsc, "main/net", "extra", "dulwich", "0.42",
249 self.createChangesFile("foo.changes", self.getBaseChanges()))
250 self.assertRaises(UploadError, uploadfile.checkBuild, build)
251
220252
221class DebBinaryUploadFileTests(PackageUploadFileTestCase):253class DebBinaryUploadFileTests(PackageUploadFileTestCase):
222 """Tests for DebBinaryUploadFile."""254 """Tests for DebBinaryUploadFile."""
@@ -326,3 +358,32 @@
326 bpr = uploadfile.storeInDatabase(build)358 bpr = uploadfile.storeInDatabase(build)
327 self.assertEquals(359 self.assertEquals(
328 u"http://samba.org/~jelmer/dulwich", bpr.homepage)360 u"http://samba.org/~jelmer/dulwich", bpr.homepage)
361
362 def test_checkBuild(self):
363 # checkBuild() verifies consistency with a build.
364 das = self.factory.makeDistroArchSeries(
365 distroseries=self.policy.distroseries, architecturetag="i386")
366 build = self.factory.makeBinaryPackageBuild(
367 distroarchseries=das,
368 archive=self.policy.archive)
369 uploadfile = self.createDebBinaryUploadFile(
370 "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
371 None)
372 uploadfile.checkBuild(build)
373 # checkBuild() sets the build status to FULLYBUILT and
374 # removes the upload log.
375 self.assertEquals(BuildStatus.FULLYBUILT, build.status)
376 self.assertIs(None, build.upload_log)
377
378 def test_checkBuild_inconsistent(self):
379 # checkBuild() raises UploadError if inconsistencies between build
380 # and upload file are found.
381 das = self.factory.makeDistroArchSeries(
382 distroseries=self.policy.distroseries, architecturetag="amd64")
383 build = self.factory.makeBinaryPackageBuild(
384 distroarchseries=das,
385 archive=self.policy.archive)
386 uploadfile = self.createDebBinaryUploadFile(
387 "foo_0.42_i386.deb", "main/python", "unknown", "mypkg", "0.42",
388 None)
389 self.assertRaises(UploadError, uploadfile.checkBuild, build)
329390
=== modified file 'lib/lp/archiveuploader/tests/test_ppauploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-08-31 11:11:09 +0000
+++ lib/lp/archiveuploader/tests/test_ppauploadprocessor.py 2010-09-17 06:08:57 +0000
@@ -355,10 +355,10 @@
355 builds = self.name16.archive.getBuildRecords(name="bar")355 builds = self.name16.archive.getBuildRecords(name="bar")
356 [build] = builds356 [build] = builds
357 self.options.context = 'buildd'357 self.options.context = 'buildd'
358 self.options.buildid = build.id
359 upload_dir = self.queueUpload(358 upload_dir = self.queueUpload(
360 "bar_1.0-1_binary_universe", "~name16/ubuntu")359 "bar_1.0-1_binary_universe", "~name16/ubuntu")
361 self.processUpload(self.uploadprocessor, upload_dir)360 self.processUpload(
361 self.uploadprocessor, upload_dir, build=build)
362362
363 # No mails are sent for successful binary uploads.363 # No mails are sent for successful binary uploads.
364 self.assertEqual(len(stub.test_emails), 0,364 self.assertEqual(len(stub.test_emails), 0,
@@ -405,9 +405,9 @@
405405
406 # Binary upload to the just-created build record.406 # Binary upload to the just-created build record.
407 self.options.context = 'buildd'407 self.options.context = 'buildd'
408 self.options.buildid = build.id
409 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")408 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
410 self.processUpload(self.uploadprocessor, upload_dir)409 self.processUpload(
410 self.uploadprocessor, upload_dir, build=build)
411411
412 # The binary upload was accepted and it's waiting in the queue.412 # The binary upload was accepted and it's waiting in the queue.
413 queue_items = self.breezy.getQueueItems(413 queue_items = self.breezy.getQueueItems(
@@ -459,9 +459,9 @@
459459
460 # Binary upload to the just-created build record.460 # Binary upload to the just-created build record.
461 self.options.context = 'buildd'461 self.options.context = 'buildd'
462 self.options.buildid = build_bar_i386.id
463 upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")462 upload_dir = self.queueUpload("bar_1.0-1_binary", "~cprov/ubuntu")
464 self.processUpload(self.uploadprocessor, upload_dir)463 self.processUpload(
464 self.uploadprocessor, upload_dir, build=build_bar_i386)
465465
466 # The binary upload was accepted and it's waiting in the queue.466 # The binary upload was accepted and it's waiting in the queue.
467 queue_items = self.breezy.getQueueItems(467 queue_items = self.breezy.getQueueItems(
@@ -760,9 +760,9 @@
760 builds = self.name16.archive.getBuildRecords(name='bar')760 builds = self.name16.archive.getBuildRecords(name='bar')
761 [build] = builds761 [build] = builds
762 self.options.context = 'buildd'762 self.options.context = 'buildd'
763 self.options.buildid = build.id
764 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")763 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
765 self.processUpload(self.uploadprocessor, upload_dir)764 self.processUpload(
765 self.uploadprocessor, upload_dir, build=build)
766766
767 # The binary upload was accepted and it's waiting in the queue.767 # The binary upload was accepted and it's waiting in the queue.
768 queue_items = self.breezy.getQueueItems(768 queue_items = self.breezy.getQueueItems(
@@ -804,10 +804,9 @@
804 # Binary uploads should exhibit the same behaviour:804 # Binary uploads should exhibit the same behaviour:
805 [build] = self.name16.archive.getBuildRecords(name="bar")805 [build] = self.name16.archive.getBuildRecords(name="bar")
806 self.options.context = 'buildd'806 self.options.context = 'buildd'
807 self.options.buildid = build.id
808 upload_dir = self.queueUpload(807 upload_dir = self.queueUpload(
809 "bar_1.0-1_contrib_binary", "~name16/ubuntu")808 "bar_1.0-1_contrib_binary", "~name16/ubuntu")
810 self.processUpload(self.uploadprocessor, upload_dir)809 self.processUpload(self.uploadprocessor, upload_dir, build=build)
811 queue_items = self.breezy.getQueueItems(810 queue_items = self.breezy.getQueueItems(
812 status=PackageUploadStatus.ACCEPTED, name="bar",811 status=PackageUploadStatus.ACCEPTED, name="bar",
813 version="1.0-1", exact_match=True, archive=self.name16.archive)812 version="1.0-1", exact_match=True, archive=self.name16.archive)
@@ -1306,14 +1305,14 @@
1306 builds = self.name16.archive.getBuildRecords(name='bar')1305 builds = self.name16.archive.getBuildRecords(name='bar')
1307 [build] = builds1306 [build] = builds
1308 self.options.context = 'buildd'1307 self.options.context = 'buildd'
1309 self.options.buildid = build.id
13101308
1311 # Stuff 1024 MiB in name16 PPA, so anything will be above the1309 # Stuff 1024 MiB in name16 PPA, so anything will be above the
1312 # default quota limit, 1024 MiB.1310 # default quota limit, 1024 MiB.
1313 self._fillArchive(self.name16.archive, 1024 * (2 ** 20))1311 self._fillArchive(self.name16.archive, 1024 * (2 ** 20))
13141312
1315 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")1313 upload_dir = self.queueUpload("bar_1.0-1_binary", "~name16/ubuntu")
1316 self.processUpload(self.uploadprocessor, upload_dir)1314 self.processUpload(
1315 self.uploadprocessor, upload_dir, build=build)
13171316
1318 # The binary upload was accepted, and it's waiting in the queue.1317 # The binary upload was accepted, and it's waiting in the queue.
1319 queue_items = self.breezy.getQueueItems(1318 queue_items = self.breezy.getQueueItems(
13201319
=== modified file 'lib/lp/archiveuploader/tests/test_recipeuploads.py'
--- lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-08-27 11:19:54 +0000
+++ lib/lp/archiveuploader/tests/test_recipeuploads.py 2010-09-17 06:08:57 +0000
@@ -10,6 +10,9 @@
10from storm.store import Store10from storm.store import Store
11from zope.component import getUtility11from zope.component import getUtility
1212
13from lp.archiveuploader.uploadprocessor import (
14 UploadStatusEnum,
15 )
13from lp.archiveuploader.tests.test_uploadprocessor import (16from lp.archiveuploader.tests.test_uploadprocessor import (
14 TestUploadProcessorBase,17 TestUploadProcessorBase,
15 )18 )
@@ -17,7 +20,6 @@
17from lp.code.interfaces.sourcepackagerecipebuild import (20from lp.code.interfaces.sourcepackagerecipebuild import (
18 ISourcePackageRecipeBuildSource,21 ISourcePackageRecipeBuildSource,
19 )22 )
20from lp.soyuz.enums import PackageUploadStatus
2123
2224
23class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):25class TestSourcePackageRecipeBuildUploads(TestUploadProcessorBase):
@@ -40,8 +42,7 @@
40 requester=self.recipe.owner)42 requester=self.recipe.owner)
4143
42 Store.of(self.build).flush()44 Store.of(self.build).flush()
43 self.options.context = 'recipe'45 self.options.context = 'buildd'
44 self.options.buildid = self.build.id
4546
46 self.uploadprocessor = self.getUploadProcessor(47 self.uploadprocessor = self.getUploadProcessor(
47 self.layer.txn)48 self.layer.txn)
@@ -54,19 +55,14 @@
54 self.assertIs(None, self.build.source_package_release)55 self.assertIs(None, self.build.source_package_release)
55 self.assertEqual(False, self.build.verifySuccessfulUpload())56 self.assertEqual(False, self.build.verifySuccessfulUpload())
56 self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)57 self.queueUpload('bar_1.0-1', '%d/ubuntu' % self.build.archive.id)
57 self.uploadprocessor.processChangesFile(58 result = self.uploadprocessor.processChangesFile(
58 os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),59 os.path.join(self.queue_folder, "incoming", 'bar_1.0-1'),
59 '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id)60 '%d/ubuntu/bar_1.0-1_source.changes' % self.build.archive.id,
61 build=self.build)
60 self.layer.txn.commit()62 self.layer.txn.commit()
6163
62 queue_item = self.uploadprocessor.last_processed_upload.queue_root64 self.assertEquals(UploadStatusEnum.ACCEPTED, result,
63 self.assertTrue(
64 queue_item is not None,
65 "Source upload failed\nGot: %s" % "\n".join(self.log.lines))65 "Source upload failed\nGot: %s" % "\n".join(self.log.lines))
6666
67 self.assertEqual(PackageUploadStatus.DONE, queue_item.status)
68 spr = queue_item.sources[0].sourcepackagerelease
69 self.assertEqual(self.build, spr.source_package_recipe_build)
70 self.assertEqual(spr, self.build.source_package_release)
71 self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)67 self.assertEqual(BuildStatus.FULLYBUILT, self.build.status)
72 self.assertEqual(True, self.build.verifySuccessfulUpload())68 self.assertEqual(True, self.build.verifySuccessfulUpload())
7369
=== modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py'
--- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-17 06:08:54 +0000
+++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-17 06:08:57 +0000
@@ -18,6 +18,7 @@
18import tempfile18import tempfile
19import traceback19import traceback
2020
21from storm.locals import Store
21from zope.component import (22from zope.component import (
22 getGlobalSiteManager,23 getGlobalSiteManager,
23 getUtility,24 getUtility,
@@ -153,7 +154,7 @@
153154
154 self.options = MockOptions()155 self.options = MockOptions()
155 self.options.base_fsroot = self.queue_folder156 self.options.base_fsroot = self.queue_folder
156 self.options.builds = True157 self.options.builds = False
157 self.options.leafname = None158 self.options.leafname = None
158 self.options.distro = "ubuntu"159 self.options.distro = "ubuntu"
159 self.options.distroseries = None160 self.options.distroseries = None
@@ -172,9 +173,13 @@
172 super(TestUploadProcessorBase, self).tearDown()173 super(TestUploadProcessorBase, self).tearDown()
173174
174 def getUploadProcessor(self, txn):175 def getUploadProcessor(self, txn):
175 def getPolicy(distro):176 def getPolicy(distro, build):
176 self.options.distro = distro.name177 self.options.distro = distro.name
177 policy = findPolicyByName(self.options.context)178 policy = findPolicyByName(self.options.context)
179 if self.options.builds:
180 policy.distroseries = build.distro_series
181 policy.pocket = build.pocket
182 policy.archive = build.archive
178 policy.setOptions(self.options)183 policy.setOptions(self.options)
179 return policy184 return policy
180 return UploadProcessor(185 return UploadProcessor(
@@ -288,7 +293,7 @@
288 shutil.copytree(upload_dir, target_path)293 shutil.copytree(upload_dir, target_path)
289 return os.path.join(self.incoming_folder, queue_entry)294 return os.path.join(self.incoming_folder, queue_entry)
290295
291 def processUpload(self, processor, upload_dir):296 def processUpload(self, processor, upload_dir, build=None):
292 """Process an upload queue entry directory.297 """Process an upload queue entry directory.
293298
294 There is some duplication here with logic in UploadProcessor,299 There is some duplication here with logic in UploadProcessor,
@@ -298,7 +303,8 @@
298 results = []303 results = []
299 changes_files = processor.locateChangesFiles(upload_dir)304 changes_files = processor.locateChangesFiles(upload_dir)
300 for changes_file in changes_files:305 for changes_file in changes_files:
301 result = processor.processChangesFile(upload_dir, changes_file)306 result = processor.processChangesFile(
307 upload_dir, changes_file, build=build)
302 results.append(result)308 results.append(result)
303 return results309 return results
304310
@@ -693,10 +699,10 @@
693 # Upload and accept a binary for the primary archive source.699 # Upload and accept a binary for the primary archive source.
694 shutil.rmtree(upload_dir)700 shutil.rmtree(upload_dir)
695 self.options.context = 'buildd'701 self.options.context = 'buildd'
696 self.options.buildid = bar_original_build.id
697 self.layer.txn.commit()702 self.layer.txn.commit()
698 upload_dir = self.queueUpload("bar_1.0-1_binary")703 upload_dir = self.queueUpload("bar_1.0-1_binary")
699 self.processUpload(uploadprocessor, upload_dir)704 self.processUpload(uploadprocessor, upload_dir,
705 build=bar_original_build)
700 self.assertEqual(706 self.assertEqual(
701 uploadprocessor.last_processed_upload.is_rejected, False)707 uploadprocessor.last_processed_upload.is_rejected, False)
702 bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)708 bar_bin_pubs = self.publishPackage('bar', '1.0-1', source=False)
@@ -724,10 +730,10 @@
724730
725 shutil.rmtree(upload_dir)731 shutil.rmtree(upload_dir)
726 self.options.context = 'buildd'732 self.options.context = 'buildd'
727 self.options.buildid = bar_copied_build.id
728 upload_dir = self.queueUpload(733 upload_dir = self.queueUpload(
729 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)734 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
730 self.processUpload(uploadprocessor, upload_dir)735 self.processUpload(uploadprocessor, upload_dir,
736 build=bar_copied_build)
731737
732 # Make sure the upload succeeded.738 # Make sure the upload succeeded.
733 self.assertEqual(739 self.assertEqual(
@@ -796,9 +802,9 @@
796 [bar_original_build] = bar_source_pub.createMissingBuilds()802 [bar_original_build] = bar_source_pub.createMissingBuilds()
797803
798 self.options.context = 'buildd'804 self.options.context = 'buildd'
799 self.options.buildid = bar_original_build.id
800 upload_dir = self.queueUpload("bar_1.0-1_binary")805 upload_dir = self.queueUpload("bar_1.0-1_binary")
801 self.processUpload(uploadprocessor, upload_dir)806 self.processUpload(
807 uploadprocessor, upload_dir, build=bar_original_build)
802 [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)808 [bar_binary_pub] = self.publishPackage("bar", "1.0-1", source=False)
803809
804 # Prepare ubuntu/breezy-autotest to build sources in i386.810 # Prepare ubuntu/breezy-autotest to build sources in i386.
@@ -818,10 +824,10 @@
818 # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt824 # Re-upload the same 'bar-1.0-1' binary as if it was rebuilt
819 # in breezy-autotest context.825 # in breezy-autotest context.
820 shutil.rmtree(upload_dir)826 shutil.rmtree(upload_dir)
821 self.options.buildid = bar_copied_build.id
822 self.options.distroseries = breezy_autotest.name827 self.options.distroseries = breezy_autotest.name
823 upload_dir = self.queueUpload("bar_1.0-1_binary")828 upload_dir = self.queueUpload("bar_1.0-1_binary")
824 self.processUpload(uploadprocessor, upload_dir)829 self.processUpload(uploadprocessor, upload_dir,
830 build=bar_copied_build)
825 [duplicated_binary_upload] = breezy_autotest.getQueueItems(831 [duplicated_binary_upload] = breezy_autotest.getQueueItems(
826 status=PackageUploadStatus.NEW, name='bar',832 status=PackageUploadStatus.NEW, name='bar',
827 version='1.0-1', exact_match=True)833 version='1.0-1', exact_match=True)
@@ -859,9 +865,9 @@
859 [bar_original_build] = bar_source_pub.getBuilds()865 [bar_original_build] = bar_source_pub.getBuilds()
860866
861 self.options.context = 'buildd'867 self.options.context = 'buildd'
862 self.options.buildid = bar_original_build.id
863 upload_dir = self.queueUpload("bar_1.0-2_binary")868 upload_dir = self.queueUpload("bar_1.0-2_binary")
864 self.processUpload(uploadprocessor, upload_dir)869 self.processUpload(uploadprocessor, upload_dir,
870 build=bar_original_build)
865 [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)871 [bar_binary_pub] = self.publishPackage("bar", "1.0-2", source=False)
866872
867 # Create a COPY archive for building in non-virtual builds.873 # Create a COPY archive for building in non-virtual builds.
@@ -878,10 +884,10 @@
878 [bar_copied_build] = bar_copied_source.createMissingBuilds()884 [bar_copied_build] = bar_copied_source.createMissingBuilds()
879885
880 shutil.rmtree(upload_dir)886 shutil.rmtree(upload_dir)
881 self.options.buildid = bar_copied_build.id
882 upload_dir = self.queueUpload(887 upload_dir = self.queueUpload(
883 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)888 "bar_1.0-1_binary", "%s/ubuntu" % copy_archive.id)
884 self.processUpload(uploadprocessor, upload_dir)889 self.processUpload(uploadprocessor, upload_dir,
890 build=bar_copied_build)
885891
886 # The binary just uploaded is accepted because it's destined for a892 # The binary just uploaded is accepted because it's destined for a
887 # copy archive and the PRIMARY and the COPY archives are isolated893 # copy archive and the PRIMARY and the COPY archives are isolated
@@ -1034,9 +1040,9 @@
1034 self.breezy['i386'], PackagePublishingPocket.RELEASE,1040 self.breezy['i386'], PackagePublishingPocket.RELEASE,
1035 self.ubuntu.main_archive)1041 self.ubuntu.main_archive)
1036 self.layer.txn.commit()1042 self.layer.txn.commit()
1037 self.options.buildid = foocomm_build.id
1038 upload_dir = self.queueUpload("foocomm_1.0-1_binary")1043 upload_dir = self.queueUpload("foocomm_1.0-1_binary")
1039 self.processUpload(uploadprocessor, upload_dir)1044 self.processUpload(
1045 uploadprocessor, upload_dir, build=foocomm_build)
10401046
1041 contents = [1047 contents = [
1042 "Subject: foocomm_1.0-1_i386.changes rejected",1048 "Subject: foocomm_1.0-1_i386.changes rejected",
@@ -1044,10 +1050,8 @@
1044 "where they don't fit."]1050 "where they don't fit."]
1045 self.assertEmail(contents)1051 self.assertEmail(contents)
10461052
1047 # Reset upload queue directory for a new upload and the1053 # Reset upload queue directory for a new upload.
1048 # uploadprocessor buildid option.
1049 shutil.rmtree(upload_dir)1054 shutil.rmtree(upload_dir)
1050 self.options.buildid = None
10511055
1052 # Now upload a binary package of 'foocomm', letting a new build record1056 # Now upload a binary package of 'foocomm', letting a new build record
1053 # with appropriate data be created by the uploadprocessor.1057 # with appropriate data be created by the uploadprocessor.
@@ -1881,7 +1885,7 @@
1881 self.assertLogContains(1885 self.assertLogContains(
1882 "Unable to find package build job with id 42. Skipping.")1886 "Unable to find package build job with id 42. Skipping.")
18831887
1884 def testNoFiles(self):1888 def testBinaryPackageBuild_fail(self):
1885 # If the upload directory is empty, the upload1889 # If the upload directory is empty, the upload
1886 # will fail.1890 # will fail.
18871891
@@ -1905,6 +1909,8 @@
19051909
1906 # Upload and accept a binary for the primary archive source.1910 # Upload and accept a binary for the primary archive source.
1907 shutil.rmtree(upload_dir)1911 shutil.rmtree(upload_dir)
1912
1913 # Commit so the build cookie has the right ids.
1908 self.layer.txn.commit()1914 self.layer.txn.commit()
1909 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())1915 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
1910 os.mkdir(os.path.join(self.incoming_folder, leaf_name))1916 os.mkdir(os.path.join(self.incoming_folder, leaf_name))
@@ -1925,7 +1931,7 @@
1925 self.assertTrue('DEBUG: Moving upload directory '1931 self.assertTrue('DEBUG: Moving upload directory '
1926 in log_contents)1932 in log_contents)
19271933
1928 def testSuccess(self):1934 def testBinaryPackageBuilds(self):
1929 # Properly uploaded binaries should result in the1935 # Properly uploaded binaries should result in the
1930 # build status changing to FULLYBUILT.1936 # build status changing to FULLYBUILT.
1931 # Upload a source package1937 # Upload a source package
@@ -1946,6 +1952,8 @@
19461952
1947 # Upload and accept a binary for the primary archive source.1953 # Upload and accept a binary for the primary archive source.
1948 shutil.rmtree(upload_dir)1954 shutil.rmtree(upload_dir)
1955
1956 # Commit so the build cookie has the right ids.
1949 self.layer.txn.commit()1957 self.layer.txn.commit()
1950 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())1958 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
1951 upload_dir = self.queueUpload("bar_1.0-1_binary",1959 upload_dir = self.queueUpload("bar_1.0-1_binary",
@@ -1959,13 +1967,74 @@
1959 # No emails are sent on success1967 # No emails are sent on success
1960 self.assertEquals(len(stub.test_emails), last_stub_mail_count)1968 self.assertEquals(len(stub.test_emails), last_stub_mail_count)
1961 self.assertEquals(BuildStatus.FULLYBUILT, build.status)1969 self.assertEquals(BuildStatus.FULLYBUILT, build.status)
1962 log_contents = build.upload_log.read()1970 # Upon full build the upload log is unset.
1963 log_lines = log_contents.splitlines()1971 self.assertIs(None, build.upload_log)
1964 self.assertTrue(1972
1965 'INFO: Processing upload bar_1.0-1_i386.changes' in log_lines)1973 def testSourcePackageRecipeBuild(self):
1966 self.assertTrue(1974 # Properly uploaded source packages should result in the
1967 'INFO: Committing the transaction and any mails associated with '1975 # build status changing to FULLYBUILT.
1968 'this upload.' in log_lines)1976
1977 # Upload a source package
1978 archive = self.factory.makeArchive()
1979 archive.require_virtualized = False
1980 build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
1981 distroseries=self.breezy, archive=archive, requester=archive.owner)
1982 self.assertEquals(archive.owner, build.requester)
1983 bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
1984 # Commit so the build cookie has the right ids.
1985 self.layer.txn.commit()
1986 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
1987 relative_path = "~%s/%s/%s/%s" % (
1988 archive.owner.name, archive.name, self.breezy.distribution.name,
1989 self.breezy.name)
1990 upload_dir = self.queueUpload(
1991 "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path)
1992 self.options.context = 'buildd'
1993 self.options.builds = True
1994 build.jobStarted()
1995 # Commit so date_started is recorded and doesn't cause constraint
1996 # violations later.
1997 build.status = BuildStatus.UPLOADING
1998 Store.of(build).flush()
1999 self.uploadprocessor.processBuildUpload(
2000 self.incoming_folder, leaf_name)
2001 self.layer.txn.commit()
2002
2003 self.assertEquals(BuildStatus.FULLYBUILT, build.status)
2004 self.assertEquals(None, build.builder)
2005 self.assertIsNot(None, build.date_finished)
2006 self.assertIsNot(None, build.duration)
2007 # Upon full build the upload log is unset.
2008 self.assertIs(None, build.upload_log)
2009
2010 def testSourcePackageRecipeBuild_fail(self):
2011 # A source package recipe build will fail if no files are present.
2012
2013 # Upload a source package
2014 archive = self.factory.makeArchive()
2015 archive.require_virtualized = False
2016 build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar",
2017 distroseries=self.breezy, archive=archive)
2018 bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build)
2019 # Commit so the build cookie has the right ids.
2020 Store.of(build).flush()
2021 leaf_name = build.getUploadDirLeaf(build.getBuildCookie())
2022 os.mkdir(os.path.join(self.incoming_folder, leaf_name))
2023 self.options.context = 'buildd'
2024 self.options.builds = True
2025 build.jobStarted()
2026 # Commit so date_started is recorded and doesn't cause constraint
2027 # violations later.
2028 Store.of(build).flush()
2029 build.status = BuildStatus.UPLOADING
2030 self.uploadprocessor.processBuildUpload(
2031 self.incoming_folder, leaf_name)
2032 self.layer.txn.commit()
2033 self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status)
2034 self.assertEquals(None, build.builder)
2035 self.assertIsNot(None, build.date_finished)
2036 self.assertIsNot(None, build.duration)
2037 self.assertIsNot(None, build.upload_log)
19692038
19702039
1971class ParseBuildUploadLeafNameTests(TestCase):2040class ParseBuildUploadLeafNameTests(TestCase):
19722041
=== modified file 'lib/lp/archiveuploader/tests/uploadpolicy.txt'
--- lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-08-18 14:03:15 +0000
+++ lib/lp/archiveuploader/tests/uploadpolicy.txt 2010-09-17 06:08:57 +0000
@@ -53,23 +53,16 @@
53 ... distro = 'ubuntu'53 ... distro = 'ubuntu'
54 ... distroseries = None54 ... distroseries = None
55 >>> class MockOptions(MockAbstractOptions):55 >>> class MockOptions(MockAbstractOptions):
56 ... buildid = 156 ... builds = True
5757
58 >>> ab_opts = MockAbstractOptions()58 >>> ab_opts = MockAbstractOptions()
59 >>> bd_opts = MockOptions()59 >>> bd_opts = MockOptions()
6060
61 >>> insecure_policy.setOptions(ab_opts)61 >>> insecure_policy.setOptions(ab_opts)
62 >>> insecure_policy.options is ab_opts
63 True
64 >>> insecure_policy.distro.name62 >>> insecure_policy.distro.name
65 u'ubuntu'63 u'ubuntu'
66 >>> buildd_policy.setOptions(ab_opts)64 >>> buildd_policy.setOptions(ab_opts)
67 Traceback (most recent call last):
68 ...
69 UploadPolicyError: BuildID required for buildd context
70 >>> buildd_policy.setOptions(bd_opts)65 >>> buildd_policy.setOptions(bd_opts)
71 >>> buildd_policy.options is bd_opts
72 True
73 >>> buildd_policy.distro.name66 >>> buildd_policy.distro.name
74 u'ubuntu'67 u'ubuntu'
7568
7669
=== modified file 'lib/lp/archiveuploader/uploadpolicy.py'
--- lib/lp/archiveuploader/uploadpolicy.py 2010-08-25 13:04:14 +0000
+++ lib/lp/archiveuploader/uploadpolicy.py 2010-09-17 06:08:57 +0000
@@ -11,7 +11,6 @@
11 "BuildDaemonUploadPolicy",11 "BuildDaemonUploadPolicy",
12 "findPolicyByName",12 "findPolicyByName",
13 "IArchiveUploadPolicy",13 "IArchiveUploadPolicy",
14 "SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME",
15 "UploadPolicyError",14 "UploadPolicyError",
16 ]15 ]
1716
@@ -34,8 +33,6 @@
34from lazr.enum import EnumeratedType, Item33from lazr.enum import EnumeratedType, Item
3534
3635
37# Defined here so that uploadpolicy.py doesn't depend on lp.code.
38SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME = 'recipe'
39# Number of seconds in an hour (used later)36# Number of seconds in an hour (used later)
40HOURS = 360037HOURS = 3600
4138
@@ -128,13 +125,8 @@
128 raise AssertionError(125 raise AssertionError(
129 "Upload is not sourceful, binaryful or mixed.")126 "Upload is not sourceful, binaryful or mixed.")
130127
131 def getUploader(self, changes):
132 """Get the person who is doing the uploading."""
133 return changes.signer
134
135 def setOptions(self, options):128 def setOptions(self, options):
136 """Store the options for later."""129 """Store the options for later."""
137 self.options = options
138 # Extract and locate the distribution though...130 # Extract and locate the distribution though...
139 self.distro = getUtility(IDistributionSet)[options.distro]131 self.distro = getUtility(IDistributionSet)[options.distro]
140 if options.distroseries is not None:132 if options.distroseries is not None:
@@ -324,7 +316,6 @@
324 """The build daemon upload policy is invoked by the slave scanner."""316 """The build daemon upload policy is invoked by the slave scanner."""
325317
326 name = 'buildd'318 name = 'buildd'
327 accepted_type = ArchiveUploadType.BINARY_ONLY
328319
329 def __init__(self):320 def __init__(self):
330 super(BuildDaemonUploadPolicy, self).__init__()321 super(BuildDaemonUploadPolicy, self).__init__()
@@ -333,11 +324,9 @@
333 self.unsigned_dsc_ok = True324 self.unsigned_dsc_ok = True
334325
335 def setOptions(self, options):326 def setOptions(self, options):
336 AbstractUploadPolicy.setOptions(self, options)327 """Store the options for later."""
337 # We require a buildid to be provided328 super(BuildDaemonUploadPolicy, self).setOptions(options)
338 if (getattr(options, 'buildid', None) is None and329 options.builds = True
339 not getattr(options, 'builds', False)):
340 raise UploadPolicyError("BuildID required for buildd context")
341330
342 def policySpecificChecks(self, upload):331 def policySpecificChecks(self, upload):
343 """The buildd policy should enforce that the buildid matches."""332 """The buildd policy should enforce that the buildid matches."""
@@ -349,6 +338,15 @@
349 """Buildd policy allows PPA upload."""338 """Buildd policy allows PPA upload."""
350 return False339 return False
351340
341 def validateUploadType(self, upload):
342 if upload.sourceful and upload.binaryful:
343 if self.accepted_type != ArchiveUploadType.MIXED_ONLY:
344 upload.reject(
345 "Source/binary (i.e. mixed) uploads are not allowed.")
346 elif not upload.sourceful and not upload.binaryful:
347 raise AssertionError(
348 "Upload is not sourceful, binaryful or mixed.")
349
352350
353class SyncUploadPolicy(AbstractUploadPolicy):351class SyncUploadPolicy(AbstractUploadPolicy):
354 """This policy is invoked when processing sync uploads."""352 """This policy is invoked when processing sync uploads."""
355353
=== modified file 'lib/lp/archiveuploader/uploadprocessor.py'
--- lib/lp/archiveuploader/uploadprocessor.py 2010-09-17 06:08:54 +0000
+++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-17 06:08:57 +0000
@@ -71,7 +71,6 @@
71 )71 )
72from lp.archiveuploader.uploadpolicy import (72from lp.archiveuploader.uploadpolicy import (
73 BuildDaemonUploadPolicy,73 BuildDaemonUploadPolicy,
74 SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
75 UploadPolicyError,74 UploadPolicyError,
76 )75 )
77from lp.buildmaster.enums import (76from lp.buildmaster.enums import (
@@ -207,6 +206,7 @@
207 The name of the leaf is the build id of the build.206 The name of the leaf is the build id of the build.
208 Build uploads always contain a single package per leaf.207 Build uploads always contain a single package per leaf.
209 """208 """
209 upload_path = os.path.join(fsroot, upload)
210 try:210 try:
211 job_id = parse_build_upload_leaf_name(upload)211 job_id = parse_build_upload_leaf_name(upload)
212 except ValueError:212 except ValueError:
@@ -220,20 +220,20 @@
220 "Unable to find package build job with id %d. Skipping." %220 "Unable to find package build job with id %d. Skipping." %
221 job_id)221 job_id)
222 return222 return
223 logger = BufferLogger()
223 build = buildfarm_job.getSpecificJob()224 build = buildfarm_job.getSpecificJob()
224 if build.status != BuildStatus.UPLOADING:225 if build.status != BuildStatus.UPLOADING:
225 self.log.warn(226 self.log.warn(
226 "Expected build status to be 'UPLOADING', was %s. Skipping.",227 "Expected build status to be 'UPLOADING', was %s. "
227 build.status.name)228 "Moving to failed.", build.status.name)
229 self.moveProcessedUpload(upload_path, "failed", logger)
228 return230 return
229 self.log.debug("Build %s found" % build.id)231 self.log.debug("Build %s found" % build.id)
230 logger = BufferLogger()
231 upload_path = os.path.join(fsroot, upload)
232 try:232 try:
233 [changes_file] = self.locateChangesFiles(upload_path)233 [changes_file] = self.locateChangesFiles(upload_path)
234 logger.debug("Considering changefile %s" % changes_file)234 logger.debug("Considering changefile %s" % changes_file)
235 result = self.processChangesFile(235 result = self.processChangesFile(
236 upload_path, changes_file, logger)236 upload_path, changes_file, logger, build)
237 except (KeyboardInterrupt, SystemExit):237 except (KeyboardInterrupt, SystemExit):
238 raise238 raise
239 except:239 except:
@@ -251,16 +251,13 @@
251 UploadStatusEnum.REJECTED: "rejected",251 UploadStatusEnum.REJECTED: "rejected",
252 UploadStatusEnum.ACCEPTED: "accepted"}[result]252 UploadStatusEnum.ACCEPTED: "accepted"}[result]
253 self.moveProcessedUpload(upload_path, destination, logger)253 self.moveProcessedUpload(upload_path, destination, logger)
254 build.date_finished = datetime.datetime.now(pytz.UTC)
254 if not (result == UploadStatusEnum.ACCEPTED and255 if not (result == UploadStatusEnum.ACCEPTED and
255 build.verifySuccessfulUpload() and256 build.verifySuccessfulUpload() and
256 build.status == BuildStatus.FULLYBUILT):257 build.status == BuildStatus.FULLYBUILT):
257 build.status = BuildStatus.FAILEDTOUPLOAD258 build.status = BuildStatus.FAILEDTOUPLOAD
258 build.date_finished = datetime.datetime.now(pytz.UTC)
259 build.notify(extra_info="Uploading build %s failed." % upload)259 build.notify(extra_info="Uploading build %s failed." % upload)
260 build.storeUploadLog(logger.buffer.getvalue())260 build.storeUploadLog(logger.buffer.getvalue())
261
262 # Remove BuildQueue record.
263 build.buildqueue_record.destroySelf()
264261
265 def processUpload(self, fsroot, upload):262 def processUpload(self, fsroot, upload):
266 """Process an upload's changes files, and move it to a new directory.263 """Process an upload's changes files, and move it to a new directory.
@@ -376,7 +373,8 @@
376 os.path.join(relative_path, filename))373 os.path.join(relative_path, filename))
377 return self.orderFilenames(changes_files)374 return self.orderFilenames(changes_files)
378375
379 def processChangesFile(self, upload_path, changes_file, logger=None):376 def processChangesFile(self, upload_path, changes_file, logger=None,
377 build=None):
380 """Process a single changes file.378 """Process a single changes file.
381379
382 This is done by obtaining the appropriate upload policy (according380 This is done by obtaining the appropriate upload policy (according
@@ -432,7 +430,7 @@
432 "https://help.launchpad.net/Packaging/PPA#Uploading "430 "https://help.launchpad.net/Packaging/PPA#Uploading "
433 "and update your configuration.")))431 "and update your configuration.")))
434 logger.debug("Finding fresh policy")432 logger.debug("Finding fresh policy")
435 policy = self._getPolicyForDistro(distribution)433 policy = self._getPolicyForDistro(distribution, build)
436 policy.archive = archive434 policy.archive = archive
437435
438 # DistroSeries overriding respect the following precedence:436 # DistroSeries overriding respect the following precedence:
@@ -450,10 +448,8 @@
450448
451 # Reject source upload to buildd upload paths.449 # Reject source upload to buildd upload paths.
452 first_path = relative_path.split(os.path.sep)[0]450 first_path = relative_path.split(os.path.sep)[0]
453 is_not_buildd_nor_recipe_policy = policy.name not in [451 if (first_path.isdigit() and
454 SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,452 policy.name != BuildDaemonUploadPolicy.name):
455 BuildDaemonUploadPolicy.name]
456 if first_path.isdigit() and is_not_buildd_nor_recipe_policy:
457 error_message = (453 error_message = (
458 "Invalid upload path (%s) for this policy (%s)" %454 "Invalid upload path (%s) for this policy (%s)" %
459 (relative_path, policy.name))455 (relative_path, policy.name))
@@ -472,7 +468,7 @@
472 result = UploadStatusEnum.ACCEPTED468 result = UploadStatusEnum.ACCEPTED
473469
474 try:470 try:
475 upload.process()471 upload.process(build)
476 except UploadPolicyError, e:472 except UploadPolicyError, e:
477 upload.reject("UploadPolicyError escaped upload.process: "473 upload.reject("UploadPolicyError escaped upload.process: "
478 "%s " % e)474 "%s " % e)
@@ -513,7 +509,8 @@
513 upload.do_reject(notify)509 upload.do_reject(notify)
514 self.ztm.abort()510 self.ztm.abort()
515 else:511 else:
516 successful = upload.do_accept(notify=notify)512 successful = upload.do_accept(
513 notify=notify, build=build)
517 if not successful:514 if not successful:
518 result = UploadStatusEnum.REJECTED515 result = UploadStatusEnum.REJECTED
519 logger.info(516 logger.info(
520517
=== modified file 'lib/lp/buildmaster/interfaces/packagebuild.py'
--- lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-17 06:08:54 +0000
+++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-09-17 06:08:57 +0000
@@ -71,10 +71,6 @@
71 title=_('Build farm job'), schema=IBuildFarmJob, required=True,71 title=_('Build farm job'), schema=IBuildFarmJob, required=True,
72 readonly=True, description=_('The base build farm job.'))72 readonly=True, description=_('The base build farm job.'))
7373
74 policy_name = TextLine(
75 title=_("Policy name"), required=True,
76 description=_("The upload policy to use for handling these builds."))
77
78 current_component = Attribute(74 current_component = Attribute(
79 'Component where the source related to this build was last '75 'Component where the source related to this build was last '
80 'published.')76 'published.')
@@ -149,6 +145,14 @@
149 created in a suspended state.145 created in a suspended state.
150 """146 """
151147
148 def getUploader(changes):
149 """Return the person responsible for the upload.
150
151 This is used to when checking permissions.
152
153 :param changes: Changes file from the upload.
154 """
155
152156
153class IPackageBuildSource(Interface):157class IPackageBuildSource(Interface):
154 """A utility of this interface used to create _things_."""158 """A utility of this interface used to create _things_."""
155159
=== modified file 'lib/lp/buildmaster/model/packagebuild.py'
--- lib/lp/buildmaster/model/packagebuild.py 2010-09-17 06:08:54 +0000
+++ lib/lp/buildmaster/model/packagebuild.py 2010-09-17 06:08:57 +0000
@@ -94,8 +94,6 @@
94 build_farm_job_id = Int(name='build_farm_job', allow_none=False)94 build_farm_job_id = Int(name='build_farm_job', allow_none=False)
95 build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')95 build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id')
9696
97 policy_name = 'buildd'
98
99 # The following two properties are part of the IPackageBuild97 # The following two properties are part of the IPackageBuild
100 # interface, but need to be provided by derived classes.98 # interface, but need to be provided by derived classes.
101 distribution = None99 distribution = None
@@ -239,6 +237,10 @@
239 """See `IPackageBuild`."""237 """See `IPackageBuild`."""
240 raise NotImplementedError238 raise NotImplementedError
241239
240 def getUploader(self, changes):
241 """See `IPackageBuild`."""
242 raise NotImplementedError
243
242244
243class PackageBuildDerived:245class PackageBuildDerived:
244 """Setup the delegation for package build.246 """Setup the delegation for package build.
@@ -352,6 +354,10 @@
352 if not os.path.exists(target_dir):354 if not os.path.exists(target_dir):
353 os.mkdir(target_dir)355 os.mkdir(target_dir)
354356
357 # Flush so there are no race conditions with archiveuploader about
358 # self.status.
359 Store.of(self).flush()
360
355 # Move the directory used to grab the binaries into361 # Move the directory used to grab the binaries into
356 # the incoming directory so the upload processor never362 # the incoming directory so the upload processor never
357 # sees half-finished uploads.363 # sees half-finished uploads.
@@ -360,6 +366,9 @@
360 # Release the builder for another job.366 # Release the builder for another job.
361 self.buildqueue_record.builder.cleanSlave()367 self.buildqueue_record.builder.cleanSlave()
362368
369 # Remove BuildQueue record.
370 self.buildqueue_record.destroySelf()
371
363 def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):372 def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):
364 """Handle a package that had failed to build.373 """Handle a package that had failed to build.
365374
366375
=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-17 06:08:54 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py 2010-09-17 06:08:57 +0000
@@ -105,7 +105,6 @@
105105
106 def test_default_values(self):106 def test_default_values(self):
107 # PackageBuild has a number of default values.107 # PackageBuild has a number of default values.
108 self.failUnlessEqual('buildd', self.package_build.policy_name)
109 self.failUnlessEqual(108 self.failUnlessEqual(
110 'multiverse', self.package_build.current_component.name)109 'multiverse', self.package_build.current_component.name)
111 self.failUnlessEqual(None, self.package_build.distribution)110 self.failUnlessEqual(None, self.package_build.distribution)
@@ -327,6 +326,7 @@
327 })326 })
328 self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status)327 self.assertEqual(BuildStatus.FAILEDTOUPLOAD, self.build.status)
329 self.assertResultCount(0, "failed")328 self.assertResultCount(0, "failed")
329 self.assertIs(None, self.build.buildqueue_record)
330330
331 def test_handleStatus_OK_relative_filepath(self):331 def test_handleStatus_OK_relative_filepath(self):
332 # A filemap that tries to write to files outside of332 # A filemap that tries to write to files outside of
333333
=== modified file 'lib/lp/code/configure.zcml'
--- lib/lp/code/configure.zcml 2010-09-13 04:56:29 +0000
+++ lib/lp/code/configure.zcml 2010-09-17 06:08:57 +0000
@@ -923,7 +923,7 @@
923 <require permission="launchpad.View" interface="lp.code.interfaces.sourcepackagerecipebuild.ISourcePackageRecipeBuild"/>923 <require permission="launchpad.View" interface="lp.code.interfaces.sourcepackagerecipebuild.ISourcePackageRecipeBuild"/>
924 <!-- This is needed for UploadProcessor to run. The permission isn't924 <!-- This is needed for UploadProcessor to run. The permission isn't
925 important; launchpad.Edit isn't actually held by anybody. -->925 important; launchpad.Edit isn't actually held by anybody. -->
926 <require permission="launchpad.Edit" set_attributes="status upload_log" />926 <require permission="launchpad.Edit" set_attributes="status upload_log date_finished requester" />
927 </class>927 </class>
928928
929 <securedutility929 <securedutility
@@ -988,10 +988,6 @@
988 name="RECIPEBRANCHBUILD"988 name="RECIPEBRANCHBUILD"
989 provides="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJob"/>989 provides="lp.buildmaster.interfaces.buildfarmjob.IBuildFarmJob"/>
990990
991 <call
992 callable="lp.code.model.sourcepackagerecipebuild.register_archive_upload_policy_adapter"
993 />
994
995 <webservice:register module="lp.code.interfaces.webservice" />991 <webservice:register module="lp.code.interfaces.webservice" />
996 <adapter992 <adapter
997 provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJob"993 provides="lp.buildmaster.interfaces.buildfarmjob.ISpecificBuildFarmJob"
998994
=== modified file 'lib/lp/code/model/sourcepackagerecipebuild.py'
--- lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-09 17:02:33 +0000
+++ lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-17 06:08:57 +0000
@@ -22,7 +22,6 @@
22 )22 )
23from storm.store import Store23from storm.store import Store
24from zope.component import (24from zope.component import (
25 getGlobalSiteManager,
26 getUtility,25 getUtility,
27 )26 )
28from zope.interface import (27from zope.interface import (
@@ -39,12 +38,6 @@
39 )38 )
40from canonical.launchpad.webapp import errorlog39from canonical.launchpad.webapp import errorlog
41from lp.app.errors import NotFoundError40from lp.app.errors import NotFoundError
42from lp.archiveuploader.uploadpolicy import (
43 ArchiveUploadType,
44 BuildDaemonUploadPolicy,
45 IArchiveUploadPolicy,
46 SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME,
47 )
48from lp.buildmaster.enums import (41from lp.buildmaster.enums import (
49 BuildFarmJobType,42 BuildFarmJobType,
50 BuildStatus,43 BuildStatus,
@@ -77,25 +70,10 @@
77from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease70from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease
7871
7972
80class SourcePackageRecipeUploadPolicy(BuildDaemonUploadPolicy):
81 """Policy for uploading the results of a source package recipe build."""
82
83 name = SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME
84 accepted_type = ArchiveUploadType.SOURCE_ONLY
85
86 def getUploader(self, changes):
87 """Return the person doing the upload."""
88 build_id = int(getattr(self.options, 'buildid'))
89 sprb = getUtility(ISourcePackageRecipeBuildSource).getById(build_id)
90 return sprb.requester
91
92
93class SourcePackageRecipeBuild(PackageBuildDerived, Storm):73class SourcePackageRecipeBuild(PackageBuildDerived, Storm):
9474
95 __storm_table__ = 'SourcePackageRecipeBuild'75 __storm_table__ = 'SourcePackageRecipeBuild'
9676
97 policy_name = SourcePackageRecipeUploadPolicy.name
98
99 implements(ISourcePackageRecipeBuild)77 implements(ISourcePackageRecipeBuild)
100 classProvides(ISourcePackageRecipeBuildSource)78 classProvides(ISourcePackageRecipeBuildSource)
10179
@@ -333,6 +311,10 @@
333 if self.status == BuildStatus.FULLYBUILT:311 if self.status == BuildStatus.FULLYBUILT:
334 self.notify()312 self.notify()
335313
314 def getUploader(self, changes):
315 """See `IPackageBuild`."""
316 return self.requester
317
336318
337class SourcePackageRecipeBuildJob(BuildFarmJobOldDerived, Storm):319class SourcePackageRecipeBuildJob(BuildFarmJobOldDerived, Storm):
338 classProvides(ISourcePackageRecipeBuildJobSource)320 classProvides(ISourcePackageRecipeBuildJobSource)
@@ -384,13 +366,6 @@
384 return 2505 + self.build.archive.relative_build_score366 return 2505 + self.build.archive.relative_build_score
385367
386368
387def register_archive_upload_policy_adapter():
388 getGlobalSiteManager().registerUtility(
389 component=SourcePackageRecipeUploadPolicy,
390 provided=IArchiveUploadPolicy,
391 name=SourcePackageRecipeUploadPolicy.name)
392
393
394def get_recipe_build_for_build_farm_job(build_farm_job):369def get_recipe_build_for_build_farm_job(build_farm_job):
395 """Return the SourcePackageRecipeBuild associated with a BuildFarmJob."""370 """Return the SourcePackageRecipeBuild associated with a BuildFarmJob."""
396 store = Store.of(build_farm_job)371 store = Store.of(build_farm_job)
397372
=== modified file 'lib/lp/code/model/tests/test_sourcepackagerecipebuild.py'
--- lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-17 06:08:54 +0000
+++ lib/lp/code/model/tests/test_sourcepackagerecipebuild.py 2010-09-17 06:08:57 +0000
@@ -309,6 +309,12 @@
309 job = sprb.build_farm_job.getSpecificJob()309 job = sprb.build_farm_job.getSpecificJob()
310 self.assertEqual(sprb, job)310 self.assertEqual(sprb, job)
311311
312 def test_getUploader(self):
313 # For ACL purposes the uploader is the build requester.
314 build = self.makeSourcePackageRecipeBuild()
315 self.assertEquals(build.requester,
316 build.getUploader(None))
317
312318
313class TestAsBuildmaster(TestCaseWithFactory):319class TestAsBuildmaster(TestCaseWithFactory):
314320
315321
=== modified file 'lib/lp/soyuz/doc/build-failedtoupload-workflow.txt'
--- lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-08-04 00:16:44 +0000
+++ lib/lp/soyuz/doc/build-failedtoupload-workflow.txt 2010-09-17 06:08:57 +0000
@@ -162,8 +162,7 @@
162 >>> buildd_policy = getPolicy(162 >>> buildd_policy = getPolicy(
163 ... name='buildd',163 ... name='buildd',
164 ... distro=failedtoupload_candidate.distribution.name,164 ... distro=failedtoupload_candidate.distribution.name,
165 ... distroseries=failedtoupload_candidate.distro_series.name,165 ... distroseries=failedtoupload_candidate.distro_series.name)
166 ... buildid=failedtoupload_candidate.id)
167166
168 >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(167 >>> cdrkit_bin_upload = NascentUpload.from_changesfile_path(
169 ... datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),168 ... datadir('suite/cdrkit_1.0/cdrkit_1.0_i386.changes'),
@@ -171,7 +170,7 @@
171 >>> cdrkit_bin_upload.process()170 >>> cdrkit_bin_upload.process()
172 >>> cdrkit_bin_upload.is_rejected171 >>> cdrkit_bin_upload.is_rejected
173 False172 False
174 >>> success = cdrkit_bin_upload.do_accept()173 >>> success = cdrkit_bin_upload.do_accept(build=failedtoupload_candidate)
175 >>> print cdrkit_bin_upload.queue_root.status.name174 >>> print cdrkit_bin_upload.queue_root.status.name
176 NEW175 NEW
177176
178177
=== modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt'
--- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-17 06:08:54 +0000
+++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-09-17 06:08:57 +0000
@@ -339,8 +339,6 @@
339 >>> build.status.title339 >>> build.status.title
340 'Uploading build'340 'Uploading build'
341341
342 >>> bqItem10.destroySelf()
343
344=== Successfully collected and uploaded (FULLYBUILT) ===342=== Successfully collected and uploaded (FULLYBUILT) ===
345343
346Build item 6 has binary packages available in the sample data, letting us test344Build item 6 has binary packages available in the sample data, letting us test
@@ -1062,7 +1060,6 @@
1062 True1060 True
1063 >>> print lfa.filename1061 >>> print lfa.filename
1064 buildlog_ubuntu-hoary-i386.mozilla-firefox_0.9_BUILDING.txt.gz1062 buildlog_ubuntu-hoary-i386.mozilla-firefox_0.9_BUILDING.txt.gz
1065 >>> candidate.destroySelf()
10661063
1067The attempt to fetch the buildlog from the common librarian will fail1064The attempt to fetch the buildlog from the common librarian will fail
1068since this is a build in a private archive and the buildlog was thus1065since this is a build in a private archive and the buildlog was thus
10691066
=== modified file 'lib/lp/soyuz/doc/distroseriesqueue-translations.txt'
--- lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-08-24 15:29:01 +0000
+++ lib/lp/soyuz/doc/distroseriesqueue-translations.txt 2010-09-17 06:08:57 +0000
@@ -74,15 +74,14 @@
74 ... dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)74 ... dapper_amd64, PackagePublishingPocket.RELEASE, dapper.main_archive)
7575
76 >>> buildd_policy = getPolicy(76 >>> buildd_policy = getPolicy(
77 ... name='buildd', distro='ubuntu', distroseries='dapper',77 ... name='buildd', distro='ubuntu', distroseries='dapper')
78 ... buildid=build.id)
7978
80 >>> pmount_upload = NascentUpload.from_changesfile_path(79 >>> pmount_upload = NascentUpload.from_changesfile_path(
81 ... datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),80 ... datadir('pmount_0.9.7-2ubuntu2_amd64.changes'),
82 ... buildd_policy, mock_logger)81 ... buildd_policy, mock_logger)
83 DEBUG: Changes file can be unsigned.82 DEBUG: Changes file can be unsigned.
8483
85 >>> pmount_upload.process()84 >>> pmount_upload.process(build=build)
86 DEBUG: Beginning processing.85 DEBUG: Beginning processing.
87 DEBUG: Verifying the changes file.86 DEBUG: Verifying the changes file.
88 DEBUG: Verifying files in upload.87 DEBUG: Verifying files in upload.
@@ -105,9 +104,8 @@
105 >>> print len(dapper_pmount.getLatestTranslationsUploads())104 >>> print len(dapper_pmount.getLatestTranslationsUploads())
106 0105 0
107106
108 >>> success = pmount_upload.do_accept()107 >>> success = pmount_upload.do_accept(build=build)
109 DEBUG: Creating queue entry108 DEBUG: Creating queue entry
110 DEBUG: Build ... found
111 ...109 ...
112110
113 # And all things worked.111 # And all things worked.
114112
=== modified file 'lib/lp/soyuz/doc/soyuz-set-of-uploads.txt'
--- lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-08-30 02:07:38 +0000
+++ lib/lp/soyuz/doc/soyuz-set-of-uploads.txt 2010-09-17 06:08:57 +0000
@@ -119,21 +119,17 @@
119 >>> from lp.soyuz.scripts.soyuz_process_upload import (119 >>> from lp.soyuz.scripts.soyuz_process_upload import (
120 ... ProcessUpload)120 ... ProcessUpload)
121 >>> from canonical.testing import LaunchpadZopelessLayer121 >>> from canonical.testing import LaunchpadZopelessLayer
122 >>> def process_uploads(upload_policy, build_id, series, loglevel):122 >>> def process_uploads(upload_policy, series, loglevel):
123 ... """Simulate process-upload.py script run.123 ... """Simulate process-upload.py script run.
124 ...124 ...
125 ... :param upload_policy: context in which to consider the upload125 ... :param upload_policy: context in which to consider the upload
126 ... (equivalent to script's --context option).126 ... (equivalent to script's --context option).
127 ... :param build_id: build to which to attach this upload.
128 ... (equivalent to script's --buildid option).
129 ... :param series: distro series to give back from.127 ... :param series: distro series to give back from.
130 ... (equivalent to script's --series option).128 ... (equivalent to script's --series option).
131 ... :param loglevel: logging level (as defined in logging module). Any129 ... :param loglevel: logging level (as defined in logging module). Any
132 ... log messages below this level will be suppressed.130 ... log messages below this level will be suppressed.
133 ... """131 ... """
134 ... args = [temp_dir, "-C", upload_policy]132 ... args = [temp_dir, "-C", upload_policy]
135 ... if build_id is not None:
136 ... args.extend(["-b", build_id])
137 ... if series is not None:133 ... if series is not None:
138 ... args.extend(["-s", series])134 ... args.extend(["-s", series])
139 ... # Run script under 'uploader' DB user. The dbuser argument to the135 ... # Run script under 'uploader' DB user. The dbuser argument to the
@@ -230,11 +226,11 @@
230 >>> from lp.services.mail import stub226 >>> from lp.services.mail import stub
231227
232 >>> def simulate_upload(228 >>> def simulate_upload(
233 ... leafname, is_new=False, upload_policy='anything', build_id=None,229 ... leafname, is_new=False, upload_policy='anything',
234 ... series=None, distro="ubuntutest", loglevel=logging.WARN):230 ... series=None, distro="ubuntutest", loglevel=logging.WARN):
235 ... """Process upload(s). Options are as for process_uploads()."""231 ... """Process upload(s). Options are as for process_uploads()."""
236 ... punt_upload_into_queue(leafname, distro=distro)232 ... punt_upload_into_queue(leafname, distro=distro)
237 ... process_uploads(upload_policy, build_id, series, loglevel)233 ... process_uploads(upload_policy, series, loglevel)
238 ... # We seem to be leaving a lock file behind here for some reason.234 ... # We seem to be leaving a lock file behind here for some reason.
239 ... # Naturally it doesn't count as an unprocessed incoming file, which235 ... # Naturally it doesn't count as an unprocessed incoming file, which
240 ... # is what we're really looking for.236 ... # is what we're really looking for.
@@ -289,19 +285,6 @@
289285
290 >>> simulate_upload('bar_1.0-2')286 >>> simulate_upload('bar_1.0-2')
291287
292Check the rejection of bar_1.0-2_binary when uploaded to the wrong build id.
293
294 >>> simulate_upload(
295 ... 'bar_1.0-2_binary', upload_policy="buildd", build_id="2",
296 ... loglevel=logging.ERROR)
297 log> Exception while accepting:
298 Attempt to upload binaries specifying build 2, where they don't fit.
299 ...
300 Rejected uploads: ['bar_1.0-2_binary']
301
302Try it again without the bogus build id. This succeeds without
303complaints.
304
305 >>> simulate_upload('bar_1.0-2_binary')288 >>> simulate_upload('bar_1.0-2_binary')
306289
307Check the rejection of a malicious version of bar package which refers290Check the rejection of a malicious version of bar package which refers
308291
=== modified file 'lib/lp/soyuz/model/binarypackagebuild.py'
--- lib/lp/soyuz/model/binarypackagebuild.py 2010-09-17 06:08:54 +0000
+++ lib/lp/soyuz/model/binarypackagebuild.py 2010-09-17 06:08:57 +0000
@@ -760,6 +760,10 @@
760 # package build, then don't hit the db.760 # package build, then don't hit the db.
761 return self761 return self
762762
763 def getUploader(self, changes):
764 """See `IBinaryPackageBuild`."""
765 return changes.signer
766
763767
764class BinaryPackageBuildSet:768class BinaryPackageBuildSet:
765 implements(IBinaryPackageBuildSet)769 implements(IBinaryPackageBuildSet)
766770
=== modified file 'lib/lp/soyuz/scripts/soyuz_process_upload.py'
--- lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-08-20 20:31:18 +0000
+++ lib/lp/soyuz/scripts/soyuz_process_upload.py 2010-09-17 06:08:57 +0000
@@ -61,11 +61,6 @@
61 help="Distro series to give back from.")61 help="Distro series to give back from.")
6262
63 self.parser.add_option(63 self.parser.add_option(
64 "-b", "--buildid", action="store", type="int", dest="buildid",
65 metavar="BUILD",
66 help="The build ID to which to attach this upload.")
67
68 self.parser.add_option(
69 "-a", "--announce", action="store", dest="announcelist",64 "-a", "--announce", action="store", dest="announcelist",
70 metavar="ANNOUNCELIST", help="Override the announcement list")65 metavar="ANNOUNCELIST", help="Override the announcement list")
7166
@@ -82,10 +77,15 @@
82 "%s is not a directory" % self.options.base_fsroot)77 "%s is not a directory" % self.options.base_fsroot)
8378
84 self.logger.debug("Initialising connection.")79 self.logger.debug("Initialising connection.")
85 def getPolicy(distro):80 def getPolicy(distro, build):
86 self.options.distro = distro.name81 self.options.distro = distro.name
87 policy = findPolicyByName(self.options.context)82 policy = findPolicyByName(self.options.context)
88 policy.setOptions(self.options)83 policy.setOptions(self.options)
84 if self.options.builds:
85 assert build, "--builds specified but no build"
86 policy.distroseries = build.distro_series
87 policy.pocket = build.pocket
88 policy.archive = build.archive
89 return policy89 return policy
90 processor = UploadProcessor(self.options.base_fsroot,90 processor = UploadProcessor(self.options.base_fsroot,
91 self.options.dryrun, self.options.nomails, self.options.builds,91 self.options.dryrun, self.options.nomails, self.options.builds,
9292
=== modified file 'lib/lp/soyuz/tests/test_binarypackagebuild.py'
--- lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-09 17:02:33 +0000
+++ lib/lp/soyuz/tests/test_binarypackagebuild.py 2010-09-17 06:08:57 +0000
@@ -150,6 +150,15 @@
150 self.assertStatementCount(150 self.assertStatementCount(
151 0, self.build.getSpecificJob)151 0, self.build.getSpecificJob)
152152
153 def test_getUploader(self):
154 # For ACL purposes the uploader is the changes file signer.
155
156 class MockChanges:
157 signer = "Somebody <somebody@ubuntu.com>"
158
159 self.assertEquals("Somebody <somebody@ubuntu.com>",
160 self.build.getUploader(MockChanges()))
161
153162
154class TestBuildUpdateDependencies(TestCaseWithFactory):163class TestBuildUpdateDependencies(TestCaseWithFactory):
155164