Hi Jelmer, r=me, assuming you check the following: * to check: the db permissions - can we get rid of delete perms elsewhere), * to fix: don't see why we need to use removeSecurityProxy in your test instead of a small update to the factory mothed), * to check: regarding the actual diff - my local db-devel seems to already have some of these changes Thanks! > === modified file 'database/schema/security.cfg' > --- database/schema/security.cfg 2010-09-16 00:33:37 +0000 > +++ database/schema/security.cfg 2010-09-16 09:04:13 +0000 > @@ -1130,9 +1130,12 @@ > public.packagebuild = SELECT, INSERT, UPDATE > public.binarypackagebuild = SELECT, INSERT, UPDATE > public.sourcepackagerecipebuild = SELECT, UPDATE > -public.buildqueue = SELECT, INSERT, UPDATE > -public.job = SELECT, INSERT, UPDATE > -public.buildpackagejob = SELECT, INSERT, UPDATE > +public.sourcepackagerecipebuildjob = SELECT, UPDATE > +public.sourcepackagerecipe = SELECT, UPDATE > +public.buildqueue = SELECT, INSERT, UPDATE, DELETE > +public.job = SELECT, INSERT, UPDATE, DELETE > +public.buildpackagejob = SELECT, INSERT, UPDATE, DELETE Right - so this is so the buildqueue record can be cleaned up earlier by the uploader. Nice. Should we be able to remove some DELETE perms elsewhere? > +public.builder = SELECT > > # Thusly the librarian > public.libraryfilecontent = SELECT, INSERT > > === modified file 'lib/lp/archiveuploader/nascentupload.py' > --- lib/lp/archiveuploader/nascentupload.py 2010-09-15 19:38:48 +0000 > +++ lib/lp/archiveuploader/nascentupload.py 2010-09-16 09:05:43 +0000 > @@ -498,10 +498,13 @@ > if self.binaryful: > return > > - # Set up some convenient shortcut variables. > - > - uploader = self.policy.getUploader(self.changes, build) > - archive = self.policy.archive > + # The build can have an explicit uploader, which may be different > + # from the changes file signer. (i.e in case of daily source package > + # builds) > + if build is not None: > + uploader = build.getUploader(self.changes) > + else: > + uploader = self.changes.signer This seems strange? do we not need archive any more? In db-devel it's used straight afterwards, but I'm assuming you've changed that code in a previous branch. And checking your full diff shows that is the case (you're using policy.archive). > > # If we have no signer, there's no ACL we can apply. > if uploader is None: > > === modified file 'lib/lp/archiveuploader/tests/test_uploadprocessor.py' > --- lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:02:57 +0000 > +++ lib/lp/archiveuploader/tests/test_uploadprocessor.py 2010-09-16 09:05:56 +0000 > @@ -1884,7 +1884,7 @@ > self.assertLogContains( > "Unable to find package build job with id 42. Skipping.") > > - def testNoFiles(self): > + def testBinaryPackageBuild_fail(self): > # If the upload directory is empty, the upload > # will fail. > > @@ -1908,6 +1908,8 @@ > > # Upload and accept a binary for the primary archive source. > shutil.rmtree(upload_dir) > + > + # Commit so the build cookie has the right ids. I'm assuming you tried flushing the store etc. too. > self.layer.txn.commit() > leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) > os.mkdir(os.path.join(self.incoming_folder, leaf_name)) > @@ -1928,7 +1930,7 @@ > self.assertTrue('DEBUG: Moving upload directory ' > in log_contents) > > - def testSuccess(self): > + def testBinaryPackageBuilds(self): > # Properly uploaded binaries should result in the > # build status changing to FULLYBUILT. > # Upload a source package > @@ -1949,6 +1951,8 @@ > > # Upload and accept a binary for the primary archive source. > shutil.rmtree(upload_dir) > + > + # Commit so the build cookie has the right ids. > self.layer.txn.commit() > leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) > upload_dir = self.queueUpload("bar_1.0-1_binary", > @@ -1970,6 +1974,77 @@ > 'INFO: Committing the transaction and any mails associated with ' > 'this upload.' in log_lines) > > + def testSourcePackageRecipeBuild(self): > + # Properly uploaded source packages should result in the > + # build status changing to FULLYBUILT. > + > + # Upload a source package > + archive = self.factory.makeArchive() > + archive.require_virtualized = False No need now, but in the future it'd be nice to add that kind of thing to the factory method. > + build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar", > + distroseries=self.breezy, archive=archive, requester=archive.owner) > + self.assertEquals(archive.owner, build.requester) > + bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build) > + # Commit so the build cookie has the right ids. > + self.layer.txn.commit() Ditto re. flush. > + leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) > + relative_path = "~%s/%s/%s/%s" % ( > + archive.owner.name, archive.name, self.breezy.distribution.name, > + self.breezy.name) > + upload_dir = self.queueUpload( > + "bar_1.0-1", queue_entry=leaf_name, relative_path=relative_path) > + self.options.context = 'buildd' > + self.options.builds = True > + build.jobStarted() > + # Commit so date_started is recorded and doesn't cause constraint > + # violations later. > + build.status = BuildStatus.UPLOADING > + self.layer.txn.commit() ouch... we need multiple commits in the one test :/ > + self.uploadprocessor.processBuildUpload( > + self.incoming_folder, leaf_name) > + self.layer.txn.commit() > + > + self.assertEquals(BuildStatus.FULLYBUILT, build.status, > + build.upload_log.read()) > + self.assertEquals(None, build.builder) > + self.assertIsNot(None, build.date_finished) > + self.assertIsNot(None, build.duration) > + log_contents = build.upload_log.read() > + log_lines = log_contents.splitlines() > + self.assertTrue( > + 'INFO: Processing upload bar_1.0-1_source.changes' in log_lines) > + self.assertTrue( > + 'INFO: Committing the transaction and any mails associated with ' > + 'this upload.' in log_lines) Wow, that's some test! It's probably not worthwhile, but I tend to factor relevant bits out of big tests like that so that it could be as simple as: def testSourcePackageRecipeBuild(self): # ... build = self.uploadSourcePackageRecipeBuild() leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) self.uploadprocessor.processBuildUpload( self.incoming_folder, leaf_name) self.layer.txn.commit() self.assertEquals(BuildStatus.FULLYBUILT, build.status, build.upload_log.read()) ... or similar. > + > + def testSourcePackageRecipeBuild_fail(self): > + # A source package recipe build will fail if no files are present. > + > + # Upload a source package > + build = self.factory.makeSourcePackageRecipeBuild(sourcename=u"bar", > + distroseries=self.breezy) > + removeSecurityProxy(build).package_build.archive = self.ubuntu.main_archive Hrm, I'm surprised that rSP(build).archive = ... doesn't work? (as the archive attribute is delegated from the SPRBuild to the underlying package build. But really, we shouldn't need rSP() here. makeSourcePackageRecipeBuild() already has archive as a kwarg, so it's just a matter of adding it to makeSourcePackageRecipeBuildJob() and passing it through. > + bq = self.factory.makeSourcePackageRecipeBuildJob(recipe_build=build) > + # Commit so the build cookie has the right ids. > + self.layer.txn.commit() > + leaf_name = build.getUploadDirLeaf(build.getBuildCookie()) > + os.mkdir(os.path.join(self.incoming_folder, leaf_name)) > + self.options.context = 'buildd' > + self.options.builds = True > + build.jobStarted() > + # Commit so date_started is recorded and doesn't cause constraint > + # violations later. > + self.layer.txn.commit() > + build.status = BuildStatus.UPLOADING Bonus points if you do decide to factor out parts of these tests and they can both re-use the same method, but not necessary :) > + self.uploadprocessor.processBuildUpload( > + self.incoming_folder, leaf_name) > + self.layer.txn.commit() > + > + self.assertEquals(BuildStatus.FAILEDTOUPLOAD, build.status) > + self.assertEquals(None, build.builder) > + self.assertIsNot(None, build.date_finished) > + self.assertIsNot(None, build.duration) > + > > class ParseBuildUploadLeafNameTests(TestCase): > """Tests for parse_build_upload_leaf_name.""" > > === modified file 'lib/lp/archiveuploader/uploadprocessor.py' > --- lib/lp/archiveuploader/uploadprocessor.py 2010-09-16 09:02:57 +0000 > +++ lib/lp/archiveuploader/uploadprocessor.py 2010-09-16 09:06:37 +0000 > @@ -71,7 +71,6 @@ > ) > from lp.archiveuploader.uploadpolicy import ( > BuildDaemonUploadPolicy, > - SOURCE_PACKAGE_RECIPE_UPLOAD_POLICY_NAME, > UploadPolicyError, > ) > from lp.buildmaster.enums import ( > @@ -207,6 +206,7 @@ > The name of the leaf is the build id of the build. > Build uploads always contain a single package per leaf. > """ > + upload_path = os.path.join(fsroot, upload) > try: > job_id = parse_build_upload_leaf_name(upload) > except ValueError: > @@ -220,15 +220,15 @@ > "Unable to find package build job with id %d. Skipping." % > job_id) > return > + logger = BufferLogger() > build = buildfarm_job.getSpecificJob() > if build.status != BuildStatus.UPLOADING: > self.log.warn( > - "Expected build status to be 'UPLOADING', was %s. Skipping.", > - build.status.name) > + "Expected build status to be 'UPLOADING', was %s. " > + "Moving to failed.", build.status.name) > + self.moveProcessedUpload(upload_path, "failed", logger) > return > self.log.debug("Build %s found" % build.id) > - logger = BufferLogger() > - upload_path = os.path.join(fsroot, upload) > try: > [changes_file] = self.locateChangesFiles(upload_path) > logger.debug("Considering changefile %s" % changes_file) > @@ -251,11 +251,11 @@ > UploadStatusEnum.REJECTED: "rejected", > UploadStatusEnum.ACCEPTED: "accepted"}[result] > self.moveProcessedUpload(upload_path, destination, logger) > + build.date_finished = datetime.datetime.now(pytz.UTC) Great... was that a case where it wasn't being set (because the condition below wasn't met)? > if not (result == UploadStatusEnum.ACCEPTED and > build.verifySuccessfulUpload() and > build.status == BuildStatus.FULLYBUILT): > build.status = BuildStatus.FAILEDTOUPLOAD > - build.date_finished = datetime.datetime.now(pytz.UTC) > build.notify(extra_info="Uploading build %s failed." % upload) > build.storeUploadLog(logger.buffer.getvalue()) > ... > === modified file 'lib/lp/buildmaster/model/packagebuild.py' > --- lib/lp/buildmaster/model/packagebuild.py 2010-09-16 00:33:37 +0000 > +++ lib/lp/buildmaster/model/packagebuild.py 2010-09-16 09:04:13 +0000 > @@ -94,8 +94,6 @@ > build_farm_job_id = Int(name='build_farm_job', allow_none=False) > build_farm_job = Reference(build_farm_job_id, 'BuildFarmJob.id') > > - policy_name = 'buildd' > - > # The following two properties are part of the IPackageBuild > # interface, but need to be provided by derived classes. > distribution = None > @@ -239,6 +237,10 @@ > """See `IPackageBuild`.""" > raise NotImplementedError > > + def getUploader(self, changes): > + """See `IPackageBuild`.""" > + raise NotImplementedError > + > > class PackageBuildDerived: > """Setup the delegation for package build. > @@ -360,6 +362,9 @@ > # Release the builder for another job. > self.buildqueue_record.builder.cleanSlave() > > + # Remove BuildQueue record. > + self.buildqueue_record.destroySelf() > + hrm, strange? I checked my local db-devel to get the context for this segment, and my db-devel already has these lines? Are you sure this diff is correct? > def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger): > """Handle a package that had failed to build. > > ... > === modified file 'lib/lp/code/model/sourcepackagerecipebuild.py' > --- lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-16 09:02:57 +0000 > +++ lib/lp/code/model/sourcepackagerecipebuild.py 2010-09-16 09:06:51 +0000 Niice... lots of code deletion :)