This branch looks very good. I have minior comments below.
merge-conditional
-Edwin
>=== added file 'lib/lp/soyuz/interfaces/archivejob.py'
>--- lib/lp/soyuz/interfaces/archivejob.py 1970-01-01 00:00:00 +0000
>+++ lib/lp/soyuz/interfaces/archivejob.py 2010-07-28 19:38:05 +0000
>@@ -0,0 +1,55 @@
>+from zope.interface import Attribute, Interface
>+from zope.schema import Int, Object
>+
>+from canonical.launchpad import _
>+
>+from lazr.enum import DBEnumeratedType, DBItem
>+
>+from lp.services.job.interfaces.job import IJob, IJobSource, IRunnableJob
>+from lp.soyuz.interfaces.archive import IArchive
>+
>+
>+class ArchiveJobType(DBEnumeratedType):
>+ """Values that IArchiveJob.job_type can take."""
>+
>+ COPY_ARCHIVE = DBItem(0, """
>+ Create a copy archive.
>+
>+ This job creates a copy archive from the current state of
>+ the archive.
>+ """)
>+
>+
>+class IArchiveJob(Interface):
>+ """A Job related to an Archive."""
>+
>+ id = Int(
>+ title=_('DB ID'), required=True, readonly=True,
>+ description=_("The tracking number for this job."))
>+
>+ archive = Object(
>+ title=_('The Archive this job is about.'), schema=IArchive,
It looks peculiar to occasionally capitalize the noun. For example,
"job" isn't capitalized here, but it is capitalized in the following title.
I think capitalizing to match the class names is better left to doc
strings, since field titles are often exposed to the user, even if that
would never happen for this interface.
This should be capitalized, otherwise, it looks like there might
have been some words before "constructor" that formed an actual
sentence.
>=== added file 'lib/lp/soyuz/tests/test_copyarchivejob.py'
>--- lib/lp/soyuz/tests/test_copyarchivejob.py 1970-01-01 00:00:00 +0000
>+++ lib/lp/soyuz/tests/test_copyarchivejob.py 2010-07-28 19:38:05 +0000
>+
>+ def checkPublishedSources(self, expected, archive, series):
>+ # We need to be admin as the archive is disabled at this point.
>+ with celebrity_logged_in('admin'):
>+ sources = archive.getPublishedSources(
>+ distroseries=series,
>+ status=(
>+ PackagePublishingStatus.PENDING,
>+ PackagePublishingStatus.PUBLISHED))
>+ actual = []
>+ for source in sources:
>+ actual.append(
>+ (source.source_package_name, source.source_package_version))
Line too long.
>+ self.assertEqual(sorted(expected), sorted(actual))
>+
>+ def test_run(self):
>+ """Test that CopyArchiveJob.run() actually copies the archive.
>+
>+ We just make a simple test here, and rely on PackageCloner tests
>+ to cover the functionality.
>+ """
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE)
>+ job.run()
>+ self.checkPublishedSources([("bzr", "2.1")], target_archive, series)
>+
>+ def test_run_mergeCopy(self):
>+ """Test that CopyArchiveJob.run() when merge=True does a mergeCopy."""
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ # Create the copy archive
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE)
>+ job.start()
>+ job.run()
>+ job.complete()
>+ # Create a new package in the source
>+ self.factory.makeSourcePackagePublishingHistory(
>+ sourcepackagename=self.factory.getOrMakeSourcePackageName(
>+ name='apt'),
>+ distroseries=series, component=self.factory.makeComponent(),
>+ version="1.2", architecturehintlist='any',
>+ archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
>+ pocket=PackagePublishingPocket.RELEASE)
Here you are creating a SPPH record for the source_archive, and
makeSourceAndTarget() creates a SPPH record for the source_archive. To
test the merge, shouldn't the to packages be in different archives?
>+ # Create a job to merge
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE, merge=True)
>+ job.run()
>+ self.checkPublishedSources(
>+ [("bzr", "2.1"), ("apt", "1.2")], target_archive, series)
>+
>+ def test_run_with_proc_families(self):
>+ """Test that a CopyArchiveJob job with proc_families uses them.
>+
>+ If we create a CopyArchiveJob with proc_families != None then
>+ they should be used when cloning packages.
>+ """
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ proc_families = [ProcessorFamilySet().getByName("x86")]
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE, proc_families=proc_families)
>+ job.run()
>+ builds = list(
>+ getUtility(IBinaryPackageBuildSet).getBuildsForArchive(
>+ target_archive, status=BuildStatus.NEEDSBUILD))
>+ actual_builds = list()
>+ for build in builds:
>+ naked_build = removeSecurityProxy(build)
>+ spr = naked_build.source_package_release
>+ actual_builds.append((spr.name, spr.version))
>+ # One build for the one package, as we specified one processor
>+ # family.
>+ self.assertEqual([("bzr", "2.1")], actual_builds)
I see that you passed in x86 for the proc_families. I don't see any
verification that it used that proc family when copying the package.
test_create_sets_proc_family_ids() tests that halfway through the
process, but it seems like you could check it here on the actual builds.
Hi James,
This branch looks very good. I have minior comments below.
merge-conditional
-Edwin
>=== added file 'lib/lp/ soyuz/interface s/archivejob. py' soyuz/interface s/archivejob. py 1970-01-01 00:00:00 +0000 soyuz/interface s/archivejob. py 2010-07-28 19:38:05 +0000 job.interfaces. job import IJob, IJobSource, IRunnableJob interfaces. archive import IArchive DBEnumeratedTyp e): job_type can take.""" Interface) :
>--- lib/lp/
>+++ lib/lp/
>@@ -0,0 +1,55 @@
>+from zope.interface import Attribute, Interface
>+from zope.schema import Int, Object
>+
>+from canonical.launchpad import _
>+
>+from lazr.enum import DBEnumeratedType, DBItem
>+
>+from lp.services.
>+from lp.soyuz.
>+
>+
>+class ArchiveJobType(
>+ """Values that IArchiveJob.
>+
>+ COPY_ARCHIVE = DBItem(0, """
>+ Create a copy archive.
>+
>+ This job creates a copy archive from the current state of
>+ the archive.
>+ """)
>+
>+
>+class IArchiveJob(
>+ """A Job related to an Archive."""
>+
>+ id = Int(
>+ title=_('DB ID'), required=True, readonly=True,
>+ description=_("The tracking number for this job."))
>+
>+ archive = Object(
>+ title=_('The Archive this job is about.'), schema=IArchive,
It looks peculiar to occasionally capitalize the noun. For example,
"job" isn't capitalized here, but it is capitalized in the following title.
I think capitalizing to match the class names is better left to doc
strings, since field titles are often exposed to the user, even if that
would never happen for this interface.
>+ required=True) ce(IJobSource) : (IRunnableJob) : Source( IArchiveJobSour ce): soyuz/model/ archivejob. py' soyuz/model/ archivejob. py 1970-01-01 00:00:00 +0000 soyuz/model/ archivejob. py 2010-07-28 19:38:05 +0000 database. enumcol import EnumCol launchpad. webapp. interfaces import ( job.model. job import Job job.runner import BaseRunnableJob interfaces. archivejob import ( model.archive import Archive IArchiveJob) archive_ id, Archive.id) enum=ArchiveJob Type, notNull=True) 'json_data' ) loads(self. _json_data)
>+
>+ job = Object(
>+ title=_('The common Job attributes'), schema=IJob, required=True)
>+
>+ metadata = Attribute('A dict of data about the job.')
>+
>+ def destroySelf():
>+ """Destroy this object."""
>+
>+
>+class IArchiveJobSour
>+ """An interface for acquiring IArchiveJobs."""
>+
>+ def create(archive):
>+ """Create a new IArchiveJobs for an archive."""
>+
>+
>+class ICopyArchiveJob
>+ """A Job to copy archives."""
>+
>+
>+class ICopyArchiveJob
>+ """Interface for acquiring CopyArchiveJobs."""
>
>=== added file 'lib/lp/
>--- lib/lp/
>+++ lib/lp/
>@@ -0,0 +1,130 @@
>+
>+__metaclass__ = object
>+
>+import simplejson
>+
>+from sqlobject import SQLObjectNotFound
>+from storm.base import Storm
>+from storm.expr import And
>+from storm.locals import Int, Reference, Unicode
>+
>+from zope.component import getUtility
>+from zope.interface import classProvides, implements
>+
>+from canonical.
>+from canonical.
>+ DEFAULT_FLAVOR, IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
>+
>+from lazr.delegates import delegates
>+
>+from lp.services.
>+from lp.services.
>+from lp.soyuz.
>+ ArchiveJobType, IArchiveJob, IArchiveJobSource)
>+from lp.soyuz.
>+
>+
>+class ArchiveJob(Storm):
>+ """Base class for jobs related to Archives."""
>+
>+ implements(
>+
>+ __storm_table__ = 'archivejob'
>+
>+ id = Int(primary=True)
>+
>+ job_id = Int(name='job')
>+ job = Reference(job_id, Job.id)
>+
>+ archive_id = Int(name='archive')
>+ archive = Reference(
>+
>+ job_type = EnumCol(
>+
>+ _json_data = Unicode(
>+
>+ @property
>+ def metadata(self):
>+ return simplejson.
>+
>+ def __init__(self, archive, job_type, metadata):
>+ """constructor.
This should be capitalized, otherwise, it looks like there might
have been some words before "constructor" that formed an actual
sentence.
>=== added file 'lib/lp/ soyuz/tests/ test_copyarchiv ejob.py' soyuz/tests/ test_copyarchiv ejob.py 1970-01-01 00:00:00 +0000 soyuz/tests/ test_copyarchiv ejob.py 2010-07-28 19:38:05 +0000 ources( self, expected, archive, series): logged_ in('admin' ): getPublishedSou rces( series, ngStatus. PENDING, ngStatus. PUBLISHED) ) source_ package_ name, source. source_ package_ version) )
>--- lib/lp/
>+++ lib/lp/
>+
>+ def checkPublishedS
>+ # We need to be admin as the archive is disabled at this point.
>+ with celebrity_
>+ sources = archive.
>+ distroseries=
>+ status=(
>+ PackagePublishi
>+ PackagePublishi
>+ actual = []
>+ for source in sources:
>+ actual.append(
>+ (source.
Line too long.
>+ self.assertEqua l(sorted( expected) , sorted(actual)) run() actually copies the archive. AndTarget( ) create( ngPocket. RELEASE, series, ngPocket. RELEASE) shedSources( [("bzr" , "2.1")], target_archive, series) mergeCopy( self): run() when merge=True does a mergeCopy.""" AndTarget( ) create( ngPocket. RELEASE, series, ngPocket. RELEASE) makeSourcePacka gePublishingHis tory( me=self. factory. getOrMakeSource PackageName( series, component= self.factory. makeComponent( ), tlist=' any', source_ archive, status= PackagePublishi ngStatus. PUBLISHED, PackagePublishi ngPocket. RELEASE)
>+
>+ def test_run(self):
>+ """Test that CopyArchiveJob.
>+
>+ We just make a simple test here, and rely on PackageCloner tests
>+ to cover the functionality.
>+ """
>+ source_archive, target_archive, series = self.makeSource
>+ job = CopyArchiveJob.
>+ target_archive, source_archive, series,
>+ PackagePublishi
>+ PackagePublishi
>+ job.run()
>+ self.checkPubli
>+
>+ def test_run_
>+ """Test that CopyArchiveJob.
>+ source_archive, target_archive, series = self.makeSource
>+ # Create the copy archive
>+ job = CopyArchiveJob.
>+ target_archive, source_archive, series,
>+ PackagePublishi
>+ PackagePublishi
>+ job.start()
>+ job.run()
>+ job.complete()
>+ # Create a new package in the source
>+ self.factory.
>+ sourcepackagena
>+ name='apt'),
>+ distroseries=
>+ version="1.2", architecturehin
>+ archive=
>+ pocket=
Here you are creating a SPPH record for the source_archive, and rget() creates a SPPH record for the source_archive. To
makeSourceAndTa
test the merge, shouldn't the to packages be in different archives?
>+ # Create a job to merge create( ngPocket. RELEASE, series, ngPocket. RELEASE, merge=True) shedSources( with_proc_ families( self): AndTarget( ) ySet(). getByName( "x86")] create( ngPocket. RELEASE, series, ngPocket. RELEASE, proc_families= proc_families) IBinaryPackageB uildSet) .getBuildsForAr chive( BuildStatus. NEEDSBUILD) ) roxy(build) source_ package_ release builds. append( (spr.name, spr.version)) l([("bzr" , "2.1")], actual_builds)
>+ job = CopyArchiveJob.
>+ target_archive, source_archive, series,
>+ PackagePublishi
>+ PackagePublishi
>+ job.run()
>+ self.checkPubli
>+ [("bzr", "2.1"), ("apt", "1.2")], target_archive, series)
>+
>+ def test_run_
>+ """Test that a CopyArchiveJob job with proc_families uses them.
>+
>+ If we create a CopyArchiveJob with proc_families != None then
>+ they should be used when cloning packages.
>+ """
>+ source_archive, target_archive, series = self.makeSource
>+ proc_families = [ProcessorFamil
>+ job = CopyArchiveJob.
>+ target_archive, source_archive, series,
>+ PackagePublishi
>+ PackagePublishi
>+ job.run()
>+ builds = list(
>+ getUtility(
>+ target_archive, status=
>+ actual_builds = list()
>+ for build in builds:
>+ naked_build = removeSecurityP
>+ spr = naked_build.
>+ actual_
>+ # One build for the one package, as we specified one processor
>+ # family.
>+ self.assertEqua
I see that you passed in x86 for the proc_families. I don't see any sets_proc_ family_ ids() tests that halfway through the
verification that it used that proc family when copying the package.
test_create_
process, but it seems like you could check it here on the actual builds.