Code review comment for lp:~james-w/launchpad/copy-archive-job

Revision history for this message
Edwin Grubbs (edwin-grubbs) wrote :

Hi James,

This branch looks very good. I have minior comments below.

merge-conditional

-Edwin

>=== added file 'lib/lp/soyuz/interfaces/archivejob.py'
>--- lib/lp/soyuz/interfaces/archivejob.py 1970-01-01 00:00:00 +0000
>+++ lib/lp/soyuz/interfaces/archivejob.py 2010-07-28 19:38:05 +0000
>@@ -0,0 +1,55 @@
>+from zope.interface import Attribute, Interface
>+from zope.schema import Int, Object
>+
>+from canonical.launchpad import _
>+
>+from lazr.enum import DBEnumeratedType, DBItem
>+
>+from lp.services.job.interfaces.job import IJob, IJobSource, IRunnableJob
>+from lp.soyuz.interfaces.archive import IArchive
>+
>+
>+class ArchiveJobType(DBEnumeratedType):
>+ """Values that IArchiveJob.job_type can take."""
>+
>+ COPY_ARCHIVE = DBItem(0, """
>+ Create a copy archive.
>+
>+ This job creates a copy archive from the current state of
>+ the archive.
>+ """)
>+
>+
>+class IArchiveJob(Interface):
>+ """A Job related to an Archive."""
>+
>+ id = Int(
>+ title=_('DB ID'), required=True, readonly=True,
>+ description=_("The tracking number for this job."))
>+
>+ archive = Object(
>+ title=_('The Archive this job is about.'), schema=IArchive,

It looks peculiar to occasionally capitalize the noun. For example,
"job" isn't capitalized here, but it is capitalized in the following title.
I think capitalizing to match the class names is better left to doc
strings, since field titles are often exposed to the user, even if that
would never happen for this interface.

>+ required=True)
>+
>+ job = Object(
>+ title=_('The common Job attributes'), schema=IJob, required=True)
>+
>+ metadata = Attribute('A dict of data about the job.')
>+
>+ def destroySelf():
>+ """Destroy this object."""
>+
>+
>+class IArchiveJobSource(IJobSource):
>+ """An interface for acquiring IArchiveJobs."""
>+
>+ def create(archive):
>+ """Create a new IArchiveJobs for an archive."""
>+
>+
>+class ICopyArchiveJob(IRunnableJob):
>+ """A Job to copy archives."""
>+
>+
>+class ICopyArchiveJobSource(IArchiveJobSource):
>+ """Interface for acquiring CopyArchiveJobs."""
>
>=== added file 'lib/lp/soyuz/model/archivejob.py'
>--- lib/lp/soyuz/model/archivejob.py 1970-01-01 00:00:00 +0000
>+++ lib/lp/soyuz/model/archivejob.py 2010-07-28 19:38:05 +0000
>@@ -0,0 +1,130 @@
>+
>+__metaclass__ = object
>+
>+import simplejson
>+
>+from sqlobject import SQLObjectNotFound
>+from storm.base import Storm
>+from storm.expr import And
>+from storm.locals import Int, Reference, Unicode
>+
>+from zope.component import getUtility
>+from zope.interface import classProvides, implements
>+
>+from canonical.database.enumcol import EnumCol
>+from canonical.launchpad.webapp.interfaces import (
>+ DEFAULT_FLAVOR, IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
>+
>+from lazr.delegates import delegates
>+
>+from lp.services.job.model.job import Job
>+from lp.services.job.runner import BaseRunnableJob
>+from lp.soyuz.interfaces.archivejob import (
>+ ArchiveJobType, IArchiveJob, IArchiveJobSource)
>+from lp.soyuz.model.archive import Archive
>+
>+
>+class ArchiveJob(Storm):
>+ """Base class for jobs related to Archives."""
>+
>+ implements(IArchiveJob)
>+
>+ __storm_table__ = 'archivejob'
>+
>+ id = Int(primary=True)
>+
>+ job_id = Int(name='job')
>+ job = Reference(job_id, Job.id)
>+
>+ archive_id = Int(name='archive')
>+ archive = Reference(archive_id, Archive.id)
>+
>+ job_type = EnumCol(enum=ArchiveJobType, notNull=True)
>+
>+ _json_data = Unicode('json_data')
>+
>+ @property
>+ def metadata(self):
>+ return simplejson.loads(self._json_data)
>+
>+ def __init__(self, archive, job_type, metadata):
>+ """constructor.

This should be capitalized, otherwise, it looks like there might
have been some words before "constructor" that formed an actual
sentence.

>=== added file 'lib/lp/soyuz/tests/test_copyarchivejob.py'
>--- lib/lp/soyuz/tests/test_copyarchivejob.py 1970-01-01 00:00:00 +0000
>+++ lib/lp/soyuz/tests/test_copyarchivejob.py 2010-07-28 19:38:05 +0000
>+
>+ def checkPublishedSources(self, expected, archive, series):
>+ # We need to be admin as the archive is disabled at this point.
>+ with celebrity_logged_in('admin'):
>+ sources = archive.getPublishedSources(
>+ distroseries=series,
>+ status=(
>+ PackagePublishingStatus.PENDING,
>+ PackagePublishingStatus.PUBLISHED))
>+ actual = []
>+ for source in sources:
>+ actual.append(
>+ (source.source_package_name, source.source_package_version))

Line too long.

>+ self.assertEqual(sorted(expected), sorted(actual))
>+
>+ def test_run(self):
>+ """Test that CopyArchiveJob.run() actually copies the archive.
>+
>+ We just make a simple test here, and rely on PackageCloner tests
>+ to cover the functionality.
>+ """
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE)
>+ job.run()
>+ self.checkPublishedSources([("bzr", "2.1")], target_archive, series)
>+
>+ def test_run_mergeCopy(self):
>+ """Test that CopyArchiveJob.run() when merge=True does a mergeCopy."""
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ # Create the copy archive
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE)
>+ job.start()
>+ job.run()
>+ job.complete()
>+ # Create a new package in the source
>+ self.factory.makeSourcePackagePublishingHistory(
>+ sourcepackagename=self.factory.getOrMakeSourcePackageName(
>+ name='apt'),
>+ distroseries=series, component=self.factory.makeComponent(),
>+ version="1.2", architecturehintlist='any',
>+ archive=source_archive, status=PackagePublishingStatus.PUBLISHED,
>+ pocket=PackagePublishingPocket.RELEASE)

Here you are creating a SPPH record for the source_archive, and
makeSourceAndTarget() creates a SPPH record for the source_archive. To
test the merge, shouldn't the to packages be in different archives?

>+ # Create a job to merge
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE, merge=True)
>+ job.run()
>+ self.checkPublishedSources(
>+ [("bzr", "2.1"), ("apt", "1.2")], target_archive, series)
>+
>+ def test_run_with_proc_families(self):
>+ """Test that a CopyArchiveJob job with proc_families uses them.
>+
>+ If we create a CopyArchiveJob with proc_families != None then
>+ they should be used when cloning packages.
>+ """
>+ source_archive, target_archive, series = self.makeSourceAndTarget()
>+ proc_families = [ProcessorFamilySet().getByName("x86")]
>+ job = CopyArchiveJob.create(
>+ target_archive, source_archive, series,
>+ PackagePublishingPocket.RELEASE, series,
>+ PackagePublishingPocket.RELEASE, proc_families=proc_families)
>+ job.run()
>+ builds = list(
>+ getUtility(IBinaryPackageBuildSet).getBuildsForArchive(
>+ target_archive, status=BuildStatus.NEEDSBUILD))
>+ actual_builds = list()
>+ for build in builds:
>+ naked_build = removeSecurityProxy(build)
>+ spr = naked_build.source_package_release
>+ actual_builds.append((spr.name, spr.version))
>+ # One build for the one package, as we specified one processor
>+ # family.
>+ self.assertEqual([("bzr", "2.1")], actual_builds)

I see that you passed in x86 for the proc_families. I don't see any
verification that it used that proc family when copying the package.
test_create_sets_proc_family_ids() tests that halfway through the
process, but it seems like you could check it here on the actual builds.

review: Approve (code)

« Back to merge proposal