Merge lp:~mwhudson/launchpad/no-hosted-area into lp:launchpad

Proposed by Michael Hudson-Doyle
Status: Merged
Approved by: Tim Penhey
Approved revision: no longer in the source branch.
Merged at revision: 10828
Proposed branch: lp:~mwhudson/launchpad/no-hosted-area
Merge into: lp:launchpad
Diff against target: 9694 lines (+3175/-2889)
146 files modified
Makefile (+4/-5)
bzrplugins/lpserve.py (+4/-4)
configs/development/launchpad-lazr.conf (+2/-2)
configs/testrunner/launchpad-lazr.conf (+3/-7)
cronscripts/merge-proposal-jobs.py (+22/-30)
cronscripts/mirror-prober.sh (+2/-2)
cronscripts/nightly.sh (+14/-14)
cronscripts/publishing/cron.germinate (+1/-1)
cronscripts/publishing/maintenance-check.py (+217/-29)
cronscripts/update_preview_diffs.py (+0/-38)
database/replication/Makefile (+15/-36)
database/replication/authdb_create.sql (+0/-885)
database/replication/authdb_drop.sql (+0/-14)
database/replication/authdb_sequences.sql (+0/-22)
database/replication/helpers.py (+13/-30)
database/replication/initialize.py (+7/-54)
database/replication/new-slave.py (+37/-43)
database/replication/populate_auth_replication_set.py (+0/-177)
database/replication/preamble.py (+1/-1)
database/replication/repair-restored-db.py (+1/-1)
database/replication/report.py (+1/-1)
database/replication/slon_ctl.py (+1/-1)
database/replication/sync.py (+1/-1)
database/schema/diagram.py (+1/-1)
database/schema/emptytables.py (+1/-1)
database/schema/fti.py (+1/-1)
database/schema/online_fti_updater.py (+1/-1)
database/schema/patch-2207-47-0.sql (+6/-0)
database/schema/patch-2207-48-0.sql (+27/-0)
database/schema/pending/add-mailing-list-experts.py (+1/-1)
database/schema/pending/create-openid-rp-configs.py (+1/-1)
database/schema/pending/gnu-savannah-celebrity.py (+1/-1)
database/schema/pending/migrate_kde_potemplates.py (+1/-1)
database/schema/pending/new-person-columns.py (+1/-1)
database/schema/pending/patch-2207-49-0.sql (+16/-0)
database/schema/pending/prune-nonce.py (+1/-1)
database/schema/pending/update-shippingrequest-types.py (+1/-1)
database/schema/pending/update-translation-credits.py (+3/-3)
database/schema/reset_sequences.py (+1/-1)
database/schema/security.cfg (+10/-96)
database/schema/security.py (+1/-1)
database/schema/sort_sql.py (+1/-1)
database/schema/trusted.sql (+33/-33)
database/schema/unautovacuumable.py (+1/-1)
database/schema/upgrade.py (+5/-4)
lib/canonical/config/schema-lazr.conf (+28/-0)
lib/canonical/launchpad/daemons/tachandler.py (+4/-4)
lib/canonical/launchpad/doc/product-update-remote-product-script.txt (+1/-1)
lib/canonical/launchpad/scripts/garbo.py (+2/-151)
lib/canonical/launchpad/scripts/tests/test_garbo.py (+0/-56)
lib/contrib/glock.py (+1/-1)
lib/lp/answers/doc/expiration.txt (+1/-1)
lib/lp/archivepublisher/publishing.py (+31/-1)
lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py (+8/-1)
lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py (+42/-0)
lib/lp/archivepublisher/tests/test_publisher.py (+51/-31)
lib/lp/bugs/browser/bugwatch.py (+60/-1)
lib/lp/bugs/browser/configure.zcml (+6/-0)
lib/lp/bugs/browser/tests/bugwatch-views.txt (+83/-1)
lib/lp/bugs/configure.zcml (+4/-1)
lib/lp/bugs/doc/bug-watch-activity.txt (+9/-5)
lib/lp/bugs/doc/bugnotification-sending.txt (+1/-1)
lib/lp/bugs/doc/bugtask-expiration.txt (+1/-1)
lib/lp/bugs/doc/bugtask.txt (+1/-1)
lib/lp/bugs/doc/bugwatch.txt (+84/-0)
lib/lp/bugs/doc/checkwatches.txt (+1/-1)
lib/lp/bugs/doc/cve-update.txt (+2/-2)
lib/lp/bugs/interfaces/bugwatch.py (+52/-0)
lib/lp/bugs/model/bugwatch.py (+63/-3)
lib/lp/bugs/scripts/bugheat.py (+5/-5)
lib/lp/bugs/scripts/checkwatches/scheduler.py (+4/-4)
lib/lp/bugs/scripts/tests/test_bugheat.py (+3/-3)
lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt (+16/-2)
lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt (+123/-0)
lib/lp/bugs/templates/bugwatch-editform.pt (+4/-0)
lib/lp/bugs/templates/bugwatch-portlet-activity.pt (+44/-0)
lib/lp/bugs/tests/test_apportjob.py (+1/-1)
lib/lp/bugs/tests/test_bugheat.py (+1/-1)
lib/lp/code/configure.zcml (+68/-26)
lib/lp/code/doc/branch-merge-proposal-notifications.txt (+11/-5)
lib/lp/code/doc/codereviewcomment.txt (+18/-0)
lib/lp/code/interfaces/branchmergeproposal.py (+71/-13)
lib/lp/code/interfaces/codehosting.py (+12/-0)
lib/lp/code/interfaces/codereviewcomment.py (+4/-0)
lib/lp/code/mail/branch.py (+11/-12)
lib/lp/code/mail/branchmergeproposal.py (+8/-57)
lib/lp/code/mail/codereviewcomment.py (+14/-5)
lib/lp/code/mail/tests/test_branch.py (+33/-5)
lib/lp/code/mail/tests/test_branchmergeproposal.py (+150/-79)
lib/lp/code/mail/tests/test_codehandler.py (+22/-26)
lib/lp/code/mail/tests/test_codereviewcomment.py (+11/-2)
lib/lp/code/model/branchmergeproposal.py (+10/-13)
lib/lp/code/model/branchmergeproposaljob.py (+368/-25)
lib/lp/code/model/branchtarget.py (+0/-17)
lib/lp/code/model/codereviewcomment.py (+8/-0)
lib/lp/code/model/tests/test_branchcloud.py (+4/-3)
lib/lp/code/model/tests/test_branchmergeproposaljobs.py (+349/-0)
lib/lp/code/model/tests/test_branchmergeproposals.py (+30/-248)
lib/lp/code/model/tests/test_diff.py (+4/-0)
lib/lp/code/scripts/tests/test_create_merge_proposals.py (+3/-3)
lib/lp/code/scripts/tests/test_merge_proposal_jobs.py (+9/-47)
lib/lp/code/scripts/tests/test_reclaim_branch_space.py (+3/-4)
lib/lp/code/scripts/tests/test_scan_branches.py (+2/-2)
lib/lp/code/scripts/tests/test_sendbranchmail.py (+13/-8)
lib/lp/code/scripts/tests/test_update_preview_diffs.py (+0/-93)
lib/lp/code/scripts/tests/test_upgrade_branches.py (+4/-4)
lib/lp/code/stories/webservice/xx-code-import.txt (+1/-0)
lib/lp/code/subscribers/branchmergeproposal.py (+55/-0)
lib/lp/code/tests/helpers.py (+16/-0)
lib/lp/code/xmlrpc/codehosting.py (+33/-8)
lib/lp/code/xmlrpc/tests/test_codehosting.py (+86/-1)
lib/lp/codehosting/inmemory.py (+26/-3)
lib/lp/codehosting/scanner/tests/test_bzrsync.py (+6/-5)
lib/lp/codehosting/scanner/tests/test_mergedetection.py (+14/-2)
lib/lp/codehosting/sftp.py (+2/-5)
lib/lp/codehosting/tests/servers.py (+1/-1)
lib/lp/codehosting/tests/test_acceptance.py (+108/-114)
lib/lp/codehosting/vfs/branchfs.py (+106/-72)
lib/lp/codehosting/vfs/branchfsclient.py (+2/-2)
lib/lp/codehosting/vfs/tests/test_branchfs.py (+128/-51)
lib/lp/codehosting/vfs/tests/test_filesystem.py (+1/-1)
lib/lp/hardwaredb/doc/hwdb-submission.txt (+4/-4)
lib/lp/registry/doc/distribution-mirror.txt (+5/-5)
lib/lp/registry/doc/person-karma.txt (+1/-1)
lib/lp/registry/doc/sourceforge-remote-products.txt (+1/-1)
lib/lp/registry/doc/standing.txt (+2/-2)
lib/lp/services/job/runner.py (+29/-10)
lib/lp/services/job/tests/test_runner.py (+6/-2)
lib/lp/services/mail/sendmail.py (+27/-24)
lib/lp/soyuz/doc/buildd-slavescanner.txt (+2/-2)
lib/lp/soyuz/doc/gina.txt (+1/-1)
lib/lp/soyuz/doc/manage-chroot.txt (+1/-1)
lib/lp/soyuz/doc/package-cache-script.txt (+1/-1)
lib/lp/soyuz/scripts/publishdistro.py (+33/-20)
lib/lp/soyuz/scripts/tests/test_processupload.py (+1/-1)
lib/lp/testing/factory.py (+2/-0)
lib/lp/translations/doc/distroseries-translations-copy.txt (+4/-2)
lib/lp/translations/doc/fix_translation_credits.txt (+2/-1)
lib/lp/translations/doc/poexport-language-pack.txt (+2/-1)
lib/lp/translations/doc/poexport-request.txt (+1/-1)
lib/lp/translations/doc/pofile-verify-stats.txt (+2/-2)
lib/lp/translations/doc/rosetta-poimport-script.txt (+1/-1)
lib/lp/translations/doc/sourcepackagerelease-translations.txt (+3/-2)
lib/lp/translations/doc/translations-export-to-branch.txt (+1/-1)
lib/lp/translations/scripts/tests/test_translations_to_branch.py (+1/-1)
scripts/close-account.py (+19/-11)
To merge this branch: bzr merge lp:~mwhudson/launchpad/no-hosted-area
Reviewer Review Type Date Requested Status
Tim Penhey (community) Approve
Review via email: mp+23643@code.launchpad.net

Description of the change

Hi Tim,

This branch makes the ssh codehosting server only use one area (the mirrored area) rather than the mirrored and hosted area.

In addition, on branch unlock, rather than requesting the branch be mirrored, the codehosting server calls a method that updates the fields the puller updates for hosted branches, hopefully reducing latency.

In the next pipe I extend this branchChanged endpoint to also record the branch format.

There's obviously no way this branch can land on its own, it will break many many tests.

Cheers,
mwh

To post a comment you must log in.
Revision history for this message
Tim Penhey (thumper) wrote :

lib/lp/code/xmlrpc/codehosting.py
in: def branchChanged(self, branch_id, stacked_on_location, last_revision_id):
  + branch.last_mirrored = datetime.datetime.now(pytz.UTC)
should probably be using UTC_NOW

lib/lp/codehosting/inmemory.py branchChanged event should use UTC_NOW too.

then:
test_branchChanged_sets_last_mirrored
can use:
     self.assertSqlAttributeEqualsDate(
        branch, 'last_mirrored', UTC_NOW)

def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
    # XXX Is this even the right thing to do?
It will be with your next pipe.

lib/lp/codehosting/vfs/branchfs.py
class LaunchpadServer (I think - around line 558)
the __init__ method still refers to the authserver, also there is a
XXX comment that I'm wondering whether we can remove it or not.

lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
authserver too.

# XXX Maaaybe we could complain on stderr here?
  - Not following our XXX format, and is this something we want to do?

# XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
I don't suppose you want to add a unit test for this?

General Note: we probably want to rename config.codehosting.mirrored_branches
at some stage.

Revision history for this message
Michael Hudson-Doyle (mwhudson) wrote :

On 19/04/10 16:20, Tim Penhey wrote:
> lib/lp/code/xmlrpc/codehosting.py
> in: def branchChanged(self, branch_id, stacked_on_location, last_revision_id):
> + branch.last_mirrored = datetime.datetime.now(pytz.UTC)
> should probably be using UTC_NOW
>
> lib/lp/codehosting/inmemory.py branchChanged event should use UTC_NOW too.
>
> then:
> test_branchChanged_sets_last_mirrored
> can use:
> self.assertSqlAttributeEqualsDate(
> branch, 'last_mirrored', UTC_NOW)

Yeah OK.

> def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
> # XXX Is this even the right thing to do?
> It will be with your next pipe.

Right, I'll delete the comment.

> lib/lp/codehosting/vfs/branchfs.py
> class LaunchpadServer (I think - around line 558)
> the __init__ method still refers to the authserver, also there is a
> XXX comment that I'm wondering whether we can remove it or not.

Grar, can I fix this in the later pipe that combines the two endpoints?

> lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
> authserver too.

This too.

> # XXX Maaaybe we could complain on stderr here?
> - Not following our XXX format, and is this something we want to do?

Nah, I'll delete the comment.

> # XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
> I don't suppose you want to add a unit test for this?

OK. It's a bit terrible though.

> General Note: we probably want to rename config.codehosting.mirrored_branches
> at some stage.

Given recent edge rollout funnies, I don't know how we'd do this :/ But
yeah, it doesn't really make sense.

Interdiff attached.

Cheers,
mwh

1=== modified file 'lib/lp/code/xmlrpc/codehosting.py'
2--- lib/lp/code/xmlrpc/codehosting.py 2010-04-19 10:45:14 +0000
3+++ lib/lp/code/xmlrpc/codehosting.py 2010-04-19 21:57:33 +0000
4@@ -23,6 +23,16 @@
5 from zope.security.proxy import removeSecurityProxy
6 from zope.security.management import endInteraction
7
8+from canonical.database.constants import UTC_NOW
9+from canonical.launchpad.validators import LaunchpadValidationError
10+from canonical.launchpad.webapp import LaunchpadXMLRPCView
11+from canonical.launchpad.webapp.authorization import check_permission
12+from canonical.launchpad.webapp.interaction import setupInteractionForPerson
13+from canonical.launchpad.webapp.interfaces import (
14+ NameLookupFailed, NotFoundError)
15+from canonical.launchpad.xmlrpc import faults
16+from canonical.launchpad.xmlrpc.helpers import return_fault
17+
18 from lp.code.errors import UnknownBranchTypeError
19 from lp.code.enums import BranchType
20 from lp.code.interfaces.branch import BranchCreationException
21@@ -38,14 +48,6 @@
22 from lp.registry.interfaces.product import NoSuchProduct
23 from lp.services.scripts.interfaces.scriptactivity import IScriptActivitySet
24 from lp.services.utils import iter_split
25-from canonical.launchpad.validators import LaunchpadValidationError
26-from canonical.launchpad.webapp import LaunchpadXMLRPCView
27-from canonical.launchpad.webapp.authorization import check_permission
28-from canonical.launchpad.webapp.interaction import setupInteractionForPerson
29-from canonical.launchpad.webapp.interfaces import (
30- NameLookupFailed, NotFoundError)
31-from canonical.launchpad.xmlrpc import faults
32-from canonical.launchpad.xmlrpc.helpers import return_fault
33
34
35 UTC = pytz.timezone('UTC')
36@@ -262,7 +264,7 @@
37 branch.mirror_status_message = (
38 'Invalid stacked on location: ' + stacked_on_location)
39 branch.stacked_on = stacked_on_branch
40- branch.last_mirrored = datetime.datetime.now(pytz.UTC)
41+ branch.last_mirrored = UTC_NOW
42 if branch.last_mirrored_id != last_revision_id:
43 branch.last_mirrored_id = last_revision_id
44 getUtility(IBranchScanJobSource).create(branch)
45
46=== modified file 'lib/lp/code/xmlrpc/tests/test_codehosting.py'
47--- lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-04-19 04:06:23 +0000
48+++ lib/lp/code/xmlrpc/tests/test_codehosting.py 2010-04-19 22:09:34 +0000
49@@ -15,7 +15,6 @@
50 from zope.component import getUtility
51 from zope.security.proxy import removeSecurityProxy
52
53-from lp.codehosting.inmemory import InMemoryFrontend
54 from canonical.database.constants import UTC_NOW
55 from canonical.launchpad.ftests import ANONYMOUS, login, logout
56 from lp.services.scripts.interfaces.scriptactivity import (
57@@ -40,6 +39,8 @@
58 BranchFileSystem, BranchPuller, LAUNCHPAD_ANONYMOUS, LAUNCHPAD_SERVICES,
59 run_with_login)
60
61+from lp.codehosting.inmemory import InMemoryFrontend
62+
63
64 UTC = pytz.timezone('UTC')
65
66@@ -739,9 +740,11 @@
67 # current time.
68 branch = self.factory.makeAnyBranch()
69 self.branchfs.branchChanged(branch.id, '', '')
70- # We can't test "now" precisely, but lets check that last_mirrored was
71- # set to _something_.
72- self.assertIsNot(None, branch.last_mirrored)
73+ if self.frontend == LaunchpadDatabaseFrontend:
74+ self.assertSqlAttributeEqualsDate(
75+ branch, 'last_mirrored', UTC_NOW)
76+ else:
77+ self.assertIs(UTC_NOW, branch.last_mirrored)
78
79 def test_branchChanged_records_bogus_stacked_on_url(self):
80 # If a bogus location is passed in as the stacked_on parameter,
81@@ -772,7 +775,7 @@
82
83 def test_branchChanged_creates_scan_job(self):
84 # branchChanged() creates a scan job for the branch.
85- if not isinstance(self.frontend, LaunchpadDatabaseFrontend):
86+ if self.frontend != LaunchpadDatabaseFrontend:
87 return
88 branch = self.factory.makeAnyBranch()
89 jobs = list(getUtility(IBranchScanJobSource).iterReady())
90@@ -782,8 +785,7 @@
91 self.assertEqual(1, len(jobs))
92
93 def test_branchChanged_doesnt_create_scan_job_for_noop_change(self):
94- # XXX Is this even the right thing to do?
95- if not isinstance(self.frontend, LaunchpadDatabaseFrontend):
96+ if self.frontend != LaunchpadDatabaseFrontend:
97 return
98 branch = self.factory.makeAnyBranch()
99 removeSecurityProxy(branch).last_mirrored_id = 'rev1'
100
101=== modified file 'lib/lp/codehosting/inmemory.py'
102--- lib/lp/codehosting/inmemory.py 2010-04-19 04:06:23 +0000
103+++ lib/lp/codehosting/inmemory.py 2010-04-19 22:04:00 +0000
104@@ -9,18 +9,18 @@
105 'XMLRPCWrapper',
106 ]
107
108-import datetime
109 import operator
110 from xmlrpclib import Fault
111
112 from bzrlib.urlutils import escape, unescape
113
114-import pytz
115-
116 from zope.component import adapter, getSiteManager
117 from zope.interface import implementer
118
119 from canonical.database.constants import UTC_NOW
120+from canonical.launchpad.validators import LaunchpadValidationError
121+from canonical.launchpad.xmlrpc import faults
122+
123 from lp.code.errors import UnknownBranchTypeError
124 from lp.code.model.branchnamespace import BranchNamespaceSet
125 from lp.code.model.branchtarget import (
126@@ -31,12 +31,10 @@
127 from lp.code.interfaces.codehosting import (
128 BRANCH_TRANSPORT, CONTROL_TRANSPORT, LAUNCHPAD_ANONYMOUS,
129 LAUNCHPAD_SERVICES)
130+from lp.code.xmlrpc.codehosting import datetime_from_tuple
131 from lp.registry.interfaces.pocket import PackagePublishingPocket
132 from lp.services.utils import iter_split
133 from lp.testing.factory import ObjectFactory
134-from canonical.launchpad.validators import LaunchpadValidationError
135-from lp.code.xmlrpc.codehosting import datetime_from_tuple
136-from canonical.launchpad.xmlrpc import faults
137
138
139 class FakeStore:
140@@ -637,7 +635,7 @@
141 branch.mirror_status_message = (
142 'Invalid stacked on location: ' + stacked_on_location)
143 branch.stacked_on = stacked_on_branch
144- branch.last_mirrored = datetime.datetime.now(pytz.UTC)
145+ branch.last_mirrored = UTC_NOW
146 if branch.last_mirrored_id != last_revision_id:
147 branch.last_mirrored_id = last_revision_id
148 return True
149
150=== modified file 'lib/lp/codehosting/vfs/branchfs.py'
151--- lib/lp/codehosting/vfs/branchfs.py 2010-04-16 00:55:24 +0000
152+++ lib/lp/codehosting/vfs/branchfs.py 2010-04-19 22:33:52 +0000
153@@ -639,7 +639,6 @@
154 # Assume it's a relative path.
155 return stacked_on_url
156 uri = URI(stacked_on_url)
157- # XXX Maaaybe we could complain on stderr here?
158 if uri.scheme not in ['http', 'bzr+ssh', 'sftp']:
159 return stacked_on_url
160 launchpad_domain = config.vhost.mainsite.hostname
161@@ -710,7 +709,6 @@
162
163 branch_url = urlutils.local_path_to_url(branch_directory)
164 branchfs_client = xmlrpclib.ServerProxy(branchfs_endpoint_url)
165- # XXX: JonathanLange 2007-05-29: The 'chroot' line lacks a unit test.
166 branch_transport = get_chrooted_transport(branch_url)
167 lp_server = LaunchpadServer(
168 BlockingProxy(branchfs_client), user_id, branch_transport,
169
170=== modified file 'lib/lp/codehosting/vfs/tests/test_branchfs.py'
171--- lib/lp/codehosting/vfs/tests/test_branchfs.py 2010-04-09 06:28:34 +0000
172+++ lib/lp/codehosting/vfs/tests/test_branchfs.py 2010-04-19 22:33:25 +0000
173@@ -17,6 +17,7 @@
174 from bzrlib.transport import (
175 get_transport, _get_protocol_handlers, register_transport, Server,
176 unregister_transport)
177+from bzrlib.transport.chroot import ChrootTransport
178 from bzrlib.transport.memory import MemoryServer, MemoryTransport
179 from bzrlib.urlutils import escape, local_path_to_url
180
181@@ -26,7 +27,7 @@
182 from lp.codehosting.vfs.branchfs import (
183 AsyncLaunchpadTransport, BranchTransportDispatch,
184 DirectDatabaseLaunchpadServer, LaunchpadInternalServer, LaunchpadServer,
185- TransportDispatch, UnknownTransportType, branch_id_to_path)
186+ TransportDispatch, UnknownTransportType, branch_id_to_path, get_lp_server)
187 from lp.codehosting.inmemory import InMemoryFrontend, XMLRPCWrapper
188 from lp.codehosting.sftp import FatLocalTransport
189 from lp.codehosting.vfs.transport import AsyncVirtualTransport
190@@ -1005,6 +1006,17 @@
191 '/%s/.bzr/goodbye.txt' % self.read_only_branch)
192
193
194+class TestGetLPServer(TestCase):
195+ """Tests for `get_lp_server`."""
196+
197+ def test_chrooting(self):
198+ # Test that get_lp_server return a server that ultimately backs onto a
199+ # ChrootTransport.
200+ lp_server = get_lp_server(1, 'http://xmlrpc.example.invalid', '')
201+ transport = lp_server._transport_dispatch._rw_dispatch.base_transport
202+ self.assertIsInstance(transport, ChrootTransport)
203+
204+
205 def test_suite():
206 return unittest.TestLoader().loadTestsFromName(__name__)
207
Revision history for this message
Tim Penhey (thumper) wrote :

 merge approved

On Tue, 20 Apr 2010 10:39:16 you wrote:
> On 19/04/10 16:20, Tim Penhey wrote:
> > lib/lp/codehosting/vfs/branchfs.py
> > class LaunchpadServer (I think - around line 558)
> > the __init__ method still refers to the authserver, also there is a
> > XXX comment that I'm wondering whether we can remove it or not.
>
> Grar, can I fix this in the later pipe that combines the two endpoints?

Yep, sure.

> > lib/lp/codehosting/vfs/tests/test_branchfs.py - still refers to an
> > authserver too.
>
> This too.

Yes.

Tim

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'Makefile'
2--- Makefile 2010-04-20 19:10:35 +0000
3+++ Makefile 2010-04-27 02:13:38 +0000
4@@ -209,9 +209,9 @@
5 ftest_inplace: inplace
6 bin/test -f $(TESTFLAGS) $(TESTOPTS)
7
8-mpcreationjobs:
9- # Handle merge proposal creations.
10- $(PY) cronscripts/mpcreationjobs.py
11+merge-proposal-jobs:
12+ # Handle merge proposal email jobs.
13+ $(PY) cronscripts/merge-proposal-jobs.py -v
14
15 run: check_schema inplace stop
16 $(RM) thread*.request
17@@ -255,8 +255,7 @@
18 # Scan branches from the filesystem into the database.
19 $(PY) cronscripts/scan_branches.py
20
21-
22-sync_branches: pull_branches scan_branches mpcreationjobs
23+sync_branches: pull_branches scan_branches merge-proposal-jobs
24
25 $(BZR_VERSION_INFO):
26 scripts/update-bzr-version-info.sh
27
28=== modified file 'bzrplugins/lpserve.py'
29--- bzrplugins/lpserve.py 2010-03-24 00:43:45 +0000
30+++ bzrplugins/lpserve.py 2010-04-27 02:13:38 +0000
31@@ -85,8 +85,8 @@
32 finally:
33 ui.ui_factory = old_factory
34
35- def run(self, user_id, port=None, upload_directory=None,
36- mirror_directory=None, branchfs_endpoint_url=None, inet=False):
37+ def run(self, user_id, port=None, branch_directory=None,
38+ branchfs_endpoint_url=None, inet=False):
39 from lp.codehosting.bzrutils import install_oops_handler
40 from lp.codehosting.vfs import get_lp_server, hooks
41 install_oops_handler(user_id)
42@@ -94,8 +94,8 @@
43 resource.setrlimit(resource.RLIMIT_AS, (four_gig, four_gig))
44 seen_new_branch = hooks.SetProcTitleHook()
45 lp_server = get_lp_server(
46- int(user_id), branchfs_endpoint_url,
47- upload_directory, mirror_directory, seen_new_branch.seen)
48+ int(user_id), branchfs_endpoint_url, branch_directory,
49+ seen_new_branch.seen)
50 lp_server.start_server()
51
52 old_lockdir_timeout = lockdir._DEFAULT_TIMEOUT_SECONDS
53
54=== modified file 'configs/development/launchpad-lazr.conf'
55--- configs/development/launchpad-lazr.conf 2010-04-19 03:44:27 +0000
56+++ configs/development/launchpad-lazr.conf 2010-04-27 02:13:38 +0000
57@@ -201,9 +201,9 @@
58 port: 11217
59 memory_size: 1
60
61-[mpcreationjobs]
62+[merge_proposal_jobs]
63 error_dir: /var/tmp/codehosting.test
64-oops_prefix: DMPCR
65+oops_prefix: DMPJ
66
67 [personalpackagearchive]
68 root: /var/tmp/ppa/
69
70=== modified file 'configs/testrunner/launchpad-lazr.conf'
71--- configs/testrunner/launchpad-lazr.conf 2010-04-19 03:44:27 +0000
72+++ configs/testrunner/launchpad-lazr.conf 2010-04-27 02:13:38 +0000
73@@ -34,7 +34,7 @@
74 bzr_lp_prefix: lp://dev/
75 hosted_branches_root: /tmp/sftp-test/branches
76 host_key_pair_path: lib/lp/codehosting/sshserver/tests/keys
77-port: tcp:22222:interface=127.0.0.1
78+port: tcp:22222:interface=bazaar.launchpad.dev
79 error_dir: /var/tmp/codehosting.test
80 oops_prefix: SMPSSH
81 access_log: /tmp/test-codehosting-access.log
82@@ -171,12 +171,8 @@
83 # processes spawned through some other mechanism.
84 port: 11242
85
86-[mpcreationjobs]
87-oops_prefix: TMPCJ
88-error_dir: /var/tmp/codehosting.test
89-
90-[update_preview_diffs]
91-oops_prefix: TUPD
92+[merge_proposal_jobs]
93+oops_prefix: TMPJ
94 error_dir: /var/tmp/codehosting.test
95
96 [upgrade_branches]
97
98=== renamed file 'cronscripts/mpcreationjobs.py' => 'cronscripts/merge-proposal-jobs.py'
99--- cronscripts/mpcreationjobs.py 2010-02-16 15:25:52 +0000
100+++ cronscripts/merge-proposal-jobs.py 2010-04-27 02:13:38 +0000
101@@ -1,48 +1,40 @@
102 #!/usr/bin/python2.5 -S
103 #
104-# Copyright 2009 Canonical Ltd. This software is licensed under the
105+# Copyright 2009, 2010 Canonical Ltd. This software is licensed under the
106 # GNU Affero General Public License version 3 (see the file LICENSE).
107
108 # pylint: disable-msg=W0403
109
110-"""Handle new BranchMergeProposals.
111+"""Handle jobs for BranchMergeProposals.
112
113-This script generates a diff for the merge proposal if needed, then notifies
114-all interested parties about the merge proposal.
115+This script handles all job types for branch merge proposals.
116 """
117
118 __metaclass__ = type
119
120 import _pythonpath
121-from zope.component import getUtility
122
123-from canonical.config import config
124-from lp.codehosting.vfs import get_scanner_server
125-from lp.services.job.runner import JobRunner
126+# The following line is a horrible hack, but unfortunately necessary right now
127+# to stop import errors from circular imports.
128+import canonical.launchpad.interfaces
129 from lp.code.interfaces.branchmergeproposal import (
130- IMergeProposalCreatedJobSource,)
131-from lp.services.scripts.base import LaunchpadCronScript
132-from canonical.launchpad.webapp.errorlog import globalErrorUtility
133-
134-
135-class RunMergeProposalCreatedJobs(LaunchpadCronScript):
136- """Run merge proposal creation jobs."""
137-
138- def main(self):
139- globalErrorUtility.configure('mpcreationjobs')
140- job_source = getUtility(IMergeProposalCreatedJobSource)
141- runner = JobRunner.fromReady(job_source, self.logger)
142- server = get_scanner_server()
143- server.start_server()
144- try:
145- runner.runAll()
146- finally:
147- server.stop_server()
148- self.logger.info(
149- 'Ran %d MergeProposalCreatedJobs.', len(runner.completed_jobs))
150+ IBranchMergeProposalJobSource,
151+ )
152+from lp.services.job.runner import JobCronScript, TwistedJobRunner
153+
154+
155+class RunMergeProposalJobs(JobCronScript):
156+ """Run all merge proposal jobs."""
157+
158+ config_name = 'merge_proposal_jobs'
159+ source_interface = IBranchMergeProposalJobSource
160+
161+ def __init__(self):
162+ super(RunMergeProposalJobs, self).__init__(
163+ runner_class=TwistedJobRunner,
164+ script_name='merge-proposal-jobs')
165
166
167 if __name__ == '__main__':
168- script = RunMergeProposalCreatedJobs(
169- 'mpcreationjobs', config.mpcreationjobs.dbuser)
170+ script = RunMergeProposalJobs()
171 script.lock_and_run()
172
173=== modified file 'cronscripts/mirror-prober.sh'
174--- cronscripts/mirror-prober.sh 2009-10-17 14:11:40 +0000
175+++ cronscripts/mirror-prober.sh 2010-04-27 02:13:38 +0000
176@@ -39,10 +39,10 @@
177 cd /srv/launchpad.net/production/launchpad/cronscripts
178
179 echo '== Distribution mirror prober (archive)' `date` ==
180-python2.5 distributionmirror-prober.py --content-type=archive --max-mirrors=20
181+python2.5 -S distributionmirror-prober.py --content-type=archive --max-mirrors=20
182
183 echo '== Distribution mirror prober (cdimage)' `date` ==
184-python2.5 distributionmirror-prober.py --content-type=cdimage --max-mirrors=30
185+python2.5 -S distributionmirror-prober.py --content-type=cdimage --max-mirrors=30
186
187 rm -f $LOCK
188
189
190=== modified file 'cronscripts/nightly.sh'
191--- cronscripts/nightly.sh 2009-10-17 14:11:40 +0000
192+++ cronscripts/nightly.sh 2010-04-27 02:13:38 +0000
193@@ -3,11 +3,11 @@
194 # Copyright 2009 Canonical Ltd. This software is licensed under the
195 # GNU Affero General Public License version 3 (see the file LICENSE).
196
197-# This script performs nightly chores. It should be run from
198+# This script performs nightly chores. It should be run from
199 # cron as the launchpad user once a day. Typically the output
200 # will be sent to an email address for inspection.
201
202-# Note that http/ftp proxies are needed by the product
203+# Note that http/ftp proxies are needed by the product
204 # release finder
205
206 # Only run this script on loganberry
207@@ -42,41 +42,41 @@
208 cd /srv/launchpad.net/production/launchpad/cronscripts
209
210 echo == Expiring memberships `date` ==
211-python2.5 flag-expired-memberships.py -q
212+python2.5 -S flag-expired-memberships.py -q
213
214 echo == Allocating revision karma `date` ==
215-python2.5 allocate-revision-karma.py -q
216+python2.5 -S allocate-revision-karma.py -q
217
218 echo == Recalculating karma `date` ==
219-python2.5 foaf-update-karma-cache.py -q
220+python2.5 -S foaf-update-karma-cache.py -q
221
222 echo == Updating cached statistics `date` ==
223-python2.5 update-stats.py -q
224+python2.5 -S update-stats.py -q
225
226 echo == Expiring questions `date` ==
227-python2.5 expire-questions.py
228+python2.5 -S expire-questions.py
229
230 ### echo == Expiring bugs `date` ==
231-### python2.5 expire-bugtasks.py
232+### python2.5 -S expire-bugtasks.py
233
234 # checkwatches.py is scheduled in the /code/pqm/launchpad_crontabs branch.
235 ### echo == Updating bug watches `date` ==
236-### python2.5 checkwatches.py
237+### python2.5 -S checkwatches.py
238
239 echo == Updating bugtask target name caches `date` ==
240-python2.5 update-bugtask-targetnamecaches.py -q
241+python2.5 -S update-bugtask-targetnamecaches.py -q
242
243 echo == Updating personal standings `date` ==
244-python2.5 update-standing.py -q
245+python2.5 -S update-standing.py -q
246
247 echo == Updating CVE database `date` ==
248-python2.5 update-cve.py -q
249+python2.5 -S update-cve.py -q
250
251 echo == Updating package cache `date` ==
252-python2.5 update-pkgcache.py -q
253+python2.5 -S update-pkgcache.py -q
254
255 echo == Product Release Finder `date` ==
256-python2.5 product-release-finder.py -q
257+python2.5 -S product-release-finder.py -q
258
259
260 rm -f $LOCK
261
262=== modified file 'cronscripts/publishing/cron.germinate'
263--- cronscripts/publishing/cron.germinate 2010-03-02 09:58:34 +0000
264+++ cronscripts/publishing/cron.germinate 2010-04-27 02:13:38 +0000
265@@ -127,7 +127,7 @@
266 echo " done."
267
268 # now generate the Supported extra overrides
269-$MAINTAINCE_CHECK $suite > "$MISCROOT/more-extra.override.$suite.main.supported"
270+$MAINTAINCE_CHECK $suite > "$MISCROOT/more-extra.override.$suite.main.supported" 2> _maintenance-check.stderr
271 if [ $? -eq 0 ]; then
272 cat "$MISCROOT/more-extra.override.$suite.main.supported" >> "$MISCROOT/more-extra.override.$suite.main.new"
273 fi
274
275=== modified file 'cronscripts/publishing/maintenance-check.py'
276--- cronscripts/publishing/maintenance-check.py 2010-01-22 13:57:45 +0000
277+++ cronscripts/publishing/maintenance-check.py 2010-04-27 02:13:38 +0000
278@@ -6,9 +6,19 @@
279 # https://code.edge.launchpad.net/~mvo/ubuntu-maintenance-check/python-port
280 # (where it will vanish once taken here)
281
282+# this warning filter is only needed on older versions of python-apt,
283+# once the machine runs lucid it can be removed
284+import warnings
285+warnings.filterwarnings("ignore","apt API not stable yet")
286+import apt
287+warnings.resetwarnings()
288+
289+import apt_pkg
290 import logging
291+import os
292 import sys
293 import urllib2
294+import urlparse
295
296 from optparse import OptionParser
297
298@@ -31,8 +41,8 @@
299 SUPPORTED_ARCHES = PRIMARY_ARCHES + ["armel"]
300
301 # what defines the seeds is documented in wiki.ubuntu.com/SeedManagement
302-SERVER_SEEDS = [ "supported-server"]
303-DESKTOP_SEEDS = ["ship", "supported-desktop"]
304+SERVER_SEEDS = [ "supported-server", "server-ship"]
305+DESKTOP_SEEDS = ["ship", "supported-desktop", "supported-desktop-extra"]
306 SUPPORTED_SEEDS = [ "all" ]
307
308 # normal support timeframe
309@@ -51,32 +61,111 @@
310
311 # distro names and if they get LTS support (order is important)
312 DISTRO_NAMES_AND_LTS_SUPPORT = [ ("ubuntu", True),
313- ("kubuntu", False),
314- ("edubuntu", False),
315+ ("kubuntu", True),
316 ("netbook", False),
317 ]
318
319 # germinate output base directory
320 BASE_URL = "http://people.canonical.com/~ubuntu-archive/germinate-output/"
321
322+# hints dir url, hints file is "$distro.hints" by default
323+# (e.g. lucid.hints)
324+HINTS_DIR_URL = "http://people.canonical.com/~ubuntu-archive/seeds/platform.%s/SUPPORTED_HINTS"
325+
326+# we need the archive root to parse the Sources file to support
327+# by-source hints
328+ARCHIVE_ROOT = "http://archive.ubuntu.com/ubuntu"
329+
330 # support timeframe tag used in the Packages file
331 SUPPORT_TAG = "Supported"
332
333-
334-def get_structure(name, version):
335- """
336- get structure file for named distro and distro version
337- (e.g. kubuntu, lucid)
338- """
339- f = urllib2.urlopen("%s/%s.%s/structure" % (BASE_URL, name, version))
340+def get_binaries_for_source_pkg(srcname):
341+ """ Return all binary package names for the given source package name.
342+
343+ :param srcname: The source package name.
344+ :return: A list of binary package names.
345+ """
346+ pkgnames = set()
347+ recs = apt_pkg.GetPkgSrcRecords()
348+ while recs.Lookup(srcname):
349+ for binary in recs.Binaries:
350+ pkgnames.add(binary)
351+ return pkgnames
352+
353+def expand_src_pkgname(pkgname):
354+ """ Expand a package name if it is prefixed with src.
355+
356+ If the package name is prefixed with src it will be expanded
357+ to a list of binary package names. Otherwise the original
358+ package name will be returned.
359+
360+ :param pkgname: The package name (that may include src:prefix).
361+ :return: A list of binary package names (the list may be one element long).
362+ """
363+ if not pkgname.startswith("src:"):
364+ return [pkgname]
365+ return get_binaries_for_source_pkg(pkgname.split("src:")[1])
366+
367+def create_and_update_deb_src_source_list(distroseries):
368+ """ Create sources.list and update cache.
369+
370+ This creates a sources.list file with deb-src entries for a given
371+ distroseries and apt.Cache.update() to make sure the data is up-to-date.
372+ :param distro: The code name of the distribution series (e.g. lucid).
373+ :return: None
374+ :raises: IOError: When cache update fails.
375+ """
376+ # apt root dir
377+ rootdir="./aptroot.%s" % distroseries
378+ sources_list_dir = os.path.join(rootdir, "etc","apt")
379+ if not os.path.exists(sources_list_dir):
380+ os.makedirs(sources_list_dir)
381+ sources_list = open(os.path.join(sources_list_dir, "sources.list"),"w")
382+ for pocket in [
383+ "%s" % distroseries,
384+ "%s-updates" % distroseries,
385+ "%s-security" % distroseries]:
386+ sources_list.write(
387+ "deb-src %s %s main restricted\n" % (
388+ ARCHIVE_ROOT, pocket))
389+ sources_list.write(
390+ "deb %s %s main restricted\n" % (
391+ ARCHIVE_ROOT, pocket))
392+ sources_list.close()
393+ # create required dirs/files for apt.Cache(rootdir) to work on older
394+ # versions of python-apt. once lucid is used it can be removed
395+ for d in ["var/lib/dpkg",
396+ "var/cache/apt/archives/partial",
397+ "var/lib/apt/lists/partial"]:
398+ if not os.path.exists(os.path.join(rootdir,d)):
399+ os.makedirs(os.path.join(rootdir,d))
400+ if not os.path.exists(os.path.join(rootdir,"var/lib/dpkg/status")):
401+ open(os.path.join(rootdir,"var/lib/dpkg/status"),"w")
402+ # open cache with our just prepared rootdir
403+ cache = apt.Cache(rootdir=rootdir)
404+ try:
405+ cache.update(apt.progress.FetchProgress())
406+ except SystemError:
407+ logging.exception("cache.update() failed")
408+
409+def get_structure(distroname, version):
410+ """ Get structure file conent for named distro and distro version.
411+
412+ :param name: Name of the distribution (e.g. kubuntu, ubuntu, xubuntu).
413+ :param version: Code name of the distribution version (e.g. lucid).
414+ :return: List of strings with the structure file content
415+ """
416+ f = urllib2.urlopen("%s/%s.%s/structure" % (BASE_URL, distroname, version))
417 structure = f.readlines()
418 f.close()
419 return structure
420
421 def expand_seeds(structure, seedname):
422- """
423- expand seed by its dependencies using the strucure file
424- returns a set() for the seed dependencies (excluding the original seedname)
425+ """ Expand seed by its dependencies using the strucure file.
426+
427+ :param structure: The content of the STRUCTURE file as string list.
428+ :param seedname: The name of the seed as string that needs to be expanded.
429+ :return: a set() for the seed dependencies (excluding the original seedname)
430 """
431 seeds = []
432 for line in structure:
433@@ -122,6 +211,28 @@
434 in_seeds.add(s)
435 return in_seeds
436
437+def compare_support_level(x, y):
438+ """
439+ compare two support level strings of the form 18m, 3y etc
440+ :parm x: the first support level
441+ :parm y: the second support level
442+ :return: negative if x < y, zero if x==y, positive if x > y
443+ """
444+ def support_to_int(support_time):
445+ """
446+ helper that takes a support time string and converts it to
447+ a integer for cmp()
448+ """
449+ # allow strings like "5y (kubuntu-common)
450+ x = support_time.split()[0]
451+ if x.endswith("y"):
452+ return 12 * int(x[0:-1])
453+ elif x.endswith("m"):
454+ return int(x[0:-1])
455+ else:
456+ raise ValueError("support time '%s' has to end with y or m" % x)
457+ return cmp(support_to_int(x), support_to_int(y))
458+
459 def get_packages_support_time(structure, name, pkg_support_time, support_timeframe_list):
460 """
461 input a structure file and a list of pair<timeframe, seedlist>
462@@ -137,8 +248,15 @@
463 for pkg in pkgs_in_seeds[seed]:
464 if not pkg in pkg_support_time:
465 pkg_support_time[pkg] = timeframe
466- if options.with_seeds:
467- pkg_support_time[pkg] += " (%s)" % ", ".join(what_seeds(pkg, pkgs_in_seeds))
468+ else:
469+ old_timeframe = pkg_support_time[pkg]
470+ if compare_support_level(old_timeframe, timeframe) < 0:
471+ logging.debug("overwriting %s from %s to %s" % (
472+ pkg, old_timeframe, timeframe))
473+ pkg_support_time[pkg] = timeframe
474+ if options.with_seeds:
475+ pkg_support_time[pkg] += " (%s)" % ", ".join(what_seeds(pkg, pkgs_in_seeds))
476+
477
478 return pkg_support_time
479
480@@ -150,6 +268,8 @@
481 parser.add_option("--source-packages", "", default=False,
482 action="store_true",
483 help="show as source pkgs")
484+ parser.add_option("--hints-file", "", default=None,
485+ help="use diffenrt use hints file location")
486 (options, args) = parser.parse_args()
487
488 # init
489@@ -160,6 +280,17 @@
490 sys.exit(1)
491 else:
492 distro = "lucid"
493+
494+ # make sure our deb-src information is up-to-date
495+ create_and_update_deb_src_source_list(distro)
496+
497+ if options.hints_file:
498+ hints_file = options.hints_file
499+ (schema, netloc, path, query, fragment) = urlparse.urlsplit(hints_file)
500+ if not schema:
501+ hints_file = "file:%s" % path
502+ else:
503+ hints_file = HINTS_DIR_URL % distro
504
505 # go over the distros we need to check
506 pkg_support_time = {}
507@@ -175,20 +306,77 @@
508 else:
509 support_timeframe = SUPPORT_TIMEFRAME
510 get_packages_support_time(structure, name, pkg_support_time, support_timeframe)
511+
512+ # now go over the bits in main that we have not seen (because
513+ # they are not in any seed and got added manually into "main"
514+ for arch in PRIMARY_ARCHES:
515+ rootdir="./aptroot.%s" % distro
516+ apt_pkg.Config.Set("APT::Architecture", arch)
517+ cache = apt.Cache(rootdir=rootdir)
518+ try:
519+ cache.update(apt.progress.FetchProgress())
520+ except SystemError:
521+ logging.exception("cache.update() failed")
522+ cache.open(apt.progress.OpProgress())
523+ for pkg in cache:
524+ if not pkg.name in pkg_support_time:
525+ pkg_support_time[pkg.name] = support_timeframe[-1][0]
526+ logging.warn("add package in main but not in seeds %s with %s" %
527+ (pkg.name, pkg_support_time[pkg.name]))
528+
529+ # now check the hints file that is used to overwrite
530+ # the default seeds
531+ try:
532+ for line in urllib2.urlopen(hints_file):
533+ line = line.strip()
534+ if not line or line.startswith("#"):
535+ continue
536+ try:
537+ (raw_pkgname, support_time) = line.split()
538+ for pkgname in expand_src_pkgname(raw_pkgname):
539+ if support_time == 'unsupported':
540+ try:
541+ del pkg_support_time[pkgname]
542+ sys.stderr.write("hints-file: marking %s unsupported\n" % pkgname)
543+ except KeyError:
544+ pass
545+ else:
546+ if pkg_support_time.get(pkgname) != support_time:
547+ sys.stderr.write(
548+ "hints-file: changing %s from %s to %s\n" % (
549+ pkgname, pkg_support_time.get(pkgname),
550+ support_time))
551+ pkg_support_time[pkgname] = support_time
552+ except:
553+ logging.exception("can not parse line '%s'" % line)
554+ except urllib2.HTTPError, e:
555+ if e.getcode() != 404:
556+ raise
557+ sys.stderr.write("hints-file: %s gave 404 error\n" % hints_file)
558
559 # output suitable for the extra-override file
560 for pkgname in sorted(pkg_support_time.keys()):
561- # go over the supported arches, they are divided in
562- # first-class (PRIMARY) and second-class with different
563- # support levels
564- for arch in SUPPORTED_ARCHES:
565- # full LTS support
566- if arch in PRIMARY_ARCHES:
567- print "%s/%s %s %s" % (
568- pkgname, arch, SUPPORT_TAG, pkg_support_time[pkgname])
569- else:
570- # not a LTS supported architecture, gets only regular
571- # support_timeframe
572- print "%s/%s %s %s" % (
573- pkgname, arch, SUPPORT_TAG, SUPPORT_TIMEFRAME[0][0])
574+ # special case, the hints file may contain overrides that
575+ # are arch-specific (like zsh-doc/armel)
576+ if "/" in pkgname:
577+ print "%s %s %s" % (
578+ pkgname, SUPPORT_TAG, pkg_support_time[pkgname])
579+ else:
580+ # go over the supported arches, they are divided in
581+ # first-class (PRIMARY) and second-class with different
582+ # support levels
583+ for arch in SUPPORTED_ARCHES:
584+ # ensure we do not overwrite arch-specific overwrites
585+ pkgname_and_arch = "%s/%s" % (pkgname, arch)
586+ if pkgname_and_arch in pkg_support_time:
587+ break
588+ if arch in PRIMARY_ARCHES:
589+ # arch with full LTS support
590+ print "%s %s %s" % (
591+ pkgname_and_arch, SUPPORT_TAG, pkg_support_time[pkgname])
592+ else:
593+ # not a LTS supported architecture, gets only regular
594+ # support_timeframe
595+ print "%s %s %s" % (
596+ pkgname_and_arch, SUPPORT_TAG, SUPPORT_TIMEFRAME[0][0])
597
598
599=== removed file 'cronscripts/update_preview_diffs.py'
600--- cronscripts/update_preview_diffs.py 2010-02-16 15:25:52 +0000
601+++ cronscripts/update_preview_diffs.py 1970-01-01 00:00:00 +0000
602@@ -1,38 +0,0 @@
603-#!/usr/bin/python2.5 -S
604-#
605-# Copyright 2009 Canonical Ltd. This software is licensed under the
606-# GNU Affero General Public License version 3 (see the file LICENSE).
607-
608-# pylint: disable-msg=W0403
609-
610-"""Update or create previews diffs for branch merge proposals."""
611-
612-__metaclass__ = type
613-
614-import _pythonpath
615-
616-from lp.services.job.runner import JobCronScript, JobRunner, TwistedJobRunner
617-from lp.code.interfaces.branchmergeproposal import (
618- IUpdatePreviewDiffJobSource,)
619-
620-
621-class RunUpdatePreviewDiffJobs(JobCronScript):
622- """Run UpdatePreviewDiff jobs."""
623-
624- config_name = 'update_preview_diffs'
625- source_interface = IUpdatePreviewDiffJobSource
626-
627- def __init__(self):
628- super(RunUpdatePreviewDiffJobs, self).__init__()
629- if self.options.twisted:
630- self.runner_class = TwistedJobRunner
631- else:
632- self.runner_class = JobRunner
633-
634- def add_my_options(self):
635- self.parser.add_option('--twisted', action='store_true')
636-
637-
638-if __name__ == '__main__':
639- script = RunUpdatePreviewDiffJobs()
640- script.lock_and_run()
641
642=== modified file 'database/replication/Makefile'
643--- database/replication/Makefile 2010-03-26 08:43:39 +0000
644+++ database/replication/Makefile 2010-04-27 02:13:38 +0000
645@@ -44,14 +44,16 @@
646
647 PGMASSACRE=../../utilities/pgmassacre.py
648
649-# Turn off silencing for now so we details on staging deployments.
650+CREATEDB_83=createdb --encoding=UTF8
651+CREATEDB_84=createdb --encoding=UTF8 --locale=C --template=template0
652+CREATEDB=${CREATEDB_83}
653+
654+# Turn off output silencing so we can see details of staging deployments.
655+# Without the timestamps, we are unable to estimate production deployment
656+# times.
657 #SHHH=../../utilities/shhh.py
658 SHHH=
659
660-AUTHDB_TABLES=\
661- account accountpassword authkoken emailaddress \
662- openidassociation openidauthorization openidnonce openidrpsummary
663-
664 default:
665 echo Usage: make [start|stop|restart]
666
667@@ -76,7 +78,7 @@
668
669 # Replicate it again, so we can test with multiple slaves.
670 -${PGMASSACRE} launchpad_dev_slave2
671- createdb --encoding=UTF8 launchpad_dev_slave2
672+ ${CREATEDB} launchpad_dev_slave2
673 LPCONFIG=${DEV_CONFIG} ./slon_ctl.py start \
674 node3_node 'dbname=launchpad_dev_slave2 user=slony'
675 LPCONFIG=${DEV_CONFIG} ./new-slave.py 3 launchpad_dev_slave2
676@@ -96,23 +98,12 @@
677 _MASTER=lpmain_staging_new _SLAVE=lpmain_staging_slave_new \
678 LAG="0 seconds"
679 # Create the DB with the desired default tablespace.
680- createdb --encoding UTF8 --tablespace ${STAGING_TABLESPACE} \
681- lpmain_staging_new
682- # Restore the DB schema. We need to restore permissions, despite
683+ ${CREATEDB} --tablespace ${STAGING_TABLESPACE} lpmain_staging_new
684+ # Restore the database. We need to restore permissions, despite
685 # later running security.py, to pull in permissions granted on
686 # production to users not maintained by security.py.
687 pg_restore --dbname=lpmain_staging_new \
688 --no-owner --exit-on-error ${STAGING_DUMP}
689- psql -q -d lpmain_staging_new -f authdb_drop.sql
690- psql -q -d lpmain_staging_new -f authdb_create.sql \
691- 2>&1 | grep -v _sl || true
692- # Restore the authdb data.
693- for table in ${AUTHDB_TABLES}; do \
694- pg_restore --dbname=lpmain_staging_new \
695- --no-acl --no-owner --disable-triggers --data-only \
696- --table=$$table ${STAGING_DUMP}; \
697- done
698- psql -q -d lpmain_staging_new -f authdb_sequences.sql
699 # Uninstall Slony-I if it is installed - a pg_dump of a DB with
700 # Slony-I installed isn't usable without this step.
701 LPCONFIG=${NEW_STAGING_CONFIG} ./repair-restored-db.py
702@@ -144,17 +135,9 @@
703 LPCONFIG=${STAGING_CONFIG} ./slon_ctl.py --lag="${LAG}" start
704
705 dogfood:
706- createdb --encoding UTF8 ${DOGFOOD_DBNAME}
707+ ${CREATEDB} ${DOGFOOD_DBNAME}
708 pg_restore --dbname=${DOGFOOD_DBNAME} --no-acl --no-owner \
709 --exit-on-error ${DOGFOOD_DUMP}
710- psql -q -d ${DOGFOOD_DBNAME} -f authdb_drop.sql
711- psql -q -d ${DOGFOOD_DBNAME} -f authdb_create.sql \
712- 2>&1 | grep -v _sl || true
713- for table in ${AUTHDB_TABLES}; do \
714- pg_restore --dbname=${DOGFOOD_DBNAME} \
715- --no-acl --no-owner --disable-triggers --data-only \
716- --table=$$table ${DOGFOOD_DUMP}; \
717- done
718 ./repair-restored-db.py -d ${DOGFOOD_DBNAME}
719 ../schema/upgrade.py -d ${DOGFOOD_DBNAME}
720 ../schema/fti.py -d ${DOGFOOD_DBNAME}
721@@ -174,14 +157,15 @@
722 _replicate:
723 @echo LPCONFIG currently ${LPCONFIG}
724 # Start the slon daemon for the master.
725- ./slon_ctl.py start \
726+ ./slon_ctl.py --lag="0 seconds" start \
727 node1_node "dbname=${_MASTER} user=slony"
728 # Initialize the cluster and create replication sets.
729 ./initialize.py
730 # Create the soon-to-be-slave database, empty at this point.
731- createdb --encoding=UTF8 --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}
732+ ${CREATEDB} --tablespace=${_SLAVE_TABLESPACE} ${_SLAVE}
733 # Start the slon daemon for the slave
734- ./slon_ctl.py start node2_node "dbname=${_SLAVE} user=slony"
735+ ./slon_ctl.py --lag="0 seconds" start \
736+ node2_node "dbname=${_SLAVE} user=slony"
737 # Setup the slave
738 ./new-slave.py 2 "dbname=${_SLAVE}"
739 # Upgrade all databases in the cluster and reset security.
740@@ -192,12 +176,7 @@
741 @echo Running security.py `date`
742 ./slon_ctl.py stop # security.py can deadlock with slony
743 ${SHHH} ../schema/security.py --cluster -U slony
744- ./slon_ctl.py --lag="0 seconds" start
745- # Migrate tables to the authdb replication set, creating the set
746- # and subscribing nodes to it as necessary.
747- ./populate_auth_replication_set.py -U slony
748 # Restart slon daemons with default lag setting.
749- ./slon_ctl.py stop
750 ./slon_ctl.py --lag="${LAG}" start
751 # Generate a preamble for manual slonik(1) usage.
752 ./preamble.py > preamble.sk
753
754=== removed file 'database/replication/authdb_create.sql'
755--- database/replication/authdb_create.sql 2010-03-30 05:51:30 +0000
756+++ database/replication/authdb_create.sql 1970-01-01 00:00:00 +0000
757@@ -1,885 +0,0 @@
758--- Copyright 2009 Canonical Ltd. This software is licensed under the
759--- GNU Affero General Public License version 3 (see the file LICENSE).
760-
761--- Generated by:
762--- pg_dump --format=p --schema-only --no-owner --no-privileges \
763--- --table=Account --table=AccountPassword --table=AuthToken \
764--- --table=EmailAddress --table=OpenIDAssociation \
765--- --table=OpenIDAuthorization --table=OpenIDNonce \
766--- --table=OpenIDRPSummary --table=ValidPersonCache \
767--- --table=ValidPersonOrTeamCache launchpad_prod_4
768-
769---
770--- PostgreSQL database dump
771---
772-
773-SET client_encoding = 'UTF8';
774-SET standard_conforming_strings = off;
775-SET check_function_bodies = false;
776-SET client_min_messages = warning;
777-SET escape_string_warning = off;
778-
779-SET search_path = public, pg_catalog;
780-
781-SET default_tablespace = '';
782-
783-SET default_with_oids = false;
784-
785---
786--- Name: account; Type: TABLE; Schema: public; Owner: -; Tablespace:
787---
788-
789-CREATE TABLE account (
790- id integer NOT NULL,
791- date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
792- creation_rationale integer NOT NULL,
793- status integer NOT NULL,
794- date_status_set timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
795- displayname text NOT NULL,
796- openid_identifier text DEFAULT generate_openid_identifier() NOT NULL,
797- status_comment text,
798- old_openid_identifier text
799-);
800-
801-
802---
803--- Name: TABLE account; Type: COMMENT; Schema: public; Owner: -
804---
805-
806-COMMENT ON TABLE account IS 'An account that may be used for authenticating to Canonical or other systems.';
807-
808-
809---
810--- Name: COLUMN account.status; Type: COMMENT; Schema: public; Owner: -
811---
812-
813-COMMENT ON COLUMN account.status IS 'The status of the account.';
814-
815-
816---
817--- Name: COLUMN account.date_status_set; Type: COMMENT; Schema: public; Owner: -
818---
819-
820-COMMENT ON COLUMN account.date_status_set IS 'When the status was last changed.';
821-
822-
823---
824--- Name: COLUMN account.displayname; Type: COMMENT; Schema: public; Owner: -
825---
826-
827-COMMENT ON COLUMN account.displayname IS 'Name to display when rendering information about this account.';
828-
829-
830---
831--- Name: COLUMN account.openid_identifier; Type: COMMENT; Schema: public; Owner: -
832---
833-
834-COMMENT ON COLUMN account.openid_identifier IS 'The key used to construct an OpenID identity URL for this account.';
835-
836-
837---
838--- Name: COLUMN account.status_comment; Type: COMMENT; Schema: public; Owner: -
839---
840-
841-COMMENT ON COLUMN account.status_comment IS 'The comment on the status of the account.';
842-
843-
844---
845--- Name: COLUMN account.old_openid_identifier; Type: COMMENT; Schema: public; Owner: -
846---
847-
848-COMMENT ON COLUMN account.old_openid_identifier IS 'The previous openid_identifier, used for transitions to the current openid_identifier.';
849-
850-
851---
852--- Name: account_id_seq; Type: SEQUENCE; Schema: public; Owner: -
853---
854-
855-CREATE SEQUENCE account_id_seq
856- INCREMENT BY 1
857- NO MAXVALUE
858- NO MINVALUE
859- CACHE 1;
860-
861-
862---
863--- Name: account_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
864---
865-
866-ALTER SEQUENCE account_id_seq OWNED BY account.id;
867-
868-
869---
870--- Name: accountpassword; Type: TABLE; Schema: public; Owner: -; Tablespace:
871---
872-
873-CREATE TABLE accountpassword (
874- id integer NOT NULL,
875- account integer NOT NULL,
876- password text NOT NULL
877-);
878-
879-
880---
881--- Name: TABLE accountpassword; Type: COMMENT; Schema: public; Owner: -
882---
883-
884-COMMENT ON TABLE accountpassword IS 'A password used to authenticate an Account.';
885-
886-
887---
888--- Name: COLUMN accountpassword.password; Type: COMMENT; Schema: public; Owner: -
889---
890-
891-COMMENT ON COLUMN accountpassword.password IS 'SSHA digest encrypted password.';
892-
893-
894---
895--- Name: accountpassword_id_seq; Type: SEQUENCE; Schema: public; Owner: -
896---
897-
898-CREATE SEQUENCE accountpassword_id_seq
899- INCREMENT BY 1
900- NO MAXVALUE
901- NO MINVALUE
902- CACHE 1;
903-
904-
905---
906--- Name: accountpassword_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
907---
908-
909-ALTER SEQUENCE accountpassword_id_seq OWNED BY accountpassword.id;
910-
911-
912---
913--- Name: authtoken; Type: TABLE; Schema: public; Owner: -; Tablespace:
914---
915-
916-CREATE TABLE authtoken (
917- id integer NOT NULL,
918- date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
919- date_consumed timestamp without time zone,
920- token_type integer NOT NULL,
921- token text NOT NULL,
922- requester integer,
923- requester_email text,
924- email text NOT NULL,
925- redirection_url text
926-);
927-
928-
929---
930--- Name: TABLE authtoken; Type: COMMENT; Schema: public; Owner: -
931---
932-
933-COMMENT ON TABLE authtoken IS 'AuthToken stores one time tokens used by the authentication service for validating email addresses and other tasks that require verifying an email address is valid such as password recovery and account merging. This table will be cleaned occasionally to remove expired tokens. Expiry time is not yet defined.';
934-
935-
936---
937--- Name: COLUMN authtoken.date_created; Type: COMMENT; Schema: public; Owner: -
938---
939-
940-COMMENT ON COLUMN authtoken.date_created IS 'The timestamp that this request was made.';
941-
942-
943---
944--- Name: COLUMN authtoken.date_consumed; Type: COMMENT; Schema: public; Owner: -
945---
946-
947-COMMENT ON COLUMN authtoken.date_consumed IS 'The date and time when this token was consumed. It''s NULL if it hasn''t been consumed yet.';
948-
949-
950---
951--- Name: COLUMN authtoken.token_type; Type: COMMENT; Schema: public; Owner: -
952---
953-
954-COMMENT ON COLUMN authtoken.token_type IS 'The type of request, as per dbschema.TokenType.';
955-
956-
957---
958--- Name: COLUMN authtoken.token; Type: COMMENT; Schema: public; Owner: -
959---
960-
961-COMMENT ON COLUMN authtoken.token IS 'The token (not the URL) emailed used to uniquely identify this request. This token will be used to generate a URL that when clicked on will continue a workflow.';
962-
963-
964---
965--- Name: COLUMN authtoken.requester; Type: COMMENT; Schema: public; Owner: -
966---
967-
968-COMMENT ON COLUMN authtoken.requester IS 'The Account that made this request. This will be null for password recovery requests.';
969-
970-
971---
972--- Name: COLUMN authtoken.requester_email; Type: COMMENT; Schema: public; Owner: -
973---
974-
975-COMMENT ON COLUMN authtoken.requester_email IS 'The email address that was used to login when making this request. This provides an audit trail to help the end user confirm that this is a valid request. It is not a link to the EmailAddress table as this may be changed after the request is made. This field will be null for password recovery requests.';
976-
977-
978---
979--- Name: COLUMN authtoken.email; Type: COMMENT; Schema: public; Owner: -
980---
981-
982-COMMENT ON COLUMN authtoken.email IS 'The email address that this request was sent to.';
983-
984-
985---
986--- Name: authtoken_id_seq; Type: SEQUENCE; Schema: public; Owner: -
987---
988-
989-CREATE SEQUENCE authtoken_id_seq
990- INCREMENT BY 1
991- NO MAXVALUE
992- NO MINVALUE
993- CACHE 1;
994-
995-
996---
997--- Name: authtoken_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
998---
999-
1000-ALTER SEQUENCE authtoken_id_seq OWNED BY authtoken.id;
1001-
1002-
1003---
1004--- Name: emailaddress; Type: TABLE; Schema: public; Owner: -; Tablespace:
1005---
1006-
1007-CREATE TABLE emailaddress (
1008- id integer NOT NULL,
1009- email text NOT NULL,
1010- person integer,
1011- status integer NOT NULL,
1012- date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
1013- account integer,
1014- CONSTRAINT emailaddress__is_linked__chk CHECK (((person IS NOT NULL) OR (account IS NOT NULL)))
1015-);
1016-
1017-
1018---
1019--- Name: COLUMN emailaddress.email; Type: COMMENT; Schema: public; Owner: -
1020---
1021-
1022-COMMENT ON COLUMN emailaddress.email IS 'An email address used by a Person. The email address is stored in a casesensitive way, but must be case insensitivly unique.';
1023-
1024-
1025---
1026--- Name: emailaddress_id_seq; Type: SEQUENCE; Schema: public; Owner: -
1027---
1028-
1029-CREATE SEQUENCE emailaddress_id_seq
1030- INCREMENT BY 1
1031- NO MAXVALUE
1032- NO MINVALUE
1033- CACHE 1;
1034-
1035-
1036---
1037--- Name: emailaddress_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
1038---
1039-
1040-ALTER SEQUENCE emailaddress_id_seq OWNED BY emailaddress.id;
1041-
1042-
1043---
1044--- Name: openidassociation; Type: TABLE; Schema: public; Owner: -; Tablespace:
1045---
1046-
1047-CREATE TABLE openidassociation (
1048- server_url character varying(2047) NOT NULL,
1049- handle character varying(255) NOT NULL,
1050- secret bytea,
1051- issued integer,
1052- lifetime integer,
1053- assoc_type character varying(64),
1054- CONSTRAINT secret_length_constraint CHECK ((length(secret) <= 128))
1055-);
1056-
1057-
1058---
1059--- Name: openidauthorization; Type: TABLE; Schema: public; Owner: -; Tablespace:
1060---
1061-
1062-CREATE TABLE openidauthorization (
1063- id integer NOT NULL,
1064- account integer NOT NULL,
1065- client_id text,
1066- date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
1067- date_expires timestamp without time zone NOT NULL,
1068- trust_root text NOT NULL
1069-);
1070-
1071-
1072---
1073--- Name: openidauthorization_id_seq; Type: SEQUENCE; Schema: public; Owner: -
1074---
1075-
1076-CREATE SEQUENCE openidauthorization_id_seq
1077- INCREMENT BY 1
1078- NO MAXVALUE
1079- NO MINVALUE
1080- CACHE 1;
1081-
1082-
1083---
1084--- Name: openidauthorization_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
1085---
1086-
1087-ALTER SEQUENCE openidauthorization_id_seq OWNED BY openidauthorization.id;
1088-
1089-
1090---
1091--- Name: openidnonce; Type: TABLE; Schema: public; Owner: -; Tablespace:
1092---
1093-
1094-CREATE TABLE openidnonce (
1095- server_url character varying(2047) NOT NULL,
1096- "timestamp" integer NOT NULL,
1097- salt character(40) NOT NULL
1098-);
1099-
1100-
1101---
1102--- Name: TABLE openidnonce; Type: COMMENT; Schema: public; Owner: -
1103---
1104-
1105-COMMENT ON TABLE openidnonce IS 'Nonces for our OpenID consumer.';
1106-
1107-
1108---
1109--- Name: openidrpsummary; Type: TABLE; Schema: public; Owner: -; Tablespace:
1110---
1111-
1112-CREATE TABLE openidrpsummary (
1113- id integer NOT NULL,
1114- account integer NOT NULL,
1115- openid_identifier text NOT NULL,
1116- trust_root text NOT NULL,
1117- date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
1118- date_last_used timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
1119- total_logins integer DEFAULT 1 NOT NULL
1120-);
1121-
1122-
1123---
1124--- Name: TABLE openidrpsummary; Type: COMMENT; Schema: public; Owner: -
1125---
1126-
1127-COMMENT ON TABLE openidrpsummary IS 'The summary of the activity between a person and an RP.';
1128-
1129-
1130---
1131--- Name: COLUMN openidrpsummary.account; Type: COMMENT; Schema: public; Owner: -
1132---
1133-
1134-COMMENT ON COLUMN openidrpsummary.account IS 'The account who used the RP.';
1135-
1136-
1137---
1138--- Name: COLUMN openidrpsummary.openid_identifier; Type: COMMENT; Schema: public; Owner: -
1139---
1140-
1141-COMMENT ON COLUMN openidrpsummary.openid_identifier IS 'The OpenID identifier used to login.';
1142-
1143-
1144---
1145--- Name: COLUMN openidrpsummary.trust_root; Type: COMMENT; Schema: public; Owner: -
1146---
1147-
1148-COMMENT ON COLUMN openidrpsummary.trust_root IS 'The trust root for the RP';
1149-
1150-
1151---
1152--- Name: COLUMN openidrpsummary.date_created; Type: COMMENT; Schema: public; Owner: -
1153---
1154-
1155-COMMENT ON COLUMN openidrpsummary.date_created IS 'The creation date of this summary; the first time the person used the RP.';
1156-
1157-
1158---
1159--- Name: COLUMN openidrpsummary.date_last_used; Type: COMMENT; Schema: public; Owner: -
1160---
1161-
1162-COMMENT ON COLUMN openidrpsummary.date_last_used IS 'The date the RP was last used.';
1163-
1164-
1165---
1166--- Name: COLUMN openidrpsummary.total_logins; Type: COMMENT; Schema: public; Owner: -
1167---
1168-
1169-COMMENT ON COLUMN openidrpsummary.total_logins IS 'The total number of times the RP was used by the person.';
1170-
1171-
1172---
1173--- Name: openidrpsummary_id_seq; Type: SEQUENCE; Schema: public; Owner: -
1174---
1175-
1176-CREATE SEQUENCE openidrpsummary_id_seq
1177- INCREMENT BY 1
1178- NO MAXVALUE
1179- NO MINVALUE
1180- CACHE 1;
1181-
1182-
1183---
1184--- Name: openidrpsummary_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
1185---
1186-
1187-ALTER SEQUENCE openidrpsummary_id_seq OWNED BY openidrpsummary.id;
1188-
1189-
1190---
1191--- Name: validpersoncache; Type: VIEW; Schema: public; Owner: -
1192---
1193-
1194-CREATE VIEW validpersoncache AS
1195- SELECT emailaddress.person AS id FROM emailaddress, account WHERE ((((emailaddress.account = account.id) AND (emailaddress.person IS NOT NULL)) AND (emailaddress.status = 4)) AND (account.status = 20));
1196-
1197-
1198---
1199--- Name: VIEW validpersoncache; Type: COMMENT; Schema: public; Owner: -
1200---
1201-
1202-COMMENT ON VIEW validpersoncache IS 'A materialized view listing the Person.ids of all valid people (but not teams).';
1203-
1204-
1205---
1206--- Name: validpersonorteamcache; Type: VIEW; Schema: public; Owner: -
1207---
1208-
1209-CREATE VIEW validpersonorteamcache AS
1210- SELECT person.id FROM ((person LEFT JOIN emailaddress ON ((person.id = emailaddress.person))) LEFT JOIN account ON ((emailaddress.account = account.id))) WHERE ((person.teamowner IS NOT NULL) OR ((account.status = 20) AND (emailaddress.status = 4)));
1211-
1212-
1213---
1214--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1215---
1216-
1217-ALTER TABLE account ALTER COLUMN id SET DEFAULT nextval('account_id_seq'::regclass);
1218-
1219-
1220---
1221--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1222---
1223-
1224-ALTER TABLE accountpassword ALTER COLUMN id SET DEFAULT nextval('accountpassword_id_seq'::regclass);
1225-
1226-
1227---
1228--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1229---
1230-
1231-ALTER TABLE authtoken ALTER COLUMN id SET DEFAULT nextval('authtoken_id_seq'::regclass);
1232-
1233-
1234---
1235--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1236---
1237-
1238-ALTER TABLE emailaddress ALTER COLUMN id SET DEFAULT nextval('emailaddress_id_seq'::regclass);
1239-
1240-
1241---
1242--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1243---
1244-
1245-ALTER TABLE openidauthorization ALTER COLUMN id SET DEFAULT nextval('openidauthorization_id_seq'::regclass);
1246-
1247-
1248---
1249--- Name: id; Type: DEFAULT; Schema: public; Owner: -
1250---
1251-
1252-ALTER TABLE openidrpsummary ALTER COLUMN id SET DEFAULT nextval('openidrpsummary_id_seq'::regclass);
1253-
1254-
1255---
1256--- Name: account_openid_identifier_key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1257---
1258-
1259-ALTER TABLE ONLY account
1260- ADD CONSTRAINT account_openid_identifier_key UNIQUE (openid_identifier);
1261-
1262-
1263---
1264--- Name: account_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1265---
1266-
1267-ALTER TABLE ONLY account
1268- ADD CONSTRAINT account_pkey PRIMARY KEY (id);
1269-
1270-
1271---
1272--- Name: accountpassword_account_key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1273---
1274-
1275-ALTER TABLE ONLY accountpassword
1276- ADD CONSTRAINT accountpassword_account_key UNIQUE (account);
1277-
1278-
1279---
1280--- Name: accountpassword_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1281---
1282-
1283-ALTER TABLE ONLY accountpassword
1284- ADD CONSTRAINT accountpassword_pkey PRIMARY KEY (id);
1285-
1286-
1287---
1288--- Name: authtoken__token__key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1289---
1290-
1291-ALTER TABLE ONLY authtoken
1292- ADD CONSTRAINT authtoken__token__key UNIQUE (token);
1293-
1294-
1295---
1296--- Name: authtoken_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1297---
1298-
1299-ALTER TABLE ONLY authtoken
1300- ADD CONSTRAINT authtoken_pkey PRIMARY KEY (id);
1301-
1302-
1303---
1304--- Name: emailaddress_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1305---
1306-
1307-ALTER TABLE ONLY emailaddress
1308- ADD CONSTRAINT emailaddress_pkey PRIMARY KEY (id);
1309-
1310-
1311---
1312--- Name: openidassociation_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1313---
1314-
1315-ALTER TABLE ONLY openidassociation
1316- ADD CONSTRAINT openidassociation_pkey PRIMARY KEY (server_url, handle);
1317-
1318-
1319---
1320--- Name: openidauthorization_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1321---
1322-
1323-ALTER TABLE ONLY openidauthorization
1324- ADD CONSTRAINT openidauthorization_pkey PRIMARY KEY (id);
1325-
1326-
1327---
1328--- Name: openidnonce_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1329---
1330-
1331-ALTER TABLE ONLY openidnonce
1332- ADD CONSTRAINT openidnonce_pkey PRIMARY KEY (server_url, "timestamp", salt);
1333-
1334-
1335---
1336--- Name: openidrpsummary__account__trust_root__openid_identifier__key; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1337---
1338-
1339-ALTER TABLE ONLY openidrpsummary
1340- ADD CONSTRAINT openidrpsummary__account__trust_root__openid_identifier__key UNIQUE (account, trust_root, openid_identifier);
1341-
1342-
1343---
1344--- Name: openidrpsummary_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace:
1345---
1346-
1347-ALTER TABLE ONLY openidrpsummary
1348- ADD CONSTRAINT openidrpsummary_pkey PRIMARY KEY (id);
1349-
1350-
1351---
1352--- Name: account__old_openid_identifier__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1353---
1354-
1355-CREATE INDEX account__old_openid_identifier__idx ON account USING btree (old_openid_identifier);
1356-
1357-
1358---
1359--- Name: authtoken__date_consumed__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1360---
1361-
1362-CREATE INDEX authtoken__date_consumed__idx ON authtoken USING btree (date_consumed);
1363-
1364-
1365---
1366--- Name: authtoken__date_created__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1367---
1368-
1369-CREATE INDEX authtoken__date_created__idx ON authtoken USING btree (date_created);
1370-
1371-
1372---
1373--- Name: authtoken__requester__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1374---
1375-
1376-CREATE INDEX authtoken__requester__idx ON authtoken USING btree (requester);
1377-
1378-
1379---
1380--- Name: emailaddress__account__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
1381---
1382-
1383-CREATE UNIQUE INDEX emailaddress__account__key ON emailaddress USING btree (account) WHERE ((status = 4) AND (account IS NOT NULL));
1384-
1385-
1386---
1387--- Name: INDEX emailaddress__account__key; Type: COMMENT; Schema: public; Owner: -
1388---
1389-
1390-COMMENT ON INDEX emailaddress__account__key IS 'Ensures that an Account only has one preferred email address';
1391-
1392-
1393---
1394--- Name: emailaddress__lower_email__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
1395---
1396-
1397-CREATE INDEX emailaddress__lower_email__key ON emailaddress USING btree (lower(email));
1398-
1399-
1400---
1401--- Name: emailaddress__person__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
1402---
1403-
1404-CREATE UNIQUE INDEX emailaddress__person__key ON emailaddress USING btree (person) WHERE ((status = 4) AND (person IS NOT NULL));
1405-
1406-
1407---
1408--- Name: INDEX emailaddress__person__key; Type: COMMENT; Schema: public; Owner: -
1409---
1410-
1411-COMMENT ON INDEX emailaddress__person__key IS 'Ensures that a Person only has one preferred email address';
1412-
1413-
1414---
1415--- Name: emailaddress__person__status__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1416---
1417-
1418-CREATE INDEX emailaddress__person__status__idx ON emailaddress USING btree (person, status);
1419-
1420-
1421---
1422--- Name: openidauthorixation__account__troot__expires__client_id__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1423---
1424-
1425-CREATE INDEX openidauthorixation__account__troot__expires__client_id__idx ON openidauthorization USING btree (account, trust_root, date_expires, client_id);
1426-
1427-
1428---
1429--- Name: openidauthorixation__account__trust_root__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
1430---
1431-
1432-CREATE UNIQUE INDEX openidauthorixation__account__trust_root__key ON openidauthorization USING btree (account, trust_root) WHERE (client_id IS NULL);
1433-
1434-
1435---
1436--- Name: openidauthorization__account__client_id__trust_root__key; Type: INDEX; Schema: public; Owner: -; Tablespace:
1437---
1438-
1439-CREATE UNIQUE INDEX openidauthorization__account__client_id__trust_root__key ON openidauthorization USING btree (account, client_id, trust_root) WHERE (client_id IS NOT NULL);
1440-
1441-
1442---
1443--- Name: openidrpsummary__openid_identifier__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1444---
1445-
1446-CREATE INDEX openidrpsummary__openid_identifier__idx ON openidrpsummary USING btree (openid_identifier);
1447-
1448-
1449---
1450--- Name: openidrpsummary__trust_root__idx; Type: INDEX; Schema: public; Owner: -; Tablespace:
1451---
1452-
1453-CREATE INDEX openidrpsummary__trust_root__idx ON openidrpsummary USING btree (trust_root);
1454-
1455-
1456---
1457--- Name: _sl_logtrigger_200; Type: TRIGGER; Schema: public; Owner: -
1458---
1459-
1460-CREATE TRIGGER _sl_logtrigger_200
1461- AFTER INSERT OR DELETE OR UPDATE ON account
1462- FOR EACH ROW
1463- EXECUTE PROCEDURE _sl.logtrigger('_sl', '200', 'kvvvvvvvv');
1464-
1465-
1466---
1467--- Name: _sl_logtrigger_201; Type: TRIGGER; Schema: public; Owner: -
1468---
1469-
1470-CREATE TRIGGER _sl_logtrigger_201
1471- AFTER INSERT OR DELETE OR UPDATE ON accountpassword
1472- FOR EACH ROW
1473- EXECUTE PROCEDURE _sl.logtrigger('_sl', '201', 'kvv');
1474-
1475-
1476---
1477--- Name: _sl_logtrigger_274; Type: TRIGGER; Schema: public; Owner: -
1478---
1479-
1480-CREATE TRIGGER _sl_logtrigger_274
1481- AFTER INSERT OR DELETE OR UPDATE ON emailaddress
1482- FOR EACH ROW
1483- EXECUTE PROCEDURE _sl.logtrigger('_sl', '274', 'kvvvvv');
1484-
1485-
1486---
1487--- Name: _sl_logtrigger_335; Type: TRIGGER; Schema: public; Owner: -
1488---
1489-
1490-CREATE TRIGGER _sl_logtrigger_335
1491- AFTER INSERT OR DELETE OR UPDATE ON openidauthorization
1492- FOR EACH ROW
1493- EXECUTE PROCEDURE _sl.logtrigger('_sl', '335', 'kvvvvv');
1494-
1495-
1496---
1497--- Name: _sl_logtrigger_337; Type: TRIGGER; Schema: public; Owner: -
1498---
1499-
1500-CREATE TRIGGER _sl_logtrigger_337
1501- AFTER INSERT OR DELETE OR UPDATE ON openidrpsummary
1502- FOR EACH ROW
1503- EXECUTE PROCEDURE _sl.logtrigger('_sl', '337', 'kvvvvvv');
1504-
1505-
1506---
1507--- Name: _sl_logtrigger_438; Type: TRIGGER; Schema: public; Owner: -
1508---
1509-
1510-CREATE TRIGGER _sl_logtrigger_438
1511- AFTER INSERT OR DELETE OR UPDATE ON authtoken
1512- FOR EACH ROW
1513- EXECUTE PROCEDURE _sl.logtrigger('_sl', '438', 'kvvvvvvvv');
1514-
1515-
1516---
1517--- Name: _sl_logtrigger_439; Type: TRIGGER; Schema: public; Owner: -
1518---
1519-
1520-CREATE TRIGGER _sl_logtrigger_439
1521- AFTER INSERT OR DELETE OR UPDATE ON openidassociation
1522- FOR EACH ROW
1523- EXECUTE PROCEDURE _sl.logtrigger('_sl', '439', 'kkvvvv');
1524-
1525-
1526---
1527--- Name: _sl_logtrigger_445; Type: TRIGGER; Schema: public; Owner: -
1528---
1529-
1530-CREATE TRIGGER _sl_logtrigger_445
1531- AFTER INSERT OR DELETE OR UPDATE ON openidnonce
1532- FOR EACH ROW
1533- EXECUTE PROCEDURE _sl.logtrigger('_sl', '445', 'kkk');
1534-
1535-
1536---
1537--- Name: set_date_status_set_t; Type: TRIGGER; Schema: public; Owner: -
1538---
1539-
1540-CREATE TRIGGER set_date_status_set_t
1541- BEFORE UPDATE ON account
1542- FOR EACH ROW
1543- EXECUTE PROCEDURE set_date_status_set();
1544-
1545-
1546---
1547--- Name: accountpassword_account_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
1548---
1549-
1550-ALTER TABLE ONLY accountpassword
1551- ADD CONSTRAINT accountpassword_account_fkey FOREIGN KEY (account) REFERENCES account(id) ON DELETE CASCADE;
1552-
1553-
1554---
1555--- Name: authtoken__requester__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
1556---
1557-
1558-ALTER TABLE ONLY authtoken
1559- ADD CONSTRAINT authtoken__requester__fk FOREIGN KEY (requester) REFERENCES account(id);
1560-
1561-
1562---
1563--- Name: emailaddress__account__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
1564---
1565-
1566-ALTER TABLE ONLY emailaddress
1567- ADD CONSTRAINT emailaddress__account__fk FOREIGN KEY (account) REFERENCES account(id) ON DELETE SET NULL;
1568-
1569-
1570---
1571--- Name: openidauthorization__account__fk; Type: FK CONSTRAINT; Schema: public; Owner: -
1572---
1573-
1574-ALTER TABLE ONLY openidauthorization
1575- ADD CONSTRAINT openidauthorization__account__fk FOREIGN KEY (account) REFERENCES account(id);
1576-
1577-
1578---
1579--- Name: openidrpsummary_account_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
1580---
1581-
1582-ALTER TABLE ONLY openidrpsummary
1583- ADD CONSTRAINT openidrpsummary_account_fkey FOREIGN KEY (account) REFERENCES account(id);
1584-
1585-
1586---
1587--- PostgreSQL database dump complete
1588---
1589-
1590-CREATE INDEX emailaddress__account__status__idx
1591- ON EmailAddress(account, status);
1592-
1593-
1594--- Permissions for Ubuntu SSO server testing on staging.
1595-
1596--- Mirrored from sso_auth user 2010-01-12.
1597--- These tables will eventually not be available.
1598---
1599-GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE account TO ubuntu_sso;
1600-GRANT USAGE ON SEQUENCE account_id_seq TO ubuntu_sso;
1601-GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE accountpassword TO ubuntu_sso;
1602-GRANT USAGE ON SEQUENCE accountpassword_id_seq TO ubuntu_sso;
1603-GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE authtoken TO ubuntu_sso;
1604-GRANT USAGE ON SEQUENCE authtoken_id_seq TO ubuntu_sso;
1605-GRANT SELECT ON TABLE person TO ubuntu_sso;
1606-GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE emailaddress TO ubuntu_sso;
1607-GRANT USAGE ON SEQUENCE emailaddress_id_seq TO ubuntu_sso;
1608-GRANT SELECT,INSERT,DELETE ON TABLE openidassociation TO ubuntu_sso;
1609-GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE openidauthorization TO ubuntu_sso;
1610-GRANT USAGE ON SEQUENCE openidauthorization_id_seq TO ubuntu_sso;
1611-GRANT SELECT,INSERT,DELETE ON TABLE openidnonce TO ubuntu_sso;
1612-GRANT SELECT,INSERT,UPDATE ON TABLE openidrpsummary TO ubuntu_sso;
1613-GRANT USAGE ON SEQUENCE openidrpsummary_id_seq TO ubuntu_sso;
1614-GRANT SELECT ON SEQUENCE person_id_seq TO ubuntu_sso;
1615-GRANT SELECT ON TABLE personlocation TO ubuntu_sso;
1616-GRANT SELECT ON SEQUENCE personlocation_id_seq TO ubuntu_sso;
1617-GRANT SELECT ON TABLE teamparticipation TO ubuntu_sso;
1618-GRANT SELECT ON SEQUENCE teamparticipation_id_seq TO ubuntu_sso;
1619-
1620--- Permissions on the Ubuntu SSO tables.
1621---
1622-GRANT SELECT, INSERT, DELETE, UPDATE ON TABLE auth_permission,
1623-auth_group_permissions, auth_group, auth_user, auth_user_groups,
1624-auth_user_user_permissions, auth_message, django_content_type,
1625-django_session, django_site, django_admin_log,
1626-ssoopenidrpconfig TO ubuntu_sso;
1627-
1628-GRANT USAGE ON SEQUENCE auth_group_id_seq,
1629-auth_group_permissions_id_seq, auth_message_id_seq,
1630-auth_permission_id_seq, auth_user_groups_id_seq, auth_user_id_seq,
1631-auth_user_user_permissions_id_seq, django_admin_log_id_seq,
1632-django_content_type_id_seq, django_site_id_seq,
1633-ssoopenidrpconfig_id_seq TO ubuntu_sso;
1634-
1635--- Permissions on the lpmirror tables (mirrors of relevant Launchpad
1636--- information, available even when Launchpad database upgrades are in
1637--- progress).
1638-GRANT SELECT
1639-ON TABLE
1640- lp_person, lp_personlocation, lp_teamparticipation, lp_account
1641-TO ubuntu_sso;
1642-
1643
1644=== removed file 'database/replication/authdb_drop.sql'
1645--- database/replication/authdb_drop.sql 2009-11-11 10:32:35 +0000
1646+++ database/replication/authdb_drop.sql 1970-01-01 00:00:00 +0000
1647@@ -1,14 +0,0 @@
1648--- Copyright 2009 Canonical Ltd. This software is licensed under the
1649--- GNU Affero General Public License version 3 (see the file LICENSE).
1650-
1651-SET client_min_messages=ERROR;
1652-
1653--- Drop everything in the authdb replication set.
1654-DROP TABLE IF EXISTS Account CASCADE;
1655-DROP TABLE IF EXISTS AccountPassword CASCADE;
1656-DROP TABLE IF EXISTS AuthToken CASCADE;
1657-DROP TABLE IF EXISTS EmailAddress CASCADE;
1658-DROP TABLE IF EXISTS OpenIDAssociation CASCADE;
1659-DROP TABLE IF EXISTS OpenIDAuthorization CASCADE;
1660-DROP TABLE IF EXISTS OpenIDNonce CASCADE;
1661-DROP TABLE IF EXISTS OpenIDRPSummary;
1662
1663=== removed file 'database/replication/authdb_sequences.sql'
1664--- database/replication/authdb_sequences.sql 2010-01-13 06:54:32 +0000
1665+++ database/replication/authdb_sequences.sql 1970-01-01 00:00:00 +0000
1666@@ -1,22 +0,0 @@
1667--- Repair sequences in the authdb replication set. We need to do this because
1668--- we cannot restore the sequence values from the dump when restoring the
1669--- data using pg_restore --data-only.
1670-
1671-SELECT setval('account_id_seq', max(id)) AS Account
1672-FROM Account;
1673-
1674-SELECT setval('accountpassword_id_seq', max(id)) AS AccountPassword
1675-FROM AccountPassword;
1676-
1677-SELECT setval('authtoken_id_seq', max(id)) AS AuthToken
1678-FROM AuthToken;
1679-
1680-SELECT setval('emailaddress_id_seq', max(id)) AS EmailAddress
1681-FROM EmailAddress;
1682-
1683-SELECT setval('openidauthorization_id_seq', max(id)) AS OpenIDAuthorization
1684-FROM OpenIDAuthorization;
1685-
1686-SELECT setval('openidrpsummary_id_seq', max(id)) AS OpenIDRPSummary
1687-FROM OpenIDRPSummary;
1688-
1689
1690=== modified file 'database/replication/helpers.py'
1691--- database/replication/helpers.py 2010-02-26 03:34:49 +0000
1692+++ database/replication/helpers.py 2010-04-27 02:13:38 +0000
1693@@ -28,20 +28,15 @@
1694
1695 # Replication set id constants. Don't change these without DBA help.
1696 LPMAIN_SET_ID = 1
1697-AUTHDB_SET_ID = 2
1698 HOLDING_SET_ID = 666
1699+LPMIRROR_SET_ID = 4
1700
1701-# Seed tables for the authdb replication set to be passed to
1702+# Seed tables for the lpmain replication set to be passed to
1703 # calculate_replication_set().
1704-AUTHDB_SEED = frozenset([
1705+LPMAIN_SEED = frozenset([
1706 ('public', 'account'),
1707+ ('public', 'openidnonce'),
1708 ('public', 'openidassociation'),
1709- ('public', 'openidnonce'),
1710- ])
1711-
1712-# Seed tables for the lpmain replication set to be passed to
1713-# calculate_replication_set().
1714-LPMAIN_SEED = frozenset([
1715 ('public', 'person'),
1716 ('public', 'launchpaddatabaserevision'),
1717 ('public', 'databasereplicationlag'),
1718@@ -57,7 +52,6 @@
1719 ('public', 'launchpadstatistic'),
1720 ('public', 'parsedapachelog'),
1721 ('public', 'shipitsurvey'),
1722- ('public', 'openidassociations'), # Remove this in April 2009 or later.
1723 ('public', 'databasereplicationlag'),
1724 ])
1725
1726@@ -70,6 +64,7 @@
1727 'public.secret', 'public.sessiondata', 'public.sessionpkgdata',
1728 # Mirror tables, per Bug #489078. These tables have their own private
1729 # replication set that is setup manually.
1730+ 'public.lp_account',
1731 'public.lp_person',
1732 'public.lp_personlocation',
1733 'public.lp_teamparticipation',
1734@@ -176,12 +171,13 @@
1735 script = preamble() + script
1736
1737 if sync is not None:
1738- script = script + dedent("""\
1739+ sync_script = dedent("""\
1740 sync (id = @master_node);
1741 wait for event (
1742- origin = ALL, confirmed = ALL,
1743+ origin = @master_node, confirmed = ALL,
1744 wait on = @master_node, timeout = %d);
1745 """ % sync)
1746+ script = script + sync_script
1747
1748 # Copy the script to a NamedTemporaryFile rather than just pumping it
1749 # to slonik via stdin. This way it can be examined if slonik appears
1750@@ -192,7 +188,7 @@
1751
1752 # Run slonik
1753 log.debug("Executing slonik script %s" % script_on_disk.name)
1754- log.log(DEBUG2, script)
1755+ log.log(DEBUG2, 'Running script:\n%s' % script)
1756 returncode = subprocess.call(['slonik', script_on_disk.name])
1757
1758 if returncode != 0:
1759@@ -323,10 +319,10 @@
1760 cluster name = sl;
1761
1762 # Symbolic ids for replication sets.
1763- define lpmain_set %d;
1764- define authdb_set %d;
1765- define holding_set %d;
1766- """ % (LPMAIN_SET_ID, AUTHDB_SET_ID, HOLDING_SET_ID))]
1767+ define lpmain_set %d;
1768+ define holding_set %d;
1769+ define lpmirror_set %d;
1770+ """ % (LPMAIN_SET_ID, HOLDING_SET_ID, LPMIRROR_SET_ID))]
1771
1772 if master_node is not None:
1773 preamble.append(dedent("""\
1774@@ -503,19 +499,6 @@
1775 raise ReplicationConfigError(
1776 "Unreplicated sequences: %s" % repr(unrepl_sequences))
1777
1778- authdb_tables, authdb_sequences = calculate_replication_set(
1779- cur, AUTHDB_SEED)
1780 lpmain_tables, lpmain_sequences = calculate_replication_set(
1781 cur, LPMAIN_SEED)
1782
1783- confused_tables = authdb_tables.intersection(lpmain_tables)
1784- if confused_tables:
1785- raise ReplicationConfigError(
1786- "Tables exist in multiple replication sets: %s"
1787- % repr(confused_tables))
1788- confused_sequences = authdb_sequences.intersection(lpmain_sequences)
1789- if confused_sequences:
1790- raise ReplicationConfigError(
1791- "Sequences exist in multiple replication sets: %s"
1792- % repr(confused_sequences))
1793-
1794
1795=== modified file 'database/replication/initialize.py'
1796--- database/replication/initialize.py 2010-01-22 06:25:48 +0000
1797+++ database/replication/initialize.py 2010-04-27 02:13:38 +0000
1798@@ -1,4 +1,4 @@
1799-#!/usr/bin/python2.5
1800+#!/usr/bin/python2.5 -S
1801 #
1802 # Copyright 2009 Canonical Ltd. This software is licensed under the
1803 # GNU Affero General Public License version 3 (see the file LICENSE).
1804@@ -88,54 +88,13 @@
1805 helpers.sync(120) # Will exit on failure.
1806
1807
1808-def create_replication_sets(
1809- authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences):
1810+def create_replication_sets(lpmain_tables, lpmain_sequences):
1811 """Create the replication sets."""
1812 log.info('Creating Slony-I replication sets.')
1813
1814- # Instead of creating both the authdb and lpmain replication sets,
1815- # we just create the lpmain replication set containing everything.
1816- # This way, we can then test the populate_auth_replication_set.py
1817- # migration script that moves the relevant tables from the lpmain
1818- # replication set to the authdb replication set.
1819- # We will turn this behavior off once we are running two
1820- # replication sets in production and remove the migration script.
1821- lpmain_tables = lpmain_tables.union(authdb_tables)
1822- lpmain_sequences = lpmain_sequences.union(authdb_sequences)
1823-
1824 script = ["try {"]
1825- # script,append("""
1826- # echo 'Creating AuthDB replication set (@authdb_set)';
1827- # create set (
1828- # id=@authdb_set, origin=@master_node,
1829- # comment='AuthDB tables and sequences');
1830- # """)
1831
1832- # entry_id = 1
1833- # for table in sorted(authdb_tables):
1834- # script.append("""
1835- # echo 'Adding %(table)s to replication set @authdb_set';
1836- # set add table (
1837- # set id=@authdb_set,
1838- # origin=@master_node,
1839- # id=%(entry_id)d,
1840- # fully qualified name='%(table)s');
1841- # """ % vars())
1842- # entry_id += 1
1843- # entry_id = 1
1844- # for sequence in sorted(authdb_sequences):
1845- # script.append("""
1846- # echo 'Adding %(sequence)s to replication set @authdb_set';
1847- # set add sequence (
1848- # set id=@authdb_set,
1849- # origin=@master_node,
1850- # id=%(entry_id)d,
1851- # fully qualified name='%(sequence)s');
1852- # """ % vars())
1853- # entry_id += 1
1854- #
1855- # assert entry_id < 200, 'authdb replcation set has > 200 objects???'
1856- entry_id = 200
1857+ entry_id = 1
1858
1859 script.append("""
1860 echo 'Creating LPMain replication set (@lpmain_set)';
1861@@ -157,7 +116,7 @@
1862 """ % vars())
1863 entry_id += 1
1864
1865- entry_id = 200
1866+ entry_id = 1
1867 script.append(
1868 "echo 'Adding %d sequences to replication set @lpmain_set';"
1869 % len(lpmain_sequences))
1870@@ -199,9 +158,6 @@
1871 con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
1872 global cur
1873 cur = con.cursor()
1874- log.debug("Calculating authdb replication set.")
1875- authdb_tables, authdb_sequences = helpers.calculate_replication_set(
1876- cur, helpers.AUTHDB_SEED)
1877 log.debug("Calculating lpmain replication set.")
1878 lpmain_tables, lpmain_sequences = helpers.calculate_replication_set(
1879 cur, helpers.LPMAIN_SEED)
1880@@ -212,8 +168,7 @@
1881 fails = 0
1882 for table in all_tables_in_schema(cur, 'public'):
1883 times_seen = 0
1884- for table_set in [
1885- authdb_tables, lpmain_tables, helpers.IGNORED_TABLES]:
1886+ for table_set in [lpmain_tables, helpers.IGNORED_TABLES]:
1887 if table in table_set:
1888 times_seen += 1
1889 if times_seen == 0:
1890@@ -224,8 +179,7 @@
1891 fails += 1
1892 for sequence in all_sequences_in_schema(cur, 'public'):
1893 times_seen = 0
1894- for sequence_set in [
1895- authdb_sequences, lpmain_sequences, helpers.IGNORED_SEQUENCES]:
1896+ for sequence_set in [lpmain_sequences, helpers.IGNORED_SEQUENCES]:
1897 if sequence in sequence_set:
1898 times_seen += 1
1899 if times_seen == 0:
1900@@ -242,8 +196,7 @@
1901
1902 ensure_live()
1903
1904- create_replication_sets(
1905- authdb_tables, authdb_sequences, lpmain_tables, lpmain_sequences)
1906+ create_replication_sets(lpmain_tables, lpmain_sequences)
1907
1908 helpers.sync(0)
1909
1910
1911=== modified file 'database/replication/new-slave.py'
1912--- database/replication/new-slave.py 2010-01-13 08:40:48 +0000
1913+++ database/replication/new-slave.py 2010-04-27 02:13:38 +0000
1914@@ -1,4 +1,4 @@
1915-#!/usr/bin/python2.5
1916+#!/usr/bin/python2.5 -S
1917 #
1918 # Copyright 2009 Canonical Ltd. This software is licensed under the
1919 # GNU Affero General Public License version 3 (see the file LICENSE).
1920@@ -22,10 +22,9 @@
1921 from canonical.database.sqlbase import (
1922 connect_string, ISOLATION_LEVEL_AUTOCOMMIT)
1923 from canonical.launchpad.scripts import db_options, logger_options, logger
1924-from canonical.launchpad.webapp.adapter import _auth_store_tables
1925
1926 import replication.helpers
1927-from replication.helpers import AUTHDB_SET_ID, LPMAIN_SET_ID
1928+from replication.helpers import LPMAIN_SET_ID
1929
1930 def main():
1931 parser = OptionParser(
1932@@ -77,8 +76,6 @@
1933 # Get the connection string for masters.
1934 lpmain_connection_string = get_master_connection_string(
1935 source_connection, parser, LPMAIN_SET_ID) or source_connection_string
1936- authdb_connection_string = get_master_connection_string(
1937- source_connection, parser, AUTHDB_SET_ID) or source_connection_string
1938
1939 # Sanity check the target connection string.
1940 target_connection_string = ConnectionString(raw_target_connection_string)
1941@@ -130,31 +127,6 @@
1942 log.error("Failed to duplicate database schema.")
1943 return 1
1944
1945- # Drop the authdb replication set tables we just restored, as they
1946- # will be broken if the authdb master is a seperate database to the
1947- # lpmain master.
1948- log.debug("Dropping (possibly corrupt) authdb tables.")
1949- cur = target_con.cursor()
1950- for table_name in _auth_store_tables:
1951- cur.execute("DROP TABLE IF EXISTS %s CASCADE" % table_name)
1952- target_con.commit()
1953-
1954- # Duplicate the authdb schema.
1955- log.info("Duplicating authdb schema from '%s' to '%s'" % (
1956- authdb_connection_string, target_connection_string))
1957- table_args = ["--table=%s" % table for table in _auth_store_tables]
1958- # We need to restore the two cross-replication-set views that where
1959- # dropped as a side effect of dropping the auth store tables.
1960- table_args.append("--table=ValidPersonCache")
1961- table_args.append("--table=ValidPersonOrTeamCache")
1962- cmd = "pg_dump --schema-only --no-privileges %s %s | psql -1 -q %s" % (
1963- ' '.join(table_args),
1964- source_connection_string.asPGCommandLineArgs(),
1965- target_connection_string.asPGCommandLineArgs())
1966- if subprocess.call(cmd, shell=True) != 0:
1967- log.error("Failed to duplicate database schema.")
1968- return 1
1969-
1970 # Trash the broken Slony tables we just duplicated.
1971 log.debug("Removing slony cruft.")
1972 cur = target_con.cursor()
1973@@ -163,21 +135,30 @@
1974 del target_con
1975
1976 # Get a list of existing set ids that can be subscribed too. This
1977- # is all sets where the origin is the master_node, and set 2 if
1978- # the master happens to be configured as a forwarding slave. We
1979+ # is all sets where the origin is the master_node. We
1980 # don't allow other sets where the master is configured as a
1981 # forwarding slave as we have to special case rebuilding the database
1982- # schema (such as we do for the authdb replication set 2).
1983+ # schema, and we want to avoid cascading slave configurations anyway
1984+ # since we are running an antique Slony-I at the moment - keep it
1985+ # simple!
1986+ # We order the sets smallest to largest by number of tables.
1987+ # This should let us subscribe the quickest sets first for more
1988+ # immediate feedback.
1989 source_connection.rollback()
1990 master_node = replication.helpers.get_master_node(source_connection)
1991 cur = source_connection.cursor()
1992 cur.execute("""
1993- SELECT set_id FROM _sl.sl_set WHERE set_origin=%d
1994- UNION
1995- SELECT sub_set AS set_id FROM _sl.sl_subscribe
1996- WHERE sub_receiver=%d AND sub_forward IS TRUE AND sub_active IS TRUE
1997- AND sub_set=2
1998- """ % (master_node.node_id, master_node.node_id))
1999+ SELECT set_id
2000+ FROM _sl.sl_set, (
2001+ SELECT tab_set, count(*) AS tab_count
2002+ FROM _sl.sl_table GROUP BY tab_set
2003+ ) AS TableCounts
2004+ WHERE
2005+ set_origin=%d
2006+ AND tab_set = set_id
2007+ ORDER BY tab_count
2008+ """
2009+ % (master_node.node_id,))
2010 set_ids = [set_id for set_id, in cur.fetchall()]
2011 log.debug("Discovered set ids %s" % repr(list(set_ids)))
2012
2013@@ -209,19 +190,32 @@
2014 } on error { echo 'Failed.'; exit 1; }
2015 """)
2016
2017+ full_sync = []
2018+ sync_nicknames = [node.nickname for node in existing_nodes]
2019+ sync_nicknames.append('new_node');
2020+ for nickname in sync_nicknames:
2021+ full_sync.append(dedent("""\
2022+ echo 'Waiting for %(nickname)s sync.';
2023+ sync (id=@%(nickname)s);
2024+ wait for event (
2025+ origin = @%(nickname)s, confirmed=ALL,
2026+ wait on = @%(nickname)s, timeout=0);
2027+ """ % {'nickname': nickname}))
2028+ full_sync = '\n'.join(full_sync)
2029+ script += full_sync
2030+
2031 for set_id in set_ids:
2032-
2033 script += dedent("""\
2034 echo 'Subscribing new node to set %d.';
2035 subscribe set (
2036 id=%d, provider=@master_node, receiver=@new_node, forward=yes);
2037-
2038- echo 'Waiting for sync... this might take a while...';
2039+ echo 'Waiting for subscribe to start processing.';
2040 sync (id = @master_node);
2041 wait for event (
2042- origin = ALL, confirmed = ALL,
2043+ origin = @master_node, confirmed = ALL,
2044 wait on = @master_node, timeout = 0);
2045 """ % (set_id, set_id))
2046+ script += full_sync
2047
2048 replication.helpers.execute_slonik(script)
2049
2050
2051=== removed file 'database/replication/populate_auth_replication_set.py'
2052--- database/replication/populate_auth_replication_set.py 2009-10-17 14:06:03 +0000
2053+++ database/replication/populate_auth_replication_set.py 1970-01-01 00:00:00 +0000
2054@@ -1,177 +0,0 @@
2055-#!/usr/bin/python2.5
2056-#
2057-# Copyright 2009 Canonical Ltd. This software is licensed under the
2058-# GNU Affero General Public License version 3 (see the file LICENSE).
2059-
2060-"""Populate the auth replication set.
2061-
2062-This script moves the the SSO tables from the main replication set to
2063-the auth replication set.
2064-
2065-Once it has been run on production, these tables can no longer be
2066-maintained using the Launchpad database maintenance scripts
2067-(upgrade.py, security.py etc.).
2068-
2069-We do this so Launchpad database upgrades do not lock the SSO tables,
2070-allowing the SSO service to continue to operate.
2071-
2072-This is a single shot script.
2073-"""
2074-
2075-__metaclass__ = type
2076-__all__ = []
2077-
2078-import _pythonpath
2079-
2080-import sys
2081-from textwrap import dedent
2082-from optparse import OptionParser
2083-
2084-from canonical.database.sqlbase import (
2085- connect, ISOLATION_LEVEL_AUTOCOMMIT, sqlvalues)
2086-from canonical.launchpad.scripts import db_options, logger_options, logger
2087-
2088-import replication.helpers
2089-
2090-def create_auth_set(cur):
2091- """Create the auth replication set if it doesn't already exist."""
2092- cur.execute("SELECT TRUE FROM _sl.sl_set WHERE set_id=2")
2093- if cur.fetchone() is not None:
2094- log.info("Auth set already exists.")
2095- return
2096- slonik_script = dedent("""\
2097- create set (
2098- id=@authdb_set, origin=@master_node,
2099- comment='SSO service tables');
2100- """)
2101- log.info("Creating authdb replication set.")
2102- replication.helpers.execute_slonik(slonik_script, sync=0)
2103-
2104-
2105-def subscribe_auth_set(cur):
2106- """The authdb set subscription much match the lpmain set subscription.
2107-
2108- This is a requirement to move stuff between replication sets. It
2109- is also what we want (all nodes replicating everything).
2110- """
2111- cur.execute("""
2112- SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 1
2113- EXCEPT
2114- SELECT sub_receiver FROM _sl.sl_subscribe WHERE sub_set = 2
2115- """)
2116- for node_id in (node_id for node_id, in cur.fetchall()):
2117- log.info("Subscribing Node #%d to authdb replication set" % node_id)
2118- success = replication.helpers.execute_slonik(dedent("""\
2119- subscribe set (
2120- id = @authdb_set, provider = @master_node,
2121- receiver = %d, forward = yes);
2122- """ % node_id), sync=0)
2123- if not success:
2124- log.error("Slonik failed. Exiting.")
2125- sys.exit(1)
2126-
2127-
2128-def migrate_tables_and_sequences(cur):
2129- auth_tables, auth_sequences = (
2130- replication.helpers.calculate_replication_set(
2131- cur, replication.helpers.AUTHDB_SEED))
2132-
2133- slonik_script = ["try {"]
2134- for table_fqn in auth_tables:
2135- namespace, table_name = table_fqn.split('.')
2136- cur.execute("""
2137- SELECT tab_id, tab_set
2138- FROM _sl.sl_table
2139- WHERE tab_nspname = %s AND tab_relname = %s
2140- """ % sqlvalues(namespace, table_name))
2141- try:
2142- table_id, set_id = cur.fetchone()
2143- except IndexError:
2144- log.error("Table %s not found in _sl.sl_tables" % table_fqn)
2145- sys.exit(1)
2146- if set_id == 1:
2147- slonik_script.append("echo 'Moving table %s';" % table_fqn)
2148- slonik_script.append(
2149- "set move table "
2150- "(origin=@master_node, id=%d, new set=@authdb_set);"
2151- % table_id)
2152- elif set_id == 2:
2153- log.warn(
2154- "Table %s already in authdb replication set"
2155- % table_fqn)
2156- else:
2157- log.error("Unknown replication set %s" % set_id)
2158- sys.exit(1)
2159-
2160- for sequence_fqn in auth_sequences:
2161- namespace, sequence_name = sequence_fqn.split('.')
2162- cur.execute("""
2163- SELECT seq_id, seq_set
2164- FROM _sl.sl_sequence
2165- WHERE seq_nspname = %s AND seq_relname = %s
2166- """ % sqlvalues(namespace, sequence_name))
2167- try:
2168- sequence_id, set_id = cur.fetchone()
2169- except IndexError:
2170- log.error(
2171- "Sequence %s not found in _sl.sl_sequences" % sequence_fqn)
2172- sys.exit(1)
2173- if set_id == 1:
2174- slonik_script.append("echo 'Moving sequence %s';" % sequence_fqn)
2175- slonik_script.append(
2176- "set move sequence "
2177- "(origin=@master_node, id=%d, new set=@authdb_set);"
2178- % sequence_id)
2179- elif set_id ==2:
2180- log.warn(
2181- "Sequence %s already in authdb replication set."
2182- % sequence_fqn)
2183- else:
2184- log.error("Unknown replication set %s" % set_id)
2185- sys.exit(1)
2186-
2187- if len(slonik_script) == 1:
2188- log.warn("No tables or sequences to migrate.")
2189- return
2190-
2191- slonik_script.append(dedent("""\
2192- } on error {
2193- echo 'Failed to move one or more tables or sequences.';
2194- exit 1;
2195- }
2196- """))
2197-
2198- slonik_script = "\n".join(slonik_script)
2199-
2200- log.info("Running migration script...")
2201- if not replication.helpers.execute_slonik(slonik_script, sync=0):
2202- log.error("Slonik failed. Exiting.")
2203- sys.exit(1)
2204-
2205-
2206-def main():
2207- parser = OptionParser()
2208- db_options(parser)
2209- logger_options(parser)
2210- options, args = parser.parse_args()
2211-
2212- global log
2213- log = logger(options)
2214-
2215- con = connect('slony', isolation=ISOLATION_LEVEL_AUTOCOMMIT)
2216- cur = con.cursor()
2217-
2218- # Don't start until cluster is synced.
2219- log.info("Waiting for sync.")
2220- replication.helpers.sync(0)
2221-
2222- create_auth_set(cur)
2223- subscribe_auth_set(cur)
2224- migrate_tables_and_sequences(cur)
2225-
2226-
2227-log = None # Global log
2228-
2229-
2230-if __name__ == '__main__':
2231- main()
2232
2233=== modified file 'database/replication/preamble.py'
2234--- database/replication/preamble.py 2009-10-17 14:06:03 +0000
2235+++ database/replication/preamble.py 2010-04-27 02:13:38 +0000
2236@@ -1,4 +1,4 @@
2237-#!/usr/bin/python2.5
2238+#!/usr/bin/python2.5 -S
2239 #
2240 # Copyright 2009 Canonical Ltd. This software is licensed under the
2241 # GNU Affero General Public License version 3 (see the file LICENSE).
2242
2243=== modified file 'database/replication/repair-restored-db.py'
2244--- database/replication/repair-restored-db.py 2010-01-22 06:25:48 +0000
2245+++ database/replication/repair-restored-db.py 2010-04-27 02:13:38 +0000
2246@@ -1,4 +1,4 @@
2247-#!/usr/bin/python2.5
2248+#!/usr/bin/python2.5 -S
2249 #
2250 # Copyright 2009 Canonical Ltd. This software is licensed under the
2251 # GNU Affero General Public License version 3 (see the file LICENSE).
2252
2253=== modified file 'database/replication/report.py'
2254--- database/replication/report.py 2009-10-17 14:06:03 +0000
2255+++ database/replication/report.py 2010-04-27 02:13:38 +0000
2256@@ -1,4 +1,4 @@
2257-#!/usr/bin/python2.5
2258+#!/usr/bin/python2.5 -S
2259 #
2260 # Copyright 2009 Canonical Ltd. This software is licensed under the
2261 # GNU Affero General Public License version 3 (see the file LICENSE).
2262
2263=== modified file 'database/replication/slon_ctl.py'
2264--- database/replication/slon_ctl.py 2009-10-17 14:06:03 +0000
2265+++ database/replication/slon_ctl.py 2010-04-27 02:13:38 +0000
2266@@ -1,4 +1,4 @@
2267-#!/usr/bin/python2.5
2268+#!/usr/bin/python2.5 -S
2269 #
2270 # Copyright 2009 Canonical Ltd. This software is licensed under the
2271 # GNU Affero General Public License version 3 (see the file LICENSE).
2272
2273=== modified file 'database/replication/sync.py'
2274--- database/replication/sync.py 2010-01-06 15:52:31 +0000
2275+++ database/replication/sync.py 2010-04-27 02:13:38 +0000
2276@@ -1,4 +1,4 @@
2277-#!/usr/bin/python2.5
2278+#!/usr/bin/python2.5 -S
2279 #
2280 # Copyright 2010 Canonical Ltd. This software is licensed under the
2281 # GNU Affero General Public License version 3 (see the file LICENSE).
2282
2283=== modified file 'database/schema/diagram.py'
2284--- database/schema/diagram.py 2010-02-09 01:31:05 +0000
2285+++ database/schema/diagram.py 2010-04-27 02:13:38 +0000
2286@@ -1,4 +1,4 @@
2287-#!/usr/bin/python2.5
2288+#!/usr/bin/python2.5 -S
2289 #
2290 # Copyright 2009 Canonical Ltd. This software is licensed under the
2291 # GNU Affero General Public License version 3 (see the file LICENSE).
2292
2293=== modified file 'database/schema/emptytables.py'
2294--- database/schema/emptytables.py 2009-10-17 14:06:03 +0000
2295+++ database/schema/emptytables.py 2010-04-27 02:13:38 +0000
2296@@ -1,4 +1,4 @@
2297-#!/usr/bin/python2.5
2298+#!/usr/bin/python2.5 -S
2299 #
2300 # Copyright 2009 Canonical Ltd. This software is licensed under the
2301 # GNU Affero General Public License version 3 (see the file LICENSE).
2302
2303=== modified file 'database/schema/fti.py'
2304--- database/schema/fti.py 2009-11-06 21:58:50 +0000
2305+++ database/schema/fti.py 2010-04-27 02:13:38 +0000
2306@@ -1,4 +1,4 @@
2307-#!/usr/bin/python2.5
2308+#!/usr/bin/python2.5 -S
2309 #
2310 # Copyright 2009 Canonical Ltd. This software is licensed under the
2311 # GNU Affero General Public License version 3 (see the file LICENSE).
2312
2313=== modified file 'database/schema/online_fti_updater.py'
2314--- database/schema/online_fti_updater.py 2009-10-17 14:06:03 +0000
2315+++ database/schema/online_fti_updater.py 2010-04-27 02:13:38 +0000
2316@@ -1,4 +1,4 @@
2317-#!/usr/bin/python2.5
2318+#!/usr/bin/python2.5 -S
2319 #
2320 # Copyright 2009 Canonical Ltd. This software is licensed under the
2321 # GNU Affero General Public License version 3 (see the file LICENSE).
2322
2323=== added file 'database/schema/patch-2207-47-0.sql'
2324--- database/schema/patch-2207-47-0.sql 1970-01-01 00:00:00 +0000
2325+++ database/schema/patch-2207-47-0.sql 2010-04-27 02:13:38 +0000
2326@@ -0,0 +1,6 @@
2327+SET client_min_messages=ERROR;
2328+
2329+UPDATE BugWatchActivity SET result = 9 WHERE result IS NULL;
2330+ALTER TABLE BugWatchActivity ALTER COLUMN result SET NOT NULL;
2331+
2332+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 47, 0);
2333
2334=== added file 'database/schema/patch-2207-48-0.sql'
2335--- database/schema/patch-2207-48-0.sql 1970-01-01 00:00:00 +0000
2336+++ database/schema/patch-2207-48-0.sql 2010-04-27 02:13:38 +0000
2337@@ -0,0 +1,27 @@
2338+SET client_min_messages=ERROR;
2339+
2340+ALTER TABLE EmailAddress ADD CONSTRAINT emailaddress__person__fk
2341+ FOREIGN KEY (person) REFERENCES Person;
2342+
2343+CREATE TEMPORARY TABLE DudAccountLinks AS
2344+SELECT Person.id
2345+FROM Person
2346+LEFT OUTER JOIN Account ON Person.account = Account.id
2347+WHERE Person.account IS NOT NULL AND Account.id IS NULL;
2348+
2349+UPDATE Person SET account = NULL
2350+FROM DudAccountLinks
2351+WHERE Person.id = DudAccountLinks.id;
2352+
2353+DROP TABLE DudAccountLinks;
2354+
2355+ALTER TABLE Person ADD CONSTRAINT person__account__fk
2356+ FOREIGN KEY (account) REFERENCES Account;
2357+
2358+ALTER TABLE MailingListSubscription
2359+ ADD CONSTRAINT mailinglistsubscription__email_address_fk
2360+ FOREIGN KEY (email_address) REFERENCES EmailAddress
2361+ ON DELETE CASCADE;
2362+
2363+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 48, 0);
2364+
2365
2366=== modified file 'database/schema/pending/add-mailing-list-experts.py'
2367--- database/schema/pending/add-mailing-list-experts.py 2009-10-17 14:06:03 +0000
2368+++ database/schema/pending/add-mailing-list-experts.py 2010-04-27 02:13:38 +0000
2369@@ -1,4 +1,4 @@
2370-#!/usr/bin/python2.5
2371+#!/usr/bin/python2.5 -S
2372 #
2373 # Copyright 2009 Canonical Ltd. This software is licensed under the
2374 # GNU Affero General Public License version 3 (see the file LICENSE).
2375
2376=== modified file 'database/schema/pending/create-openid-rp-configs.py'
2377--- database/schema/pending/create-openid-rp-configs.py 2009-10-17 14:06:03 +0000
2378+++ database/schema/pending/create-openid-rp-configs.py 2010-04-27 02:13:38 +0000
2379@@ -1,4 +1,4 @@
2380-#!/usr/bin/python2.5
2381+#!/usr/bin/python2.5 -S
2382 #
2383 # Copyright 2009 Canonical Ltd. This software is licensed under the
2384 # GNU Affero General Public License version 3 (see the file LICENSE).
2385
2386=== modified file 'database/schema/pending/gnu-savannah-celebrity.py'
2387--- database/schema/pending/gnu-savannah-celebrity.py 2009-10-17 14:06:03 +0000
2388+++ database/schema/pending/gnu-savannah-celebrity.py 2010-04-27 02:13:38 +0000
2389@@ -1,4 +1,4 @@
2390-#!/usr/bin/python2.5
2391+#!/usr/bin/python2.5 -S
2392 #
2393 # Copyright 2009 Canonical Ltd. This software is licensed under the
2394 # GNU Affero General Public License version 3 (see the file LICENSE).
2395
2396=== modified file 'database/schema/pending/migrate_kde_potemplates.py'
2397--- database/schema/pending/migrate_kde_potemplates.py 2009-10-17 14:06:03 +0000
2398+++ database/schema/pending/migrate_kde_potemplates.py 2010-04-27 02:13:38 +0000
2399@@ -1,4 +1,4 @@
2400-#!/usr/bin/python2.5
2401+#!/usr/bin/python2.5 -S
2402 #
2403 # Copyright 2009 Canonical Ltd. This software is licensed under the
2404 # GNU Affero General Public License version 3 (see the file LICENSE).
2405
2406=== modified file 'database/schema/pending/new-person-columns.py'
2407--- database/schema/pending/new-person-columns.py 2009-10-17 14:06:03 +0000
2408+++ database/schema/pending/new-person-columns.py 2010-04-27 02:13:38 +0000
2409@@ -1,4 +1,4 @@
2410-#!/usr/bin/python2.5
2411+#!/usr/bin/python2.5 -S
2412 #
2413 # Copyright 2009 Canonical Ltd. This software is licensed under the
2414 # GNU Affero General Public License version 3 (see the file LICENSE).
2415
2416=== added file 'database/schema/pending/patch-2207-49-0.sql'
2417--- database/schema/pending/patch-2207-49-0.sql 1970-01-01 00:00:00 +0000
2418+++ database/schema/pending/patch-2207-49-0.sql 2010-04-27 02:13:38 +0000
2419@@ -0,0 +1,16 @@
2420+SET client_min_messages=ERROR;
2421+
2422+DROP VIEW RevisionNumber;
2423+
2424+ALTER TABLE BranchRevision DROP COLUMN id;
2425+ALTER TABLE BranchRevision
2426+ ADD CONSTRAINT branchrevision_pkey
2427+ PRIMARY KEY (branch, revision);
2428+ALTER TABLE BranchRevision
2429+ DROP CONSTRAINT revision__branch__revision__key,
2430+ DROP CONSTRAINT revision__revision__branch__key,
2431+ DROP CONSTRAINT revisionnumber_branch_sequence_unique;
2432+CREATE UNIQUE INDEX branchrevision__branch__sequence__key
2433+ ON BranchRevision (branch, sequence) WHERE sequence IS NOT NULL;
2434+
2435+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 49, 0);
2436
2437=== modified file 'database/schema/pending/prune-nonce.py'
2438--- database/schema/pending/prune-nonce.py 2009-10-17 14:06:03 +0000
2439+++ database/schema/pending/prune-nonce.py 2010-04-27 02:13:38 +0000
2440@@ -1,4 +1,4 @@
2441-#!/usr/bin/python2.5
2442+#!/usr/bin/python2.5 -S
2443 #
2444 # Copyright 2009 Canonical Ltd. This software is licensed under the
2445 # GNU Affero General Public License version 3 (see the file LICENSE).
2446
2447=== modified file 'database/schema/pending/update-shippingrequest-types.py'
2448--- database/schema/pending/update-shippingrequest-types.py 2009-10-17 14:06:03 +0000
2449+++ database/schema/pending/update-shippingrequest-types.py 2010-04-27 02:13:38 +0000
2450@@ -1,4 +1,4 @@
2451-#!/usr/bin/python2.5
2452+#!/usr/bin/python2.5 -S
2453 #
2454 # Copyright 2009 Canonical Ltd. This software is licensed under the
2455 # GNU Affero General Public License version 3 (see the file LICENSE).
2456
2457=== modified file 'database/schema/pending/update-translation-credits.py'
2458--- database/schema/pending/update-translation-credits.py 2009-10-17 14:06:03 +0000
2459+++ database/schema/pending/update-translation-credits.py 2010-04-27 02:13:38 +0000
2460@@ -1,4 +1,4 @@
2461-#!/usr/bin/python2.5
2462+#!/usr/bin/python2.5 -S
2463 #
2464 # Copyright 2009 Canonical Ltd. This software is licensed under the
2465 # GNU Affero General Public License version 3 (see the file LICENSE).
2466@@ -54,7 +54,7 @@
2467 pomsgid
2468 WHERE
2469 posubmission.active IS TRUE AND
2470- posubmission.pomsgset=pomsgset.id AND
2471+ posubmission.pomsgset=pomsgset.id AND
2472 potmsgset=potmsgset.id AND
2473 primemsgid=pomsgid.id AND
2474 published IS NOT TRUE AND
2475@@ -77,7 +77,7 @@
2476 pomsgid
2477 WHERE
2478 posubmission.active IS FALSE AND
2479- posubmission.pomsgset=pomsgset.id AND
2480+ posubmission.pomsgset=pomsgset.id AND
2481 pomsgset.potmsgset=potmsgset.id AND
2482 potmsgset.primemsgid=pomsgid.id AND
2483 posubmission.published IS TRUE AND
2484
2485=== modified file 'database/schema/reset_sequences.py'
2486--- database/schema/reset_sequences.py 2009-10-17 14:06:03 +0000
2487+++ database/schema/reset_sequences.py 2010-04-27 02:13:38 +0000
2488@@ -1,4 +1,4 @@
2489-#!/usr/bin/python2.5
2490+#!/usr/bin/python2.5 -S
2491 #
2492 # Copyright 2009 Canonical Ltd. This software is licensed under the
2493 # GNU Affero General Public License version 3 (see the file LICENSE).
2494
2495=== modified file 'database/schema/security.cfg'
2496--- database/schema/security.cfg 2010-04-21 19:41:18 +0000
2497+++ database/schema/security.cfg 2010-04-27 02:13:38 +0000
2498@@ -35,20 +35,27 @@
2499 public.is_printable_ascii(text) = EXECUTE
2500 public.launchpaddatabaserevision = SELECT
2501 public.name_blacklist_match(text) = EXECUTE
2502-public.fticache =
2503 public.pillarname = SELECT
2504 public.ulower(text) = EXECUTE
2505-public._killall_backends(text) =
2506 public.generate_openid_identifier() = EXECUTE
2507 public.getlocalnodeid() = EXECUTE
2508 public.replication_lag() = EXECUTE
2509 public.replication_lag(integer) = EXECUTE
2510 public.assert_patch_applied(integer, integer, integer) = EXECUTE
2511+# Explicitly state 'no permissions on these objects' to silence
2512+# security.py warnings.
2513+public.fticache =
2514+public._killall_backends(text) =
2515 public.exclusivelocks =
2516 public.alllocks =
2517 public.pgstattuple(oid) =
2518 public.pgstattuple(text) =
2519 public.bugnotificationarchive =
2520+public.lp_account =
2521+public.lp_personlocation =
2522+public.lp_person =
2523+public.lp_teamparticipation =
2524+public.bug_update_latest_patch_uploaded(integer) =
2525
2526 [ro]
2527 # A user with full readonly access to the database. Generally used for
2528@@ -61,59 +68,6 @@
2529 type=user
2530 groups=admin
2531
2532-[sso_auth]
2533-# authdb replication set write access from the SSO service.
2534-type=user
2535-public.account = SELECT, INSERT, UPDATE, DELETE
2536-public.accountpassword = SELECT, INSERT, UPDATE, DELETE
2537-public.authtoken = SELECT, INSERT, UPDATE, DELETE
2538-public.emailaddress = SELECT, INSERT, UPDATE, DELETE
2539-public.openidrpsummary = SELECT, INSERT, UPDATE
2540-public.openidassociation = SELECT, INSERT, DELETE
2541-public.openidnonce = SELECT, INSERT, DELETE
2542-public.openidauthorization = SELECT, INSERT, UPDATE, DELETE
2543-public.person = SELECT
2544-public.personlocation = SELECT
2545-public.teamparticipation = SELECT
2546-
2547-[sso_main]
2548-# main replication set access from the SSO service login.launchpad.net
2549-type=user
2550-public.language = SELECT
2551-public.openidrpconfig = SELECT
2552-public.person = SELECT
2553-public.personlanguage = SELECT
2554-public.personlocation = SELECT
2555-public.shippingrequest = SELECT
2556-public.teammembership = SELECT
2557-public.teamparticipation = SELECT
2558-public.validpersoncache = SELECT
2559-# Needed for person.preferredemail to work.
2560-public.emailaddress = SELECT
2561-# Needed for OpenID login to work - Bug #352727
2562-public.country = SELECT
2563-# Needed for display of OpenID consumer logo per Bug #353926
2564-public.libraryfilealias = SELECT
2565-public.libraryfilecontent = SELECT
2566-
2567-[launchpad_auth]
2568-# authdb replication set access from the main Z3 application.
2569-type=user
2570-public.account = SELECT, INSERT, UPDATE, DELETE
2571-public.accountpassword = SELECT, INSERT, UPDATE, DELETE
2572-public.authtoken = SELECT, INSERT, UPDATE
2573-public.emailaddress = SELECT, INSERT, UPDATE, DELETE
2574-public.language = SELECT
2575-public.openidrpconfig = SELECT
2576-public.openidrpsummary = SELECT
2577-public.person = SELECT
2578-public.personlanguage = SELECT
2579-public.teammembership = SELECT
2580-public.teamparticipation = SELECT
2581-# XXX 2009-05-07 stub bug=373252: SELECT and DELETE permissions required
2582-# for garbo.py. INSERT permission needed for the tests.
2583-public.openidassociation = SELECT, INSERT, DELETE
2584-
2585 [launchpad_main]
2586 # lpmain replication set access from the main Z3 application.
2587 type=user
2588@@ -884,34 +838,6 @@
2589 public.country = SELECT
2590 public.parsedapachelog = SELECT, INSERT, UPDATE
2591
2592-[sourcerer]
2593-type=user
2594-groups=script
2595-public.archive = SELECT
2596-public.archivearch = SELECT
2597-public.branch = SELECT, INSERT, UPDATE
2598-public.revision = SELECT, INSERT, UPDATE
2599-# Karma
2600-public.karma = SELECT, INSERT
2601-public.karmaaction = SELECT
2602-# To get at a source package's manifest
2603-public.distribution = SELECT
2604-public.distroseries = SELECT
2605-public.sourcepackagename = SELECT
2606-public.sourcepackagepublishinghistory = SELECT
2607-public.sourcepackagerelease = SELECT, UPDATE
2608-public.sourcepackagereleasefile = SELECT
2609-# To get at an upstream product's manifest
2610-public.product = SELECT
2611-public.productseries = SELECT
2612-public.productrelease = SELECT, UPDATE
2613-public.productreleasefile = SELECT
2614-# To get from source package to upstream
2615-public.packaging = SELECT
2616-# To get stuff from the librarian
2617-public.libraryfilealias = SELECT
2618-public.libraryfilecontent = SELECT
2619-
2620 [write]
2621 type=group
2622 # Full access except for tables that are exclusively updated by
2623@@ -1688,7 +1614,7 @@
2624 public.teamparticipation = SELECT
2625 public.validpersoncache = SELECT
2626
2627-[mp-creation-job]
2628+[merge-proposal-jobs]
2629 type=user
2630 groups=script
2631 public.account = SELECT
2632@@ -1725,18 +1651,6 @@
2633 public.teamparticipation = SELECT
2634 public.validpersoncache = SELECT
2635
2636-[update-preview-diffs]
2637-type=user
2638-groups=script
2639-public.branch = SELECT
2640-public.branchmergeproposal = SELECT, UPDATE
2641-public.branchmergeproposaljob = SELECT
2642-public.diff = SELECT, INSERT
2643-public.job = SELECT, UPDATE
2644-public.libraryfilealias = SELECT, INSERT
2645-public.libraryfilecontent = SELECT, INSERT
2646-public.previewdiff = SELECT, INSERT
2647-
2648 [upgrade-branches]
2649 type=user
2650 groups=script
2651
2652=== modified file 'database/schema/security.py'
2653--- database/schema/security.py 2010-02-09 01:31:05 +0000
2654+++ database/schema/security.py 2010-04-27 02:13:38 +0000
2655@@ -1,4 +1,4 @@
2656-#!/usr/bin/python2.5
2657+#!/usr/bin/python2.5 -S
2658 #
2659 # Copyright 2009 Canonical Ltd. This software is licensed under the
2660 # GNU Affero General Public License version 3 (see the file LICENSE).
2661
2662=== modified file 'database/schema/sort_sql.py'
2663--- database/schema/sort_sql.py 2009-10-17 14:06:03 +0000
2664+++ database/schema/sort_sql.py 2010-04-27 02:13:38 +0000
2665@@ -1,4 +1,4 @@
2666-#!/usr/bin/python2.5
2667+#!/usr/bin/python2.5 -S
2668 #
2669 # Copyright 2009 Canonical Ltd. This software is licensed under the
2670 # GNU Affero General Public License version 3 (see the file LICENSE).
2671
2672=== modified file 'database/schema/trusted.sql'
2673--- database/schema/trusted.sql 2010-03-26 08:25:24 +0000
2674+++ database/schema/trusted.sql 2010-04-27 02:13:38 +0000
2675@@ -1470,36 +1470,36 @@
2676
2677 -- Update the (redundant) column bug.latest_patch_uploaded when a
2678 -- a bug attachment is added or removed or if its type is changed.
2679-CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded(integer) RETURNS VOID
2680- SECURITY DEFINER LANGUAGE plpgsql AS
2681- $$
2682- BEGIN
2683- UPDATE bug SET latest_patch_uploaded =
2684- (SELECT max(message.datecreated)
2685- FROM message, bugattachment
2686- WHERE bugattachment.message=message.id AND
2687- bugattachment.bug=$1 AND
2688- bugattachment.type=1)
2689- WHERE bug.id=$1;
2690- END;
2691- $$;
2692-
2693-
2694-CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_insert_update() RETURNS trigger
2695- SECURITY DEFINER LANGUAGE plpgsql AS
2696- $$
2697- BEGIN
2698- PERFORM bug_update_latest_patch_uploaded(NEW.bug);
2699- RETURN NULL; -- Ignored - this is an AFTER trigger
2700- END;
2701- $$;
2702-
2703-
2704-CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_delete() RETURNS trigger
2705- SECURITY DEFINER LANGUAGE plpgsql AS
2706- $$
2707- BEGIN
2708- PERFORM bug_update_latest_patch_uploaded(OLD.bug);
2709- RETURN NULL; -- Ignored - this is an AFTER trigger
2710- END;
2711- $$;
2712+CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded(integer)
2713+RETURNS VOID SECURITY DEFINER LANGUAGE plpgsql AS
2714+$$
2715+BEGIN
2716+ UPDATE bug SET latest_patch_uploaded =
2717+ (SELECT max(message.datecreated)
2718+ FROM message, bugattachment
2719+ WHERE bugattachment.message=message.id AND
2720+ bugattachment.bug=$1 AND
2721+ bugattachment.type=1)
2722+ WHERE bug.id=$1;
2723+END;
2724+$$;
2725+
2726+
2727+CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_insert_update()
2728+RETURNS trigger SECURITY DEFINER LANGUAGE plpgsql AS
2729+$$
2730+BEGIN
2731+ PERFORM bug_update_latest_patch_uploaded(NEW.bug);
2732+ RETURN NULL; -- Ignored - this is an AFTER trigger
2733+END;
2734+$$;
2735+
2736+
2737+CREATE OR REPLACE FUNCTION bug_update_latest_patch_uploaded_on_delete()
2738+RETURNS trigger SECURITY DEFINER LANGUAGE plpgsql AS
2739+$$
2740+BEGIN
2741+ PERFORM bug_update_latest_patch_uploaded(OLD.bug);
2742+ RETURN NULL; -- Ignored - this is an AFTER trigger
2743+END;
2744+$$;
2745
2746=== modified file 'database/schema/unautovacuumable.py'
2747--- database/schema/unautovacuumable.py 2009-11-06 21:58:50 +0000
2748+++ database/schema/unautovacuumable.py 2010-04-27 02:13:38 +0000
2749@@ -1,4 +1,4 @@
2750-#!/usr/bin/python2.5
2751+#!/usr/bin/python2.5 -S
2752 #
2753 # Copyright 2009 Canonical Ltd. This software is licensed under the
2754 # GNU Affero General Public License version 3 (see the file LICENSE).
2755
2756=== modified file 'database/schema/upgrade.py'
2757--- database/schema/upgrade.py 2009-10-17 14:06:03 +0000
2758+++ database/schema/upgrade.py 2010-04-27 02:13:38 +0000
2759@@ -1,4 +1,4 @@
2760-#!/usr/bin/python2.5
2761+#!/usr/bin/python2.5 -S
2762 #
2763 # Copyright 2009 Canonical Ltd. This software is licensed under the
2764 # GNU Affero General Public License version 3 (see the file LICENSE).
2765@@ -249,9 +249,10 @@
2766 id=@holding_set,
2767 provider=@master_node, receiver=@node%d_node, forward=yes);
2768 echo 'Waiting for sync';
2769- sync (id=1);
2770+ sync (id=@master_node);
2771 wait for event (
2772- origin=ALL, confirmed=ALL, wait on=@master_node, timeout=0
2773+ origin=@master_node, confirmed=ALL,
2774+ wait on=@master_node, timeout=0
2775 );
2776 """ % (slave_node.node_id, slave_node.node_id))
2777
2778@@ -281,7 +282,7 @@
2779 (fqn(nspname, relname), tab_id)
2780 for nspname, relname, tab_id in cur.fetchall())
2781
2782- # Generate a slonik script to remove tables from the replication set,
2783+ # Generate a slonik script to remove tables from the replication set,
2784 # and a DROP TABLE/DROP SEQUENCE sql script to run after.
2785 if tabs_to_drop:
2786 log.info("Dropping tables: %s" % ', '.join(
2787
2788=== modified file 'lib/canonical/config/schema-lazr.conf'
2789--- lib/canonical/config/schema-lazr.conf 2010-04-19 03:44:27 +0000
2790+++ lib/canonical/config/schema-lazr.conf 2010-04-27 02:13:38 +0000
2791@@ -402,6 +402,11 @@
2792 # mapping done by branch-rewrite.py for.
2793 branch_rewrite_cache_lifetime: 10
2794
2795+# Update Preview diff ready timeout
2796+#
2797+# How long, in minutes, we wait for a branch to be ready in order to
2798+# generate a diff for a merge proposal (in the UpdatePreviewDiffJob).
2799+update_preview_diff_ready_timeout: 15
2800
2801 [codeimport]
2802 # Where the Bazaar imports are stored.
2803@@ -1419,6 +1424,25 @@
2804 port: 11217
2805
2806
2807+[merge_proposal_jobs]
2808+# The database user which will be used by this process.
2809+# datatype: string
2810+dbuser: merge-proposal-jobs
2811+storm_cache: generational
2812+storm_cache_size: 500
2813+
2814+# See [error_reports].
2815+error_dir: none
2816+
2817+# See [error_reports].
2818+oops_prefix: none
2819+
2820+# See [error_reports].
2821+copy_to_zlog: false
2822+
2823+##
2824+## TODO: delete mpcreationjobs section after 10.04 rollout.
2825+##
2826 [mpcreationjobs]
2827 # The database user which will be used by this process.
2828 # datatype: string
2829@@ -1435,6 +1459,9 @@
2830 # See [error_reports].
2831 copy_to_zlog: false
2832
2833+##
2834+## TODO: delete update_preview_diffs section after 10.04 rollout.
2835+##
2836 [update_preview_diffs]
2837 dbuser: update-preview-diffs
2838
2839@@ -1447,6 +1474,7 @@
2840 # See [error_reports].
2841 copy_to_zlog: false
2842
2843+
2844 [upgrade_branches]
2845 dbuser: upgrade-branches
2846
2847
2848=== added directory 'lib/canonical/launchpad/apidoc'
2849=== removed directory 'lib/canonical/launchpad/apidoc'
2850=== modified file 'lib/canonical/launchpad/daemons/tachandler.py'
2851--- lib/canonical/launchpad/daemons/tachandler.py 2010-04-05 09:22:54 +0000
2852+++ lib/canonical/launchpad/daemons/tachandler.py 2010-04-27 02:13:38 +0000
2853@@ -164,8 +164,8 @@
2854 def _waitForDaemonStartup(self):
2855 """ Wait for the daemon to fully start.
2856
2857- Times out after 20 seconds. If that happens, the log file will
2858- not be cleaned up so the user can post-mortem it.
2859+ Times out after 20 seconds. If that happens, the log file content
2860+ will be included in the exception message for debugging purpose.
2861
2862 :raises TacException: Timeout.
2863 """
2864@@ -178,8 +178,8 @@
2865 now = time.time()
2866
2867 if now >= deadline:
2868- raise TacException('Unable to start %s. Check %s.' % (
2869- self.tacfile, self.logfile))
2870+ raise TacException('Unable to start %s. Content of %s:\n%s' % (
2871+ self.tacfile, self.logfile, open(self.logfile).read()))
2872
2873 def tearDown(self):
2874 self.killTac()
2875
2876=== modified file 'lib/canonical/launchpad/doc/product-update-remote-product-script.txt'
2877--- lib/canonical/launchpad/doc/product-update-remote-product-script.txt 2009-03-27 03:29:31 +0000
2878+++ lib/canonical/launchpad/doc/product-update-remote-product-script.txt 2010-04-27 02:13:38 +0000
2879@@ -14,7 +14,7 @@
2880 0
2881
2882 >>> print err
2883- INFO creating lockfile
2884+ INFO Creating lockfile: /var/lock/launchpad-updateremoteproduct.lock
2885 INFO 0 projects using BUGZILLA needing updating.
2886 ...
2887 INFO 0 projects using RT needing updating.
2888
2889=== modified file 'lib/canonical/launchpad/scripts/garbo.py'
2890--- lib/canonical/launchpad/scripts/garbo.py 2010-04-08 08:55:10 +0000
2891+++ lib/canonical/launchpad/scripts/garbo.py 2010-04-27 02:13:38 +0000
2892@@ -26,9 +26,7 @@
2893 from canonical.launchpad.database.openidconsumer import OpenIDConsumerNonce
2894 from canonical.launchpad.interfaces import IMasterStore
2895 from canonical.launchpad.interfaces.emailaddress import EmailAddressStatus
2896-from canonical.launchpad.interfaces.looptuner import ITunableLoop
2897-from canonical.launchpad.utilities.looptuner import (
2898- DBLoopTuner, TunableLoop)
2899+from canonical.launchpad.utilities.looptuner import TunableLoop
2900 from canonical.launchpad.webapp.interfaces import (
2901 IStoreSelector, MAIN_STORE, MASTER_FLAVOR)
2902 from lp.bugs.interfaces.bug import IBugSet
2903@@ -42,7 +40,6 @@
2904 from lp.code.model.branchjob import BranchJob
2905 from lp.code.model.codeimportresult import CodeImportResult
2906 from lp.code.model.revision import RevisionAuthor, RevisionCache
2907-from lp.registry.model.mailinglist import MailingListSubscription
2908 from lp.registry.model.person import Person
2909 from lp.services.job.model.job import Job
2910 from lp.services.scripts.base import (
2911@@ -346,150 +343,6 @@
2912 transaction.commit()
2913
2914
2915-class MailingListSubscriptionPruner(TunableLoop):
2916- """Prune `MailingListSubscription`s pointing at deleted email addresses.
2917-
2918- Users subscribe to mailing lists with one of their verified email
2919- addresses. When they remove an address, the mailing list
2920- subscription should go away too.
2921- """
2922-
2923- maximum_chunk_size = 1000
2924-
2925- def __init__(self, log, abort_time=None):
2926- super(MailingListSubscriptionPruner, self).__init__(log, abort_time)
2927- self.subscription_store = IMasterStore(MailingListSubscription)
2928- self.email_store = IMasterStore(EmailAddress)
2929-
2930- (self.min_subscription_id,
2931- self.max_subscription_id) = self.subscription_store.find(
2932- (Min(MailingListSubscription.id),
2933- Max(MailingListSubscription.id))).one()
2934-
2935- self.next_subscription_id = self.min_subscription_id
2936-
2937- def isDone(self):
2938- return (self.min_subscription_id is None or
2939- self.next_subscription_id > self.max_subscription_id)
2940-
2941- def __call__(self, chunk_size):
2942- result = self.subscription_store.find(
2943- MailingListSubscription,
2944- MailingListSubscription.id >= self.next_subscription_id,
2945- MailingListSubscription.id < (self.next_subscription_id +
2946- chunk_size))
2947- used_ids = set(result.values(MailingListSubscription.email_addressID))
2948- existing_ids = set(self.email_store.find(
2949- EmailAddress.id, EmailAddress.id.is_in(used_ids)))
2950- deleted_ids = used_ids - existing_ids
2951-
2952- self.subscription_store.find(
2953- MailingListSubscription,
2954- MailingListSubscription.id >= self.next_subscription_id,
2955- MailingListSubscription.id < (self.next_subscription_id +
2956- chunk_size),
2957- MailingListSubscription.email_addressID.is_in(deleted_ids)
2958- ).remove()
2959-
2960- self.next_subscription_id += chunk_size
2961- transaction.commit()
2962-
2963-
2964-class PersonEmailAddressLinkChecker(TunableLoop):
2965- """Report invalid references between the authdb and main replication sets.
2966-
2967- We can't use referential integrity to ensure references remain valid,
2968- so we have to check regularly for any bugs that creep into our code.
2969-
2970- We don't repair links yet, but could add this feature. I'd
2971- rather track down the source of problems and fix problems there
2972- and avoid automatic repair, which might be dangerous. In particular,
2973- replication lag introduces a number of race conditions that would
2974- need to be addressed.
2975- """
2976- maximum_chunk_size = 1000
2977-
2978- def __init__(self, log, abort_time=None):
2979- super(PersonEmailAddressLinkChecker, self).__init__(log, abort_time)
2980-
2981- self.person_store = IMasterStore(Person)
2982- self.email_store = IMasterStore(EmailAddress)
2983-
2984- # This query detects invalid links between Person and EmailAddress.
2985- # The first part detects difference in opionion about what Account
2986- # is linked to. The second part detects EmailAddresses linked to
2987- # non existent Person records.
2988- query = """
2989- SELECT Person.id, EmailAddress.id
2990- FROM EmailAddress, Person
2991- WHERE EmailAddress.person = Person.id
2992- AND Person.account IS DISTINCT FROM EmailAddress.account
2993- UNION
2994- SELECT NULL, EmailAddress.id
2995- FROM EmailAddress LEFT OUTER JOIN Person
2996- ON EmailAddress.person = Person.id
2997- WHERE EmailAddress.person IS NOT NULL
2998- AND Person.id IS NULL
2999- """
3000- # We need to issue this query twice, waiting between calls
3001- # for all pending database changes to replicate. The known
3002- # bad set are the entries common in both results.
3003- bad_links_1 = set(self.person_store.execute(query))
3004- transaction.abort()
3005-
3006- self.blockForReplication()
3007-
3008- bad_links_2 = set(self.person_store.execute(query))
3009- transaction.abort()
3010-
3011- self.bad_links = bad_links_1.intersection(bad_links_2)
3012-
3013- def blockForReplication(self):
3014- start = time.time()
3015- while True:
3016- lag = self.person_store.execute(
3017- "SELECT COALESCE(EXTRACT(EPOCH FROM replication_lag()), 0);"
3018- ).get_one()[0]
3019- if lag < (time.time() - start):
3020- return
3021- # Guestimate on how long we should wait for. We cap
3022- # it as several hours of lag can clear in an instant
3023- # in some cases.
3024- naptime = min(300, lag)
3025- self.log.debug(
3026- "Waiting for replication. Lagged %s secs. Napping %s secs."
3027- % (lag, naptime))
3028- time.sleep(naptime)
3029-
3030- def isDone(self):
3031- return not self.bad_links
3032-
3033- def __call__(self, chunksize):
3034- for counter in range(0, int(chunksize)):
3035- if not self.bad_links:
3036- return
3037- person_id, emailaddress_id = self.bad_links.pop()
3038- if person_id is None:
3039- person = None
3040- else:
3041- person = self.person_store.get(Person, person_id)
3042- emailaddress = self.email_store.get(EmailAddress, emailaddress_id)
3043- self.report(person, emailaddress)
3044- # We don't repair... yet.
3045- # self.repair(person, emailaddress)
3046- transaction.abort()
3047-
3048- def report(self, person, emailaddress):
3049- if person is None:
3050- self.log.error(
3051- "Corruption - '%s' is linked to a non-existant Person."
3052- % emailaddress.email)
3053- else:
3054- self.log.error(
3055- "Corruption - '%s' and '%s' reference different Accounts."
3056- % (emailaddress.email, person.name))
3057-
3058-
3059 class PersonPruner(TunableLoop):
3060
3061 maximum_chunk_size = 1000
3062@@ -662,7 +515,7 @@
3063 def __call__(self, chunk_size):
3064 chunk_size = int(chunk_size)
3065 ids_to_remove = list(self._ids_to_remove()[:chunk_size])
3066- num_removed = self.job_store.find(
3067+ self.job_store.find(
3068 BranchJob,
3069 In(BranchJob.id, ids_to_remove)).remove()
3070 transaction.commit()
3071@@ -918,8 +771,6 @@
3072 CodeImportResultPruner,
3073 RevisionAuthorEmailLinker,
3074 HWSubmissionEmailLinker,
3075- MailingListSubscriptionPruner,
3076- PersonEmailAddressLinkChecker,
3077 BugNotificationPruner,
3078 BranchJobPruner,
3079 BugWatchActivityPruner,
3080
3081=== modified file 'lib/canonical/launchpad/scripts/tests/test_garbo.py'
3082--- lib/canonical/launchpad/scripts/tests/test_garbo.py 2010-04-13 01:49:42 +0000
3083+++ lib/canonical/launchpad/scripts/tests/test_garbo.py 2010-04-27 02:13:38 +0000
3084@@ -351,26 +351,6 @@
3085 LaunchpadZopelessLayer.switchDbUser('testadmin')
3086 self.assertEqual(sub3.owner, person3)
3087
3088- def test_MailingListSubscriptionPruner(self):
3089- LaunchpadZopelessLayer.switchDbUser('testadmin')
3090- team, mailing_list = self.factory.makeTeamAndMailingList(
3091- 'mlist-team', 'mlist-owner')
3092- person = self.factory.makePerson(email='preferred@example.org')
3093- email = self.factory.makeEmail('secondary@example.org', person)
3094- transaction.commit()
3095- mailing_list.subscribe(person, email)
3096-
3097- # User remains subscribed if we run the garbage collector.
3098- self.runDaily()
3099- self.assertNotEqual(mailing_list.getSubscription(person), None)
3100-
3101- # If we remove the email address that was subscribed, the
3102- # garbage collector removes the subscription.
3103- LaunchpadZopelessLayer.switchDbUser('testadmin')
3104- Store.of(email).remove(email)
3105- self.runDaily()
3106- self.assertEqual(mailing_list.getSubscription(person), None)
3107-
3108 def test_PersonPruner(self):
3109 personset = getUtility(IPersonSet)
3110 # Switch the DB user because the garbo_daily user isn't allowed to
3111@@ -466,42 +446,6 @@
3112 BugNotification.date_emailed < THIRTY_DAYS_AGO).count(),
3113 0)
3114
3115- def test_PersonEmailAddressLinkChecker(self):
3116- LaunchpadZopelessLayer.switchDbUser('testadmin')
3117-
3118- # Make an EmailAddress record reference a non-existant Person.
3119- emailaddress = IMasterStore(EmailAddress).get(EmailAddress, 16)
3120- emailaddress.personID = -1
3121-
3122- # Make a Person record reference a different Account to its
3123- # EmailAddress records.
3124- person = IMasterStore(Person).get(Person, 1)
3125- person_email = Store.of(person).find(
3126- EmailAddress, person=person).any()
3127- person.accountID = -1
3128-
3129- # Run the garbage collector. We should get two ERROR reports
3130- # about the corrupt data.
3131- collector = self.runDaily()
3132-
3133- # The PersonEmailAddressLinkChecker is not intelligent enough
3134- # to repair corruption. It is only there to alert us to the
3135- # issue so data can be manually repaired and the cause
3136- # tracked down and fixed.
3137- self.assertEqual(emailaddress.personID, -1)
3138- self.assertNotEqual(person.accountID, person_email.accountID)
3139-
3140- # The corruption has been reported though as a ERROR messages.
3141- log_output = collector.logger.output_file.getvalue()
3142- error_message_1 = (
3143- "ERROR Corruption - "
3144- "'test@canonical.com' is linked to a non-existant Person.")
3145- self.assertNotEqual(log_output.find(error_message_1), -1)
3146- error_message_2 = (
3147- "ERROR Corruption - "
3148- "'mark@example.com' and 'mark' reference different Accounts")
3149- self.assertNotEqual(log_output.find(error_message_2), -1)
3150-
3151 def test_BranchJobPruner(self):
3152 # Garbo should remove jobs completed over 30 days ago.
3153 self.useBzrBranches()
3154
3155=== modified file 'lib/contrib/glock.py'
3156--- lib/contrib/glock.py 2007-01-29 18:48:21 +0000
3157+++ lib/contrib/glock.py 2010-04-27 02:13:38 +0000
3158@@ -157,7 +157,7 @@
3159 the caller decided not to block.
3160 """
3161 if self.logger:
3162- self.logger.info('creating lockfile')
3163+ self.logger.info('Creating lockfile: %s', self.fpath)
3164 if _windows:
3165 if blocking:
3166 timeout = win32event.INFINITE
3167
3168=== modified file 'lib/lp/answers/doc/expiration.txt'
3169--- lib/lp/answers/doc/expiration.txt 2009-07-23 17:49:31 +0000
3170+++ lib/lp/answers/doc/expiration.txt 2010-04-27 02:13:38 +0000
3171@@ -132,7 +132,7 @@
3172 ... stderr=subprocess.PIPE)
3173 >>> (out, err) = process.communicate()
3174 >>> print err
3175- INFO creating lockfile
3176+ INFO Creating lockfile: /var/lock/launchpad-expire-questions.lock
3177 INFO Expiring OPEN and NEEDSINFO questions without activity for the
3178 last 15 days.
3179 INFO Found 5 questions to expire.
3180
3181=== modified file 'lib/lp/archivepublisher/publishing.py'
3182--- lib/lp/archivepublisher/publishing.py 2010-02-09 00:17:40 +0000
3183+++ lib/lp/archivepublisher/publishing.py 2010-04-27 02:13:38 +0000
3184@@ -12,6 +12,7 @@
3185 import hashlib
3186 import logging
3187 import os
3188+import shutil
3189
3190 from datetime import datetime
3191
3192@@ -29,7 +30,7 @@
3193 from canonical.database.sqlbase import sqlvalues
3194 from lp.registry.interfaces.pocket import (
3195 PackagePublishingPocket, pocketsuffix)
3196-from lp.soyuz.interfaces.archive import ArchivePurpose
3197+from lp.soyuz.interfaces.archive import ArchivePurpose, ArchiveStatus
3198 from lp.soyuz.interfaces.binarypackagerelease import (
3199 BinaryPackageFormat)
3200 from lp.soyuz.interfaces.component import IComponentSet
3201@@ -596,3 +597,32 @@
3202 in_file.close()
3203
3204 out_file.write(" %s % 16d %s\n" % (checksum, length, file_name))
3205+
3206+ def deleteArchive(self):
3207+ """Delete the archive.
3208+
3209+ Physically remove the entire archive from disk and set the archive's
3210+ status to DELETED.
3211+
3212+ Any errors encountered while removing the archive from disk will
3213+ be caught and an OOPS report generated.
3214+ """
3215+
3216+ root_dir = os.path.join(
3217+ self._config.distroroot, self.archive.owner.name,
3218+ self.archive.name)
3219+
3220+ self.log.info(
3221+ "Attempting to delete archive '%s/%s' at '%s'." % (
3222+ self.archive.owner.name, self.archive.name, root_dir))
3223+
3224+ try:
3225+ shutil.rmtree(root_dir)
3226+ except (shutil.Error, OSError), e:
3227+ self.log.warning(
3228+ "Failed to delete directory '%s' for archive '%s/%s'\n%s" % (
3229+ root_dir, self.archive.owner.name,
3230+ self.archive.name, e))
3231+
3232+ self.archive.status = ArchiveStatus.DELETED
3233+ self.archive.publish = False
3234
3235=== modified file 'lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py'
3236--- lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py 2009-10-17 14:06:03 +0000
3237+++ lib/lp/archivepublisher/scripts/generate_ppa_htaccess.py 2010-04-27 02:13:38 +0000
3238@@ -17,7 +17,7 @@
3239 from canonical.launchpad.webapp import canonical_url
3240
3241 from lp.archivepublisher.config import getPubConfig
3242-from lp.soyuz.interfaces.archive import IArchiveSet
3243+from lp.soyuz.interfaces.archive import IArchiveSet, ArchiveStatus
3244 from lp.soyuz.interfaces.archiveauthtoken import (
3245 IArchiveAuthTokenSet)
3246 from lp.soyuz.interfaces.archivesubscriber import (
3247@@ -241,6 +241,13 @@
3248 ppa.name,
3249 ppa.owner.displayname)
3250 continue
3251+ elif ppa.status == ArchiveStatus.DELETED or ppa.enabled is False:
3252+ self.logger.info(
3253+ "Skipping htacess updates for deleted or disabled PPA "
3254+ " '%s' owned by %s.",
3255+ ppa.name,
3256+ ppa.owner.displayname)
3257+ continue
3258
3259 self.ensureHtaccess(ppa)
3260 temp_htpasswd = self.generateHtpasswd(ppa, valid_tokens)
3261
3262=== modified file 'lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py'
3263--- lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py 2010-03-09 07:29:18 +0000
3264+++ lib/lp/archivepublisher/tests/test_generate_ppa_htaccess.py 2010-04-27 02:13:38 +0000
3265@@ -24,6 +24,7 @@
3266 from lp.archivepublisher.config import getPubConfig
3267 from lp.archivepublisher.scripts.generate_ppa_htaccess import (
3268 HtaccessTokenGenerator)
3269+from lp.soyuz.interfaces.archive import ArchiveStatus
3270 from lp.soyuz.interfaces.archivesubscriber import (
3271 ArchiveSubscriberStatus)
3272 from lp.testing import TestCaseWithFactory
3273@@ -476,6 +477,47 @@
3274 self.assertFalse(os.path.isfile(htaccess))
3275 self.assertFalse(os.path.isfile(htpasswd))
3276
3277+ def testSkippingOfDisabledPPAs(self):
3278+ """Test that the htaccess for disabled PPAs are not touched."""
3279+ subs, tokens = self.setupDummyTokens()
3280+ htaccess, htpasswd = self.ensureNoFiles()
3281+
3282+ # Setup subscription so that htaccess/htpasswd is pending generation.
3283+ now = datetime.now(pytz.UTC)
3284+ subs[0].date_expires = now + timedelta(minutes=3)
3285+ self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT)
3286+
3287+ # Set the PPA as disabled.
3288+ self.ppa.disable()
3289+ self.assertFalse(self.ppa.enabled)
3290+
3291+ script = self.getScript()
3292+ script.main()
3293+
3294+ # The htaccess and htpasswd files should not be generated.
3295+ self.assertFalse(os.path.isfile(htaccess))
3296+ self.assertFalse(os.path.isfile(htpasswd))
3297+
3298+ def testSkippingOfDeletedPPAs(self):
3299+ """Test that the htaccess for deleted PPAs are not touched."""
3300+ subs, tokens = self.setupDummyTokens()
3301+ htaccess, htpasswd = self.ensureNoFiles()
3302+
3303+ # Setup subscription so that htaccess/htpasswd is pending generation.
3304+ now = datetime.now(pytz.UTC)
3305+ subs[0].date_expires = now + timedelta(minutes=3)
3306+ self.assertEqual(subs[0].status, ArchiveSubscriberStatus.CURRENT)
3307+
3308+ # Set the PPA as deleted.
3309+ self.ppa.status = ArchiveStatus.DELETED
3310+
3311+ script = self.getScript()
3312+ script.main()
3313+
3314+ # The htaccess and htpasswd files should not be generated.
3315+ self.assertFalse(os.path.isfile(htaccess))
3316+ self.assertFalse(os.path.isfile(htpasswd))
3317+
3318 def testSendingCancellationEmail(self):
3319 """Test that when a token is deactivated, its user gets an email.
3320
3321
3322=== modified file 'lib/lp/archivepublisher/tests/test_publisher.py'
3323--- lib/lp/archivepublisher/tests/test_publisher.py 2010-02-10 00:25:55 +0000
3324+++ lib/lp/archivepublisher/tests/test_publisher.py 2010-04-27 02:13:38 +0000
3325@@ -8,7 +8,6 @@
3326
3327 import bz2
3328 import gzip
3329-import hashlib
3330 import os
3331 import shutil
3332 import stat
3333@@ -26,7 +25,7 @@
3334 from canonical.database.constants import UTC_NOW
3335 from canonical.launchpad.ftests.keys_for_tests import gpgkeysdir
3336 from lp.soyuz.interfaces.archive import (
3337- ArchivePurpose, IArchiveSet)
3338+ ArchivePurpose, ArchiveStatus, IArchiveSet)
3339 from lp.soyuz.interfaces.binarypackagerelease import (
3340 BinaryPackageFormat)
3341 from lp.registry.interfaces.distribution import IDistributionSet
3342@@ -37,7 +36,6 @@
3343 from lp.soyuz.interfaces.publishing import PackagePublishingStatus
3344 from lp.archivepublisher.interfaces.archivesigningkey import (
3345 IArchiveSigningKey)
3346-from lp.testing import get_lsb_information
3347 from lp.soyuz.tests.test_publishing import TestNativePublishingBase
3348 from canonical.zeca.ftests.harness import ZecaTestSetup
3349
3350@@ -95,6 +93,32 @@
3351 foo_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir
3352 self.assertEqual(open(foo_path).read().strip(), 'Hello world')
3353
3354+ def testDeletingPPA(self):
3355+ """Test deleting a PPA"""
3356+ ubuntu_team = getUtility(IPersonSet).getByName('ubuntu-team')
3357+ test_archive = getUtility(IArchiveSet).new(
3358+ distribution=self.ubuntutest, owner=ubuntu_team,
3359+ purpose=ArchivePurpose.PPA)
3360+ publisher = getPublisher(test_archive, None, self.logger)
3361+
3362+ self.assertTrue(os.path.exists(publisher._config.archiveroot))
3363+
3364+ # Create a file inside archiveroot to ensure we're recursive.
3365+ open(os.path.join(
3366+ publisher._config.archiveroot, 'test_file'), 'w').close()
3367+
3368+ publisher.deleteArchive()
3369+ root_dir = os.path.join(
3370+ publisher._config.distroroot, test_archive.owner.name,
3371+ test_archive.name)
3372+ self.assertFalse(os.path.exists(root_dir))
3373+ self.assertEqual(test_archive.status, ArchiveStatus.DELETED)
3374+ self.assertEqual(test_archive.publish, False)
3375+
3376+ # Trying to delete it again won't fail, in the corner case where
3377+ # some admin manually deleted the repo.
3378+ publisher.deleteArchive()
3379+
3380 def testPublishPartner(self):
3381 """Test that a partner package is published to the right place."""
3382 archive = self.ubuntutest.getArchiveByComponent('partner')
3383@@ -104,8 +128,7 @@
3384 pub_config.poolroot, pub_config.temproot, self.logger)
3385 publisher = Publisher(
3386 self.logger, pub_config, disk_pool, archive)
3387- pub_source = self.getPubSource(archive=archive,
3388- filecontent="I am partner")
3389+ self.getPubSource(archive=archive, filecontent="I am partner")
3390
3391 publisher.A_publish(False)
3392
3393@@ -143,7 +166,7 @@
3394 disk_pool = DiskPool(
3395 pub_config.poolroot, pub_config.temproot, self.logger)
3396 publisher = Publisher(self.logger, pub_config, disk_pool, archive)
3397- pub_source = self.getPubSource(
3398+ self.getPubSource(
3399 archive=archive, filecontent="I am partner",
3400 status=PackagePublishingStatus.PENDING)
3401
3402@@ -230,8 +253,7 @@
3403 self.logger, self.config, self.disk_pool,
3404 self.ubuntutest.main_archive)
3405
3406- pub_source = self.getPubSource(
3407- status=PackagePublishingStatus.PUBLISHED)
3408+ self.getPubSource(status=PackagePublishingStatus.PUBLISHED)
3409
3410 # a new non-careful publisher won't find anything to publish, thus
3411 # no pockets will be *dirtied*.
3412@@ -251,7 +273,7 @@
3413 self.logger, self.config, self.disk_pool,
3414 self.ubuntutest.main_archive)
3415
3416- pub_source = self.getPubSource(
3417+ self.getPubSource(
3418 filecontent='Hello world',
3419 status=PackagePublishingStatus.PUBLISHED)
3420
3421@@ -394,17 +416,17 @@
3422 ubuntu = getUtility(IDistributionSet)['ubuntu']
3423
3424 spiv = person_set.getByName('spiv')
3425- spiv_archive = archive_set.new(
3426+ archive_set.new(
3427 owner=spiv, distribution=ubuntu, purpose=ArchivePurpose.PPA)
3428 name16 = person_set.getByName('name16')
3429- name16_archive = archive_set.new(
3430+ archive_set.new(
3431 owner=name16, distribution=ubuntu, purpose=ArchivePurpose.PPA)
3432
3433- pub_source = self.getPubSource(
3434+ self.getPubSource(
3435 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
3436 status=PackagePublishingStatus.PENDING, archive=spiv.archive)
3437
3438- pub_source = self.getPubSource(
3439+ self.getPubSource(
3440 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
3441 status=PackagePublishingStatus.PUBLISHED, archive=name16.archive)
3442
3443@@ -467,7 +489,7 @@
3444 pub_source = self.getPubSource(
3445 sourcename="foo", filename="foo_1.dsc", filecontent='Hello world',
3446 status=PackagePublishingStatus.PENDING, archive=cprov.archive)
3447- pub_bin = self.getPubBinaries(
3448+ self.getPubBinaries(
3449 pub_source=pub_source,
3450 description=" My leading spaces are normalised to a single "
3451 "space but not trailing. \n It does nothing, "
3452@@ -478,7 +500,7 @@
3453 ignored_source = self.getPubSource(
3454 status=PackagePublishingStatus.DELETED,
3455 archive=cprov.archive)
3456- pub_udeb = self.getPubBinaries(
3457+ self.getPubBinaries(
3458 pub_source=ignored_source, binaryname='bingo',
3459 description='nice udeb', format=BinaryPackageFormat.UDEB)[0]
3460
3461@@ -609,27 +631,27 @@
3462 # waiting to be deleted, each in different pockets. The deleted
3463 # source in the release pocket should not be processed. We'll
3464 # also have a binary waiting to be deleted.
3465- published_source = self.getPubSource(
3466+ self.getPubSource(
3467 pocket=PackagePublishingPocket.RELEASE,
3468 status=PackagePublishingStatus.PUBLISHED)
3469
3470- deleted_source_in_release_pocket = self.getPubSource(
3471+ self.getPubSource(
3472 pocket=PackagePublishingPocket.RELEASE,
3473 status=PackagePublishingStatus.DELETED)
3474
3475- removed_source = self.getPubSource(
3476+ self.getPubSource(
3477 scheduleddeletiondate=UTC_NOW,
3478 dateremoved=UTC_NOW,
3479 pocket=PackagePublishingPocket.UPDATES,
3480 status=PackagePublishingStatus.DELETED)
3481
3482- deleted_source = self.getPubSource(
3483+ self.getPubSource(
3484 pocket=PackagePublishingPocket.SECURITY,
3485 status=PackagePublishingStatus.DELETED)
3486
3487- deleted_binary = self.getPubBinaries(
3488+ self.getPubBinaries(
3489 pocket=PackagePublishingPocket.BACKPORTS,
3490- status=PackagePublishingStatus.DELETED)[0]
3491+ status=PackagePublishingStatus.DELETED)
3492
3493 # Run the deletion detection.
3494 publisher.A2_markPocketsWithDeletionsDirty()
3495@@ -681,19 +703,19 @@
3496
3497 # Create pending deletions in RELEASE, BACKPORTS, SECURITY and
3498 # UPDATES pockets.
3499- deleted_source = self.getPubSource(
3500+ self.getPubSource(
3501 pocket=PackagePublishingPocket.RELEASE,
3502 status=PackagePublishingStatus.DELETED)
3503
3504- deleted_binary = self.getPubBinaries(
3505+ self.getPubBinaries(
3506 pocket=PackagePublishingPocket.BACKPORTS,
3507 status=PackagePublishingStatus.DELETED)[0]
3508
3509- allowed_source_deletion = self.getPubSource(
3510+ self.getPubSource(
3511 pocket=PackagePublishingPocket.SECURITY,
3512 status=PackagePublishingStatus.DELETED)
3513
3514- allowed_binary_deletion = self.getPubBinaries(
3515+ self.getPubBinaries(
3516 pocket=PackagePublishingPocket.UPDATES,
3517 status=PackagePublishingStatus.DELETED)[0]
3518
3519@@ -763,7 +785,7 @@
3520 self.logger, self.config, self.disk_pool,
3521 self.ubuntutest.main_archive)
3522
3523- pub_source = self.getPubSource(filecontent='Hello world')
3524+ self.getPubSource(filecontent='Hello world')
3525
3526 publisher.A_publish(False)
3527 publisher.C_doFTPArchive(False)
3528@@ -848,8 +870,7 @@
3529 archive_publisher = getPublisher(
3530 cprov.archive, allowed_suites, self.logger)
3531
3532- pub_source = self.getPubSource(
3533- filecontent='Hello world', archive=cprov.archive)
3534+ self.getPubSource(filecontent='Hello world', archive=cprov.archive)
3535
3536 archive_publisher.A_publish(False)
3537 self.layer.txn.commit()
3538@@ -952,8 +973,7 @@
3539 allowed_suites = []
3540 archive_publisher = getPublisher(
3541 named_ppa, allowed_suites, self.logger)
3542- pub_source = self.getPubSource(
3543- filecontent='Hello world', archive=named_ppa)
3544+ self.getPubSource(filecontent='Hello world', archive=named_ppa)
3545
3546 archive_publisher.A_publish(False)
3547 self.layer.txn.commit()
3548@@ -1062,7 +1082,7 @@
3549 Publish files in pool, generate archive indexes and release files.
3550 """
3551 self.setupPublisher(archive)
3552- pub_source = self.getPubSource(archive=archive)
3553+ self.getPubSource(archive=archive)
3554
3555 self.archive_publisher.A_publish(False)
3556 transaction.commit()
3557
3558=== modified file 'lib/lp/bugs/browser/bugwatch.py'
3559--- lib/lp/bugs/browser/bugwatch.py 2010-01-15 03:32:46 +0000
3560+++ lib/lp/bugs/browser/bugwatch.py 2010-04-27 02:13:38 +0000
3561@@ -6,12 +6,15 @@
3562 __metaclass__ = type
3563 __all__ = [
3564 'BugWatchSetNavigation',
3565+ 'BugWatchActivityPortletView',
3566 'BugWatchEditView',
3567 'BugWatchView']
3568
3569+
3570 from zope.component import getUtility
3571 from zope.interface import Interface
3572
3573+from canonical.database.constants import UTC_NOW
3574 from canonical.widgets.textwidgets import URIWidget
3575
3576 from canonical.launchpad import _
3577@@ -21,7 +24,8 @@
3578 from canonical.launchpad.fields import URIField
3579 from canonical.launchpad.webapp.interfaces import ILaunchBag
3580 from lp.bugs.interfaces.bugwatch import (
3581- IBugWatch, IBugWatchSet, NoBugTrackerFound, UnrecognizedBugTrackerURL)
3582+ BUG_WATCH_ACTIVITY_SUCCESS_STATUSES, IBugWatch, IBugWatchSet,
3583+ NoBugTrackerFound, UnrecognizedBugTrackerURL)
3584 from canonical.launchpad.webapp import (
3585 action, canonical_url, custom_widget, GetitemNavigation,
3586 LaunchpadFormView, LaunchpadView)
3587@@ -99,6 +103,11 @@
3588 """See `LaunchpadFormView.`"""
3589 return {'url' : self.context.url}
3590
3591+ @property
3592+ def watch_has_activity(self):
3593+ """Return True if there has been activity on the bug watch."""
3594+ return not self.context.activity.is_empty()
3595+
3596 def validate(self, data):
3597 """See `LaunchpadFormView.`"""
3598 if 'url' not in data:
3599@@ -136,3 +145,53 @@
3600 return canonical_url(getUtility(ILaunchBag).bug)
3601
3602 cancel_url = next_url
3603+
3604+
3605+class BugWatchActivityPortletView(LaunchpadFormView):
3606+ """A portlet for displaying the activity of a bug watch."""
3607+
3608+ schema = BugWatchEditForm
3609+
3610+ def userCanReschedule(self, action=None):
3611+ """Return True if the current user can reschedule the bug watch."""
3612+ return self.context.can_be_rescheduled
3613+
3614+ @action('Update Now', name='reschedule', condition=userCanReschedule)
3615+ def reschedule_action(self, action, data):
3616+ """Schedule the current bug watch for immediate checking."""
3617+ bugwatch = self.context
3618+ bugwatch.setNextCheck(UTC_NOW)
3619+ self.request.response.addInfoNotification(
3620+ structured(
3621+ 'The <a href="%(url)s">%(bugtracker)s #%(remote_bug)s</a> '
3622+ 'bug watch has been scheduled for immediate checking.',
3623+ url=bugwatch.url, bugtracker=bugwatch.bugtracker.name,
3624+ remote_bug=bugwatch.remotebug))
3625+
3626+ @property
3627+ def next_url(self):
3628+ return canonical_url(getUtility(ILaunchBag).bug)
3629+
3630+ cancel_url = next_url
3631+
3632+ @property
3633+ def recent_watch_activity(self):
3634+ """Return a list of dicts representing recent watch activity."""
3635+ activity_items = []
3636+ for activity in self.context.activity:
3637+ if activity.result in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES:
3638+ icon = "/@@/yes"
3639+ completion_message = "completed successfully"
3640+ else:
3641+ icon = "/@@/no"
3642+ completion_message = (
3643+ "failed with error '%s'" % activity.result.title)
3644+
3645+ activity_items.append({
3646+ 'icon': icon,
3647+ 'date': activity.activity_date,
3648+ 'completion_message': completion_message,
3649+ 'result_text': activity.result.title,
3650+ })
3651+
3652+ return activity_items
3653
3654=== modified file 'lib/lp/bugs/browser/configure.zcml'
3655--- lib/lp/bugs/browser/configure.zcml 2010-03-11 01:39:25 +0000
3656+++ lib/lp/bugs/browser/configure.zcml 2010-04-27 02:13:38 +0000
3657@@ -1103,6 +1103,12 @@
3658 class="lp.bugs.browser.bugwatch.BugWatchEditView"
3659 permission="launchpad.AnyPerson"
3660 template="../templates/bugwatch-editform.pt"/>
3661+ <browser:page
3662+ for="lp.bugs.interfaces.bugwatch.IBugWatch"
3663+ name="+portlet-activity"
3664+ class="lp.bugs.browser.bugwatch.BugWatchActivityPortletView"
3665+ permission="launchpad.AnyPerson"
3666+ template="../templates/bugwatch-portlet-activity.pt"/>
3667 <browser:pages
3668 for="lp.bugs.interfaces.bugwatch.IBugWatch"
3669 permission="launchpad.AnyPerson">
3670
3671=== modified file 'lib/lp/bugs/browser/tests/bugwatch-views.txt'
3672--- lib/lp/bugs/browser/tests/bugwatch-views.txt 2009-10-22 11:55:51 +0000
3673+++ lib/lp/bugs/browser/tests/bugwatch-views.txt 2010-04-27 02:13:38 +0000
3674@@ -1,4 +1,5 @@
3675-= Bug Watch Edit Page =
3676+Bug Watch Edit Page
3677+===================
3678
3679 It's possible to edit a bug watch on +edit, as well as deleting it.
3680 Deleting a bug watch is only possible when the bug watch isn't linked to
3681@@ -28,3 +29,84 @@
3682 >>> [action.label for action in unlinked_bugwatch_view.actions
3683 ... if action.available()]
3684 ['Change', 'Delete Bug Watch']
3685+
3686+
3687+Recent activity
3688+---------------
3689+
3690+The Bug Watch +edit page displays a list of the recent activity for the
3691+watch. This is provided by the BugWatch activity portlet view and can be
3692+accessed via the recent_watch_activity property of BugWatchView.
3693+
3694+We'll create a new watch in order to demonstrate this.
3695+
3696+ >>> from canonical.launchpad.ftests import login
3697+ >>> login('foo.bar@canonical.com')
3698+ >>> new_watch = factory.makeBugWatch()
3699+
3700+The view for the new watch will have an empty recent_watch_activity list
3701+since it hasn't been updated yet.
3702+
3703+ >>> new_watch_view = create_initialized_view(
3704+ ... new_watch, '+portlet-activity')
3705+ >>> len(new_watch_view.recent_watch_activity)
3706+ 0
3707+
3708+The BugWatch +edit view has a watch_has_activity property, which is used
3709+to determine whether the recent activity portlet should be displayed.
3710+
3711+ >>> new_watch_edit_view = create_initialized_view(
3712+ ... new_watch, '+edit')
3713+ >>> print new_watch_edit_view.watch_has_activity
3714+ False
3715+
3716+Adding a successful activity entry for the watch will cause it to show
3717+up on the BugWatchView's recent_watch_activity property.
3718+
3719+ >>> new_watch.addActivity()
3720+ >>> len(new_watch_view.recent_watch_activity)
3721+ 1
3722+
3723+The BugWatch +edit view's watch_has_activity property will also have
3724+changed.
3725+
3726+ >>> new_watch_edit_view = create_initialized_view(
3727+ ... new_watch, '+edit')
3728+ >>> print new_watch_edit_view.watch_has_activity
3729+ True
3730+
3731+Each entry in the recent_watch_activity list is a dict containing data
3732+about the activity.
3733+
3734+ >>> from pprint import pprint
3735+ >>> for activity_dict in new_watch_view.recent_watch_activity:
3736+ ... pprint(activity_dict)
3737+ {'completion_message': 'completed successfully',
3738+ 'date': datetime.datetime(...tzinfo=<UTC>),
3739+ 'icon': '/@@/yes',
3740+ 'result_text': 'Synchronisation succeeded'}
3741+
3742+If an activity entry records a failure, the 'icon' entry in the dict
3743+will point to the 'no' icon and the completion_message will explain the
3744+failure.
3745+
3746+We'll commit the transaction to make sure that the two activities have
3747+different dates.
3748+
3749+ >>> import transaction
3750+ >>> transaction.commit()
3751+
3752+ >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
3753+ >>> new_watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
3754+ >>> for activity_dict in new_watch_view.recent_watch_activity:
3755+ ... pprint(activity_dict)
3756+ {'completion_message': "failed with error 'Bug Not Found'",
3757+ 'date': datetime.datetime(...tzinfo=<UTC>),
3758+ 'icon': '/@@/no',
3759+ 'result_text': 'Bug Not Found'}
3760+ {'completion_message': 'completed successfully',
3761+ 'date': datetime.datetime(...tzinfo=<UTC>),
3762+ 'icon': '/@@/yes',
3763+ 'result_text': 'Synchronisation succeeded'}
3764+
3765+
3766
3767=== modified file 'lib/lp/bugs/configure.zcml'
3768--- lib/lp/bugs/configure.zcml 2010-04-14 12:55:44 +0000
3769+++ lib/lp/bugs/configure.zcml 2010-04-27 02:13:38 +0000
3770@@ -849,7 +849,9 @@
3771 bug
3772 bugtasks
3773 bugtracker
3774+ can_be_rescheduled
3775 datecreated
3776+ failed_activity
3777 getLastErrorMessage
3778 hasComment
3779 unpushed_comments
3780@@ -870,7 +872,8 @@
3781 permission="launchpad.AnyPerson"
3782 attributes="
3783 destroySelf
3784- addActivity"
3785+ addActivity
3786+ setNextCheck"
3787 set_attributes="bugtracker remotebug"/>
3788 <require
3789 permission="launchpad.Admin"
3790
3791=== modified file 'lib/lp/bugs/doc/bug-watch-activity.txt'
3792--- lib/lp/bugs/doc/bug-watch-activity.txt 2010-04-21 10:30:24 +0000
3793+++ lib/lp/bugs/doc/bug-watch-activity.txt 2010-04-27 02:13:38 +0000
3794@@ -50,11 +50,14 @@
3795 >>> activity.activity_date
3796 datetime.datetime...
3797
3798+The BugWatchActivity's result will be BugWatchActivityStatus.SYNC_SUCCEEDED.
3799+
3800+ >>> print activity.result.title
3801+ Synchronisation succeeded
3802+
3803 The other fields on the BugWatchActivity record, which aren't required,
3804 will all be None.
3805
3806- >>> print activity.result
3807- None
3808 >>> print activity.message
3809 None
3810 >>> print activity.oops_id
3811@@ -83,12 +86,13 @@
3812 >>> print bug_watch.activity.count()
3813 2
3814
3815-The most recent activity entry will have a result of None since it was
3816+The most recent activity entry will have a result of
3817+BugWatchActivityStatus.SYNC_SUCCEEDED since it was
3818 successful.
3819
3820 >>> most_recent_activity = bug_watch.activity.first()
3821- >>> print most_recent_activity.result
3822- None
3823+ >>> print most_recent_activity.result.title
3824+ Synchronisation succeeded
3825
3826 Its message will also be empty
3827
3828
3829=== modified file 'lib/lp/bugs/doc/bugnotification-sending.txt'
3830--- lib/lp/bugs/doc/bugnotification-sending.txt 2010-04-15 10:58:02 +0000
3831+++ lib/lp/bugs/doc/bugnotification-sending.txt 2010-04-27 02:13:38 +0000
3832@@ -984,7 +984,7 @@
3833 >>> process.returncode
3834 0
3835 >>> print err
3836- INFO creating lockfile
3837+ INFO Creating lockfile: /var/lock/launchpad-send-bug-notifications.lock
3838 INFO Notifying mark@example.com about bug 2.
3839 ...
3840 INFO Notifying support@ubuntu.com about bug 2.
3841
3842=== modified file 'lib/lp/bugs/doc/bugtask-expiration.txt'
3843--- lib/lp/bugs/doc/bugtask-expiration.txt 2010-04-14 13:23:02 +0000
3844+++ lib/lp/bugs/doc/bugtask-expiration.txt 2010-04-27 02:13:38 +0000
3845@@ -445,7 +445,7 @@
3846 ... stderr=subprocess.PIPE)
3847 >>> (out, err) = process.communicate()
3848 >>> print err
3849- INFO creating lockfile
3850+ INFO Creating lockfile: /var/lock/launchpad-expire-bugtasks.lock
3851 INFO Expiring unattended, INCOMPLETE bugtasks older than
3852 60 days for projects that use Launchpad Bugs.
3853 INFO Found 3 bugtasks to expire.
3854
3855=== modified file 'lib/lp/bugs/doc/bugtask.txt'
3856--- lib/lp/bugs/doc/bugtask.txt 2010-01-21 17:40:23 +0000
3857+++ lib/lp/bugs/doc/bugtask.txt 2010-04-27 02:13:38 +0000
3858@@ -1104,7 +1104,7 @@
3859 >>> (out, err) = process.communicate()
3860
3861 >>> print err
3862- INFO creating lockfile
3863+ INFO Creating lockfile: /var/lock/launchpad-launchpad-targetnamecacheupdater.lock
3864 INFO Updating targetname cache of bugtasks.
3865 INFO Updating 1 BugTasks (starting id: 2).
3866 INFO Updating ...BugTasks...
3867
3868=== modified file 'lib/lp/bugs/doc/bugwatch.txt'
3869--- lib/lp/bugs/doc/bugwatch.txt 2010-04-21 10:30:24 +0000
3870+++ lib/lp/bugs/doc/bugwatch.txt 2010-04-27 02:13:38 +0000
3871@@ -513,3 +513,87 @@
3872 >>> bug.removeWatch(bug_watch, factory.makePerson())
3873 >>> [bug_watch.remotebug for bug_watch in bug.watches]
3874 []
3875+
3876+
3877+Checking if a watch can be rescheduled
3878+--------------------------------------
3879+
3880+IBugWatch provides an attribute, can_be_rescheduled, which indicates
3881+whether or not the watch can be rescheduled. For a new bug watch this
3882+will be False.
3883+
3884+ >>> schedulable_watch = factory.makeBugWatch()
3885+ >>> schedulable_watch.next_check = None
3886+ >>> schedulable_watch.can_be_rescheduled
3887+ False
3888+
3889+If there's been activity on the watch but it's always been successful,
3890+can_be_rescheduled will be False.
3891+
3892+ >>> schedulable_watch.addActivity()
3893+ >>> schedulable_watch.can_be_rescheduled
3894+ False
3895+
3896+If the watch's updates have failed less than 60% of the time,
3897+can_be_rescheduled will be True
3898+
3899+ >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
3900+ >>> schedulable_watch.addActivity(
3901+ ... result=BugWatchActivityStatus.BUG_NOT_FOUND)
3902+ >>> schedulable_watch.can_be_rescheduled
3903+ True
3904+
3905+If the watch is rescheduled, can_be_rescheduled will be False, since the
3906+next_check time for the watch will be in the past (or in this case is
3907+now) and therefore it will be checked with the next checkwatches run.
3908+
3909+ >>> from pytz import utc
3910+ >>> from datetime import datetime
3911+ >>> schedulable_watch.next_check = datetime.now(utc)
3912+ >>> schedulable_watch.can_be_rescheduled
3913+ False
3914+
3915+However, if the watch has failed more than 60% of the time
3916+can_be_rescheduled will be False, since it's assumed that the watch
3917+needs attention in order for it to be able to work again.
3918+
3919+ >>> schedulable_watch.next_check = None
3920+ >>> schedulable_watch.addActivity(
3921+ ... result=BugWatchActivityStatus.BUG_NOT_FOUND)
3922+ >>> schedulable_watch.can_be_rescheduled
3923+ False
3924+
3925+
3926+Rescheduling a watch
3927+--------------------
3928+
3929+The rescheduling of a watch is done via IBugWatch.setNextCheck(). This
3930+is to ensure that watches are only rescheduled when can_be_rescheduled
3931+is True (note that the BugWatch Scheduler bypasses setNextCheck() and
3932+sets next_check directly because it has admin privileges).
3933+
3934+The schedulable_watch that we used in the previous test cannot currently
3935+be rescheduled.
3936+
3937+ >>> schedulable_watch.can_be_rescheduled
3938+ False
3939+
3940+Calling setNextCheck() on this watch will cause an Exception,
3941+BugWatchCannotBeRescheduled, to be raised.
3942+
3943+ >>> schedulable_watch.setNextCheck(datetime.now(utc))
3944+ Traceback (most recent call last):
3945+ ...
3946+ BugWatchCannotBeRescheduled...
3947+
3948+If we add some activity to the watch, to make its can_be_rescheduled
3949+property become True, setNextCheck() will succeed.
3950+
3951+ >>> schedulable_watch.addActivity()
3952+ >>> schedulable_watch.can_be_rescheduled
3953+ True
3954+
3955+ >>> next_check = datetime.now(utc)
3956+ >>> schedulable_watch.setNextCheck(next_check)
3957+ >>> schedulable_watch.next_check == next_check
3958+ True
3959
3960=== modified file 'lib/lp/bugs/doc/checkwatches.txt'
3961--- lib/lp/bugs/doc/checkwatches.txt 2010-04-21 10:30:24 +0000
3962+++ lib/lp/bugs/doc/checkwatches.txt 2010-04-27 02:13:38 +0000
3963@@ -44,7 +44,7 @@
3964 0
3965
3966 >>> print err
3967- INFO creating lockfile
3968+ INFO Creating lockfile: /var/lock/launchpad-checkwatches.lock
3969 DEBUG No global batch size specified.
3970 DEBUG Skipping updating Ubuntu Bugzilla watches.
3971 DEBUG No watches to update on http://bugs.debian.org
3972
3973=== modified file 'lib/lp/bugs/doc/cve-update.txt'
3974--- lib/lp/bugs/doc/cve-update.txt 2009-06-12 16:36:02 +0000
3975+++ lib/lp/bugs/doc/cve-update.txt 2010-04-27 02:13:38 +0000
3976@@ -37,7 +37,7 @@
3977 ... )
3978 >>> (output, empty) = process.communicate()
3979 >>> print output
3980- INFO creating lockfile
3981+ INFO Creating lockfile: /var/lock/launchpad-updatecve.lock
3982 ...
3983 INFO CVE-1999-0002 created
3984 INFO Creating new SGI reference for 1999-0002
3985@@ -91,7 +91,7 @@
3986 ... )
3987 >>> (output, empty) = process.communicate()
3988 >>> print output
3989- INFO creating lockfile
3990+ INFO Creating lockfile: /var/lock/launchpad-updatecve.lock
3991 ...
3992 INFO Creating new CERT reference for 1999-0002
3993 INFO Creating new CIAC reference for 1999-0002
3994
3995=== modified file 'lib/lp/bugs/interfaces/bugwatch.py'
3996--- lib/lp/bugs/interfaces/bugwatch.py 2010-03-23 12:55:05 +0000
3997+++ lib/lp/bugs/interfaces/bugwatch.py 2010-04-27 02:13:38 +0000
3998@@ -8,7 +8,9 @@
3999 __metaclass__ = type
4000
4001 __all__ = [
4002+ 'BUG_WATCH_ACTIVITY_SUCCESS_STATUSES',
4003 'BugWatchActivityStatus',
4004+ 'BugWatchCannotBeRescheduled',
4005 'IBugWatch',
4006 'IBugWatchActivity',
4007 'IBugWatchSet',
4008@@ -93,6 +95,42 @@
4009 Launchpad cannot import the status of private remote bugs.
4010 """)
4011
4012+ SYNC_SUCCEEDED = DBItem(9, """
4013+ Synchronisation succeeded
4014+
4015+ The remote bug's status was successfully synchronized to Launchpad.
4016+ """)
4017+
4018+ COMMENT_IMPORT_FAILED = DBItem(10, """
4019+ Unable to import comments
4020+
4021+ The remote bug's status was synchronized successfully but
4022+ comments could not be imported from the remote bug.
4023+ """)
4024+
4025+ COMMENT_PUSH_FAILED = DBItem(11, """
4026+ Unable to push comments
4027+
4028+ The remote bug's status was synchronized successfully and
4029+ its comments were successfully imported but Launchpad was unable
4030+ to push comments back to the remote bug.
4031+ """)
4032+
4033+ BACKLINK_FAILED = DBItem(12, """
4034+ Unable to set link remote bug to Launchpad
4035+
4036+ The remote bug's status and comments were synchronized
4037+ sucessfully with Launchpad but Launchpad was unable to set the
4038+ remote bug's link back to the relevant Launchpad bug.
4039+ """)
4040+
4041+
4042+# The set of BugWatchActivityStatuses that are considered to indicate
4043+# success.
4044+BUG_WATCH_ACTIVITY_SUCCESS_STATUSES = [
4045+ BugWatchActivityStatus.SYNC_SUCCEEDED,
4046+ ]
4047+
4048
4049 class IBugWatch(IHasBug):
4050 """A bug on a remote system."""
4051@@ -173,6 +211,10 @@
4052 Text(title=_('The URL at which to view the remote bug.'),
4053 readonly=True))
4054
4055+ can_be_rescheduled = Attribute(
4056+ "A True or False indicator of whether or not this watch can be "
4057+ "rescheduled.")
4058+
4059 def updateImportance(remote_importance, malone_importance):
4060 """Update the importance of the bug watch and any linked bug task.
4061
4062@@ -213,6 +255,13 @@
4063 def addActivity(result=None, message=None, oops_id=None):
4064 """Add an `IBugWatchActivity` record for this BugWatch."""
4065
4066+ def setNextCheck(next_check):
4067+ """Set the next_check time of the watch.
4068+
4069+ :raises: `BugWatchCannotBeRescheduled` if
4070+ `IBugWatch.can_be_rescheduled` is False.
4071+ """
4072+
4073
4074 # Defined here because of circular imports.
4075 IBugTracker['watches'].value_type.schema = IBugWatch
4076@@ -326,3 +375,6 @@
4077 title=_('OOPS ID'), readonly=True,
4078 description=_("The OOPS ID associated with this activity."))
4079
4080+
4081+class BugWatchCannotBeRescheduled(Exception):
4082+ """The current `IBugWatch` can't be rescheduled."""
4083
4084=== modified file 'lib/lp/bugs/model/bugwatch.py'
4085--- lib/lp/bugs/model/bugwatch.py 2010-04-09 15:04:19 +0000
4086+++ lib/lp/bugs/model/bugwatch.py 2010-04-27 02:13:38 +0000
4087@@ -12,6 +12,9 @@
4088
4089 import re
4090 import urllib
4091+
4092+from datetime import datetime
4093+from pytz import utc
4094 from urlparse import urlunsplit
4095
4096 from zope.event import notify
4097@@ -44,8 +47,9 @@
4098
4099 from lp.bugs.interfaces.bugtracker import BugTrackerType, IBugTrackerSet
4100 from lp.bugs.interfaces.bugwatch import (
4101- BugWatchActivityStatus, IBugWatch, IBugWatchActivity, IBugWatchSet,
4102- NoBugTrackerFound, UnrecognizedBugTrackerURL)
4103+ BUG_WATCH_ACTIVITY_SUCCESS_STATUSES, BugWatchActivityStatus,
4104+ BugWatchCannotBeRescheduled, IBugWatch, IBugWatchActivity,
4105+ IBugWatchSet, NoBugTrackerFound, UnrecognizedBugTrackerURL)
4106 from lp.bugs.model.bugmessage import BugMessage
4107 from lp.bugs.model.bugset import BugSetBase
4108 from lp.bugs.model.bugtask import BugTask
4109@@ -66,6 +70,9 @@
4110 }
4111
4112
4113+WATCH_RESCHEDULE_THRESHOLD = 0.6
4114+
4115+
4116 class BugWatch(SQLBase):
4117 """See `IBugWatch`."""
4118 implements(IBugWatch)
4119@@ -212,6 +219,8 @@
4120
4121 if self.last_error_type in error_message_mapping:
4122 message = error_message_mapping[self.last_error_type]
4123+ elif self.last_error_type != BugWatchActivityStatus.UNKNOWN:
4124+ message = self.last_error_type.description
4125 else:
4126 message = ("Launchpad couldn't import bug #%(bug)s from "
4127 "%(bugtracker)s.")
4128@@ -284,7 +293,12 @@
4129 """See `IBugWatch`."""
4130 activity = BugWatchActivity()
4131 activity.bug_watch = self
4132- activity.result = result
4133+ if result is None:
4134+ # If no result is passed we assume that the activity
4135+ # succeded and set the result field accordingly.
4136+ activity.result = BugWatchActivityStatus.SYNC_SUCCEEDED
4137+ else:
4138+ activity.result = result
4139 if message is not None:
4140 activity.message = unicode(message)
4141 if oops_id is not None:
4142@@ -300,6 +314,52 @@
4143 BugWatchActivity.bug_watch == self).order_by(
4144 Desc('activity_date'))
4145
4146+ @property
4147+ def can_be_rescheduled(self):
4148+ """See `IBugWatch`."""
4149+ if (self.next_check is not None and
4150+ self.next_check <= datetime.now(utc)):
4151+ # If the watch is already scheduled for a time in the past
4152+ # (or for right now) it can't be rescheduled, since it
4153+ # should be be checked by the next checkwatches run anyway.
4154+ return False
4155+
4156+ if self.activity.is_empty():
4157+ # Don't show the reschedule button if the watch has never
4158+ # been checked.
4159+ return False
4160+
4161+ if self.failed_activity.is_empty():
4162+ # Don't show the reschedule button if the watch has never
4163+ # failed.
4164+ return False
4165+
4166+ # If the ratio is lower than the reschedule threshold, we
4167+ # can show the button.
4168+ failure_ratio = (
4169+ float(self.failed_activity.count()) /
4170+ self.activity.count())
4171+ return failure_ratio <= WATCH_RESCHEDULE_THRESHOLD
4172+
4173+ @property
4174+ def failed_activity(self):
4175+ store = Store.of(self)
4176+ success_status_ids = [
4177+ status.value for status in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES]
4178+
4179+ return store.find(
4180+ BugWatchActivity,
4181+ BugWatchActivity.bug_watch == self,
4182+ Not(In(BugWatchActivity.result, success_status_ids))).order_by(
4183+ Desc('activity_date'))
4184+
4185+ def setNextCheck(self, next_check):
4186+ """See `IBugWatch`."""
4187+ if not self.can_be_rescheduled:
4188+ raise BugWatchCannotBeRescheduled()
4189+
4190+ self.next_check = next_check
4191+
4192
4193 class BugWatchSet(BugSetBase):
4194 """A set for BugWatch"""
4195
4196=== modified file 'lib/lp/bugs/scripts/bugheat.py'
4197--- lib/lp/bugs/scripts/bugheat.py 2010-04-14 12:55:44 +0000
4198+++ lib/lp/bugs/scripts/bugheat.py 2010-04-27 02:13:38 +0000
4199@@ -79,12 +79,12 @@
4200 self._getHeatFromSubscribers(),
4201 ])
4202
4203- # Bugs decay over time. Every month the bug isn't touched its heat
4204- # decreases by 10%.
4205- months = (
4206+ # Bugs decay over time. Every day the bug isn't touched its heat
4207+ # decreases by 1%.
4208+ days = (
4209 datetime.utcnow() -
4210- self.bug.date_last_updated.replace(tzinfo=None)).days / 30
4211- total_heat = int(total_heat * (0.9 ** months))
4212+ self.bug.date_last_updated.replace(tzinfo=None)).days
4213+ total_heat = int(total_heat * (0.99 ** days))
4214
4215 return total_heat
4216
4217
4218=== modified file 'lib/lp/bugs/scripts/checkwatches/scheduler.py'
4219--- lib/lp/bugs/scripts/checkwatches/scheduler.py 2010-03-26 14:33:46 +0000
4220+++ lib/lp/bugs/scripts/checkwatches/scheduler.py 2010-04-27 02:13:38 +0000
4221@@ -10,12 +10,11 @@
4222
4223 import transaction
4224
4225-from storm.expr import Not
4226-
4227 from canonical.database.sqlbase import sqlvalues
4228 from canonical.launchpad.utilities.looptuner import TunableLoop
4229 from canonical.launchpad.interfaces import IMasterStore
4230
4231+from lp.bugs.interfaces.bugwatch import BUG_WATCH_ACTIVITY_SUCCESS_STATUSES
4232 from lp.bugs.model.bugwatch import BugWatch
4233
4234
4235@@ -70,7 +69,7 @@
4236 FROM (SELECT 1
4237 FROM bugwatchactivity
4238 WHERE bugwatchactivity.bug_watch = bug_watch.id
4239- AND bugwatchactivity.result IS NOT NULL
4240+ AND bugwatchactivity.result NOT IN (%s)
4241 ORDER BY bugwatchactivity.id DESC
4242 LIMIT %s) AS recent_failures
4243 ) AS recent_failure_count
4244@@ -80,7 +79,8 @@
4245 ) AS counts
4246 WHERE BugWatch.id = counts.id
4247 """ % sqlvalues(
4248- self.delay_coefficient, self.max_sample_size, chunk_size)
4249+ self.delay_coefficient, BUG_WATCH_ACTIVITY_SUCCESS_STATUSES,
4250+ self.max_sample_size, chunk_size)
4251 self.transaction.begin()
4252 result = self.store.execute(query)
4253 self.log.debug("Scheduled %s watches" % result.rowcount)
4254
4255=== modified file 'lib/lp/bugs/scripts/tests/test_bugheat.py'
4256--- lib/lp/bugs/scripts/tests/test_bugheat.py 2010-04-14 12:55:44 +0000
4257+++ lib/lp/bugs/scripts/tests/test_bugheat.py 2010-04-27 02:13:38 +0000
4258@@ -215,12 +215,12 @@
4259 "Expected %s, got %s" % (0, heat))
4260
4261 def test_getBugHeat_decay(self):
4262- # Every month, a bug that wasn't touched has its heat reduced by 10%.
4263+ # Every day, a bug that wasn't touched has its heat reduced by 1%.
4264 aging_bug = self.factory.makeBug()
4265 fresh_heat = BugHeatCalculator(aging_bug).getBugHeat()
4266 aging_bug.date_last_updated = (
4267- aging_bug.date_last_updated - timedelta(days=32))
4268- expected = int(fresh_heat * 0.9)
4269+ aging_bug.date_last_updated - timedelta(days=1))
4270+ expected = int(fresh_heat * 0.99)
4271 heat = BugHeatCalculator(aging_bug).getBugHeat()
4272 self.assertEqual(
4273 expected, heat,
4274
4275=== modified file 'lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt'
4276--- lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt 2010-04-16 11:20:33 +0000
4277+++ lib/lp/bugs/stories/bugwatches/xx-bugwatch-errors.txt 2010-04-27 02:13:38 +0000
4278@@ -62,11 +62,17 @@
4279 The Mozilla.org Bug Tracker bug #900 appears not to exist. Check
4280 that the bug number is correct.
4281
4282-We can observe this for each of the BugWatchActivityStatus values:
4283+We can observe this for each of the BugWatchActivityStatus failure values:
4284
4285+ >>> from lp.bugs.interfaces.bugwatch import (
4286+ ... BUG_WATCH_ACTIVITY_SUCCESS_STATUSES)
4287 >>> from lp.bugs.tests.externalbugtracker import (
4288 ... set_bugwatch_error_type)
4289- >>> for item in sorted(BugWatchActivityStatus.items):
4290+
4291+ >>> failure_values = [
4292+ ... value for value in sorted(BugWatchActivityStatus.items) if
4293+ ... value not in BUG_WATCH_ACTIVITY_SUCCESS_STATUSES]
4294+ >>> for item in failure_values:
4295 ... set_bugwatch_error_type(watch, item)
4296 ... user_browser.open('http://bugs.launchpad.dev/thunderbird/+bug/12')
4297 ... for tag in find_tags_by_class(user_browser.contents,
4298@@ -86,6 +92,14 @@
4299 Launchpad doesn't support importing bugs from Bugzilla bug trackers.
4300 The bug is marked as private on the remote bug tracker. Launchpad
4301 cannot import the status of private remote bugs.
4302+ The remote bug's status was synchronized successfully but comments
4303+ could not be imported from the remote bug.
4304+ The remote bug's status was synchronized successfully and its
4305+ comments were successfully imported but Launchpad was unable to push
4306+ comments back to the remote bug.
4307+ The remote bug's status and comments were synchronized sucessfully
4308+ with Launchpad but Launchpad was unable to set the remote bug's link
4309+ back to the relevant Launchpad bug.
4310
4311 Finally, if the error gets solved (or no error occurs), the error
4312 message will go away.
4313
4314=== modified file 'lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt'
4315--- lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt 2010-04-09 12:00:54 +0000
4316+++ lib/lp/bugs/stories/bugwatches/xx-edit-bugwatch.txt 2010-04-27 02:13:38 +0000
4317@@ -77,3 +77,126 @@
4318 ... admin_browser.contents, 'bugwatch-next_check')
4319 >>> print extract_text(data_tag.renderContents())
4320 Next check: 2010-04-08...
4321+
4322+
4323+Recent activity
4324+---------------
4325+
4326+Recent activity on a bug watch is shown on the page as a list of
4327+activity entries. When a watch has not been checked, no activity is
4328+shown.
4329+
4330+ >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
4331+ >>> recent_activity_list = find_tag_by_id(
4332+ ... admin_browser.contents, 'recent-watch-activity')
4333+ >>> print recent_activity_list
4334+ None
4335+
4336+Adding some activity to the watch will cause it to show up in the recent
4337+activity list.
4338+
4339+ >>> login('foo.bar@canonical.com')
4340+ >>> watch = getUtility(IBugWatchSet).get(2)
4341+ >>> watch.addActivity()
4342+ >>> logout()
4343+
4344+ >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
4345+ >>> recent_activity_list = find_tag_by_id(
4346+ ... admin_browser.contents, 'recent-watch-activity')
4347+ >>> print extract_text(recent_activity_list)
4348+ Update completed successfully ... ago
4349+
4350+If an update fails, that too will be reflected in the list.
4351+
4352+ >>> from lp.bugs.interfaces.bugwatch import BugWatchActivityStatus
4353+ >>> login('foo.bar@canonical.com')
4354+ >>> watch = getUtility(IBugWatchSet).get(2)
4355+ >>> watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
4356+ >>> logout()
4357+
4358+ >>> admin_browser.open('http://bugs.launchpad.dev/bugs/1/+watch/2')
4359+ >>> recent_activity_list = find_tag_by_id(
4360+ ... admin_browser.contents, 'recent-watch-activity')
4361+ >>> print extract_text(recent_activity_list)
4362+ Update failed with error 'Bug Not Found' ... ago
4363+ Update completed successfully ... ago
4364+
4365+
4366+Rescheduling a watch
4367+--------------------
4368+
4369+It's possible to reschedule a failing watch via the BugWatch +edit page
4370+by clicking the "Update Now" button.
4371+
4372+For a new watch, the "Update Now" button isn't shown.
4373+
4374+ >>> from pytz import utc
4375+ >>> from datetime import datetime, timedelta
4376+ >>> login('foo.bar@canonical.com')
4377+ >>> bug_watch = factory.makeBugWatch()
4378+ >>> bug_watch.next_check = None
4379+ >>> watch_url = (
4380+ ... 'http://bugs.launchpad.dev/bugs/%s/+watch/%s' %
4381+ ... (bug_watch.bug.id, bug_watch.id))
4382+ >>> logout()
4383+
4384+ >>> user_browser.open(watch_url)
4385+ >>> user_browser.getControl('Update Now')
4386+ Traceback (most recent call last):
4387+ ...
4388+ LookupError: label 'Update Now'
4389+
4390+If the watch has been checked but has never failed, the button will
4391+remain hidden.
4392+
4393+ >>> login('foo.bar@canonical.com')
4394+ >>> bug_watch.addActivity()
4395+ >>> logout()
4396+
4397+ >>> user_browser.open(watch_url)
4398+ >>> user_browser.getControl('Update Now')
4399+ Traceback (most recent call last):
4400+ ...
4401+ LookupError: label 'Update Now'
4402+
4403+If the watch has failed less than 60% of its recent checks, the button
4404+will appear on the page.
4405+
4406+ >>> login('foo.bar@canonical.com')
4407+ >>> bug_watch.addActivity(result=BugWatchActivityStatus.BUG_NOT_FOUND)
4408+ >>> logout()
4409+
4410+ >>> user_browser.open(watch_url)
4411+ >>> reschedule_button = user_browser.getControl('Update Now')
4412+
4413+ >>> data_tag = find_tag_by_id(
4414+ ... user_browser.contents, 'bugwatch-next_check')
4415+ >>> print extract_text(data_tag.renderContents())
4416+ Next check: Not yet scheduled
4417+
4418+Clicking the Update Now button will schedule it to be checked
4419+immediately.
4420+
4421+ >>> reschedule_button.click()
4422+
4423+ >>> for message in find_tags_by_class(
4424+ ... user_browser.contents, 'informational message'):
4425+ ... print extract_text(message)
4426+ The ... bug watch has been scheduled for immediate checking.
4427+
4428+Looking at the watch +edit page again, we can see that the watch has
4429+been scheduled.
4430+
4431+ >>> user_browser.open(watch_url)
4432+ >>> data_tag = find_tag_by_id(
4433+ ... user_browser.contents, 'bugwatch-next_check')
4434+ >>> print extract_text(data_tag.renderContents())
4435+ Next check: 2...
4436+
4437+The button will no longer be shown on the page.
4438+
4439+ >>> reschedule_button = user_browser.getControl('Update Now')
4440+ Traceback (most recent call last):
4441+ ...
4442+ LookupError: label 'Update Now'
4443+
4444
4445=== modified file 'lib/lp/bugs/templates/bugwatch-editform.pt'
4446--- lib/lp/bugs/templates/bugwatch-editform.pt 2009-09-03 12:43:53 +0000
4447+++ lib/lp/bugs/templates/bugwatch-editform.pt 2010-04-27 02:13:38 +0000
4448@@ -21,6 +21,10 @@
4449 </div>
4450 </div>
4451
4452+ <div class="yui-g" tal:condition="view/watch_has_activity">
4453+ <div tal:replace="structure context/@@+portlet-activity" />
4454+ </div>
4455+
4456 <div class="yui-g">
4457 <div metal:use-macro="context/@@launchpad_form/form"/>
4458 </div>
4459
4460=== added file 'lib/lp/bugs/templates/bugwatch-portlet-activity.pt'
4461--- lib/lp/bugs/templates/bugwatch-portlet-activity.pt 1970-01-01 00:00:00 +0000
4462+++ lib/lp/bugs/templates/bugwatch-portlet-activity.pt 2010-04-27 02:13:38 +0000
4463@@ -0,0 +1,44 @@
4464+<div
4465+ xmlns:tal="http://xml.zope.org/namespaces/tal"
4466+ xmlns:metal="http://xml.zope.org/namespaces/metal"
4467+ xmlns:i18n="http://xml.zope.org/namespaces/i18n"
4468+ class="portlet" id="portlet-watches">
4469+ <h2>Recent activity</h2>
4470+ <div id="recent-watch-activity">
4471+ <div>
4472+ <form
4473+ tal:attributes="action view/action_url;"
4474+ tal:condition="view/userCanReschedule"
4475+ name="launchpadform"
4476+ id="reschedule-form"
4477+ method="post"
4478+ enctype="multipart/form-data"
4479+ accept-charset="UTF-8">
4480+ <div>
4481+ This watch has failed to update at
4482+ <tal:fail-count
4483+ replace="context/failed_activity/count" />
4484+ out of the last
4485+ <tal:fail-count
4486+ replace="context/activity/count" />
4487+ attempts.
4488+ </div>
4489+ <div>
4490+ The next update will occur
4491+ <tal:next-check
4492+ replace="view/context/next_check/fmt:approximatedate" />
4493+ <tal:reschedule-button
4494+ replace="structure view/reschedule_action/render" />
4495+ </div>
4496+ </form>
4497+ </div>
4498+ <tal:activity repeat="activity view/recent_watch_activity">
4499+ <div>
4500+ <img tal:attributes="src activity/icon; title activity/result_text" />
4501+ Update
4502+ <tal:message replace="activity/completion_message" />
4503+ <tal:time replace="activity/date/fmt:displaydate" />
4504+ </div>
4505+ </tal:activity>
4506+ </div>
4507+</div>
4508
4509=== modified file 'lib/lp/bugs/tests/test_apportjob.py'
4510--- lib/lp/bugs/tests/test_apportjob.py 2010-04-08 13:26:26 +0000
4511+++ lib/lp/bugs/tests/test_apportjob.py 2010-04-27 02:13:38 +0000
4512@@ -273,7 +273,7 @@
4513 expect_returncode=0)
4514 self.assertEqual('', stdout)
4515 self.assertIn(
4516- 'INFO Ran 1 IProcessApportBlobJobSource jobs.\n', stderr)
4517+ 'INFO Ran 1 ProcessApportBlobJob jobs.\n', stderr)
4518
4519 def test_getFileBugData(self):
4520 # The IProcessApportBlobJobSource.getFileBugData() method
4521
4522=== modified file 'lib/lp/bugs/tests/test_bugheat.py'
4523--- lib/lp/bugs/tests/test_bugheat.py 2010-02-25 21:37:02 +0000
4524+++ lib/lp/bugs/tests/test_bugheat.py 2010-04-27 02:13:38 +0000
4525@@ -96,7 +96,7 @@
4526 expect_returncode=0)
4527 self.assertEqual('', stdout)
4528 self.assertIn(
4529- 'INFO Ran 1 ICalculateBugHeatJobSource jobs.\n', stderr)
4530+ 'INFO Ran 1 CalculateBugHeatJob jobs.\n', stderr)
4531
4532 def test_getOopsVars(self):
4533 # BugJobDerived.getOopsVars() returns the variables to be used
4534
4535=== modified file 'lib/lp/code/configure.zcml'
4536--- lib/lp/code/configure.zcml 2010-04-23 02:35:47 +0000
4537+++ lib/lp/code/configure.zcml 2010-04-27 02:13:38 +0000
4538@@ -38,7 +38,7 @@
4539 <subscriber
4540 for="lp.code.interfaces.codereviewvote.ICodeReviewVoteReference
4541 lp.code.interfaces.event.IReviewerNominatedEvent"
4542- handler="lp.code.mail.branchmergeproposal.send_review_requested_notifications"/>
4543+ handler="lp.code.subscribers.branchmergeproposal.review_requested"/>
4544
4545 <!-- CodeImportMachine -->
4546
4547@@ -272,19 +272,84 @@
4548 factory="lp.code.browser.branchmergeproposal.text_xhtml_representation"
4549 name="description"/>
4550
4551+ <!-- Branch Merge Proposal Jobs -->
4552
4553 <class class="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob">
4554 <allow interface="canonical.launchpad.interfaces.IMessageJob"/>
4555 <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJob"/>
4556 </class>
4557+ <securedutility
4558+ component="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob"
4559+ provides="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource">
4560+ <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource"/>
4561+ </securedutility>
4562+
4563+ <class class="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob">
4564+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob"/>
4565+ <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJob"/>
4566+ </class>
4567+ <securedutility
4568+ component="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob"
4569+ provides="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource">
4570+ <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource"/>
4571+ </securedutility>
4572+
4573+ <securedutility
4574+ component="lp.code.model.branchmergeproposaljob.BranchMergeProposalJobSource"
4575+ provides="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJobSource">
4576+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJobSource"/>
4577+ </securedutility>
4578+
4579+ <class class="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob">
4580+ <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJob" />
4581+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
4582+ </class>
4583+ <securedutility
4584+ component="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob"
4585+ provides="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource">
4586+ <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource"/>
4587+ </securedutility>
4588+
4589+ <class class="lp.code.model.branchmergeproposaljob.CodeReviewCommentEmailJob">
4590+ <allow interface="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJob" />
4591+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
4592+ </class>
4593+ <securedutility
4594+ component="lp.code.model.branchmergeproposaljob.CodeReviewCommentEmailJob"
4595+ provides="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJobSource">
4596+ <allow interface="lp.code.interfaces.branchmergeproposal.ICodeReviewCommentEmailJobSource"/>
4597+ </securedutility>
4598+
4599+ <class class="lp.code.model.branchmergeproposaljob.ReviewRequestedEmailJob">
4600+ <allow interface="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJob" />
4601+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
4602+ </class>
4603+ <securedutility
4604+ component="lp.code.model.branchmergeproposaljob.ReviewRequestedEmailJob"
4605+ provides="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJobSource">
4606+ <allow interface="lp.code.interfaces.branchmergeproposal.IReviewRequestedEmailJobSource"/>
4607+ </securedutility>
4608+
4609+ <class class="lp.code.model.branchmergeproposaljob.MergeProposalUpdatedEmailJob">
4610+ <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
4611+ <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
4612+ </class>
4613+ <securedutility
4614+ component="lp.code.model.branchmergeproposaljob.MergeProposalUpdatedEmailJob"
4615+ provides="lp.code.interfaces.branchmergeproposal.IMergeProposalUpdatedEmailJobSource">
4616+ <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalUpdatedEmailJobSource"/>
4617+ </securedutility>
4618+
4619+ <!-- Branch Merge Proposal Subscribers -->
4620+
4621 <subscriber
4622 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
4623 lp.code.interfaces.event.INewBranchMergeProposalEvent"
4624- handler="lp.code.mail.branchmergeproposal.send_merge_proposal_created_notifications"/>
4625+ handler="lp.code.subscribers.branchmergeproposal.merge_proposal_created"/>
4626 <subscriber
4627 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
4628 lazr.lifecycle.interfaces.IObjectModifiedEvent"
4629- handler="lp.code.mail.branchmergeproposal.send_merge_proposal_modified_notifications"/>
4630+ handler="lp.code.subscribers.branchmergeproposal.merge_proposal_modified"/>
4631 <subscriber
4632 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal
4633 lp.code.interfaces.event.INewBranchMergeProposalEvent"
4634@@ -297,10 +362,6 @@
4635 for="lp.code.interfaces.branchmergeproposal.IBranchMergeProposal"
4636 provides="canonical.launchpad.webapp.interfaces.IPrimaryContext"
4637 factory="lp.code.browser.branchmergeproposal.BranchMergeProposalPrimaryContext"/>
4638- <class class="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob">
4639- <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob"/>
4640- <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJob"/>
4641- </class>
4642
4643 <!-- hierarchy -->
4644
4645@@ -309,16 +370,6 @@
4646 provides="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter">
4647 <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalGetter"/>
4648 </securedutility>
4649- <securedutility
4650- component="lp.code.model.branchmergeproposaljob.CreateMergeProposalJob"
4651- provides="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource">
4652- <allow interface="lp.code.interfaces.branchmergeproposal.ICreateMergeProposalJobSource"/>
4653- </securedutility>
4654- <securedutility
4655- component="lp.code.model.branchmergeproposaljob.MergeProposalCreatedJob"
4656- provides="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource">
4657- <allow interface="lp.code.interfaces.branchmergeproposal.IMergeProposalCreatedJobSource"/>
4658- </securedutility>
4659 <class
4660 class="lp.code.model.seriessourcepackagebranch.SeriesSourcePackageBranch">
4661 <allow interface="lp.code.interfaces.seriessourcepackagebranch.ISeriesSourcePackageBranch"/>
4662@@ -907,15 +958,6 @@
4663 <allow interface="lp.code.interfaces.revision.IRevisionSet" />
4664 </securedutility>
4665
4666- <securedutility
4667- component="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob"
4668- provides="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource">
4669- <allow interface="lp.code.interfaces.branchmergeproposal.IUpdatePreviewDiffJobSource"/>
4670- </securedutility>
4671- <class class="lp.code.model.branchmergeproposaljob.UpdatePreviewDiffJob">
4672- <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
4673- <allow interface="lp.code.interfaces.branchmergeproposal.IBranchMergeProposalJob" />
4674- </class>
4675
4676 <securedutility
4677 component="lp.code.model.branchjob.BranchUpgradeJob"
4678
4679=== modified file 'lib/lp/code/doc/branch-merge-proposal-notifications.txt'
4680--- lib/lp/code/doc/branch-merge-proposal-notifications.txt 2010-02-19 02:15:27 +0000
4681+++ lib/lp/code/doc/branch-merge-proposal-notifications.txt 2010-04-27 02:13:38 +0000
4682@@ -15,7 +15,7 @@
4683 ... BranchSubscriptionDiffSize, BranchSubscriptionNotificationLevel,
4684 ... CodeReviewNotificationLevel)
4685 >>> from lp.code.interfaces.branchmergeproposal import (
4686- ... IMergeProposalCreatedJobSource)
4687+ ... IBranchMergeProposalJobSource)
4688 >>> from lp.code.model.diff import PreviewDiff
4689 >>> from lp.testing.mail_helpers import pop_notifications
4690 >>> import transaction
4691@@ -103,8 +103,11 @@
4692 >>> bmp = source_branch.addLandingTarget(
4693 ... registrant, target_branch)
4694 >>> removeSecurityProxy(bmp).preview_diff = preview_diff
4695- >>> [job,] = list(getUtility(IMergeProposalCreatedJobSource).iterReady())
4696- >>> job.run(_create_preview=False)
4697+ >>> # Fake the update preview diff as done.
4698+ >>> bmp.next_preview_diff_job.start()
4699+ >>> bmp.next_preview_diff_job.complete()
4700+ >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
4701+ >>> job.run()
4702 >>> notifications = pop_notifications(
4703 ... sort_key=lambda n: n.get('X-Envelope-To'))
4704
4705@@ -155,8 +158,11 @@
4706 ... registrant, target_branch,
4707 ... description=initial_comment, review_requests=reviewers)
4708 >>> removeSecurityProxy(bmp).preview_diff = preview_diff
4709- >>> [job,] = list(getUtility(IMergeProposalCreatedJobSource).iterReady())
4710- >>> job.run(_create_preview=False)
4711+ >>> # Fake the update preview diff as done.
4712+ >>> bmp.next_preview_diff_job.start()
4713+ >>> bmp.next_preview_diff_job.complete()
4714+ >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
4715+ >>> job.run()
4716 >>> notifications = pop_notifications(
4717 ... sort_key=lambda n: n.get('X-Envelope-To'))
4718 >>> for notification in notifications:
4719
4720=== modified file 'lib/lp/code/doc/codereviewcomment.txt'
4721--- lib/lp/code/doc/codereviewcomment.txt 2010-02-18 00:35:39 +0000
4722+++ lib/lp/code/doc/codereviewcomment.txt 2010-04-27 02:13:38 +0000
4723@@ -45,6 +45,15 @@
4724 >>> comment3.message.parent == comment2.message
4725 True
4726
4727+When comments are added, a job is created to send the emails to the
4728+subscribers of the merge proposal.
4729+
4730+ >>> # Needed for now to make the iterReady show the jobs.
4731+ >>> factory.makeRevisionsForBranch(merge_proposal.source_branch)
4732+ >>> factory.makeRevisionsForBranch(merge_proposal.target_branch)
4733+ >>> from lp.code.tests.helpers import mark_all_merge_proposal_jobs_done
4734+ >>> mark_all_merge_proposal_jobs_done()
4735+
4736 If there is a subscriber to any of the branches involved in the merge,
4737 a notification is produced when the comment is created.
4738
4739@@ -60,9 +69,18 @@
4740 >>> from lp.testing.mail_helpers import (
4741 ... pop_notifications, print_emails)
4742 >>> _unused = pop_notifications()
4743+ >>> merge_proposal.root_message_id = '<201003111740.test.root@example.com>'
4744 >>> comment = merge_proposal.createComment(
4745 ... sender, 'Please merge', 'This patch is very nice.',
4746 ... vote=CodeReviewVote.APPROVE, review_type='DB')
4747+
4748+Now run the pending job to send the email.
4749+
4750+ >>> from lp.code.interfaces.branchmergeproposal import (
4751+ ... IBranchMergeProposalJobSource)
4752+ >>> [job] = list(getUtility(IBranchMergeProposalJobSource).iterReady())
4753+ >>> job.run()
4754+
4755 >>> notifications = pop_notifications()
4756 >>> notifications = [email for email in notifications if
4757 ... email['X-Launchpad-Message-Rationale'] == 'Owner']
4758
4759=== modified file 'lib/lp/code/interfaces/branchmergeproposal.py'
4760--- lib/lp/code/interfaces/branchmergeproposal.py 2010-03-05 03:35:10 +0000
4761+++ lib/lp/code/interfaces/branchmergeproposal.py 2010-04-27 02:13:38 +0000
4762@@ -11,11 +11,19 @@
4763 'IBranchMergeProposal',
4764 'IBranchMergeProposalGetter',
4765 'IBranchMergeProposalJob',
4766+ 'IBranchMergeProposalJobSource',
4767 'IBranchMergeProposalListingBatchNavigator',
4768+ 'ICodeReviewCommentEmailJob',
4769+ 'ICodeReviewCommentEmailJobSource',
4770 'ICreateMergeProposalJob',
4771 'ICreateMergeProposalJobSource',
4772 'IMergeProposalCreatedJob',
4773 'IMergeProposalCreatedJobSource',
4774+ 'IMergeProposalUpdatedEmailJob',
4775+ 'IMergeProposalUpdatedEmailJobSource',
4776+ 'IReviewRequestedEmailJob',
4777+ 'IReviewRequestedEmailJobSource',
4778+ 'IUpdatePreviewDiffJob',
4779 'IUpdatePreviewDiffJobSource',
4780 'notify_modified',
4781 ]
4782@@ -29,12 +37,13 @@
4783
4784 from canonical.launchpad import _
4785 from canonical.launchpad.fields import PublicPersonChoice, Summary, Whiteboard
4786-from canonical.launchpad.interfaces import IBug, IPrivacy
4787+from canonical.launchpad.interfaces.launchpad import IPrivacy
4788+from lp.bugs.interfaces.bug import IBug
4789 from lp.code.enums import BranchMergeProposalStatus, CodeReviewVote
4790 from lp.code.interfaces.branch import IBranch
4791 from lp.registry.interfaces.person import IPerson
4792 from lp.code.interfaces.diff import IPreviewDiff, IStaticDiff
4793-from lp.services.job.interfaces.job import IJob, IRunnableJob
4794+from lp.services.job.interfaces.job import IJob, IJobSource, IRunnableJob
4795 from canonical.launchpad.webapp.interfaces import ITableBatchNavigator
4796 from lazr.restful.fields import CollectionField, Reference
4797 from lazr.restful.declarations import (
4798@@ -500,6 +509,10 @@
4799 """Destroy this object."""
4800
4801
4802+class IBranchMergeProposalJobSource(IJobSource):
4803+ """A job source that will get all supported merge proposal jobs."""
4804+
4805+
4806 class IBranchMergeProposalListingBatchNavigator(ITableBatchNavigator):
4807 """A marker interface for registering the appropriate listings."""
4808
4809@@ -570,15 +583,12 @@
4810 """
4811
4812
4813-class ICreateMergeProposalJobSource(Interface):
4814+class ICreateMergeProposalJobSource(IJobSource):
4815 """Acquire MergeProposalJobs."""
4816
4817 def create(message_bytes):
4818 """Return a CreateMergeProposalJob for this message."""
4819
4820- def iterReady():
4821- """Iterate through jobs that are ready to run."""
4822-
4823
4824 class IMergeProposalCreatedJob(IRunnableJob):
4825 """Interface for review diffs."""
4826@@ -590,8 +600,12 @@
4827 def create(bmp):
4828 """Create a MergeProposalCreatedJob for the specified Job."""
4829
4830- def iterReady():
4831- """Iterate through all ready MergeProposalCreatedJobs."""
4832+
4833+class IUpdatePreviewDiffJob(IRunnableJob):
4834+ """Interface for the job to update the diff for a merge proposal."""
4835+
4836+ def checkReady():
4837+ """Check to see if this job is ready to run."""
4838
4839
4840 class IUpdatePreviewDiffJobSource(Interface):
4841@@ -603,11 +617,55 @@
4842 def get(id):
4843 """Return the UpdatePreviewDiffJob with this id."""
4844
4845- def iterReady():
4846- """Iterate through jobs ready to update preview diffs."""
4847-
4848- def contextManager():
4849- """Get a context for running this kind of job in."""
4850+
4851+class ICodeReviewCommentEmailJob(IRunnableJob):
4852+ """Interface for the job to send code review comment email."""
4853+
4854+ code_review_comment = Attribute('The code review comment.')
4855+
4856+
4857+class ICodeReviewCommentEmailJobSource(Interface):
4858+ """Create or retrieve jobs that update preview diffs."""
4859+
4860+ def create(code_review_comment):
4861+ """Create a job to email subscribers about the comment."""
4862+
4863+
4864+class IReviewRequestedEmailJob(IRunnableJob):
4865+ """Interface for the job to sends review request emails."""
4866+
4867+ reviewer = Attribute('The person or team asked to do the review.')
4868+ requester = Attribute('The person who has asked for the review.')
4869+
4870+
4871+class IReviewRequestedEmailJobSource(Interface):
4872+ """Create or retrieve jobs that email review requests."""
4873+
4874+ def create(review_request):
4875+ """Create a job to email a review request.
4876+
4877+ :param review_request: A vote reference for the requested review.
4878+ """
4879+
4880+
4881+class IMergeProposalUpdatedEmailJob(IRunnableJob):
4882+ """Interface for the job to sends email about merge proposal updates."""
4883+
4884+ editor = Attribute('The person that did the editing.')
4885+ delta_text = Attribute(
4886+ 'The textual representation of the changed fields.')
4887+
4888+
4889+class IMergeProposalUpdatedEmailJobSource(Interface):
4890+ """Create or retrieve jobs that email about merge proposal updates."""
4891+
4892+ def create(merge_proposal, delta_text, editor):
4893+ """Create a job to email merge proposal updates to subscribers.
4894+
4895+ :param merge_proposal: The merge proposal that has been edited.
4896+ :param delta_text: The text representation of the changed fields.
4897+ :param editor: The person who did the editing.
4898+ """
4899
4900
4901 # XXX: JonathanLange 2010-01-06: This is only used in the scanner, perhaps it
4902
4903=== modified file 'lib/lp/code/interfaces/codehosting.py'
4904--- lib/lp/code/interfaces/codehosting.py 2010-02-24 04:25:38 +0000
4905+++ lib/lp/code/interfaces/codehosting.py 2010-04-27 02:13:38 +0000
4906@@ -179,6 +179,18 @@
4907 :param branchID: a branch ID.
4908 """
4909
4910+ def branchChanged(branch_id, stacked_on_url, last_revision_id):
4911+ """Record that a branch has been changed.
4912+
4913+ This method records the stacked on branch and tip revision id of the
4914+ branch and creates a scan job if the tip revision id has changed.
4915+
4916+ :param branchID: The database id of the branch to operate on.
4917+ :param stacked_on_url: The unique name of the branch this branch is
4918+ stacked on, or '' if this branch is not stacked.
4919+ :param last_revision_id: The tip revision ID of the branch.
4920+ """
4921+
4922 def translatePath(requester_id, path):
4923 """Translate 'path' so that the codehosting transport can access it.
4924
4925
4926=== modified file 'lib/lp/code/interfaces/codereviewcomment.py'
4927--- lib/lp/code/interfaces/codereviewcomment.py 2010-04-22 04:40:15 +0000
4928+++ lib/lp/code/interfaces/codereviewcomment.py 2010-04-27 02:13:38 +0000
4929@@ -74,12 +74,16 @@
4930 attachments.
4931 """
4932
4933+ def getOriginalEmail():
4934+ """An email object of the original raw email if there was one."""
4935+
4936 as_quoted_email = exported(
4937 TextLine(
4938 title=_('The message as quoted in email.'),
4939 readonly=True))
4940
4941
4942+
4943 class ICodeReviewCommentDeletion(Interface):
4944 """This interface provides deletion of CodeReviewComments.
4945
4946
4947=== modified file 'lib/lp/code/mail/branch.py'
4948--- lib/lp/code/mail/branch.py 2010-03-09 16:58:30 +0000
4949+++ lib/lp/code/mail/branch.py 2010-04-27 02:13:38 +0000
4950@@ -69,23 +69,23 @@
4951 review_level=subscription.review_level)
4952
4953 @classmethod
4954- def forReviewer(cls, vote_reference, recipient,
4955+ def forReviewer(cls, branch_merge_proposal, pending_review, reviewer,
4956 branch_identity_cache=None):
4957 """Construct RecipientReason for a reviewer.
4958
4959 The reviewer will be the sole recipient.
4960 """
4961- merge_proposal = vote_reference.branch_merge_proposal
4962- branch = merge_proposal.source_branch
4963- if vote_reference.comment is None:
4964+ branch = branch_merge_proposal.source_branch
4965+ if pending_review:
4966 reason_template = (
4967 '%(entity_is)s requested to review %(merge_proposal)s.')
4968 else:
4969 reason_template = (
4970 '%(entity_is)s reviewing %(merge_proposal)s.')
4971- return cls(vote_reference.reviewer, recipient, branch,
4972- 'Reviewer', reason_template, merge_proposal,
4973- branch_identity_cache=branch_identity_cache)
4974+ return cls(reviewer, reviewer, branch,
4975+ cls.makeRationale('Reviewer', reviewer),
4976+ reason_template, branch_merge_proposal,
4977+ branch_identity_cache=branch_identity_cache)
4978
4979 @classmethod
4980 def forRegistrant(cls, merge_proposal, branch_identity_cache=None):
4981@@ -93,7 +93,6 @@
4982
4983 The registrant will be the sole recipient.
4984 """
4985- branch = merge_proposal.source_branch
4986 reason_template = 'You proposed %(branch_name)s for merging.'
4987 return cls(merge_proposal.registrant, merge_proposal.registrant,
4988 merge_proposal.source_branch,
4989@@ -124,16 +123,16 @@
4990 The owner will be the sole recipient.
4991 """
4992 return cls(branch.owner, recipient, branch,
4993- cls.makeRationale('Owner', branch.owner, recipient),
4994+ cls.makeRationale('Owner', branch.owner),
4995 'You are getting this email as %(lc_entity_is)s the'
4996 ' owner of the branch and someone has edited the'
4997 ' details.',
4998 branch_identity_cache=branch_identity_cache)
4999
5000 @staticmethod
The diff has been truncated for viewing.