Merge launchpad:master into launchpad:db-devel
- Git
- lp:launchpad
- master
- Merge into db-devel
Proposed by
Ines Almeida
Status: | Merged |
---|---|
Approved by: | Simone Pelosi |
Approved revision: | b05306ed0da998336b30540a42d9826b8df69fca |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | launchpad:master |
Merge into: | launchpad:db-devel |
Diff against target: |
7443 lines (+1371/-1501) 163 files modified
.pre-commit-config.yaml (+5/-2) charm/launchpad-ppa-publisher/charmcraft.yaml (+1/-0) charm/launchpad-ppa-uploader/charmcraft.yaml (+1/-0) cronscripts/parse-librarian-apache-access-logs.py (+6/-5) database/replication/Makefile (+27/-18) database/schema/security.cfg (+3/-3) dev/null (+0/-14) doc/reference/python.rst (+1/-16) lib/lp/answers/browser/questiontarget.py (+1/-1) lib/lp/answers/interfaces/question.py (+2/-2) lib/lp/answers/interfaces/questionmessage.py (+1/-9) lib/lp/app/doc/batch-navigation.rst (+1/-2) lib/lp/app/validators/README.txt (+2/-4) lib/lp/archivepublisher/tests/test_sync_signingkeys.py (+4/-1) lib/lp/archiveuploader/tests/test_nascentupload_documentation.py (+2/-1) lib/lp/archiveuploader/uploadprocessor.py (+2/-4) lib/lp/blueprints/doc/specification.rst (+5/-4) lib/lp/blueprints/model/specificationworkitem.py (+6/-3) lib/lp/blueprints/model/sprint.py (+1/-6) lib/lp/blueprints/model/sprintattendance.py (+5/-4) lib/lp/blueprints/vocabularies/specificationdependency.py (+1/-1) lib/lp/bugs/browser/tests/buglinktarget-views.rst (+1/-1) lib/lp/bugs/model/bug.py (+2/-2) lib/lp/bugs/model/bugtask.py (+1/-1) lib/lp/bugs/model/bugtasksearch.py (+34/-38) lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst (+1/-1) lib/lp/bugs/stories/webservice/xx-bug.rst (+45/-0) lib/lp/buildmaster/interfaces/processor.py (+1/-1) lib/lp/charms/model/charmrecipebuild.py (+2/-2) lib/lp/code/doc/codeimport-machine.rst (+1/-1) lib/lp/code/interfaces/branch.py (+1/-1) lib/lp/code/mail/tests/test_codehandler.py (+1/-1) lib/lp/code/model/cibuild.py (+1/-1) lib/lp/code/model/sourcepackagerecipe.py (+3/-3) lib/lp/code/model/sourcepackagerecipebuild.py (+1/-1) lib/lp/code/model/tests/test_codereviewkarma.py (+1/-1) lib/lp/code/model/tests/test_revisionauthor.py (+6/-6) lib/lp/code/xmlrpc/tests/test_git.py (+14/-25) lib/lp/codehosting/tests/test_acceptance.py (+7/-5) lib/lp/oci/model/ocirecipebuild.py (+3/-3) lib/lp/registry/browser/person.py (+5/-6) lib/lp/registry/browser/tests/test_person_webservice.py (+16/-0) lib/lp/registry/doc/person-account.rst (+5/-3) lib/lp/registry/doc/person-merge.rst (+4/-0) lib/lp/registry/doc/pillar.rst (+7/-3) lib/lp/registry/doc/vocabularies.rst (+0/-1) lib/lp/registry/interfaces/person.py (+7/-6) lib/lp/registry/model/distribution.py (+12/-15) lib/lp/registry/model/distributionsourcepackage.py (+2/-2) lib/lp/registry/model/distroseries.py (+1/-1) lib/lp/registry/model/distroseriesdifference.py (+5/-6) lib/lp/registry/model/mailinglist.py (+1/-2) lib/lp/registry/model/person.py (+180/-142) lib/lp/registry/model/pillar.py (+20/-15) lib/lp/registry/model/productrelease.py (+1/-1) lib/lp/registry/model/sharingjob.py (+3/-3) lib/lp/registry/model/teammembership.py (+14/-1) lib/lp/registry/personmerge.py (+1/-2) lib/lp/registry/scripts/closeaccount.py (+1/-1) lib/lp/registry/scripts/populate_distroseriesdiff.py (+1/-1) lib/lp/registry/security.py (+19/-1) lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst (+5/-2) lib/lp/registry/stories/person/xx-approve-members.rst (+3/-1) lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst (+1/-1) lib/lp/registry/stories/productrelease/xx-productrelease-view.rst (+5/-2) lib/lp/registry/stories/teammembership/xx-add-member.rst (+5/-3) lib/lp/registry/stories/teammembership/xx-teammembership.rst (+5/-4) lib/lp/registry/tests/test_person.py (+6/-6) lib/lp/registry/tests/test_teammembership.py (+1/-1) lib/lp/registry/vocabularies.py (+18/-20) lib/lp/scripts/garbo.py (+1/-1) lib/lp/scripts/harness.py (+1/-1) lib/lp/services/apachelogparser/model/parsedapachelog.py (+6/-3) lib/lp/services/auth/tests/test_model.py (+57/-24) lib/lp/services/authserver/tests/test_authserver.py (+2/-2) lib/lp/services/authserver/xmlrpc.py (+2/-2) lib/lp/services/database/doc/security-proxies.rst (+12/-10) lib/lp/services/database/doc/storm-security-proxies.rst (+2/-2) lib/lp/services/database/interfaces.py (+0/-11) lib/lp/services/database/multitablecopy.py (+5/-5) lib/lp/services/database/postgresql.py (+3/-3) lib/lp/services/database/sqlbase.py (+40/-223) lib/lp/services/database/sqlobject/__init__.py (+1/-26) lib/lp/services/database/tests/test_transaction_decorators.py (+3/-1) lib/lp/services/features/model.py (+1/-2) lib/lp/services/librarian/client.py (+4/-8) lib/lp/services/librarian/model.py (+81/-69) lib/lp/services/librarian/tests/test_client.py (+6/-5) lib/lp/services/librarianserver/db.py (+19/-12) lib/lp/services/librarianserver/librariangc.py (+3/-3) lib/lp/services/librarianserver/storage.py (+4/-3) lib/lp/services/librarianserver/testing/fake.py (+6/-3) lib/lp/services/librarianserver/testing/server.py (+2/-1) lib/lp/services/librarianserver/tests/test_db.py (+9/-8) lib/lp/services/librarianserver/tests/test_gc.py (+78/-69) lib/lp/services/librarianserver/tests/test_storage.py (+8/-3) lib/lp/services/librarianserver/tests/test_storage_db.py (+10/-4) lib/lp/services/librarianserver/tests/test_web.py (+1/-1) lib/lp/services/librarianserver/web.py (+1/-1) lib/lp/services/messages/interfaces/message.py (+8/-2) lib/lp/services/messages/model/message.py (+1/-2) lib/lp/services/openid/model/openididentifier.py (+6/-3) lib/lp/services/session/model.py (+5/-4) lib/lp/services/statistics/tests/test_update_stats.py (+1/-1) lib/lp/services/tarfile_helpers.py (+0/-10) lib/lp/services/verification/doc/logintoken.rst (+6/-5) lib/lp/services/webapp/configure.zcml (+0/-8) lib/lp/services/webapp/database.zcml (+0/-3) lib/lp/services/webapp/marshallers.py (+2/-3) lib/lp/services/webapp/snapshot.py (+4/-5) lib/lp/services/webapp/tests/test_servers.py (+26/-14) lib/lp/services/webapp/vocabulary.py (+1/-164) lib/lp/services/webservice/configure.zcml (+0/-14) lib/lp/services/webservice/doc/webservice-marshallers.rst (+7/-7) lib/lp/services/worlddata/interfaces/language.py (+2/-8) lib/lp/snappy/browser/snapbase.py (+2/-4) lib/lp/snappy/model/snap.py (+1/-1) lib/lp/snappy/model/snapbuild.py (+3/-3) lib/lp/snappy/templates/snap-new.pt (+4/-4) lib/lp/soyuz/browser/archive.py (+2/-4) lib/lp/soyuz/browser/queue.py (+2/-2) lib/lp/soyuz/browser/tests/distroseriesqueue-views.rst (+2/-1) lib/lp/soyuz/doc/gina-multiple-arch.rst (+2/-2) lib/lp/soyuz/doc/gina.rst (+8/-5) lib/lp/soyuz/doc/package-diff.rst (+3/-2) lib/lp/soyuz/doc/soyuz-set-of-uploads.rst (+2/-2) lib/lp/soyuz/model/archive.py (+153/-117) lib/lp/soyuz/model/archivefile.py (+2/-2) lib/lp/soyuz/model/archivesubscriber.py (+1/-1) lib/lp/soyuz/model/binarypackagebuild.py (+4/-4) lib/lp/soyuz/model/distributionsourcepackagerelease.py (+2/-2) lib/lp/soyuz/model/initializedistroseriesjob.py (+3/-1) lib/lp/soyuz/model/livefsbuild.py (+2/-2) lib/lp/soyuz/model/packagecloner.py (+8/-8) lib/lp/soyuz/model/packagediff.py (+1/-1) lib/lp/soyuz/model/publishing.py (+9/-13) lib/lp/soyuz/model/queue.py (+2/-2) lib/lp/soyuz/model/sourcepackagerelease.py (+1/-1) lib/lp/soyuz/scripts/gina/README (+1/-1) lib/lp/soyuz/scripts/initialize_distroseries.py (+2/-2) lib/lp/soyuz/scripts/packagecopier.py (+1/-1) lib/lp/soyuz/tests/test_archive.py (+14/-15) lib/lp/soyuz/tests/test_initializedistroseriesjob.py (+48/-25) lib/lp/soyuz/tests/test_packagediff.py (+1/-1) lib/lp/soyuz/vocabularies.py (+16/-19) lib/lp/testing/__init__.py (+6/-3) lib/lp/testing/factory.py (+2/-0) lib/lp/testing/html5browser.py (+1/-1) lib/lp/testing/layers.py (+28/-2) lib/lp/testing/tests/test_html5browser.py (+7/-7) lib/lp/translations/doc/rosetta-karma.rst (+2/-2) lib/lp/translations/doc/rosetta-translation.rst (+4/-4) lib/lp/translations/doc/translationmessage-destroy.rst (+1/-2) lib/lp/translations/model/distroserieslanguage.py (+1/-1) lib/lp/translations/model/poexportrequest.py (+1/-1) lib/lp/translations/model/pofile.py (+2/-2) lib/lp/translations/model/potemplate.py (+1/-1) lib/lp/translations/model/translationgroup.py (+5/-5) lib/lp/translations/scripts/fix_plural_forms.py (+2/-5) lib/lp/translations/scripts/tests/test_remove_translations.py (+4/-4) scripts/librarian-report.py (+3/-3) utilities/make-dummy-hosted-branches (+3/-3) utilities/snakefood/Makefile (+2/-2) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Simone Pelosi | Approve | ||
Review via email: mp+452145@code.launchpad.net |
Commit message
Merge branch 'master' into db-devel
This merge is needed in an attempt for the tests in the buildbot to not fail as often (there is a test fix merged into master, and a few other changes that could potentially help with the tests)
Description of the change
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml |
2 | index eafc49d..12369b7 100644 |
3 | --- a/.pre-commit-config.yaml |
4 | +++ b/.pre-commit-config.yaml |
5 | @@ -35,8 +35,8 @@ repos: |
6 | lib/contrib/.* |
7 | |utilities/community-contributions\.py |
8 | )$ |
9 | -- repo: https://github.com/psf/black |
10 | - rev: 23.3.0 |
11 | +- repo: https://github.com/psf/black-pre-commit-mirror |
12 | + rev: 23.9.1 |
13 | hooks: |
14 | - id: black |
15 | exclude: | |
16 | @@ -65,6 +65,9 @@ repos: |
17 | hooks: |
18 | - id: eslint |
19 | args: [--quiet] |
20 | + # 20.7.0 upgraded from npm 9.8.1 to 10.1.0, which appears to fail to |
21 | + # use the proxy correctly in Launchpad CI builds. |
22 | + language_version: "20.6.1" |
23 | - repo: https://github.com/keewis/blackdoc |
24 | rev: v0.3.8 |
25 | hooks: |
26 | diff --git a/charm/launchpad-ppa-publisher/charmcraft.yaml b/charm/launchpad-ppa-publisher/charmcraft.yaml |
27 | index 76dc78c..13d0d15 100644 |
28 | --- a/charm/launchpad-ppa-publisher/charmcraft.yaml |
29 | +++ b/charm/launchpad-ppa-publisher/charmcraft.yaml |
30 | @@ -73,3 +73,4 @@ parts: |
31 | - CHARM_INTERFACES_DIR: $CRAFT_STAGE/layers/interface |
32 | - PIP_NO_INDEX: "true" |
33 | - PIP_FIND_LINKS: $CRAFT_STAGE/charm-wheels |
34 | + reactive-charm-build-arguments: [--binary-wheels-from-source] |
35 | diff --git a/charm/launchpad-ppa-uploader/charmcraft.yaml b/charm/launchpad-ppa-uploader/charmcraft.yaml |
36 | index 4ff69d8..392d3ad 100644 |
37 | --- a/charm/launchpad-ppa-uploader/charmcraft.yaml |
38 | +++ b/charm/launchpad-ppa-uploader/charmcraft.yaml |
39 | @@ -61,3 +61,4 @@ parts: |
40 | - CHARM_INTERFACES_DIR: $CRAFT_STAGE/layers/interface |
41 | - PIP_NO_INDEX: "true" |
42 | - PIP_FIND_LINKS: $CRAFT_STAGE/charm-wheels |
43 | + reactive-charm-build-arguments: [--binary-wheels-from-source] |
44 | diff --git a/cronscripts/parse-librarian-apache-access-logs.py b/cronscripts/parse-librarian-apache-access-logs.py |
45 | index 935f12e..1a724b5 100755 |
46 | --- a/cronscripts/parse-librarian-apache-access-logs.py |
47 | +++ b/cronscripts/parse-librarian-apache-access-logs.py |
48 | @@ -16,9 +16,9 @@ updating the counts of every LFA, in order to get through the backlog. |
49 | |
50 | import _pythonpath # noqa: F401 |
51 | |
52 | -from storm.sqlobject import SQLObjectNotFound |
53 | from zope.component import getUtility |
54 | |
55 | +from lp.app.errors import NotFoundError |
56 | from lp.services.apachelogparser.script import ParseApacheLogs |
57 | from lp.services.config import config |
58 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
59 | @@ -47,10 +47,11 @@ class ParseLibrarianApacheLogs(ParseApacheLogs): |
60 | def getDownloadCountUpdater(self, file_id): |
61 | """See `ParseApacheLogs`.""" |
62 | try: |
63 | - return self.libraryfilealias_set[file_id].updateDownloadCount |
64 | - except SQLObjectNotFound: |
65 | - # This file has been deleted from the librarian, so don't |
66 | - # try to store download counters for it. |
67 | + return self.libraryfilealias_set[int(file_id)].updateDownloadCount |
68 | + except (ValueError, NotFoundError): |
69 | + # Either this isn't a valid file ID or this file has been |
70 | + # deleted from the librarian, so don't try to store download |
71 | + # counters for it. |
72 | return None |
73 | |
74 | |
75 | diff --git a/database/replication/Makefile b/database/replication/Makefile |
76 | index 8f63dc0..7cbfa77 100644 |
77 | --- a/database/replication/Makefile |
78 | +++ b/database/replication/Makefile |
79 | @@ -31,9 +31,18 @@ STAGING_CONFIG=staging-db # For swapping fresh db into place. |
80 | STAGING_DUMP=launchpad.dump # Dumpfile to build new staging from. |
81 | STAGING_TABLESPACE=pg_default # 'pg_default' for default |
82 | STAGING_LOGDIR=/srv/staging.launchpad.net/staging-logs |
83 | -DOGFOOD_DBNAME=launchpad_dogfood |
84 | +STAGING_POSTGRESQL_VERSION=10 |
85 | DOGFOOD_DUMP=launchpad.dump |
86 | |
87 | +# Names of underlying PostgreSQL databases. |
88 | +STAGING_DBNAME_MAIN=lpmain_staging |
89 | +STAGING_DBNAME_SESSION=session_staging |
90 | +DOGFOOD_DBNAME=launchpad_dogfood |
91 | + |
92 | +# Names in pgbouncer.ini's `[databases]` section. |
93 | +STAGING_PGBOUNCER_MAIN=launchpad_staging launchpad_staging_slave |
94 | +STAGING_PGBOUNCER_SESSION=session_staging |
95 | + |
96 | STAGING_PGBOUNCER=psql -p 6432 -U pgbouncer -d pgbouncer |
97 | |
98 | PGMASSACRE=../../utilities/pgmassacre.py |
99 | @@ -60,28 +69,27 @@ stagingsetup: |
100 | grep -v -E 'TRIGGER public [^ ]+ _sl_' > ${DUMPLIST} |
101 | |
102 | # Deny new connections to the main DBs and kill any leftovers. |
103 | - ${STAGING_PGBOUNCER} -c 'DISABLE launchpad_staging' |
104 | - ${STAGING_PGBOUNCER} -c 'DISABLE launchpad_staging_slave' |
105 | - ${STAGING_PGBOUNCER} -c 'KILL launchpad_staging' |
106 | - ${STAGING_PGBOUNCER} -c 'KILL launchpad_staging_slave' |
107 | - ${STAGING_PGBOUNCER} -c 'RESUME launchpad_staging' |
108 | - ${STAGING_PGBOUNCER} -c 'RESUME launchpad_staging_slave' |
109 | - -${PGMASSACRE} lpmain_staging |
110 | + set -e; for verb in DISABLE KILL RESUME; do \ |
111 | + for db in ${STAGING_PGBOUNCER_MAIN}; do \ |
112 | + ${STAGING_PGBOUNCER} -c "$$verb $$db"; \ |
113 | + done; \ |
114 | + done |
115 | + -${PGMASSACRE} ${STAGING_DBNAME_MAIN} |
116 | |
117 | # Quickly clear out the session DB. No need to DISABLE here, as |
118 | # we bring the DB back quickly. |
119 | - ${STAGING_PGBOUNCER} -c 'KILL session_staging' |
120 | - psql -d session_staging -c 'TRUNCATE sessiondata CASCADE;' |
121 | - ${STAGING_PGBOUNCER} -c 'RESUME session_staging' |
122 | + ${STAGING_PGBOUNCER} -c 'KILL ${STAGING_PGBOUNCER_SESSION}' |
123 | + psql -d ${STAGING_DBNAME_SESSION} -c 'TRUNCATE sessiondata CASCADE;' |
124 | + ${STAGING_PGBOUNCER} -c 'RESUME ${STAGING_PGBOUNCER_SESSION}' |
125 | |
126 | # Create the DB with the desired default tablespace. |
127 | - ${CREATEDB} --tablespace ${STAGING_TABLESPACE} lpmain_staging |
128 | + ${CREATEDB} --tablespace ${STAGING_TABLESPACE} ${STAGING_DBNAME_MAIN} |
129 | # Restore the database. We need to restore permissions, despite |
130 | # later running security.py, to pull in permissions granted on |
131 | # production to users not maintained by security.py. |
132 | cat ${STAGING_DUMP} \ |
133 | - | ./walblock.py -n 5000 -d /var/lib/postgresql/10/staging/pg_wal \ |
134 | - | pg_restore --dbname=lpmain_staging --no-owner ${EXIT_ON_ERROR} \ |
135 | + | ./walblock.py -n 5000 -d /var/lib/postgresql/${STAGING_POSTGRESQL_VERSION}/staging/pg_wal \ |
136 | + | pg_restore --dbname=${STAGING_DBNAME_MAIN} --no-owner ${EXIT_ON_ERROR} \ |
137 | --use-list=${DUMPLIST} -v |
138 | rm ${DUMPLIST} |
139 | # Apply database patches. |
140 | @@ -92,10 +100,11 @@ stagingsetup: |
141 | LPCONFIG=${STAGING_CONFIG} ${SHHH} ../schema/security.py \ |
142 | --log-file=INFO:${STAGING_LOGDIR}/dbupgrade.log |
143 | @echo Setting feature flags |
144 | - psql -d lpmain_staging -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('profiling.enabled', 'team:launchpad', 0, 'on') ON CONFLICT DO NOTHING" |
145 | - psql -d lpmain_staging -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('librarian.swift.enabled', 'default', 0, 'on') ON CONFLICT DO NOTHING" |
146 | - ${STAGING_PGBOUNCER} -c 'ENABLE launchpad_staging' |
147 | - ${STAGING_PGBOUNCER} -c 'ENABLE launchpad_staging_slave' |
148 | + psql -d ${STAGING_DBNAME_MAIN} -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('profiling.enabled', 'team:launchpad', 0, 'on') ON CONFLICT DO NOTHING" |
149 | + psql -d ${STAGING_DBNAME_MAIN} -c "INSERT INTO featureflag (flag, scope, priority, value) VALUES ('librarian.swift.enabled', 'default', 0, 'on') ON CONFLICT DO NOTHING" |
150 | + set -e; for db in ${STAGING_PGBOUNCER_MAIN}; do \ |
151 | + ${STAGING_PGBOUNCER} -c "ENABLE $$db"; \ |
152 | + done |
153 | |
154 | stagingswitch: |
155 | echo Nothing to do. Staging already built inplace. |
156 | diff --git a/database/schema/security.cfg b/database/schema/security.cfg |
157 | index ab71472..50328ff 100644 |
158 | --- a/database/schema/security.cfg |
159 | +++ b/database/schema/security.cfg |
160 | @@ -3,9 +3,9 @@ |
161 | # |
162 | # Possible permissions: SELECT, INSERT, UPDATE, EXECUTE |
163 | # |
164 | -# Note that we cannot have INSERT only tables if we are using SQLObject, as it |
165 | -# creates new entries by first doing an insert (to get the id) and then |
166 | -# issuing an update |
167 | +# Note that we cannot have INSERT only tables if we are using Storm, as it |
168 | +# sometimes creates new entries by first doing an insert (to get the id) and |
169 | +# then issuing an update. |
170 | [DEFAULT] |
171 | public_schemas= |
172 | |
173 | diff --git a/doc/reference/python.rst b/doc/reference/python.rst |
174 | index e3a76ab..a616784 100644 |
175 | --- a/doc/reference/python.rst |
176 | +++ b/doc/reference/python.rst |
177 | @@ -264,21 +264,6 @@ passes and returns them easier to debug. |
178 | Database-related |
179 | ================ |
180 | |
181 | -Storm |
182 | ------ |
183 | - |
184 | -We use two database ORM (object-relational mapper) APIs in Launchpad, the |
185 | -older and deprecated SQLObject API and the new and improved `Storm |
186 | -<https://storm.canonical.com>`_ API. All new code should use the Storm API, |
187 | -and you are encouraged to convert existing code to Storm as part of your |
188 | -tech-debt payments. |
189 | - |
190 | -.. note:: |
191 | - |
192 | - The SQLObject and Storm ``ResultSet`` interfaces are not compatible, so |
193 | - e.g. if you need to ``UNION`` between these two, you will run into |
194 | - trouble. We are looking into ways to address this. |
195 | - |
196 | Field attributes |
197 | ---------------- |
198 | |
199 | @@ -298,7 +283,7 @@ queries or fragments, e.g.: |
200 | FROM TeamParticipation |
201 | INNER JOIN Person ON TeamParticipation.team = Person.id |
202 | WHERE TeamParticipation.person = %s |
203 | - """ % sqlvalues(personID) |
204 | + """ % sqlvalues(person_id) |
205 | |
206 | This is also easy to cut-and-paste into ``psql`` for interactive testing, |
207 | unlike if you use several lines of single quoted strings. |
208 | diff --git a/lib/lp/answers/browser/questiontarget.py b/lib/lp/answers/browser/questiontarget.py |
209 | index ed993fb..e9556fc 100644 |
210 | --- a/lib/lp/answers/browser/questiontarget.py |
211 | +++ b/lib/lp/answers/browser/questiontarget.py |
212 | @@ -510,7 +510,7 @@ class SearchQuestionsView(UserSupportLanguagesMixin, LaunchpadFormView): |
213 | to question or mdash if there is no related source package. |
214 | """ |
215 | # XXX sinzui 2007-11-27 bug=164435: |
216 | - # SQLObject can refetch the question, so we are comparing ids. |
217 | + # Storm can refetch the question, so we are comparing ids. |
218 | assert self.context.id == question.distribution.id, ( |
219 | "The question.distribution (%s) must be equal to the context (%s)" |
220 | % (question.distribution, self.context) |
221 | diff --git a/lib/lp/answers/interfaces/question.py b/lib/lp/answers/interfaces/question.py |
222 | index dea2c45..00f27ff 100644 |
223 | --- a/lib/lp/answers/interfaces/question.py |
224 | +++ b/lib/lp/answers/interfaces/question.py |
225 | @@ -482,8 +482,8 @@ class IQuestion(IHasOwner): |
226 | |
227 | Return the created IQuestionMessage. |
228 | |
229 | - (Note this method is named expireQuestion and not expire because of |
230 | - conflicts with SQLObject.) |
231 | + (Note this method is named expireQuestion and not expire because it |
232 | + used to conflict with SQLObject.) |
233 | |
234 | This method should fire an IObjectCreatedEvent for the created |
235 | IQuestionMessage and an IObjectModifiedEvent for the question. |
236 | diff --git a/lib/lp/answers/interfaces/questionmessage.py b/lib/lp/answers/interfaces/questionmessage.py |
237 | index a35a787..e9f8610 100644 |
238 | --- a/lib/lp/answers/interfaces/questionmessage.py |
239 | +++ b/lib/lp/answers/interfaces/questionmessage.py |
240 | @@ -10,7 +10,7 @@ __all__ = [ |
241 | from lazr.restful.declarations import exported, exported_as_webservice_entry |
242 | from lazr.restful.fields import Reference |
243 | from zope.interface import Interface |
244 | -from zope.schema import Bool, Choice, Int |
245 | +from zope.schema import Choice, Int |
246 | |
247 | from lp import _ |
248 | from lp.answers.enums import QuestionAction, QuestionStatus |
249 | @@ -74,14 +74,6 @@ class IQuestionMessageView(IMessageView): |
250 | ), |
251 | exported_as="index", |
252 | ) |
253 | - visible = exported( |
254 | - Bool( |
255 | - title=_("Message visibility."), |
256 | - description=_("Whether or not the message is visible."), |
257 | - readonly=True, |
258 | - ), |
259 | - as_of="devel", |
260 | - ) |
261 | |
262 | |
263 | @exported_as_webservice_entry(as_of="devel") |
264 | diff --git a/lib/lp/app/doc/batch-navigation.rst b/lib/lp/app/doc/batch-navigation.rst |
265 | index 25274f1..ab762aa 100644 |
266 | --- a/lib/lp/app/doc/batch-navigation.rst |
267 | +++ b/lib/lp/app/doc/batch-navigation.rst |
268 | @@ -8,8 +8,7 @@ This documents and tests the Launchpad-specific elements of its usage. |
269 | |
270 | Note that our use of the batching code relies on the registration of |
271 | lp.services.webapp.batching.FiniteSequenceAdapter for |
272 | -storm.zope.interfaces.IResultSet and |
273 | -storm.zope.interfaces.ISQLObjectResultSet. |
274 | +storm.zope.interfaces.IResultSet. |
275 | |
276 | Batch navigation provides a way to navigate batch results in a web |
277 | page by providing URL links to the next, previous and numbered pages |
278 | diff --git a/lib/lp/app/validators/README.txt b/lib/lp/app/validators/README.txt |
279 | index 2efe490..bae777f 100644 |
280 | --- a/lib/lp/app/validators/README.txt |
281 | +++ b/lib/lp/app/validators/README.txt |
282 | @@ -1,4 +1,2 @@ |
283 | -Validators in this directory are either simple functions that correspond |
284 | -to database constraints such as valid_name(name), or they can be |
285 | -subclasses of sqlobject.include.validators.Validator such as |
286 | -PersonValidatorBase. |
287 | +Validators in this directory are simple functions that correspond to |
288 | +database constraints such as valid_name(name). |
289 | diff --git a/lib/lp/archivepublisher/tests/test_sync_signingkeys.py b/lib/lp/archivepublisher/tests/test_sync_signingkeys.py |
290 | index 3c73754..7e8a1bb 100644 |
291 | --- a/lib/lp/archivepublisher/tests/test_sync_signingkeys.py |
292 | +++ b/lib/lp/archivepublisher/tests/test_sync_signingkeys.py |
293 | @@ -53,7 +53,10 @@ from lp.testing.script import run_script |
294 | |
295 | class TestSyncSigningKeysScript(TestCaseWithFactory): |
296 | layer = ZopelessDatabaseLayer |
297 | - run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=30) |
298 | + # A timeout of 30 seconds is slightly too short and can lead to |
299 | + # non-relevant test failures. 45 seconds is a value estimated from trial |
300 | + # and error. |
301 | + run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=45) |
302 | |
303 | def setUp(self): |
304 | super().setUp() |
305 | diff --git a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py |
306 | index 1b27830..f163b4f 100644 |
307 | --- a/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py |
308 | +++ b/lib/lp/archiveuploader/tests/test_nascentupload_documentation.py |
309 | @@ -12,6 +12,7 @@ from lp.archiveuploader.nascentupload import NascentUpload |
310 | from lp.archiveuploader.tests import datadir, getPolicy |
311 | from lp.archiveuploader.uploadpolicy import ArchiveUploadType |
312 | from lp.registry.interfaces.distribution import IDistributionSet |
313 | +from lp.services.database.interfaces import IStore |
314 | from lp.services.librarian.model import LibraryFileAlias |
315 | from lp.services.log.logger import DevNullLogger |
316 | from lp.soyuz.interfaces.component import IComponentSet |
317 | @@ -76,7 +77,7 @@ def prepareHoaryForUploads(test): |
318 | ComponentSelection(distroseries=hoary, component=universe) |
319 | |
320 | # Create a fake hoary/i386 chroot. |
321 | - fake_chroot = LibraryFileAlias.get(1) |
322 | + fake_chroot = IStore(LibraryFileAlias).get(LibraryFileAlias, 1) |
323 | hoary["i386"].addOrUpdateChroot(fake_chroot) |
324 | |
325 | LaunchpadZopelessLayer.txn.commit() |
326 | diff --git a/lib/lp/archiveuploader/uploadprocessor.py b/lib/lp/archiveuploader/uploadprocessor.py |
327 | index f7cd1ae..9093bdc 100644 |
328 | --- a/lib/lp/archiveuploader/uploadprocessor.py |
329 | +++ b/lib/lp/archiveuploader/uploadprocessor.py |
330 | @@ -77,7 +77,6 @@ from lp.code.interfaces.sourcepackagerecipebuild import ( |
331 | from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuild |
332 | from lp.registry.interfaces.distribution import IDistributionSet |
333 | from lp.registry.interfaces.person import IPersonSet |
334 | -from lp.services.database.sqlobject import SQLObjectNotFound |
335 | from lp.services.log.logger import BufferLogger |
336 | from lp.services.statsd.interfaces.statsd_client import IStatsdClient |
337 | from lp.services.webapp.adapter import ( |
338 | @@ -1020,9 +1019,8 @@ def parse_upload_path(relative_path): |
339 | |
340 | elif first_path.isdigit(): |
341 | # This must be a binary upload from a build worker. |
342 | - try: |
343 | - archive = getUtility(IArchiveSet).get(int(first_path)) |
344 | - except SQLObjectNotFound: |
345 | + archive = getUtility(IArchiveSet).get(int(first_path)) |
346 | + if archive is None: |
347 | raise UploadPathError( |
348 | "Could not find archive with id=%s." % first_path |
349 | ) |
350 | diff --git a/lib/lp/blueprints/doc/specification.rst b/lib/lp/blueprints/doc/specification.rst |
351 | index 7ba3314..ede5c54 100644 |
352 | --- a/lib/lp/blueprints/doc/specification.rst |
353 | +++ b/lib/lp/blueprints/doc/specification.rst |
354 | @@ -21,11 +21,12 @@ IMilestoneSet can be accessed as a utility. |
355 | To create a new Specification, use ISpecificationSet.new: |
356 | |
357 | >>> from lp.registry.interfaces.product import IProductSet |
358 | + >>> from lp.registry.model.person import Person |
359 | + >>> from lp.services.database.interfaces import IStore |
360 | |
361 | >>> productset = getUtility(IProductSet) |
362 | >>> upstream_firefox = productset.get(4) |
363 | - >>> from lp.registry.model.person import Person |
364 | - >>> mark = Person.byName("mark") |
365 | + >>> mark = IStore(Person).find(Person, name="mark").one() |
366 | >>> newspec = specset.new( |
367 | ... "mng", |
368 | ... "Support MNG Format", |
369 | @@ -78,7 +79,7 @@ We attach now a spec to a distribution. |
370 | |
371 | >>> from lp.app.interfaces.launchpad import ILaunchpadCelebrities |
372 | >>> ubuntu = getUtility(ILaunchpadCelebrities).ubuntu |
373 | - >>> mark = Person.byName("mark") |
374 | + >>> mark = IStore(Person).find(Person, name="mark").one() |
375 | >>> ubuspec = specset.new( |
376 | ... "fix-spec-permissions", |
377 | ... "Fix Specification Permissions", |
378 | @@ -99,7 +100,7 @@ member, and therefore should be able to edit any spec attached to it |
379 | >>> print(ubuntu.owner.name) |
380 | ubuntu-team |
381 | |
382 | - >>> jdub = Person.byName("jdub") |
383 | + >>> jdub = IStore(Person).find(Person, name="jdub").one() |
384 | >>> jdub.inTeam(ubuntu.owner) |
385 | True |
386 | |
387 | diff --git a/lib/lp/blueprints/model/specificationworkitem.py b/lib/lp/blueprints/model/specificationworkitem.py |
388 | index 79f9316..23ac31b 100644 |
389 | --- a/lib/lp/blueprints/model/specificationworkitem.py |
390 | +++ b/lib/lp/blueprints/model/specificationworkitem.py |
391 | @@ -5,7 +5,9 @@ __all__ = [ |
392 | "SpecificationWorkItem", |
393 | ] |
394 | |
395 | -from storm.locals import Bool, Int, Reference, Unicode |
396 | +from datetime import timezone |
397 | + |
398 | +from storm.locals import Bool, DateTime, Int, Reference, Unicode |
399 | from storm.store import Store |
400 | from zope.interface import implementer |
401 | |
402 | @@ -16,7 +18,6 @@ from lp.blueprints.interfaces.specificationworkitem import ( |
403 | ) |
404 | from lp.registry.interfaces.person import validate_public_person |
405 | from lp.services.database.constants import DEFAULT |
406 | -from lp.services.database.datetimecol import UtcDateTimeCol |
407 | from lp.services.database.enumcol import DBEnum |
408 | from lp.services.database.stormbase import StormBase |
409 | from lp.services.helpers import backslashreplace |
410 | @@ -40,7 +41,9 @@ class SpecificationWorkItem(StormBase): |
411 | allow_none=False, |
412 | default=SpecificationWorkItemStatus.TODO, |
413 | ) |
414 | - date_created = UtcDateTimeCol(notNull=True, default=DEFAULT) |
415 | + date_created = DateTime( |
416 | + allow_none=False, default=DEFAULT, tzinfo=timezone.utc |
417 | + ) |
418 | sequence = Int(allow_none=False) |
419 | deleted = Bool(allow_none=False, default=False) |
420 | |
421 | diff --git a/lib/lp/blueprints/model/sprint.py b/lib/lp/blueprints/model/sprint.py |
422 | index 3a45f35..15f20ea 100644 |
423 | --- a/lib/lp/blueprints/model/sprint.py |
424 | +++ b/lib/lp/blueprints/model/sprint.py |
425 | @@ -425,12 +425,7 @@ class HasSprintsMixin: |
426 | |
427 | Subclasses must overwrite this method if it doesn't suit them. |
428 | """ |
429 | - try: |
430 | - table = getattr(self, "__storm_table__") |
431 | - except AttributeError: |
432 | - # XXX cjwatson 2020-09-10: Remove this once all inheritors have |
433 | - # been converted from SQLObject to Storm. |
434 | - table = getattr(self, "_table") |
435 | + table = getattr(self, "__storm_table__") |
436 | return [ |
437 | getattr(Specification, table.lower()) == self, |
438 | Specification.id == SprintSpecification.specification_id, |
439 | diff --git a/lib/lp/blueprints/model/sprintattendance.py b/lib/lp/blueprints/model/sprintattendance.py |
440 | index 693d4f3..82b677b 100644 |
441 | --- a/lib/lp/blueprints/model/sprintattendance.py |
442 | +++ b/lib/lp/blueprints/model/sprintattendance.py |
443 | @@ -3,12 +3,13 @@ |
444 | |
445 | __all__ = ["SprintAttendance"] |
446 | |
447 | -from storm.locals import Bool, Int, Reference |
448 | +from datetime import timezone |
449 | + |
450 | +from storm.locals import Bool, DateTime, Int, Reference |
451 | from zope.interface import implementer |
452 | |
453 | from lp.blueprints.interfaces.sprintattendance import ISprintAttendance |
454 | from lp.registry.interfaces.person import validate_public_person |
455 | -from lp.services.database.datetimecol import UtcDateTimeCol |
456 | from lp.services.database.stormbase import StormBase |
457 | |
458 | |
459 | @@ -26,8 +27,8 @@ class SprintAttendance(StormBase): |
460 | attendeeID = Int(name="attendee", validator=validate_public_person) |
461 | attendee = Reference(attendeeID, "Person.id") |
462 | |
463 | - time_starts = UtcDateTimeCol(notNull=True) |
464 | - time_ends = UtcDateTimeCol(notNull=True) |
465 | + time_starts = DateTime(allow_none=False, tzinfo=timezone.utc) |
466 | + time_ends = DateTime(allow_none=False, tzinfo=timezone.utc) |
467 | _is_physical = Bool(name="is_physical", default=True) |
468 | |
469 | def __init__(self, sprint, attendee): |
470 | diff --git a/lib/lp/blueprints/vocabularies/specificationdependency.py b/lib/lp/blueprints/vocabularies/specificationdependency.py |
471 | index d458c05..f6edb5f 100644 |
472 | --- a/lib/lp/blueprints/vocabularies/specificationdependency.py |
473 | +++ b/lib/lp/blueprints/vocabularies/specificationdependency.py |
474 | @@ -168,7 +168,7 @@ class SpecificationDepCandidatesVocabulary(StormVocabularyBase): |
475 | raise LookupError(token) |
476 | |
477 | def search(self, query, vocab_filter=None): |
478 | - """See `SQLObjectVocabularyBase.search`. |
479 | + """See `StormVocabularyBase.search`. |
480 | |
481 | We find specs where query is in the text of name or title, or matches |
482 | the full text index and then filter out ineligible specs using |
483 | diff --git a/lib/lp/bugs/browser/tests/buglinktarget-views.rst b/lib/lp/bugs/browser/tests/buglinktarget-views.rst |
484 | index d3a139a..caebb44 100644 |
485 | --- a/lib/lp/bugs/browser/tests/buglinktarget-views.rst |
486 | +++ b/lib/lp/bugs/browser/tests/buglinktarget-views.rst |
487 | @@ -94,7 +94,7 @@ IBugLinkTarget. |
488 | >>> print(view.cancel_url) |
489 | http://bugs.launchpad.test/bugs/cve/2005-2730 |
490 | |
491 | -After removing the bugs, it sends a SQLObjectModified event. |
492 | +After removing the bugs, it sends an ObjectModifiedEvent. |
493 | |
494 | >>> request = LaunchpadTestRequest( |
495 | ... method="POST", |
496 | diff --git a/lib/lp/bugs/model/bug.py b/lib/lp/bugs/model/bug.py |
497 | index 5da7f9b..989abe8 100644 |
498 | --- a/lib/lp/bugs/model/bug.py |
499 | +++ b/lib/lp/bugs/model/bug.py |
500 | @@ -2610,7 +2610,7 @@ class Bug(StormBase, InformationTypeMixin): |
501 | ), |
502 | LeftJoin( |
503 | LibraryFileContent, |
504 | - LibraryFileContent.id == LibraryFileAlias.contentID, |
505 | + LibraryFileContent.id == LibraryFileAlias.content_id, |
506 | ), |
507 | ) |
508 | .find( |
509 | @@ -2618,7 +2618,7 @@ class Bug(StormBase, InformationTypeMixin): |
510 | BugAttachment.bug == self, |
511 | Or( |
512 | BugAttachment.url != None, |
513 | - LibraryFileAlias.contentID != None, |
514 | + LibraryFileAlias.content_id != None, |
515 | ), |
516 | ) |
517 | .order_by(BugAttachment.id) |
518 | diff --git a/lib/lp/bugs/model/bugtask.py b/lib/lp/bugs/model/bugtask.py |
519 | index d66c62a..2d9845e 100644 |
520 | --- a/lib/lp/bugs/model/bugtask.py |
521 | +++ b/lib/lp/bugs/model/bugtask.py |
522 | @@ -932,7 +932,7 @@ class BugTask(StormBase): |
523 | for synched_attr in self._CONJOINED_ATTRIBUTES: |
524 | replica_attr_value = getattr(conjoined_replica, synched_attr) |
525 | # Bypass our checks that prevent setting attributes on |
526 | - # conjoined primaries by calling the underlying sqlobject |
527 | + # conjoined primaries by calling the underlying Storm |
528 | # setter methods directly. |
529 | setattr(self, synched_attr, PassthroughValue(replica_attr_value)) |
530 | |
531 | diff --git a/lib/lp/bugs/model/bugtasksearch.py b/lib/lp/bugs/model/bugtasksearch.py |
532 | index 8de8c55..d8170b6 100644 |
533 | --- a/lib/lp/bugs/model/bugtasksearch.py |
534 | +++ b/lib/lp/bugs/model/bugtasksearch.py |
535 | @@ -29,8 +29,9 @@ from storm.expr import ( |
536 | Row, |
537 | Select, |
538 | Union, |
539 | + With, |
540 | ) |
541 | -from storm.info import ClassAlias |
542 | +from storm.info import ClassAlias, get_cls_info |
543 | from storm.references import Reference |
544 | from zope.component import getUtility |
545 | from zope.security.proxy import isinstance as zope_isinstance |
546 | @@ -77,10 +78,6 @@ from lp.registry.model.teammembership import TeamParticipation |
547 | from lp.services.database.bulk import load |
548 | from lp.services.database.decoratedresultset import DecoratedResultSet |
549 | from lp.services.database.interfaces import IStore |
550 | -from lp.services.database.sqlbase import ( |
551 | - convert_storm_clause_to_string, |
552 | - sqlvalues, |
553 | -) |
554 | from lp.services.database.stormexpr import ( |
555 | ArrayAgg, |
556 | ArrayIntersects, |
557 | @@ -224,10 +221,10 @@ def search_bugs(pre_iter_hook, alternatives, just_bug_ids=False): |
558 | clauseTables, |
559 | bugtask_decorator, |
560 | join_tables, |
561 | - with_clause, |
562 | + with_clauses, |
563 | ] = _build_query(alternatives[0]) |
564 | - if with_clause: |
565 | - store = store.with_(with_clause) |
566 | + if with_clauses: |
567 | + store = store.with_(with_clauses) |
568 | decorators.append(bugtask_decorator) |
569 | origin = _build_origin( |
570 | join_tables + orderby_joins, clauseTables, start |
571 | @@ -242,12 +239,12 @@ def search_bugs(pre_iter_hook, alternatives, just_bug_ids=False): |
572 | clauseTables, |
573 | decorator, |
574 | join_tables, |
575 | - with_clause, |
576 | + with_clauses, |
577 | ] = _build_query(params) |
578 | origin = _build_origin(join_tables, clauseTables, start) |
579 | localstore = store |
580 | - if with_clause: |
581 | - localstore = store.with_(with_clause) |
582 | + if with_clauses: |
583 | + localstore = store.with_(with_clauses) |
584 | next_result = localstore.using(*origin).find(BugTaskFlat, query) |
585 | results.append(next_result) |
586 | # NB: assumes the decorators are all compatible. |
587 | @@ -337,8 +334,8 @@ def _build_query(params): |
588 | # * a searchbuilder.any object, representing a set of acceptable |
589 | # filter values |
590 | # * a searchbuilder.NULL object |
591 | - # * an sqlobject |
592 | - # * a dbschema item |
593 | + # * a Storm instance |
594 | + # * a `DBItem` |
595 | # * None (meaning no filter criteria specified for that arg_name) |
596 | # |
597 | # XXX: kiko 2006-03-16: |
598 | @@ -492,9 +489,16 @@ def _build_query(params): |
599 | |
600 | if params.structural_subscriber is not None: |
601 | with_clauses.append( |
602 | - """ss as (SELECT * from StructuralSubscription |
603 | - WHERE StructuralSubscription.subscriber = %s)""" |
604 | - % sqlvalues(params.structural_subscriber) |
605 | + With( |
606 | + "ss", |
607 | + Select( |
608 | + get_cls_info(StructuralSubscription).columns, |
609 | + where=( |
610 | + StructuralSubscription.subscriber |
611 | + == params.structural_subscriber |
612 | + ), |
613 | + ), |
614 | + ) |
615 | ) |
616 | |
617 | class StructuralSubscriptionCTE(StructuralSubscription): |
618 | @@ -761,27 +765,23 @@ def _build_query(params): |
619 | ) |
620 | store = IStore(Bug) |
621 | with_clauses.append( |
622 | - convert_storm_clause_to_string( |
623 | - WithMaterialized( |
624 | - "commented_bug_ids", |
625 | - store, |
626 | - Union(commented_messages, commented_activities), |
627 | - ) |
628 | + WithMaterialized( |
629 | + "commented_bug_ids", |
630 | + store, |
631 | + Union(commented_messages, commented_activities), |
632 | ) |
633 | ) |
634 | with_clauses.append( |
635 | - convert_storm_clause_to_string( |
636 | - WithMaterialized( |
637 | - "commented_bugtask_ids", |
638 | - store, |
639 | - Select( |
640 | - BugTaskFlat.bugtask_id, |
641 | - tables=[BugTaskFlat], |
642 | - where=BugTaskFlat.bug_id.is_in( |
643 | - Select(Column("bug", "commented_bug_ids")) |
644 | - ), |
645 | + WithMaterialized( |
646 | + "commented_bugtask_ids", |
647 | + store, |
648 | + Select( |
649 | + BugTaskFlat.bugtask_id, |
650 | + tables=[BugTaskFlat], |
651 | + where=BugTaskFlat.bug_id.is_in( |
652 | + Select(Column("bug", "commented_bug_ids")) |
653 | ), |
654 | - ) |
655 | + ), |
656 | ) |
657 | ) |
658 | extra_clauses.append( |
659 | @@ -921,11 +921,7 @@ def _build_query(params): |
660 | obj = decor(obj) |
661 | return obj |
662 | |
663 | - if with_clauses: |
664 | - with_clause = SQL(", ".join(with_clauses)) |
665 | - else: |
666 | - with_clause = None |
667 | - return (query, clauseTables, decorator, join_tables, with_clause) |
668 | + return (query, clauseTables, decorator, join_tables, with_clauses) |
669 | |
670 | |
671 | def _process_order_by(params): |
672 | diff --git a/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst b/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst |
673 | index 116baaa..9b64266 100644 |
674 | --- a/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst |
675 | +++ b/lib/lp/bugs/stories/bugs/xx-bug-text-pages.rst |
676 | @@ -15,7 +15,7 @@ We'll start by adding some attachments to the bug: |
677 | >>> from lp.services.database.sqlbase import flush_database_updates |
678 | >>> from lp.testing import login, logout |
679 | >>> login("foo.bar@canonical.com") |
680 | - >>> mark = Person.selectOneBy(name="mark") |
681 | + >>> mark = IStore(Person).find(Person, name="mark").one() |
682 | >>> mark.display_name = "M\xe1rk Sh\xfattlew\xf2rth" |
683 | >>> bug = IStore(Bug).get(Bug, 1) |
684 | >>> content = BytesIO(b"<html><body>bogus</body></html>") |
685 | diff --git a/lib/lp/bugs/stories/webservice/xx-bug.rst b/lib/lp/bugs/stories/webservice/xx-bug.rst |
686 | index 0bc2373..0aa7da2 100644 |
687 | --- a/lib/lp/bugs/stories/webservice/xx-bug.rst |
688 | +++ b/lib/lp/bugs/stories/webservice/xx-bug.rst |
689 | @@ -387,6 +387,51 @@ We don't have to submit a subject when we add a new message. |
690 | subject: 'Re: Firefox install instructions should be complete' |
691 | web_link: '...' |
692 | |
693 | +The "visible" field is exported in the "devel" version of the web service API |
694 | +and it defaults to True. |
695 | + |
696 | + >>> response = webservice.get("/bugs/5/messages", api_version="devel") |
697 | + >>> messages = response.jsonBody()["entries"] |
698 | + >>> pprint_entry(messages[0]) |
699 | + bug_attachments_collection_link: |
700 | + 'http://.../firefox/+bug/5/comments/0/bug_attachments' |
701 | + content: 'All ways of downloading firefox should provide...' |
702 | + date_created: '2005-01-14T17:27:03.702622+00:00' |
703 | + date_deleted: None |
704 | + date_last_edited: None |
705 | + owner_link: 'http://.../~name12' |
706 | + parent_link: None |
707 | + resource_type_link: 'http://.../#message' |
708 | + revisions_collection_link: 'http://.../firefox/+bug/5/comments/0/revisions' |
709 | + self_link: 'http://.../firefox/+bug/5/comments/0' |
710 | + subject: 'Firefox install instructions should be complete' |
711 | + visible: True |
712 | + web_link: 'http://bugs.../firefox/+bug/5/comments/0' |
713 | + |
714 | +The "visible" field will be False when a comment is hidden. |
715 | + |
716 | + >>> response = webservice.named_post( |
717 | + ... "/bugs/5", "setCommentVisibility", comment_number=0, visible=False |
718 | + ... ) |
719 | + >>> response.status |
720 | + 200 |
721 | + >>> response = webservice.get("/bugs/5/messages", api_version="devel") |
722 | + >>> messages = response.jsonBody()["entries"] |
723 | + >>> pprint_entry(messages[0]) |
724 | + bug_attachments_collection_link: |
725 | + 'http://.../firefox/+bug/5/comments/0/bug_attachments' |
726 | + content: 'All ways of downloading firefox should provide...' |
727 | + date_created: '2005-01-14T17:27:03.702622+00:00' |
728 | + date_deleted: None |
729 | + date_last_edited: None |
730 | + owner_link: 'http://.../~name12' |
731 | + parent_link: None |
732 | + resource_type_link: 'http://.../#message' |
733 | + revisions_collection_link: 'http://.../firefox/+bug/5/comments/0/revisions' |
734 | + self_link: 'http://.../firefox/+bug/5/comments/0' |
735 | + subject: 'Firefox install instructions should be complete' |
736 | + visible: False |
737 | + web_link: 'http://bugs.../firefox/+bug/5/comments/0' |
738 | |
739 | Bug tasks |
740 | --------- |
741 | diff --git a/lib/lp/buildmaster/interfaces/processor.py b/lib/lp/buildmaster/interfaces/processor.py |
742 | index 67f0a58..b9bb853 100644 |
743 | --- a/lib/lp/buildmaster/interfaces/processor.py |
744 | +++ b/lib/lp/buildmaster/interfaces/processor.py |
745 | @@ -40,7 +40,7 @@ class ProcessorNotFound(NameLookupFailed): |
746 | # 'devel' as their version. |
747 | @exported_as_webservice_entry(publish_web_link=False, as_of="beta") |
748 | class IProcessor(Interface): |
749 | - """The SQLObject Processor Interface""" |
750 | + """The Storm Processor Interface""" |
751 | |
752 | id = Attribute("The Processor ID") |
753 | name = exported( |
754 | diff --git a/lib/lp/charms/model/charmrecipebuild.py b/lib/lp/charms/model/charmrecipebuild.py |
755 | index 6024b31..e63743e 100644 |
756 | --- a/lib/lp/charms/model/charmrecipebuild.py |
757 | +++ b/lib/lp/charms/model/charmrecipebuild.py |
758 | @@ -377,7 +377,7 @@ class CharmRecipeBuild(PackageBuildMixin, StormBase): |
759 | (CharmFile, LibraryFileAlias, LibraryFileContent), |
760 | CharmFile.build == self.id, |
761 | LibraryFileAlias.id == CharmFile.library_file_id, |
762 | - LibraryFileContent.id == LibraryFileAlias.contentID, |
763 | + LibraryFileContent.id == LibraryFileAlias.content_id, |
764 | ) |
765 | return result.order_by([LibraryFileAlias.filename, CharmFile.id]) |
766 | |
767 | @@ -520,7 +520,7 @@ class CharmRecipeBuildSet(SpecificBuildFarmJobSourceMixin): |
768 | |
769 | load_related(Person, builds, ["requester_id"]) |
770 | lfas = load_related(LibraryFileAlias, builds, ["log_id"]) |
771 | - load_related(LibraryFileContent, lfas, ["contentID"]) |
772 | + load_related(LibraryFileContent, lfas, ["content_id"]) |
773 | distroarchserieses = load_related( |
774 | DistroArchSeries, builds, ["distro_arch_series_id"] |
775 | ) |
776 | diff --git a/lib/lp/code/doc/codeimport-machine.rst b/lib/lp/code/doc/codeimport-machine.rst |
777 | index 1b7b523..22fedca 100644 |
778 | --- a/lib/lp/code/doc/codeimport-machine.rst |
779 | +++ b/lib/lp/code/doc/codeimport-machine.rst |
780 | @@ -213,7 +213,7 @@ setQuiescing methods must fail. |
781 | |
782 | Since our scripts and daemons run at "READ COMMITTED" isolation level, |
783 | there are races that we cannot easily detect within the limitation of |
784 | -SQLObject, when the watchdog process and the controller daemon |
785 | +Storm, when the watchdog process and the controller daemon |
786 | concurrently call setOffline. Those undetected races will lead to the |
787 | creation of redundant OFFLINE events with different reason values, where |
788 | one of the reasons will be WATCHDOG. Those races should not have any |
789 | diff --git a/lib/lp/code/interfaces/branch.py b/lib/lp/code/interfaces/branch.py |
790 | index 0bf790f..16db888 100644 |
791 | --- a/lib/lp/code/interfaces/branch.py |
792 | +++ b/lib/lp/code/interfaces/branch.py |
793 | @@ -1045,7 +1045,7 @@ class IBranchView( |
794 | |
795 | :param notification_levels: An iterable of |
796 | `BranchSubscriptionNotificationLevel`s |
797 | - :return: An SQLObject query result. |
798 | + :return: A `ResultSet` of `BranchSubscription`s. |
799 | """ |
800 | |
801 | def getBranchRevision(sequence=None, revision=None, revision_id=None): |
802 | diff --git a/lib/lp/code/mail/tests/test_codehandler.py b/lib/lp/code/mail/tests/test_codehandler.py |
803 | index 82d770d..eeb65e1 100644 |
804 | --- a/lib/lp/code/mail/tests/test_codehandler.py |
805 | +++ b/lib/lp/code/mail/tests/test_codehandler.py |
806 | @@ -149,7 +149,7 @@ class TestCodeHandler(TestCaseWithFactory): |
807 | self.code_handler.process(mail, email_addr, None), |
808 | "Succeeded, but didn't return True", |
809 | ) |
810 | - # if the message has not been created, this raises SQLObjectNotFound |
811 | + # if the message has not been created, this raises NotFoundError. |
812 | MessageSet().get("<my-id>") |
813 | |
814 | def test_process_packagebranch(self): |
815 | diff --git a/lib/lp/code/model/cibuild.py b/lib/lp/code/model/cibuild.py |
816 | index 8c32605..f6faff6 100644 |
817 | --- a/lib/lp/code/model/cibuild.py |
818 | +++ b/lib/lp/code/model/cibuild.py |
819 | @@ -901,7 +901,7 @@ class CIBuildSet(SpecificBuildFarmJobSourceMixin): |
820 | |
821 | def preloadBuildsData(self, builds): |
822 | lfas = load_related(LibraryFileAlias, builds, ["log_id"]) |
823 | - load_related(LibraryFileContent, lfas, ["contentID"]) |
824 | + load_related(LibraryFileContent, lfas, ["content_id"]) |
825 | distroarchseries = load_related( |
826 | DistroArchSeries, builds, ["distro_arch_series_id"] |
827 | ) |
828 | diff --git a/lib/lp/code/model/sourcepackagerecipe.py b/lib/lp/code/model/sourcepackagerecipe.py |
829 | index 937024a..4cd3891 100644 |
830 | --- a/lib/lp/code/model/sourcepackagerecipe.py |
831 | +++ b/lib/lp/code/model/sourcepackagerecipe.py |
832 | @@ -15,6 +15,7 @@ from lazr.delegates import delegate_to |
833 | from storm.expr import And, LeftJoin |
834 | from storm.locals import ( |
835 | Bool, |
836 | + DateTime, |
837 | Desc, |
838 | Int, |
839 | Reference, |
840 | @@ -44,7 +45,6 @@ from lp.registry.interfaces.pocket import PackagePublishingPocket |
841 | from lp.registry.model.distroseries import DistroSeries |
842 | from lp.services.database.bulk import load_referencing |
843 | from lp.services.database.constants import DEFAULT, UTC_NOW |
844 | -from lp.services.database.datetimecol import UtcDateTimeCol |
845 | from lp.services.database.interfaces import IPrimaryStore, IStore |
846 | from lp.services.database.stormbase import StormBase |
847 | from lp.services.database.stormexpr import Greatest, NullsLast |
848 | @@ -95,8 +95,8 @@ class SourcePackageRecipe(StormBase): |
849 | daily_build_archive_id = Int(name="daily_build_archive", allow_none=True) |
850 | daily_build_archive = Reference(daily_build_archive_id, "Archive.id") |
851 | |
852 | - date_created = UtcDateTimeCol(notNull=True) |
853 | - date_last_modified = UtcDateTimeCol(notNull=True) |
854 | + date_created = DateTime(allow_none=False, tzinfo=timezone.utc) |
855 | + date_last_modified = DateTime(allow_none=False, tzinfo=timezone.utc) |
856 | |
857 | owner_id = Int(name="owner", allow_none=True) |
858 | owner = Reference(owner_id, "Person.id") |
859 | diff --git a/lib/lp/code/model/sourcepackagerecipebuild.py b/lib/lp/code/model/sourcepackagerecipebuild.py |
860 | index 1ec64c9..00d6ddb 100644 |
861 | --- a/lib/lp/code/model/sourcepackagerecipebuild.py |
862 | +++ b/lib/lp/code/model/sourcepackagerecipebuild.py |
863 | @@ -328,7 +328,7 @@ class SourcePackageRecipeBuild( |
864 | |
865 | load_related(LibraryFileAlias, builds, ["log_id"]) |
866 | archives = load_related(Archive, builds, ["archive_id"]) |
867 | - load_related(Person, archives, ["ownerID"]) |
868 | + load_related(Person, archives, ["owner_id"]) |
869 | distroseries = load_related(DistroSeries, builds, ["distroseries_id"]) |
870 | load_related(Distribution, distroseries, ["distribution_id"]) |
871 | sprs = load_related(SourcePackageRecipe, builds, ["recipe_id"]) |
872 | diff --git a/lib/lp/code/model/tests/test_codereviewkarma.py b/lib/lp/code/model/tests/test_codereviewkarma.py |
873 | index fc4cdce..4278a3c 100644 |
874 | --- a/lib/lp/code/model/tests/test_codereviewkarma.py |
875 | +++ b/lib/lp/code/model/tests/test_codereviewkarma.py |
876 | @@ -56,7 +56,7 @@ class TestCodeReviewKarma(TestCaseWithFactory): |
877 | # target as there would be other karma events for the branch |
878 | # creations. |
879 | self.karma_events = [] |
880 | - # The normal SQLObject events use the logged in person. |
881 | + # The normal Storm events use the logged in person. |
882 | login_person(registrant) |
883 | source_branch.addLandingTarget(registrant, target_branch) |
884 | self.assertOneKarmaEvent(registrant, "branchmergeproposed") |
885 | diff --git a/lib/lp/code/model/tests/test_revisionauthor.py b/lib/lp/code/model/tests/test_revisionauthor.py |
886 | index a256b90..37401f6 100644 |
887 | --- a/lib/lp/code/model/tests/test_revisionauthor.py |
888 | +++ b/lib/lp/code/model/tests/test_revisionauthor.py |
889 | @@ -100,8 +100,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase): |
890 | # Check a VALIDATED email address is used to link. |
891 | harry = self._makeHarry(EmailAddressStatus.VALIDATED) |
892 | author = self._createRevisionAuthor() |
893 | - # Reget harry as the SQLObject cache has been flushed on |
894 | - # transaction boundary. |
895 | + # Reget harry as the Storm cache has been flushed on transaction |
896 | + # boundary. |
897 | harry = getUtility(IPersonSet).getByName("harry") |
898 | self.assertEqual("harry@canonical.com", author.email) |
899 | self.assertEqual(harry, author.person) |
900 | @@ -110,8 +110,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase): |
901 | # Check a OLD email address is used to link. |
902 | harry = self._makeHarry(EmailAddressStatus.OLD) |
903 | author = self._createRevisionAuthor() |
904 | - # Reget harry as the SQLObject cache has been flushed on |
905 | - # transaction boundary. |
906 | + # Reget harry as the Storm cache has been flushed on transaction |
907 | + # boundary. |
908 | harry = getUtility(IPersonSet).getByName("harry") |
909 | self.assertEqual("harry@canonical.com", author.email) |
910 | self.assertEqual(harry, author.person) |
911 | @@ -120,8 +120,8 @@ class TestRevisionAuthorMatching(MakeHarryTestCase): |
912 | # Check a PREFERRED email address is used to link. |
913 | harry = self._makeHarry(EmailAddressStatus.PREFERRED) |
914 | author = self._createRevisionAuthor() |
915 | - # Reget harry as the SQLObject cache has been flushed on |
916 | - # transaction boundary. |
917 | + # Reget harry as the Storm cache has been flushed on transaction |
918 | + # boundary. |
919 | harry = getUtility(IPersonSet).getByName("harry") |
920 | self.assertEqual("harry@canonical.com", author.email) |
921 | self.assertEqual(harry, author.person) |
922 | diff --git a/lib/lp/code/xmlrpc/tests/test_git.py b/lib/lp/code/xmlrpc/tests/test_git.py |
923 | index 0467947..15e4f09 100644 |
924 | --- a/lib/lp/code/xmlrpc/tests/test_git.py |
925 | +++ b/lib/lp/code/xmlrpc/tests/test_git.py |
926 | @@ -1161,6 +1161,15 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
927 | |
928 | layer = LaunchpadFunctionalLayer |
929 | |
930 | + def _makeGitRepositoryWithRefs(self, **kwargs): |
931 | + """Helper method to create a git repository with a default branch""" |
932 | + repository = self.factory.makeGitRepository(**kwargs) |
933 | + self.factory.makeGitRefs( |
934 | + repository=repository, paths=["refs/heads/main"] |
935 | + ) |
936 | + removeSecurityProxy(repository).default_branch = "refs/heads/main" |
937 | + return repository |
938 | + |
939 | def test_confirm_git_repository_creation(self): |
940 | owner = self.factory.makePerson() |
941 | repo = removeSecurityProxy(self.factory.makeGitRepository(owner=owner)) |
942 | @@ -2821,11 +2830,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
943 | # pushed by a user that has their ordinary privileges on the |
944 | # corresponding repository. |
945 | requester_owner = self.factory.makePerson() |
946 | - repository = self.factory.makeGitRepository(owner=requester_owner) |
947 | - self.factory.makeGitRefs( |
948 | - repository=repository, paths=["refs/heads/master"] |
949 | - ) |
950 | - removeSecurityProxy(repository).default_branch = "refs/heads/master" |
951 | + repository = self._makeGitRepositoryWithRefs(owner=requester_owner) |
952 | pushed_branch = "branch1" |
953 | self.assertHasMergeProposalURL( |
954 | repository, pushed_branch, {"uid": requester_owner.id} |
955 | @@ -2857,12 +2862,8 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
956 | |
957 | self.pushConfig("codehosting", git_macaroon_secret_key="some-secret") |
958 | requester = self.factory.makePerson() |
959 | - repository = self.factory.makeGitRepository(owner=requester) |
960 | + repository = self._makeGitRepositoryWithRefs(owner=requester) |
961 | issuer = getUtility(IMacaroonIssuer, "git-repository") |
962 | - self.factory.makeGitRefs( |
963 | - repository=repository, paths=["refs/heads/master"] |
964 | - ) |
965 | - removeSecurityProxy(repository).default_branch = "refs/heads/master" |
966 | |
967 | pushed_branch = "branch1" |
968 | with person_logged_in(requester): |
969 | @@ -2890,11 +2891,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
970 | ) |
971 | requesters = [self.factory.makePerson() for _ in range(2)] |
972 | owner = self.factory.makeTeam(members=requesters) |
973 | - repository = self.factory.makeGitRepository(owner=owner) |
974 | - self.factory.makeGitRefs( |
975 | - repository=repository, paths=["refs/heads/master"] |
976 | - ) |
977 | - removeSecurityProxy(repository).default_branch = "refs/heads/master" |
978 | + repository = self._makeGitRepositoryWithRefs(owner=owner) |
979 | pushed_branch = "branch1" |
980 | macaroon = issuer.issueMacaroon(repository) |
981 | |
982 | @@ -2935,11 +2932,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
983 | # pushed by a user with a suitable access token that has their |
984 | # ordinary privileges on the corresponding repository. |
985 | requester = self.factory.makePerson() |
986 | - repository = self.factory.makeGitRepository(owner=requester) |
987 | - self.factory.makeGitRefs( |
988 | - repository=repository, paths=["refs/heads/main"] |
989 | - ) |
990 | - removeSecurityProxy(repository).default_branch = "refs/heads/main" |
991 | + repository = self._makeGitRepositoryWithRefs(owner=requester) |
992 | _, token = self.factory.makeAccessToken( |
993 | owner=requester, |
994 | target=repository, |
995 | @@ -2954,11 +2947,7 @@ class TestGitAPI(TestGitAPIMixin, TestCaseWithFactory): |
996 | # getMergeProposalURL refuses access tokens for a different |
997 | # repository. |
998 | requester = self.factory.makePerson() |
999 | - repository = self.factory.makeGitRepository(owner=requester) |
1000 | - self.factory.makeGitRefs( |
1001 | - repository=repository, paths=["refs/heads/main"] |
1002 | - ) |
1003 | - removeSecurityProxy(repository).default_branch = "refs/heads/main" |
1004 | + repository = self._makeGitRepositoryWithRefs(owner=requester) |
1005 | _, token = self.factory.makeAccessToken( |
1006 | owner=requester, scopes=[AccessTokenScope.REPOSITORY_PUSH] |
1007 | ) |
1008 | diff --git a/lib/lp/codehosting/tests/test_acceptance.py b/lib/lp/codehosting/tests/test_acceptance.py |
1009 | index ece2d62..a03af35 100644 |
1010 | --- a/lib/lp/codehosting/tests/test_acceptance.py |
1011 | +++ b/lib/lp/codehosting/tests/test_acceptance.py |
1012 | @@ -207,7 +207,7 @@ class SSHTestCase(TestCaseWithTransport, LoomTestMixin, TestCaseWithFactory): |
1013 | |
1014 | def getDatabaseBranch(self, personName, productName, branchName): |
1015 | """Look up and return the specified branch from the database.""" |
1016 | - owner = Person.byName(personName) |
1017 | + owner = IStore(Person).find(Person, name=personName).one() |
1018 | if productName is None: |
1019 | product = None |
1020 | else: |
1021 | @@ -335,7 +335,7 @@ class AcceptanceTests(WithScenarios, SSHTestCase): |
1022 | branch_type=BranchType.HOSTED, |
1023 | ): |
1024 | """Create a new branch in the database.""" |
1025 | - owner = Person.selectOneBy(name=owner_name) |
1026 | + owner = IStore(Person).find(Person, name=owner_name).one() |
1027 | if product_name == "+junk": |
1028 | product = None |
1029 | else: |
1030 | @@ -508,8 +508,10 @@ class AcceptanceTests(WithScenarios, SSHTestCase): |
1031 | # the branch doesn't exist. |
1032 | |
1033 | # 'salgado' is a member of landscape-developers. |
1034 | - salgado = Person.selectOneBy(name="salgado") |
1035 | - landscape_dev = Person.selectOneBy(name="landscape-developers") |
1036 | + salgado = IStore(Person).find(Person, name="salgado").one() |
1037 | + landscape_dev = ( |
1038 | + IStore(Person).find(Person, name="landscape-developers").one() |
1039 | + ) |
1040 | self.assertTrue( |
1041 | salgado.inTeam(landscape_dev), |
1042 | "salgado should be a member of landscape-developers, but isn't.", |
1043 | @@ -547,7 +549,7 @@ class AcceptanceTests(WithScenarios, SSHTestCase): |
1044 | # Hack 'firefox' so we have permission to do this. |
1045 | ZopelessAppServerLayer.txn.begin() |
1046 | firefox = IStore(Product).find(Product, name="firefox").one() |
1047 | - testuser = Person.selectOneBy(name="testuser") |
1048 | + testuser = IStore(Person).find(Person, name="testuser").one() |
1049 | firefox.development_focus.owner = testuser |
1050 | ZopelessAppServerLayer.txn.commit() |
1051 | remote_url = self.getTransportURL("+branch/firefox") |
1052 | diff --git a/lib/lp/oci/model/ocirecipebuild.py b/lib/lp/oci/model/ocirecipebuild.py |
1053 | index b1548a5..fbc5b61 100644 |
1054 | --- a/lib/lp/oci/model/ocirecipebuild.py |
1055 | +++ b/lib/lp/oci/model/ocirecipebuild.py |
1056 | @@ -282,7 +282,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase): |
1057 | (OCIFile, LibraryFileAlias, LibraryFileContent), |
1058 | OCIFile.build == self.id, |
1059 | LibraryFileAlias.id == OCIFile.library_file_id, |
1060 | - LibraryFileContent.id == LibraryFileAlias.contentID, |
1061 | + LibraryFileContent.id == LibraryFileAlias.content_id, |
1062 | ) |
1063 | return result.order_by([LibraryFileAlias.filename, OCIFile.id]) |
1064 | |
1065 | @@ -421,7 +421,7 @@ class OCIRecipeBuild(PackageBuildMixin, StormBase): |
1066 | (OCIFile, LibraryFileAlias, LibraryFileContent), |
1067 | OCIFile.build == self.id, |
1068 | LibraryFileAlias.id == OCIFile.library_file_id, |
1069 | - LibraryFileContent.id == LibraryFileAlias.contentID, |
1070 | + LibraryFileContent.id == LibraryFileAlias.content_id, |
1071 | OCIFile.layer_file_digest == layer_file_digest, |
1072 | ) |
1073 | .one() |
1074 | @@ -593,7 +593,7 @@ class OCIRecipeBuildSet(SpecificBuildFarmJobSourceMixin): |
1075 | |
1076 | load_related(Person, builds, ["requester_id"]) |
1077 | lfas = load_related(LibraryFileAlias, builds, ["log_id"]) |
1078 | - load_related(LibraryFileContent, lfas, ["contentID"]) |
1079 | + load_related(LibraryFileContent, lfas, ["content_id"]) |
1080 | recipes = load_related(OCIRecipe, builds, ["recipe_id"]) |
1081 | getUtility(IOCIRecipeSet).preloadDataForOCIRecipes(recipes) |
1082 | # XXX twom 2019-12-05 This needs to be extended to include |
1083 | diff --git a/lib/lp/registry/browser/person.py b/lib/lp/registry/browser/person.py |
1084 | index 5ca39fe..c359df7 100644 |
1085 | --- a/lib/lp/registry/browser/person.py |
1086 | +++ b/lib/lp/registry/browser/person.py |
1087 | @@ -248,8 +248,8 @@ class RestrictedMembershipsPersonView(LaunchpadView): |
1088 | Private teams are filtered out if the user is not a member of them. |
1089 | """ |
1090 | # This method returns a list as opposed to the database object's |
1091 | - # getLatestApprovedMembershipsForPerson which returns a sqlobject |
1092 | - # result set. |
1093 | + # getLatestApprovedMembershipsForPerson which returns a Storm |
1094 | + # ResultSet. |
1095 | membership_list = self.context.getLatestApprovedMembershipsForPerson() |
1096 | return [ |
1097 | membership |
1098 | @@ -265,8 +265,7 @@ class RestrictedMembershipsPersonView(LaunchpadView): |
1099 | Private teams are filtered out if the user is not a member of them. |
1100 | """ |
1101 | # This method returns a list as opposed to the database object's |
1102 | - # teams_with_icons which returns a sqlobject |
1103 | - # result set. |
1104 | + # teams_with_icons which returns a Storm ResultSet. |
1105 | return [ |
1106 | team |
1107 | for team in self.context.teams_with_icons |
1108 | @@ -576,7 +575,7 @@ class PersonNavigation(BranchTraversalMixin, Navigation): |
1109 | if not archive_id.isdigit(): |
1110 | return None |
1111 | return traverse_archive_subscription_for_subscriber( |
1112 | - self.context, archive_id |
1113 | + self.context, int(archive_id) |
1114 | ) |
1115 | else: |
1116 | # Otherwise we return the normal view for a person's |
1117 | @@ -2099,7 +2098,7 @@ class PersonParticipationView(LaunchpadView): |
1118 | # The member is a direct member; use the membership data. |
1119 | datejoined = membership.datejoined |
1120 | dateexpires = membership.dateexpires |
1121 | - if membership.person_id == team.teamownerID: |
1122 | + if membership.person_id == team.teamowner_id: |
1123 | role = "Owner" |
1124 | elif membership.status == TeamMembershipStatus.ADMIN: |
1125 | role = "Admin" |
1126 | diff --git a/lib/lp/registry/browser/tests/test_person_webservice.py b/lib/lp/registry/browser/tests/test_person_webservice.py |
1127 | index e0dfcb9..0081dc8 100644 |
1128 | --- a/lib/lp/registry/browser/tests/test_person_webservice.py |
1129 | +++ b/lib/lp/registry/browser/tests/test_person_webservice.py |
1130 | @@ -179,6 +179,22 @@ class TestPersonExportedID(TestCaseWithFactory): |
1131 | ) |
1132 | self.assertEqual(person_id, body["id"]) |
1133 | |
1134 | + def test_commercial_admin_can_see_id(self): |
1135 | + # A member of ~commercial-admins can read the `id` field. |
1136 | + person = self.factory.makePerson() |
1137 | + person_id = person.id |
1138 | + person_url = api_url(person) |
1139 | + |
1140 | + body = ( |
1141 | + webservice_for_person( |
1142 | + self.factory.makeCommercialAdmin(), |
1143 | + permission=OAuthPermission.WRITE_PRIVATE, |
1144 | + ) |
1145 | + .get(person_url, api_version="devel") |
1146 | + .jsonBody() |
1147 | + ) |
1148 | + self.assertEqual(person_id, body["id"]) |
1149 | + |
1150 | |
1151 | class TestPersonRepresentation(TestCaseWithFactory): |
1152 | layer = DatabaseFunctionalLayer |
1153 | diff --git a/lib/lp/registry/doc/person-account.rst b/lib/lp/registry/doc/person-account.rst |
1154 | index 21bb628..6495c01 100644 |
1155 | --- a/lib/lp/registry/doc/person-account.rst |
1156 | +++ b/lib/lp/registry/doc/person-account.rst |
1157 | @@ -130,7 +130,7 @@ will cause this spec to be reassigned. |
1158 | >>> len(foobar_pillars) > 0 |
1159 | True |
1160 | |
1161 | - >>> foobar_teams = list(Person.selectBy(teamowner=foobar)) |
1162 | + >>> foobar_teams = list(IStore(Person).find(Person, teamowner=foobar)) |
1163 | >>> len(foobar_teams) > 0 |
1164 | True |
1165 | |
1166 | @@ -202,7 +202,7 @@ adds a '-deactivatedaccount' suffix to the person's name... |
1167 | |
1168 | ...no owned teams... |
1169 | |
1170 | - >>> Person.selectBy(teamowner=foobar).is_empty() |
1171 | + >>> IStore(Person).find(Person, teamowner=foobar).is_empty() |
1172 | True |
1173 | |
1174 | ...no owned or driven pillars... |
1175 | @@ -226,7 +226,9 @@ Bar are now owned/driven by the registry admins team. |
1176 | >>> registry_pillars.issuperset(foobar_pillars) |
1177 | True |
1178 | |
1179 | - >>> registry_teams = set(Person.selectBy(teamowner=registry_experts)) |
1180 | + >>> registry_teams = set( |
1181 | + ... IStore(Person).find(Person, teamowner=registry_experts) |
1182 | + ... ) |
1183 | >>> registry_teams.issuperset(foobar_teams) |
1184 | True |
1185 | |
1186 | diff --git a/lib/lp/registry/doc/person-merge.rst b/lib/lp/registry/doc/person-merge.rst |
1187 | index f50b89c..65d98e4 100644 |
1188 | --- a/lib/lp/registry/doc/person-merge.rst |
1189 | +++ b/lib/lp/registry/doc/person-merge.rst |
1190 | @@ -278,6 +278,7 @@ create, and then delete, the needed two people. |
1191 | |
1192 | >>> from lp.registry.model.person import PersonSet, Person |
1193 | >>> from lp.registry.interfaces.person import PersonCreationRationale |
1194 | + >>> from lp.services.database.interfaces import IStore |
1195 | >>> personset = PersonSet() |
1196 | |
1197 | >>> skip = [] |
1198 | @@ -312,11 +313,14 @@ create, and then delete, the needed two people. |
1199 | ... display_name="Merge Winner", |
1200 | ... creation_rationale=lp, |
1201 | ... ) |
1202 | + ... IStore(Person).add(winner) |
1203 | ... loser = Person( |
1204 | ... name=name + ".loser", |
1205 | ... display_name="Merge Loser", |
1206 | ... creation_rationale=lp, |
1207 | ... ) |
1208 | + ... IStore(Person).add(loser) |
1209 | + ... IStore(Person).flush() |
1210 | ... yield winner, loser |
1211 | ... |
1212 | >>> endless_supply_of_players = new_players() |
1213 | diff --git a/lib/lp/registry/doc/pillar.rst b/lib/lp/registry/doc/pillar.rst |
1214 | index e961f18..72aba17 100644 |
1215 | --- a/lib/lp/registry/doc/pillar.rst |
1216 | +++ b/lib/lp/registry/doc/pillar.rst |
1217 | @@ -269,13 +269,17 @@ by that pillar name |
1218 | >>> from lp.registry.interfaces.distribution import IDistributionSet |
1219 | >>> from lp.registry.interfaces.projectgroup import IProjectGroupSet |
1220 | >>> from lp.registry.model.pillar import PillarName |
1221 | + >>> from lp.services.database.interfaces import IStore |
1222 | |
1223 | >>> ubuntu = getUtility(IDistributionSet).getByName("ubuntu") |
1224 | >>> gnome = getUtility(IProjectGroupSet).getByName("gnome") |
1225 | - >>> ubuntu_pillarname = PillarName.selectOneBy(name="ubuntu") |
1226 | + >>> ubuntu_pillarname = ( |
1227 | + ... IStore(PillarName).find(PillarName, name="ubuntu").one() |
1228 | + ... ) |
1229 | >>> ubuntu_pillarname.pillar == ubuntu |
1230 | True |
1231 | - >>> gnome_pillarname = PillarName.selectOneBy(name="gnome") |
1232 | + >>> gnome_pillarname = ( |
1233 | + ... IStore(PillarName).find(PillarName, name="gnome").one() |
1234 | + ... ) |
1235 | >>> gnome_pillarname.pillar == gnome |
1236 | True |
1237 | - |
1238 | diff --git a/lib/lp/registry/doc/vocabularies.rst b/lib/lp/registry/doc/vocabularies.rst |
1239 | index ad6bf4b..ab7f5c5 100644 |
1240 | --- a/lib/lp/registry/doc/vocabularies.rst |
1241 | +++ b/lib/lp/registry/doc/vocabularies.rst |
1242 | @@ -611,7 +611,6 @@ Any person that's already merged is not part of this vocabulary: |
1243 | |
1244 | >>> naked_cprov = removeSecurityProxy(cprov) |
1245 | >>> naked_cprov.merged = 1 |
1246 | - >>> naked_cprov.syncUpdate() |
1247 | >>> cprov in vocab |
1248 | False |
1249 | |
1250 | diff --git a/lib/lp/registry/interfaces/person.py b/lib/lp/registry/interfaces/person.py |
1251 | index ec14854..4e66cfd 100644 |
1252 | --- a/lib/lp/registry/interfaces/person.py |
1253 | +++ b/lib/lp/registry/interfaces/person.py |
1254 | @@ -128,6 +128,7 @@ from lp.registry.interfaces.teammembership import ( |
1255 | TeamMembershipStatus, |
1256 | ) |
1257 | from lp.registry.interfaces.wikiname import IWikiName |
1258 | +from lp.services.database.interfaces import IStore |
1259 | from lp.services.database.sqlbase import block_implicit_flushes |
1260 | from lp.services.fields import ( |
1261 | BlocklistableContentNameField, |
1262 | @@ -172,7 +173,7 @@ def validate_person_common( |
1263 | # Importing here to avoid a cyclic import. |
1264 | from lp.registry.model.person import Person |
1265 | |
1266 | - person = Person.get(value) |
1267 | + person = IStore(Person).get(Person, value) |
1268 | if not validate_func(person): |
1269 | raise error_class( |
1270 | "Cannot link person (name=%s, visibility=%s) to %s (name=%s)" |
1271 | @@ -219,7 +220,7 @@ def validate_membership_policy(obj, attr, value): |
1272 | return None |
1273 | |
1274 | # If we are just creating a new team, it can have any membership policy. |
1275 | - if getattr(obj, "_SO_creating", True): |
1276 | + if getattr(obj, "_creating", True): |
1277 | return value |
1278 | |
1279 | team = obj |
1280 | @@ -791,7 +792,7 @@ class IPersonLimitedView(IHasIcon, IHasLogo): |
1281 | "in listings of bugs or on a person's membership table." |
1282 | ), |
1283 | ) |
1284 | - iconID = Int(title=_("Icon ID"), required=True, readonly=True) |
1285 | + icon_id = Int(title=_("Icon ID"), required=True, readonly=True) |
1286 | logo = exported( |
1287 | LogoImageUpload( |
1288 | title=_("Logo"), |
1289 | @@ -805,7 +806,7 @@ class IPersonLimitedView(IHasIcon, IHasLogo): |
1290 | ), |
1291 | ) |
1292 | ) |
1293 | - logoID = Int(title=_("Logo ID"), required=True, readonly=True) |
1294 | + logo_id = Int(title=_("Logo ID"), required=True, readonly=True) |
1295 | # title is required for the Launchpad Page Layout main template |
1296 | title = Attribute("Person Page Title") |
1297 | is_probationary = exported( |
1298 | @@ -889,7 +890,7 @@ class IPersonViewRestricted( |
1299 | ), |
1300 | ) |
1301 | ) |
1302 | - mugshotID = Int(title=_("Mugshot ID"), required=True, readonly=True) |
1303 | + mugshot_id = Int(title=_("Mugshot ID"), required=True, readonly=True) |
1304 | |
1305 | languages = exported( |
1306 | CollectionField( |
1307 | @@ -1110,7 +1111,7 @@ class IPersonViewRestricted( |
1308 | ), |
1309 | exported_as="team_owner", |
1310 | ) |
1311 | - teamownerID = Int( |
1312 | + teamowner_id = Int( |
1313 | title=_("The Team Owner's ID or None"), required=False, readonly=True |
1314 | ) |
1315 | preferredemail = exported( |
1316 | diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py |
1317 | index 8f4bb03..924c672 100644 |
1318 | --- a/lib/lp/registry/model/distribution.py |
1319 | +++ b/lib/lp/registry/model/distribution.py |
1320 | @@ -158,7 +158,6 @@ from lp.services.database.constants import UTC_NOW |
1321 | from lp.services.database.decoratedresultset import DecoratedResultSet |
1322 | from lp.services.database.enumcol import DBEnum |
1323 | from lp.services.database.interfaces import IStore |
1324 | -from lp.services.database.sqlbase import sqlvalues |
1325 | from lp.services.database.stormbase import StormBase |
1326 | from lp.services.database.stormexpr import ( |
1327 | ArrayAgg, |
1328 | @@ -195,6 +194,7 @@ from lp.soyuz.model.publishing import ( |
1329 | SourcePackagePublishingHistory, |
1330 | get_current_source_releases, |
1331 | ) |
1332 | +from lp.soyuz.model.queue import PackageUpload |
1333 | from lp.translations.enums import TranslationPermission |
1334 | from lp.translations.model.hastranslationimports import ( |
1335 | HasTranslationImportsMixin, |
1336 | @@ -1877,20 +1877,17 @@ class Distribution( |
1337 | |
1338 | def getPendingAcceptancePPAs(self): |
1339 | """See `IDistribution`.""" |
1340 | - query = """ |
1341 | - Archive.purpose = %s AND |
1342 | - Archive.distribution = %s AND |
1343 | - PackageUpload.archive = Archive.id AND |
1344 | - PackageUpload.status = %s |
1345 | - """ % sqlvalues( |
1346 | - ArchivePurpose.PPA, self.id, PackageUploadStatus.ACCEPTED |
1347 | - ) |
1348 | - |
1349 | - return Archive.select( |
1350 | - query, |
1351 | - clauseTables=["PackageUpload"], |
1352 | - orderBy=["archive.id"], |
1353 | - distinct=True, |
1354 | + return ( |
1355 | + IStore(Archive) |
1356 | + .find( |
1357 | + Archive, |
1358 | + Archive.purpose == ArchivePurpose.PPA, |
1359 | + Archive.distribution == self, |
1360 | + PackageUpload.archive == Archive.id, |
1361 | + PackageUpload.status == PackageUploadStatus.ACCEPTED, |
1362 | + ) |
1363 | + .order_by(Archive.id) |
1364 | + .config(distinct=True) |
1365 | ) |
1366 | |
1367 | def getPendingPublicationPPAs(self): |
1368 | diff --git a/lib/lp/registry/model/distributionsourcepackage.py b/lib/lp/registry/model/distributionsourcepackage.py |
1369 | index 67312ee..bc80a4f 100644 |
1370 | --- a/lib/lp/registry/model/distributionsourcepackage.py |
1371 | +++ b/lib/lp/registry/model/distributionsourcepackage.py |
1372 | @@ -91,8 +91,8 @@ class DistributionSourcePackage( |
1373 | HasDriversMixin, |
1374 | WebhookTargetMixin, |
1375 | ): |
1376 | - """This is a "Magic Distribution Source Package". It is not an |
1377 | - SQLObject, but instead it represents a source package with a particular |
1378 | + """This is a "Magic Distribution Source Package". It is not a |
1379 | + Storm model, but instead it represents a source package with a particular |
1380 | name in a particular distribution. You can then ask it all sorts of |
1381 | things about the releases that are published under its name, the latest |
1382 | or current release, etc. |
1383 | diff --git a/lib/lp/registry/model/distroseries.py b/lib/lp/registry/model/distroseries.py |
1384 | index 22d2895..2e19dfb 100644 |
1385 | --- a/lib/lp/registry/model/distroseries.py |
1386 | +++ b/lib/lp/registry/model/distroseries.py |
1387 | @@ -1580,7 +1580,7 @@ class DistroSeries( |
1388 | POTemplate.distroseries == self, |
1389 | POTemplate.iscurrent == True, |
1390 | ) |
1391 | - contributors = contributors.order_by(*Person._storm_sortingColumns) |
1392 | + contributors = contributors.order_by(Person._separated_sortingColumns) |
1393 | contributors = contributors.config(distinct=True) |
1394 | return contributors |
1395 | |
1396 | diff --git a/lib/lp/registry/model/distroseriesdifference.py b/lib/lp/registry/model/distroseriesdifference.py |
1397 | index e82e975..e367a69 100644 |
1398 | --- a/lib/lp/registry/model/distroseriesdifference.py |
1399 | +++ b/lib/lp/registry/model/distroseriesdifference.py |
1400 | @@ -15,7 +15,7 @@ import apt_pkg |
1401 | from debian.changelog import Changelog, Version |
1402 | from lazr.enum import DBItem |
1403 | from storm.expr import And, Cast, Column, Desc, Or, Select, Table |
1404 | -from storm.locals import Int, Reference |
1405 | +from storm.locals import Int, Reference, Unicode |
1406 | from storm.zope.interfaces import IResultSet |
1407 | from zope.component import getUtility |
1408 | from zope.interface import implementer, provider |
1409 | @@ -48,7 +48,6 @@ from lp.services.database import bulk |
1410 | from lp.services.database.decoratedresultset import DecoratedResultSet |
1411 | from lp.services.database.enumcol import DBEnum |
1412 | from lp.services.database.interfaces import IPrimaryStore, IStore |
1413 | -from lp.services.database.sqlobject import StringCol |
1414 | from lp.services.database.stormbase import StormBase |
1415 | from lp.services.messages.model.message import Message, MessageChunk |
1416 | from lp.services.propertycache import ( |
1417 | @@ -389,11 +388,11 @@ class DistroSeriesDifference(StormBase): |
1418 | allow_none=False, |
1419 | enum=DistroSeriesDifferenceType, |
1420 | ) |
1421 | - source_version = StringCol(dbName="source_version", notNull=False) |
1422 | - parent_source_version = StringCol( |
1423 | - dbName="parent_source_version", notNull=False |
1424 | + source_version = Unicode(name="source_version", allow_none=True) |
1425 | + parent_source_version = Unicode( |
1426 | + name="parent_source_version", allow_none=True |
1427 | ) |
1428 | - base_version = StringCol(dbName="base_version", notNull=False) |
1429 | + base_version = Unicode(name="base_version", allow_none=True) |
1430 | |
1431 | @staticmethod |
1432 | def new(derived_series, source_package_name, parent_series): |
1433 | diff --git a/lib/lp/registry/model/mailinglist.py b/lib/lp/registry/model/mailinglist.py |
1434 | index fd26b09..f3fcaf5 100644 |
1435 | --- a/lib/lp/registry/model/mailinglist.py |
1436 | +++ b/lib/lp/registry/model/mailinglist.py |
1437 | @@ -336,8 +336,7 @@ class MailingList(StormBase): |
1438 | ), "Email already associated with another team." |
1439 | |
1440 | def _setAndNotifyDateActivated(self): |
1441 | - """Set the date_activated field and fire a |
1442 | - SQLObjectModified event. |
1443 | + """Set the date_activated field and fire an ObjectModifiedEvent. |
1444 | |
1445 | The date_activated field is only set once - repeated calls |
1446 | will not change the field's value. |
1447 | diff --git a/lib/lp/registry/model/person.py b/lib/lp/registry/model/person.py |
1448 | index 8aef249..788333b 100644 |
1449 | --- a/lib/lp/registry/model/person.py |
1450 | +++ b/lib/lp/registry/model/person.py |
1451 | @@ -62,7 +62,7 @@ from storm.expr import ( |
1452 | With, |
1453 | ) |
1454 | from storm.info import ClassAlias |
1455 | -from storm.locals import Int, Reference, ReferenceSet, Unicode |
1456 | +from storm.locals import Bool, DateTime, Int, Reference, ReferenceSet, Unicode |
1457 | from storm.store import EmptyResultSet, Store |
1458 | from twisted.conch.ssh.common import getNS |
1459 | from twisted.conch.ssh.keys import Key |
1460 | @@ -193,25 +193,16 @@ from lp.registry.model.teammembership import ( |
1461 | ) |
1462 | from lp.services.config import config |
1463 | from lp.services.database import bulk, postgresql |
1464 | -from lp.services.database.constants import UTC_NOW |
1465 | -from lp.services.database.datetimecol import UtcDateTimeCol |
1466 | +from lp.services.database.constants import DEFAULT, UTC_NOW |
1467 | from lp.services.database.decoratedresultset import DecoratedResultSet |
1468 | from lp.services.database.enumcol import DBEnum |
1469 | from lp.services.database.interfaces import IStore |
1470 | from lp.services.database.policy import PrimaryDatabasePolicy |
1471 | from lp.services.database.sqlbase import ( |
1472 | - SQLBase, |
1473 | convert_storm_clause_to_string, |
1474 | cursor, |
1475 | sqlvalues, |
1476 | ) |
1477 | -from lp.services.database.sqlobject import ( |
1478 | - BoolCol, |
1479 | - ForeignKey, |
1480 | - IntCol, |
1481 | - SQLObjectNotFound, |
1482 | - StringCol, |
1483 | -) |
1484 | from lp.services.database.stormbase import StormBase |
1485 | from lp.services.database.stormexpr import WithMaterialized, fti_search |
1486 | from lp.services.helpers import backslashreplace, shortlist |
1487 | @@ -234,7 +225,7 @@ from lp.services.identity.interfaces.emailaddress import ( |
1488 | ) |
1489 | from lp.services.identity.model.account import Account |
1490 | from lp.services.identity.model.emailaddress import EmailAddress, HasOwnerMixin |
1491 | -from lp.services.librarian.model import LibraryFileAlias |
1492 | +from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent |
1493 | from lp.services.mail.helpers import ( |
1494 | get_contact_email_addresses, |
1495 | get_email_template, |
1496 | @@ -283,7 +274,7 @@ class TeamInvitationEvent: |
1497 | self.team = team |
1498 | |
1499 | |
1500 | -class ValidPersonCache(SQLBase): |
1501 | +class ValidPersonCache(StormBase): |
1502 | """Flags if a Person is active and usable in Launchpad. |
1503 | |
1504 | This is readonly, as this is a view in the database. |
1505 | @@ -295,6 +286,10 @@ class ValidPersonCache(SQLBase): |
1506 | corroborating information. |
1507 | """ |
1508 | |
1509 | + __storm_table__ = "ValidPersonCache" |
1510 | + |
1511 | + id = Int(primary=True) |
1512 | + |
1513 | |
1514 | def validate_person_visibility(person, attr, value): |
1515 | """Validate changes in visibility. |
1516 | @@ -356,14 +351,14 @@ class PersonSettings(StormBase): |
1517 | |
1518 | __storm_table__ = "PersonSettings" |
1519 | |
1520 | - personID = Int("person", default=None, primary=True) |
1521 | - person = Reference(personID, "Person.id") |
1522 | + person_id = Int("person", default=None, primary=True) |
1523 | + person = Reference(person_id, "Person.id") |
1524 | |
1525 | - selfgenerated_bugnotifications = BoolCol(notNull=True, default=False) |
1526 | + selfgenerated_bugnotifications = Bool(allow_none=False, default=False) |
1527 | |
1528 | - expanded_notification_footers = BoolCol(notNull=False, default=False) |
1529 | + expanded_notification_footers = Bool(allow_none=True, default=False) |
1530 | |
1531 | - require_strong_email_authentication = BoolCol(notNull=False, default=False) |
1532 | + require_strong_email_authentication = Bool(allow_none=True, default=False) |
1533 | |
1534 | |
1535 | def readonly_settings(message, interface): |
1536 | @@ -421,7 +416,7 @@ _readonly_person_settings = readonly_settings( |
1537 | @implementer(IPerson) |
1538 | @delegate_to(IPersonSettings, context="_person_settings") |
1539 | class Person( |
1540 | - SQLBase, |
1541 | + StormBase, |
1542 | HasBugsBase, |
1543 | HasSpecificationsMixin, |
1544 | HasTranslationImportsMixin, |
1545 | @@ -432,14 +427,54 @@ class Person( |
1546 | ): |
1547 | """A Person.""" |
1548 | |
1549 | - def __init__(self, *args, **kwargs): |
1550 | - super().__init__(*args, **kwargs) |
1551 | - # Initialize our PersonSettings object/record. |
1552 | + __storm_table__ = "Person" |
1553 | + |
1554 | + id = Int(primary=True) |
1555 | + |
1556 | + _creating = False |
1557 | + |
1558 | + def __init__( |
1559 | + self, |
1560 | + name, |
1561 | + display_name, |
1562 | + account=None, |
1563 | + teamowner=None, |
1564 | + description=None, |
1565 | + membership_policy=DEFAULT, |
1566 | + defaultrenewalperiod=None, |
1567 | + defaultmembershipperiod=None, |
1568 | + creation_rationale=None, |
1569 | + creation_comment=None, |
1570 | + registrant=None, |
1571 | + hide_email_addresses=False, |
1572 | + ): |
1573 | + super().__init__() |
1574 | + self._creating = True |
1575 | + self.name = name |
1576 | + self.display_name = display_name |
1577 | + self.account = account |
1578 | + self.teamowner = teamowner |
1579 | + self.description = description |
1580 | + self.membership_policy = membership_policy |
1581 | + self.defaultrenewalperiod = defaultrenewalperiod |
1582 | + self.defaultmembershipperiod = defaultmembershipperiod |
1583 | + self.creation_rationale = creation_rationale |
1584 | + self.creation_comment = creation_comment |
1585 | + self.registrant = registrant |
1586 | + self.hide_email_addresses = hide_email_addresses |
1587 | if not self.is_team: |
1588 | - # This is a Person, not a team. Teams may want a TeamSettings |
1589 | - # in the future. |
1590 | + # Initialize our PersonSettings object/record. This is a |
1591 | + # Person, not a team. Teams may want a TeamSettings in the |
1592 | + # future. |
1593 | settings = PersonSettings() |
1594 | settings.person = self |
1595 | + self.__storm_loaded__() |
1596 | + del self._creating |
1597 | + |
1598 | + def __storm_loaded__(self): |
1599 | + """Mark the person as a team when created or fetched from database.""" |
1600 | + if self.is_team: |
1601 | + alsoProvides(self, ITeam) |
1602 | |
1603 | @cachedproperty |
1604 | def _person_settings(self): |
1605 | @@ -463,13 +498,16 @@ class Person( |
1606 | return self.id |
1607 | |
1608 | sortingColumns = SQL("person_sort_key(Person.displayname, Person.name)") |
1609 | - # Redefine the default ordering into Storm syntax. |
1610 | - _storm_sortingColumns = ("Person.displayname", "Person.name") |
1611 | + # If we're using SELECT DISTINCT, then we can't use sortingColumns |
1612 | + # unless `person_sort_key(Person.displayname, Person.name)` is also in |
1613 | + # the select list, which usually isn't convenient. Provide a separated |
1614 | + # version instead. |
1615 | + _separated_sortingColumns = ("Person.displayname", "Person.name") |
1616 | # When doing any sort of set operations (union, intersect, except_) with |
1617 | - # SQLObject we can't use sortingColumns because the table name Person is |
1618 | - # not available in that context, so we use this one. |
1619 | + # Storm we can't use sortingColumns because the table name Person is not |
1620 | + # available in that context, so we use this one. |
1621 | _sortingColumnsForSetOperations = SQL("person_sort_key(displayname, name)") |
1622 | - _defaultOrder = sortingColumns |
1623 | + __storm_order__ = sortingColumns |
1624 | _visibility_warning_cache_key = None |
1625 | _visibility_warning_cache = None |
1626 | |
1627 | @@ -482,53 +520,44 @@ class Person( |
1628 | # mailing list. This is because renaming a mailing list is not |
1629 | # trivial in Mailman 2.1 (see Mailman FAQ item 4.70). We prohibit |
1630 | # such renames in the team edit details view, but just to be safe, we |
1631 | - # also assert that such an attempt is not being made here. To do |
1632 | - # this, we must override the SQLObject method for setting the 'name' |
1633 | - # database column. Watch out for when SQLObject is creating this row, |
1634 | - # because in that case self.name isn't yet available. |
1635 | + # also assert that such an attempt is not being made here. Watch |
1636 | + # out for when Storm is creating this row, because in that case |
1637 | + # self.name isn't yet available. |
1638 | if self.name is None: |
1639 | mailing_list = None |
1640 | else: |
1641 | mailing_list = getUtility(IMailingListSet).get(self.name) |
1642 | can_rename = ( |
1643 | - self._SO_creating |
1644 | + self._creating |
1645 | or not self.is_team |
1646 | or mailing_list is None |
1647 | or mailing_list.status == MailingListStatus.PURGED |
1648 | ) |
1649 | assert can_rename, "Cannot rename teams with mailing lists" |
1650 | - # Everything's okay, so let SQLObject do the normal thing. |
1651 | + # Everything's okay, so let Storm do the normal thing. |
1652 | return value |
1653 | |
1654 | - name = StringCol( |
1655 | - dbName="name", |
1656 | - alternateID=True, |
1657 | - notNull=True, |
1658 | - storm_validator=_validate_name, |
1659 | - ) |
1660 | + name = Unicode(name="name", allow_none=False, validator=_validate_name) |
1661 | |
1662 | def __repr__(self): |
1663 | displayname = backslashreplace(self.displayname) |
1664 | return "<Person %s (%s)>" % (self.name, displayname) |
1665 | |
1666 | - display_name = StringCol(dbName="displayname", notNull=True) |
1667 | + display_name = Unicode(name="displayname", allow_none=False) |
1668 | |
1669 | @property |
1670 | def displayname(self): |
1671 | return self.display_name |
1672 | |
1673 | - teamdescription = StringCol(dbName="teamdescription", default=None) |
1674 | - homepage_content = StringCol(default=None) |
1675 | - _description = StringCol(dbName="description", default=None) |
1676 | - icon = ForeignKey( |
1677 | - dbName="icon", foreignKey="LibraryFileAlias", default=None |
1678 | - ) |
1679 | - logo = ForeignKey( |
1680 | - dbName="logo", foreignKey="LibraryFileAlias", default=None |
1681 | - ) |
1682 | - mugshot = ForeignKey( |
1683 | - dbName="mugshot", foreignKey="LibraryFileAlias", default=None |
1684 | - ) |
1685 | + teamdescription = Unicode(name="teamdescription", default=None) |
1686 | + homepage_content = Unicode(default=None) |
1687 | + _description = Unicode(name="description", default=None) |
1688 | + icon_id = Int(name="icon", allow_none=True, default=None) |
1689 | + icon = Reference(icon_id, "LibraryFileAlias.id") |
1690 | + logo_id = Int(name="logo", allow_none=True, default=None) |
1691 | + logo = Reference(logo_id, "LibraryFileAlias.id") |
1692 | + mugshot_id = Int(name="mugshot", allow_none=True, default=None) |
1693 | + mugshot = Reference(mugshot_id, "LibraryFileAlias.id") |
1694 | |
1695 | @property |
1696 | def account_status(self): |
1697 | @@ -547,12 +576,13 @@ class Person( |
1698 | raise NoAccountError() |
1699 | self.account.setStatus(status, user, comment) |
1700 | |
1701 | - teamowner = ForeignKey( |
1702 | - dbName="teamowner", |
1703 | - foreignKey="Person", |
1704 | + teamowner_id = Int( |
1705 | + name="teamowner", |
1706 | + validator=validate_public_person, |
1707 | + allow_none=True, |
1708 | default=None, |
1709 | - storm_validator=validate_public_person, |
1710 | ) |
1711 | + teamowner = Reference(teamowner_id, "Person.id") |
1712 | |
1713 | sshkeys = ReferenceSet("id", "SSHKey.person_id") |
1714 | |
1715 | @@ -566,30 +596,32 @@ class Person( |
1716 | default=TeamMembershipPolicy.RESTRICTED, |
1717 | validator=validate_membership_policy, |
1718 | ) |
1719 | - defaultrenewalperiod = IntCol(dbName="defaultrenewalperiod", default=None) |
1720 | - defaultmembershipperiod = IntCol( |
1721 | - dbName="defaultmembershipperiod", default=None |
1722 | - ) |
1723 | + defaultrenewalperiod = Int(name="defaultrenewalperiod", default=None) |
1724 | + defaultmembershipperiod = Int(name="defaultmembershipperiod", default=None) |
1725 | mailing_list_auto_subscribe_policy = DBEnum( |
1726 | enum=MailingListAutoSubscribePolicy, |
1727 | default=MailingListAutoSubscribePolicy.ON_REGISTRATION, |
1728 | ) |
1729 | |
1730 | - merged = ForeignKey(dbName="merged", foreignKey="Person", default=None) |
1731 | + merged_id = Int(name="merged", allow_none=True, default=None) |
1732 | + merged = Reference(merged_id, "Person.id") |
1733 | |
1734 | - datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) |
1735 | + datecreated = DateTime( |
1736 | + allow_none=False, default=UTC_NOW, tzinfo=timezone.utc |
1737 | + ) |
1738 | creation_rationale = DBEnum(enum=PersonCreationRationale, default=None) |
1739 | - creation_comment = StringCol(default=None) |
1740 | - registrant = ForeignKey( |
1741 | - dbName="registrant", |
1742 | - foreignKey="Person", |
1743 | + creation_comment = Unicode(default=None) |
1744 | + registrant_id = Int( |
1745 | + name="registrant", |
1746 | + validator=validate_public_person, |
1747 | + allow_none=True, |
1748 | default=None, |
1749 | - storm_validator=validate_public_person, |
1750 | ) |
1751 | - hide_email_addresses = BoolCol(notNull=True, default=False) |
1752 | - verbose_bugnotifications = BoolCol(notNull=True, default=True) |
1753 | + registrant = Reference(registrant_id, "Person.id") |
1754 | + hide_email_addresses = Bool(allow_none=False, default=False) |
1755 | + verbose_bugnotifications = Bool(allow_none=False, default=True) |
1756 | |
1757 | - signedcocs = ReferenceSet("<primary key>", "SignedCodeOfConduct.owner_id") |
1758 | + signedcocs = ReferenceSet("id", "SignedCodeOfConduct.owner_id") |
1759 | _ircnicknames = ReferenceSet("id", "IrcID.person_id") |
1760 | jabberids = ReferenceSet("id", "JabberID.person_id") |
1761 | |
1762 | @@ -605,7 +637,7 @@ class Person( |
1763 | allow_none=False, |
1764 | ) |
1765 | |
1766 | - personal_standing_reason = StringCol(default=None) |
1767 | + personal_standing_reason = Unicode(default=None) |
1768 | |
1769 | @property |
1770 | def description(self): |
1771 | @@ -704,12 +736,6 @@ class Person( |
1772 | person_language.delete() |
1773 | self.deleteLanguagesCache() |
1774 | |
1775 | - def _init(self, *args, **kw): |
1776 | - """Mark the person as a team when created or fetched from database.""" |
1777 | - SQLBase._init(self, *args, **kw) |
1778 | - if self.teamownerID is not None: |
1779 | - alsoProvides(self, ITeam) |
1780 | - |
1781 | def convertToTeam(self, team_owner): |
1782 | """See `IPerson`.""" |
1783 | if self.is_team: |
1784 | @@ -1010,7 +1036,7 @@ class Person( |
1785 | @property |
1786 | def is_team(self): |
1787 | """See `IPerson`.""" |
1788 | - return self.teamownerID is not None |
1789 | + return self.teamowner_id is not None |
1790 | |
1791 | @property |
1792 | def mailing_list(self): |
1793 | @@ -1118,7 +1144,7 @@ class Person( |
1794 | OR product.bug_supervisor = %(person)s |
1795 | ) |
1796 | """ % sqlvalues( |
1797 | - person=self |
1798 | + person=self.id |
1799 | ) |
1800 | |
1801 | return "%s AND (%s)" % ( |
1802 | @@ -1158,7 +1184,7 @@ class Person( |
1803 | ) _pillar |
1804 | ON PillarName.name = _pillar.name |
1805 | """ |
1806 | - % sqlvalues(person=self) |
1807 | + % sqlvalues(person=self.id) |
1808 | ) |
1809 | |
1810 | results = IStore(self).using(SQL(origin)).find(find_spec) |
1811 | @@ -1261,7 +1287,6 @@ class Person( |
1812 | CommercialSubscription, |
1813 | ) |
1814 | from lp.registry.model.distribution import Distribution |
1815 | - from lp.registry.model.person import Person |
1816 | from lp.registry.model.product import Product |
1817 | from lp.registry.model.teammembership import TeamParticipation |
1818 | |
1819 | @@ -1370,11 +1395,9 @@ class Person( |
1820 | # This is prepopulated by various queries in and out of person.py. |
1821 | if self.is_team: |
1822 | return False |
1823 | - try: |
1824 | - ValidPersonCache.get(self.id) |
1825 | - return True |
1826 | - except SQLObjectNotFound: |
1827 | - return False |
1828 | + return ( |
1829 | + IStore(ValidPersonCache).get(ValidPersonCache, self.id) is not None |
1830 | + ) |
1831 | |
1832 | @property |
1833 | def is_probationary(self): |
1834 | @@ -1602,7 +1625,6 @@ class Person( |
1835 | def getAssignedSpecificationWorkItemsDueBefore(self, date, user): |
1836 | """See `IPerson`.""" |
1837 | from lp.registry.model.distribution import Distribution |
1838 | - from lp.registry.model.person import Person |
1839 | from lp.registry.model.product import Product |
1840 | |
1841 | store = Store.of(self) |
1842 | @@ -1814,7 +1836,7 @@ class Person( |
1843 | And( |
1844 | TeamParticipation.team_id == self.id, |
1845 | TeamParticipation.person_id != self.id, |
1846 | - Person.teamownerID != None, |
1847 | + IsNot(Person.teamowner_id, None), |
1848 | ), |
1849 | need_api=True, |
1850 | ) |
1851 | @@ -2064,7 +2086,7 @@ class Person( |
1852 | Select( |
1853 | Person.id, |
1854 | tables=[Person], |
1855 | - where=Person.teamownerID.is_in(team_select), |
1856 | + where=Person.teamowner_id.is_in(team_select), |
1857 | ), |
1858 | Select( |
1859 | TeamMembership.team_id, |
1860 | @@ -2607,7 +2629,7 @@ class Person( |
1861 | spec.assignee = None |
1862 | |
1863 | registry_experts = getUtility(ILaunchpadCelebrities).registry_experts |
1864 | - for team in Person.selectBy(teamowner=self): |
1865 | + for team in IStore(Person).find(Person, teamowner=self): |
1866 | team.teamowner = registry_experts |
1867 | for pillar_name in self.getAffiliatedPillars(self): |
1868 | pillar = pillar_name.pillar |
1869 | @@ -2664,7 +2686,7 @@ class Person( |
1870 | """Return a unique name.""" |
1871 | new_name = base_new_name |
1872 | count = 1 |
1873 | - while Person.selectOneBy(name=new_name) is not None: |
1874 | + while not IStore(Person).find(Person, name=new_name).is_empty(): |
1875 | new_name = base_new_name + str(count) |
1876 | count += 1 |
1877 | return new_name |
1878 | @@ -2945,7 +2967,7 @@ class Person( |
1879 | Person, |
1880 | Person.id == TeamParticipation.team_id, |
1881 | TeamParticipation.person == self, |
1882 | - IsNot(Person.teamownerID, None), |
1883 | + IsNot(Person.teamowner_id, None), |
1884 | ) |
1885 | .order_by(Person.sortingColumns) |
1886 | ) |
1887 | @@ -2953,11 +2975,10 @@ class Person( |
1888 | @property |
1889 | def teams_indirectly_participated_in(self): |
1890 | """See `IPerson`.""" |
1891 | - Team = ClassAlias(Person, "Team") |
1892 | store = Store.of(self) |
1893 | origin = [ |
1894 | - Team, |
1895 | - Join(TeamParticipation, Team.id == TeamParticipation.team_id), |
1896 | + Person, |
1897 | + Join(TeamParticipation, Person.id == TeamParticipation.team_id), |
1898 | LeftJoin( |
1899 | TeamMembership, |
1900 | And( |
1901 | @@ -2972,9 +2993,8 @@ class Person( |
1902 | ), |
1903 | ), |
1904 | ] |
1905 | - find_objects = Team |
1906 | return store.using(*origin).find( |
1907 | - find_objects, |
1908 | + Person, |
1909 | And( |
1910 | TeamParticipation.person == self.id, |
1911 | TeamParticipation.person != TeamParticipation.team_id, |
1912 | @@ -2991,8 +3011,8 @@ class Person( |
1913 | Person, |
1914 | Person.id == TeamParticipation.team_id, |
1915 | TeamParticipation.person == self, |
1916 | - IsNot(Person.teamownerID, None), |
1917 | - IsNot(Person.iconID, None), |
1918 | + IsNot(Person.teamowner_id, None), |
1919 | + IsNot(Person.icon_id, None), |
1920 | TeamParticipation.team != self, |
1921 | ) |
1922 | .order_by(Person.sortingColumns) |
1923 | @@ -3576,8 +3596,10 @@ class Person( |
1924 | @property |
1925 | def ppas(self): |
1926 | """See `IPerson`.""" |
1927 | - return Archive.selectBy( |
1928 | - owner=self, purpose=ArchivePurpose.PPA, orderBy="name" |
1929 | + return ( |
1930 | + IStore(Archive) |
1931 | + .find(Archive, owner=self, purpose=ArchivePurpose.PPA) |
1932 | + .order_by(Archive.name) |
1933 | ) |
1934 | |
1935 | def getVisiblePPAs(self, user): |
1936 | @@ -3834,17 +3856,21 @@ class PersonSet: |
1937 | """See `IPersonSet`.""" |
1938 | # The odd ordering here is to ensure we hit the PostgreSQL |
1939 | # indexes. It will not make any real difference outside of tests. |
1940 | - query = ( |
1941 | - """ |
1942 | - id IN ( |
1943 | - SELECT person FROM KarmaTotalCache |
1944 | - ORDER BY karma_total DESC, person DESC |
1945 | - LIMIT %s |
1946 | - ) |
1947 | - """ |
1948 | - % limit |
1949 | + top_people = shortlist( |
1950 | + IStore(Person).find( |
1951 | + Person, |
1952 | + Person.id.is_in( |
1953 | + Select( |
1954 | + KarmaTotalCache.person_id, |
1955 | + order_by=( |
1956 | + Desc(KarmaTotalCache.karma_total), |
1957 | + Desc(KarmaTotalCache.person_id), |
1958 | + ), |
1959 | + limit=limit, |
1960 | + ) |
1961 | + ), |
1962 | + ) |
1963 | ) |
1964 | - top_people = shortlist(Person.select(query)) |
1965 | return sorted( |
1966 | top_people, |
1967 | key=lambda obj: (obj.karma, obj.displayname, obj.id), |
1968 | @@ -4152,6 +4178,9 @@ class PersonSet: |
1969 | defaultrenewalperiod=defaultrenewalperiod, |
1970 | membership_policy=membership_policy, |
1971 | ) |
1972 | + store = IStore(Person) |
1973 | + store.add(team) |
1974 | + store.flush() |
1975 | notify(ObjectCreatedEvent(team)) |
1976 | # Here we add the owner as a team admin manually because we know what |
1977 | # we're doing (so we don't need to do any sanity checks) and we don't |
1978 | @@ -4264,19 +4293,18 @@ class PersonSet: |
1979 | if not displayname: |
1980 | displayname = name.capitalize() |
1981 | |
1982 | - if account is None: |
1983 | - account_id = None |
1984 | - else: |
1985 | - account_id = account.id |
1986 | person = Person( |
1987 | name=name, |
1988 | display_name=displayname, |
1989 | - account_id=account_id, |
1990 | + account=account, |
1991 | creation_rationale=rationale, |
1992 | creation_comment=comment, |
1993 | hide_email_addresses=hide_email_addresses, |
1994 | registrant=registrant, |
1995 | ) |
1996 | + store = IStore(Person) |
1997 | + store.add(person) |
1998 | + store.flush() |
1999 | return person |
2000 | |
2001 | def ensurePerson( |
2002 | @@ -4304,10 +4332,10 @@ class PersonSet: |
2003 | |
2004 | def getByName(self, name, ignore_merged=True): |
2005 | """See `IPersonSet`.""" |
2006 | - query = Person.name == name |
2007 | + clauses = [Person.name == name] |
2008 | if ignore_merged: |
2009 | - query = And(query, Person.mergedID == None) |
2010 | - return Person.selectOne(query) |
2011 | + clauses.append(Is(Person.merged_id, None)) |
2012 | + return IStore(Person).find(Person, *clauses).one() |
2013 | |
2014 | def getByAccount(self, account): |
2015 | """See `IPersonSet`.""" |
2016 | @@ -4316,14 +4344,26 @@ class PersonSet: |
2017 | def updateStatistics(self): |
2018 | """See `IPersonSet`.""" |
2019 | stats = getUtility(ILaunchpadStatisticSet) |
2020 | - people_count = Person.select( |
2021 | - And(Person.teamownerID == None, Person.mergedID == None) |
2022 | - ).count() |
2023 | + people_count = ( |
2024 | + IStore(Person) |
2025 | + .find( |
2026 | + Person, |
2027 | + Is(Person.teamowner_id, None), |
2028 | + Is(Person.merged_id, None), |
2029 | + ) |
2030 | + .count() |
2031 | + ) |
2032 | stats.update("people_count", people_count) |
2033 | transaction.commit() |
2034 | - teams_count = Person.select( |
2035 | - And(Person.q.teamownerID != None, Person.q.mergedID == None) |
2036 | - ).count() |
2037 | + teams_count = ( |
2038 | + IStore(Person) |
2039 | + .find( |
2040 | + Person, |
2041 | + IsNot(Person.teamowner_id, None), |
2042 | + Is(Person.merged_id, None), |
2043 | + ) |
2044 | + .count() |
2045 | + ) |
2046 | stats.update("teams_count", teams_count) |
2047 | transaction.commit() |
2048 | |
2049 | @@ -4483,10 +4523,7 @@ class PersonSet: |
2050 | |
2051 | def get(self, personid): |
2052 | """See `IPersonSet`.""" |
2053 | - try: |
2054 | - return Person.get(personid) |
2055 | - except SQLObjectNotFound: |
2056 | - return None |
2057 | + return IStore(Person).get(Person, personid) |
2058 | |
2059 | def getByEmail(self, email, filter_status=True): |
2060 | """See `IPersonSet`.""" |
2061 | @@ -4543,8 +4580,8 @@ class PersonSet: |
2062 | # not hit the DB. |
2063 | valid_person_ids = { |
2064 | person_id.id |
2065 | - for person_id in ValidPersonCache.select( |
2066 | - "id IN %s" % sqlvalues(person_ids) |
2067 | + for person_id in IStore(ValidPersonCache).find( |
2068 | + ValidPersonCache, ValidPersonCache.id.is_in(person_ids) |
2069 | ) |
2070 | } |
2071 | return [person for person in persons if person.id in valid_person_ids] |
2072 | @@ -4589,23 +4626,24 @@ class PersonSet: |
2073 | """See `IPersonSet`.""" |
2074 | aliases = [] |
2075 | aliases.extend( |
2076 | - person.iconID for person in people if person.iconID is not None |
2077 | + person.icon_id for person in people if person.icon_id is not None |
2078 | ) |
2079 | aliases.extend( |
2080 | - person.logoID for person in people if person.logoID is not None |
2081 | + person.logo_id for person in people if person.logo_id is not None |
2082 | ) |
2083 | aliases.extend( |
2084 | - person.mugshotID |
2085 | + person.mugshot_id |
2086 | for person in people |
2087 | - if person.mugshotID is not None |
2088 | + if person.mugshot_id is not None |
2089 | ) |
2090 | if not aliases: |
2091 | return |
2092 | # Listify, since this is a pure cache. |
2093 | list( |
2094 | - LibraryFileAlias.select( |
2095 | - "LibraryFileAlias.id IN %s" % sqlvalues(aliases), |
2096 | - prejoins=["content"], |
2097 | + IStore(LibraryFileAlias).find( |
2098 | + (LibraryFileAlias, LibraryFileContent), |
2099 | + LibraryFileAlias.id.is_in(aliases), |
2100 | + LibraryFileAlias.content == LibraryFileContent.id, |
2101 | ) |
2102 | ) |
2103 | |
2104 | @@ -4792,7 +4830,7 @@ class PersonSet: |
2105 | |
2106 | def preload_for_people(rows): |
2107 | if need_teamowner or need_api: |
2108 | - bulk.load(Person, [row[0].teamownerID for row in rows]) |
2109 | + bulk.load(Person, [row[0].teamowner_id for row in rows]) |
2110 | |
2111 | def prepopulate_person(row): |
2112 | result = row[0] |
2113 | @@ -5546,7 +5584,7 @@ def _get_recipients_for_team(team): |
2114 | EmailAddress.person != None, |
2115 | Account.status == AccountStatus.ACTIVE, |
2116 | ), |
2117 | - Person.teamownerID != None, |
2118 | + IsNot(Person.teamowner_id, None), |
2119 | ), |
2120 | ).config(distinct=True) |
2121 | next_ids = [] |
2122 | diff --git a/lib/lp/registry/model/pillar.py b/lib/lp/registry/model/pillar.py |
2123 | index ad9af3a..14d4fc8 100644 |
2124 | --- a/lib/lp/registry/model/pillar.py |
2125 | +++ b/lib/lp/registry/model/pillar.py |
2126 | @@ -13,7 +13,7 @@ import six |
2127 | from storm.databases.postgres import Case |
2128 | from storm.expr import And, Coalesce, Desc, LeftJoin, Lower, Or |
2129 | from storm.info import ClassAlias |
2130 | -from storm.locals import Int, Reference |
2131 | +from storm.locals import Bool, Int, Reference, Unicode |
2132 | from storm.store import Store |
2133 | from zope.component import getUtility |
2134 | from zope.interface import implementer, provider |
2135 | @@ -33,8 +33,7 @@ from lp.services.config import config |
2136 | from lp.services.database.bulk import load_related |
2137 | from lp.services.database.decoratedresultset import DecoratedResultSet |
2138 | from lp.services.database.interfaces import IStore |
2139 | -from lp.services.database.sqlbase import SQLBase |
2140 | -from lp.services.database.sqlobject import BoolCol, ForeignKey, StringCol |
2141 | +from lp.services.database.stormbase import StormBase |
2142 | from lp.services.database.stormexpr import fti_search, rank_by_fti |
2143 | from lp.services.librarian.model import LibraryFileAlias |
2144 | |
2145 | @@ -101,7 +100,7 @@ class PillarNameSet: |
2146 | # We could attempt to do this in a single database query, but I |
2147 | # expect that doing two queries will be faster that OUTER JOINing |
2148 | # the Project, Product and Distribution tables (and this approach |
2149 | - # works better with SQLObject too. |
2150 | + # is easier with Storm too). |
2151 | |
2152 | # Retrieve information out of the PillarName table. |
2153 | query = """ |
2154 | @@ -326,23 +325,26 @@ class PillarNameSet: |
2155 | |
2156 | |
2157 | @implementer(IPillarName) |
2158 | -class PillarName(SQLBase): |
2159 | - _table = "PillarName" |
2160 | - _defaultOrder = "name" |
2161 | +class PillarName(StormBase): |
2162 | + __storm_table__ = "PillarName" |
2163 | + __storm_order__ = "name" |
2164 | |
2165 | - name = StringCol( |
2166 | - dbName="name", notNull=True, unique=True, alternateID=True |
2167 | - ) |
2168 | + id = Int(primary=True) |
2169 | + name = Unicode(name="name", allow_none=False) |
2170 | product_id = Int(name="product", allow_none=True) |
2171 | product = Reference(product_id, "Product.id") |
2172 | projectgroup_id = Int(name="project", allow_none=True) |
2173 | projectgroup = Reference(projectgroup_id, "ProjectGroup.id") |
2174 | distribution_id = Int(name="distribution", allow_none=True) |
2175 | distribution = Reference(distribution_id, "Distribution.id") |
2176 | - active = BoolCol(dbName="active", notNull=True, default=True) |
2177 | - alias_for = ForeignKey( |
2178 | - foreignKey="PillarName", dbName="alias_for", default=None |
2179 | - ) |
2180 | + active = Bool(name="active", allow_none=False, default=True) |
2181 | + alias_for_id = Int(name="alias_for", allow_none=True, default=None) |
2182 | + alias_for = Reference(alias_for_id, "PillarName.id") |
2183 | + |
2184 | + def __init__(self, name, alias_for=None): |
2185 | + super().__init__() |
2186 | + self.name = name |
2187 | + self.alias_for = alias_for |
2188 | |
2189 | @property |
2190 | def pillar(self): |
2191 | @@ -366,7 +368,10 @@ class HasAliasMixin: |
2192 | @property |
2193 | def aliases(self): |
2194 | """See `IHasAlias`.""" |
2195 | - aliases = PillarName.selectBy(alias_for=PillarName.byName(self.name)) |
2196 | + store = IStore(PillarName) |
2197 | + aliases = store.find( |
2198 | + PillarName, alias_for=store.find(PillarName, name=self.name).one() |
2199 | + ) |
2200 | return [alias.name for alias in aliases] |
2201 | |
2202 | def setAliases(self, names): |
2203 | diff --git a/lib/lp/registry/model/productrelease.py b/lib/lp/registry/model/productrelease.py |
2204 | index 8a87785..a2dd584 100644 |
2205 | --- a/lib/lp/registry/model/productrelease.py |
2206 | +++ b/lib/lp/registry/model/productrelease.py |
2207 | @@ -365,7 +365,7 @@ class ProductReleaseSet: |
2208 | ), |
2209 | LeftJoin( |
2210 | LibraryFileContent, |
2211 | - LibraryFileAlias.contentID == LibraryFileContent.id, |
2212 | + LibraryFileAlias.content == LibraryFileContent.id, |
2213 | ), |
2214 | Join( |
2215 | ProductRelease, |
2216 | diff --git a/lib/lp/registry/model/sharingjob.py b/lib/lp/registry/model/sharingjob.py |
2217 | index bf2bb78..3bb50f1 100644 |
2218 | --- a/lib/lp/registry/model/sharingjob.py |
2219 | +++ b/lib/lp/registry/model/sharingjob.py |
2220 | @@ -20,6 +20,7 @@ from zope.component import getUtility |
2221 | from zope.interface import implementer, provider |
2222 | |
2223 | from lp.app.enums import InformationType |
2224 | +from lp.app.errors import NotFoundError |
2225 | from lp.blueprints.interfaces.specification import ISpecification |
2226 | from lp.blueprints.model.specification import Specification |
2227 | from lp.blueprints.model.specificationsearch import ( |
2228 | @@ -61,7 +62,6 @@ from lp.registry.model.teammembership import TeamParticipation |
2229 | from lp.services.config import config |
2230 | from lp.services.database.enumcol import DBEnum |
2231 | from lp.services.database.interfaces import IStore |
2232 | -from lp.services.database.sqlobject import SQLObjectNotFound |
2233 | from lp.services.database.stormbase import StormBase |
2234 | from lp.services.job.model.job import EnumeratedSubclass, Job |
2235 | from lp.services.job.runner import BaseRunnableJob |
2236 | @@ -193,12 +193,12 @@ class SharingJobDerived(BaseRunnableJob, metaclass=EnumeratedSubclass): |
2237 | |
2238 | :return: the SharingJob with the specified id, as the |
2239 | current SharingJobDereived subclass. |
2240 | - :raises: SQLObjectNotFound if there is no job with the specified id, |
2241 | + :raises: NotFoundError if there is no job with the specified id, |
2242 | or its job_type does not match the desired subclass. |
2243 | """ |
2244 | job = SharingJob.get(job_id) |
2245 | if job.job_type != cls.class_job_type: |
2246 | - raise SQLObjectNotFound( |
2247 | + raise NotFoundError( |
2248 | "No object found with id %d and type %s" |
2249 | % (job_id, cls.class_job_type.title) |
2250 | ) |
2251 | diff --git a/lib/lp/registry/model/teammembership.py b/lib/lp/registry/model/teammembership.py |
2252 | index 0532e4a..0d5396b 100644 |
2253 | --- a/lib/lp/registry/model/teammembership.py |
2254 | +++ b/lib/lp/registry/model/teammembership.py |
2255 | @@ -499,12 +499,25 @@ def _cleanTeamParticipation(child, parent): |
2256 | * onto the parent.team, since we want the top and |
2257 | * bottom of the hierarchy to calculate the |
2258 | * TeamParticipation. The query above makes sure |
2259 | - * that we do this for all the ancestors. |
2260 | + * that we do this for all the ancestors. We exclude |
2261 | + * direct members that weren't already ancestors or |
2262 | + * descendants of the child from the TeamParticipation |
2263 | + * table, since they can't help us to establish entries |
2264 | + * that we need to keep. |
2265 | */ |
2266 | SELECT child.person, parent.team |
2267 | FROM TeamMembership child |
2268 | JOIN parent ON child.team = parent.person |
2269 | WHERE child.status IN %(active_states)s |
2270 | + AND child.person IN ( |
2271 | + SELECT team |
2272 | + FROM TeamParticipation |
2273 | + WHERE person = %(child)s |
2274 | + UNION |
2275 | + SELECT person |
2276 | + FROM TeamParticipation |
2277 | + WHERE team = %(child)s |
2278 | + ) |
2279 | ) |
2280 | SELECT person, team |
2281 | FROM parent |
2282 | diff --git a/lib/lp/registry/personmerge.py b/lib/lp/registry/personmerge.py |
2283 | index cec406c..2787bc2 100644 |
2284 | --- a/lib/lp/registry/personmerge.py |
2285 | +++ b/lib/lp/registry/personmerge.py |
2286 | @@ -1230,8 +1230,7 @@ def merge_people(from_person, to_person, reviewer, delete=False): |
2287 | cur.execute("SELECT id FROM Person WHERE name = %s" % sqlvalues(name)) |
2288 | i += 1 |
2289 | cur.execute( |
2290 | - "UPDATE Person SET name = %s WHERE id = %s" |
2291 | - % sqlvalues(name, from_person) |
2292 | + "UPDATE Person SET name = %s WHERE id = %s" % sqlvalues(name, from_id) |
2293 | ) |
2294 | |
2295 | # Since we've updated the database behind Storm's back, |
2296 | diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py |
2297 | index c7d4c55..7df78d0 100644 |
2298 | --- a/lib/lp/registry/scripts/closeaccount.py |
2299 | +++ b/lib/lp/registry/scripts/closeaccount.py |
2300 | @@ -236,7 +236,7 @@ def close_account(username, log): |
2301 | # Keep the corresponding PersonSettings row, but reset everything to the |
2302 | # defaults. |
2303 | table_notification("PersonSettings") |
2304 | - store.find(PersonSettings, PersonSettings.personID == person.id).set( |
2305 | + store.find(PersonSettings, PersonSettings.person == person).set( |
2306 | selfgenerated_bugnotifications=DEFAULT, |
2307 | # XXX cjwatson 2018-11-29: These two columns have NULL defaults, but |
2308 | # perhaps shouldn't? |
2309 | diff --git a/lib/lp/registry/scripts/populate_distroseriesdiff.py b/lib/lp/registry/scripts/populate_distroseriesdiff.py |
2310 | index 7847ee6..97adebd 100644 |
2311 | --- a/lib/lp/registry/scripts/populate_distroseriesdiff.py |
2312 | +++ b/lib/lp/registry/scripts/populate_distroseriesdiff.py |
2313 | @@ -56,7 +56,7 @@ def compose_sql_find_latest_source_package_releases(distroseries): |
2314 | parameters = { |
2315 | "active_status": quote(active_publishing_status), |
2316 | "distroseries": quote(distroseries.id), |
2317 | - "main_archive": quote(distroseries.distribution.main_archive), |
2318 | + "main_archive": quote(distroseries.distribution.main_archive.id), |
2319 | "release_pocket": quote(PackagePublishingPocket.RELEASE), |
2320 | } |
2321 | return ( |
2322 | diff --git a/lib/lp/registry/security.py b/lib/lp/registry/security.py |
2323 | index 03692fe..259455c 100644 |
2324 | --- a/lib/lp/registry/security.py |
2325 | +++ b/lib/lp/registry/security.py |
2326 | @@ -126,10 +126,28 @@ class ModerateProjectGroupSet(ModerateByRegistryExpertsOrAdmins): |
2327 | usedfor = IProjectGroupSet |
2328 | |
2329 | |
2330 | -class ModeratePerson(ModerateByRegistryExpertsOrAdmins): |
2331 | +class ModeratePerson(AuthorizationBase): |
2332 | permission = "launchpad.Moderate" |
2333 | usedfor = IPerson |
2334 | |
2335 | + def checkAuthenticated(self, user): |
2336 | + """Allow admins, commercial admins, and registry experts. |
2337 | + |
2338 | + Allowing commercial admins here is a bit of a cheat, but it allows |
2339 | + IS automation to see Person.id |
2340 | + (https://portal.admin.canonical.com/C158967) without needing to use |
2341 | + an account that's a fully-fledged member of ~admins. The only extra |
2342 | + exposure here is that commercial admins gain the ability to set the |
2343 | + status of other people's accounts, which isn't completely ideal, but |
2344 | + in practice people in the commercial admins team are always |
2345 | + highly-privileged anyway. |
2346 | + """ |
2347 | + return ( |
2348 | + user.in_admin |
2349 | + or user.in_commercial_admin |
2350 | + or user.in_registry_experts |
2351 | + ) |
2352 | + |
2353 | |
2354 | class ViewPillar(AuthorizationBase): |
2355 | usedfor = IPillar |
2356 | diff --git a/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst b/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst |
2357 | index 9db9db6..89ff351 100644 |
2358 | --- a/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst |
2359 | +++ b/lib/lp/registry/stories/distributionmirror/xx-reassign-distributionmirror.rst |
2360 | @@ -52,7 +52,10 @@ We also try to use the name of an unvalidated account, which can't be used as |
2361 | the owner of something. |
2362 | |
2363 | >>> from lp.registry.model.person import Person |
2364 | - >>> Person.byName("matsubara").is_valid_person_or_team |
2365 | + >>> from lp.services.database.interfaces import IStore |
2366 | + >>> IStore(Person).find( |
2367 | + ... Person, name="matsubara" |
2368 | + ... ).one().is_valid_person_or_team |
2369 | False |
2370 | >>> browser.getControl(name="field.owner").value = "matsubara" |
2371 | >>> browser.getControl("Change").click() |
2372 | @@ -80,7 +83,7 @@ Now we try to create a team using a name that is already taken. |
2373 | Okay, let's do it properly now and reassign it to an existing (and validated) |
2374 | account. |
2375 | |
2376 | - >>> salgado = Person.byName("salgado") |
2377 | + >>> salgado = IStore(Person).find(Person, name="salgado").one() |
2378 | >>> salgado.is_valid_person_or_team |
2379 | True |
2380 | |
2381 | diff --git a/lib/lp/registry/stories/person/xx-approve-members.rst b/lib/lp/registry/stories/person/xx-approve-members.rst |
2382 | index 6428641..bb6c369 100644 |
2383 | --- a/lib/lp/registry/stories/person/xx-approve-members.rst |
2384 | +++ b/lib/lp/registry/stories/person/xx-approve-members.rst |
2385 | @@ -66,8 +66,10 @@ as an inactive one. |
2386 | # listed anywhere. |
2387 | >>> from lp.registry.model.person import Person |
2388 | >>> from lp.registry.model.teammembership import TeamMembershipSet |
2389 | + >>> from lp.services.database.interfaces import IStore |
2390 | >>> membership = TeamMembershipSet().getByPersonAndTeam( |
2391 | - ... Person.byName("name12"), Person.byName("ubuntu-team") |
2392 | + ... IStore(Person).find(Person, name="name12").one(), |
2393 | + ... IStore(Person).find(Person, name="ubuntu-team").one(), |
2394 | ... ) |
2395 | >>> membership.status.title |
2396 | 'Declined' |
2397 | diff --git a/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst b/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst |
2398 | index f25171d..036b0e7 100644 |
2399 | --- a/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst |
2400 | +++ b/lib/lp/registry/stories/productrelease/xx-productrelease-basics.rst |
2401 | @@ -169,7 +169,7 @@ Celso is a member of ubuntu-team, so he can edit this release too: |
2402 | |
2403 | And if no-priv drives the series... |
2404 | |
2405 | - >>> no_priv = Person.selectOneBy(name="no-priv") |
2406 | + >>> no_priv = IStore(Person).find(Person, name="no-priv").one() |
2407 | >>> tomcat.getSeries("trunk").driver = no_priv |
2408 | |
2409 | ... they can edit existing releases as well, even if they are owned by |
2410 | diff --git a/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst b/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst |
2411 | index 867b8aa..b411b02 100644 |
2412 | --- a/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst |
2413 | +++ b/lib/lp/registry/stories/productrelease/xx-productrelease-view.rst |
2414 | @@ -41,9 +41,12 @@ downloaded and the date of the last download on that table as well. |
2415 | # Manually update the download counter for that file above so that we can |
2416 | # test it. |
2417 | >>> from datetime import date, datetime, timezone |
2418 | + >>> from lp.services.database.interfaces import IStore |
2419 | >>> from lp.services.librarian.model import LibraryFileAlias |
2420 | - >>> lfa = LibraryFileAlias.selectOne( |
2421 | - ... LibraryFileAlias.q.filename == "firefox_0.9.2.orig.tar.gz" |
2422 | + >>> lfa = ( |
2423 | + ... IStore(LibraryFileAlias) |
2424 | + ... .find(LibraryFileAlias, filename="firefox_0.9.2.orig.tar.gz") |
2425 | + ... .one() |
2426 | ... ) |
2427 | >>> lfa.updateDownloadCount(date(2006, 5, 4), None, 1) |
2428 | |
2429 | diff --git a/lib/lp/registry/stories/teammembership/xx-add-member.rst b/lib/lp/registry/stories/teammembership/xx-add-member.rst |
2430 | index b80bb89..c8ea2b2 100644 |
2431 | --- a/lib/lp/registry/stories/teammembership/xx-add-member.rst |
2432 | +++ b/lib/lp/registry/stories/teammembership/xx-add-member.rst |
2433 | @@ -23,8 +23,10 @@ Let's make sure that 'cprov' is now an Approved member of |
2434 | >>> from lp.registry.model.person import Person |
2435 | >>> from lp.registry.model.teammembership import TeamMembership |
2436 | >>> from lp.services.database.interfaces import IStore |
2437 | - >>> cprov = Person.byName("cprov") |
2438 | - >>> landscape_team = Person.byName("landscape-developers") |
2439 | + >>> cprov = IStore(Person).find(Person, name="cprov").one() |
2440 | + >>> landscape_team = ( |
2441 | + ... IStore(Person).find(Person, name="landscape-developers").one() |
2442 | + ... ) |
2443 | >>> cprov_landscape_membership = ( |
2444 | ... IStore(TeamMembership) |
2445 | ... .find(TeamMembership, person=cprov, team=landscape_team) |
2446 | @@ -56,7 +58,7 @@ become a member. |
2447 | As we can see, the launchpad team will not be one of the team's active |
2448 | members. |
2449 | |
2450 | - >>> launchpad = Person.byName("launchpad") |
2451 | + >>> launchpad = IStore(Person).find(Person, name="launchpad").one() |
2452 | >>> launchpad in landscape_team.activemembers |
2453 | False |
2454 | >>> membership = ( |
2455 | diff --git a/lib/lp/registry/stories/teammembership/xx-teammembership.rst b/lib/lp/registry/stories/teammembership/xx-teammembership.rst |
2456 | index a7b5a3b..9d1e8a7 100644 |
2457 | --- a/lib/lp/registry/stories/teammembership/xx-teammembership.rst |
2458 | +++ b/lib/lp/registry/stories/teammembership/xx-teammembership.rst |
2459 | @@ -28,9 +28,11 @@ Regular users can create teams. |
2460 | The owner of a team is always added as an administrator of their team. |
2461 | |
2462 | >>> from lp.registry.model.person import Person |
2463 | - >>> for a in Person.byName("myemail").adminmembers: |
2464 | + >>> from lp.services.database.interfaces import IStore |
2465 | + >>> for a in ( |
2466 | + ... IStore(Person).find(Person, name="myemail").one().adminmembers |
2467 | + ... ): |
2468 | ... print(a.name) |
2469 | - ... |
2470 | name12 |
2471 | |
2472 | |
2473 | @@ -90,8 +92,7 @@ approved, though. |
2474 | |
2475 | >>> from storm.locals import Store |
2476 | >>> from lp.registry.interfaces.person import TeamMembershipPolicy |
2477 | - >>> from lp.registry.model.person import Person |
2478 | - >>> myemail = Person.selectOneBy(name="myemail") |
2479 | + >>> myemail = IStore(Person).find(Person, name="myemail").one() |
2480 | >>> myemail.membership_policy = TeamMembershipPolicy.MODERATED |
2481 | >>> Store.of(myemail).flush() |
2482 | |
2483 | diff --git a/lib/lp/registry/tests/test_person.py b/lib/lp/registry/tests/test_person.py |
2484 | index 131ceff..cb3a5a7 100644 |
2485 | --- a/lib/lp/registry/tests/test_person.py |
2486 | +++ b/lib/lp/registry/tests/test_person.py |
2487 | @@ -946,12 +946,12 @@ class TestPersonStates(TestCaseWithFactory): |
2488 | is already in use. If this happens, we'll simply append an integer to |
2489 | that name until we can find one that is free. |
2490 | """ |
2491 | - sample_person = Person.byName("name12") |
2492 | + sample_person = IStore(Person).find(Person, name="name12").one() |
2493 | login(sample_person.preferredemail.email) |
2494 | sample_person.deactivate(comment="blah!") |
2495 | self.assertEqual(sample_person.name, "name12-deactivatedaccount") |
2496 | # Now that name12 is free Foo Bar can use it. |
2497 | - foo_bar = Person.byName("name16") |
2498 | + foo_bar = IStore(Person).find(Person, name="name16").one() |
2499 | foo_bar.name = "name12" |
2500 | # If Foo Bar deactivates their account, though, we'll have to use a |
2501 | # name other than name12-deactivatedaccount because that is already |
2502 | @@ -980,9 +980,9 @@ class TestPersonStates(TestCaseWithFactory): |
2503 | self.assertIs(None, product.bug_supervisor) |
2504 | |
2505 | def test_getDirectMemberIParticipateIn(self): |
2506 | - sample_person = Person.byName("name12") |
2507 | - warty_team = Person.byName("name20") |
2508 | - ubuntu_team = Person.byName("ubuntu-team") |
2509 | + sample_person = IStore(Person).find(Person, name="name12").one() |
2510 | + warty_team = IStore(Person).find(Person, name="name20").one() |
2511 | + ubuntu_team = IStore(Person).find(Person, name="ubuntu-team").one() |
2512 | # Sample Person is an active member of Warty Security Team which in |
2513 | # turn is a proposed member of Ubuntu Team. That means |
2514 | # sample_person._getDirectMemberIParticipateIn(ubuntu_team) will fail |
2515 | @@ -1061,7 +1061,7 @@ class TestPersonStates(TestCaseWithFactory): |
2516 | def test_visibility_validator_team_ss_prod_pub_to_private(self): |
2517 | # A PUBLIC team with a structural subscription to a product can |
2518 | # convert to a PRIVATE team. |
2519 | - foo_bar = Person.byName("name16") |
2520 | + foo_bar = IStore(Person).find(Person, name="name16").one() |
2521 | self.bzr.addSubscription(self.otherteam, foo_bar) |
2522 | self.otherteam.visibility = PersonVisibility.PRIVATE |
2523 | |
2524 | diff --git a/lib/lp/registry/tests/test_teammembership.py b/lib/lp/registry/tests/test_teammembership.py |
2525 | index ec7e3c9..5a5fb5b 100644 |
2526 | --- a/lib/lp/registry/tests/test_teammembership.py |
2527 | +++ b/lib/lp/registry/tests/test_teammembership.py |
2528 | @@ -821,7 +821,7 @@ class TestTeamMembership(TestCaseWithFactory): |
2529 | TeamMembershipStatus.DEACTIVATED, |
2530 | getUtility(IPersonSet).getByName("name16"), |
2531 | ) |
2532 | - # Bypass SQLObject to make sure the update was really flushed to the |
2533 | + # Bypass Storm to make sure the update was really flushed to the |
2534 | # database. |
2535 | cur = cursor() |
2536 | cur.execute("SELECT status FROM teammembership WHERE id = %d" % tm.id) |
2537 | diff --git a/lib/lp/registry/vocabularies.py b/lib/lp/registry/vocabularies.py |
2538 | index 3ef19fe..79e938c 100644 |
2539 | --- a/lib/lp/registry/vocabularies.py |
2540 | +++ b/lib/lp/registry/vocabularies.py |
2541 | @@ -71,6 +71,8 @@ from storm.expr import ( |
2542 | And, |
2543 | Column, |
2544 | Desc, |
2545 | + Is, |
2546 | + IsNot, |
2547 | Join, |
2548 | LeftJoin, |
2549 | Not, |
2550 | @@ -176,7 +178,6 @@ from lp.services.webapp.vocabulary import ( |
2551 | IHugeVocabulary, |
2552 | NamedStormHugeVocabulary, |
2553 | NamedStormVocabulary, |
2554 | - SQLObjectVocabularyBase, |
2555 | StormVocabularyBase, |
2556 | VocabularyFilter, |
2557 | ) |
2558 | @@ -208,7 +209,6 @@ class BasePersonVocabulary: |
2559 | If the token contains an '@', treat it like an email. Otherwise, |
2560 | treat it like a name. |
2561 | """ |
2562 | - token = six.ensure_text(token) |
2563 | if "@" in token: |
2564 | # This looks like an email token, so let's do an object |
2565 | # lookup based on that. |
2566 | @@ -272,7 +272,7 @@ class ProductVocabulary(StormVocabularyBase): |
2567 | return self.toTerm(product) |
2568 | |
2569 | def search(self, query, vocab_filter=None): |
2570 | - """See `SQLObjectVocabularyBase`. |
2571 | + """See `StormVocabularyBase`. |
2572 | |
2573 | Returns products where the product name, displayname, title, |
2574 | summary, or description contain the given query. Returns an empty list |
2575 | @@ -338,7 +338,7 @@ class ProjectGroupVocabulary(StormVocabularyBase): |
2576 | return self.toTerm(project) |
2577 | |
2578 | def search(self, query, vocab_filter=None): |
2579 | - """See `SQLObjectVocabularyBase`. |
2580 | + """See `StormVocabularyBase`. |
2581 | |
2582 | Returns projects where the project name, displayname, title, |
2583 | summary, or description contain the given query. Returns an empty list |
2584 | @@ -369,11 +369,11 @@ def project_products_vocabulary_factory(context): |
2585 | ) |
2586 | |
2587 | |
2588 | -class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase): |
2589 | +class UserTeamsParticipationVocabulary(StormVocabularyBase): |
2590 | """Describes the public teams in which the current user participates.""" |
2591 | |
2592 | _table = Person |
2593 | - _orderBy = "display_name" |
2594 | + _order_by = "display_name" |
2595 | |
2596 | INCLUDE_PRIVATE_TEAM = False |
2597 | |
2598 | @@ -401,7 +401,7 @@ class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase): |
2599 | teams = list( |
2600 | IStore(Person) |
2601 | .find(Person, *clauses) |
2602 | - .order_by(Person._storm_sortingColumns) |
2603 | + .order_by(Person.sortingColumns) |
2604 | ) |
2605 | # Users can view all the teams they belong to. |
2606 | precache_permission_for_objects( |
2607 | @@ -428,7 +428,7 @@ class UserTeamsParticipationVocabulary(SQLObjectVocabularyBase): |
2608 | |
2609 | @implementer(IHugeVocabulary) |
2610 | class NonMergedPeopleAndTeamsVocabulary( |
2611 | - BasePersonVocabulary, SQLObjectVocabularyBase |
2612 | + BasePersonVocabulary, StormVocabularyBase |
2613 | ): |
2614 | """The set of all non-merged people and teams. |
2615 | |
2616 | @@ -437,7 +437,7 @@ class NonMergedPeopleAndTeamsVocabulary( |
2617 | a preferred email address, that is, unvalidated person profiles. |
2618 | """ |
2619 | |
2620 | - _orderBy = ["display_name"] |
2621 | + _order_by = ["display_name"] |
2622 | displayname = "Select a Person or Team" |
2623 | step_title = "Search" |
2624 | |
2625 | @@ -449,7 +449,7 @@ class NonMergedPeopleAndTeamsVocabulary( |
2626 | return getUtility(IPersonSet).find(text) |
2627 | |
2628 | def search(self, text, vocab_filter=None): |
2629 | - """See `SQLObjectVocabularyBase`. |
2630 | + """See `StormVocabularyBase`. |
2631 | |
2632 | Return people/teams whose fti or email address match :text. |
2633 | """ |
2634 | @@ -461,7 +461,7 @@ class NonMergedPeopleAndTeamsVocabulary( |
2635 | |
2636 | @implementer(IHugeVocabulary) |
2637 | class PersonAccountToMergeVocabulary( |
2638 | - BasePersonVocabulary, SQLObjectVocabularyBase |
2639 | + BasePersonVocabulary, StormVocabularyBase |
2640 | ): |
2641 | """The set of all non-merged people with at least one email address. |
2642 | |
2643 | @@ -469,7 +469,7 @@ class PersonAccountToMergeVocabulary( |
2644 | accounts to merge. You *don't* want to use it. |
2645 | """ |
2646 | |
2647 | - _orderBy = ["display_name"] |
2648 | + _order_by = ["display_name"] |
2649 | displayname = "Select a Person to Merge" |
2650 | step_title = "Search" |
2651 | must_have_email = True |
2652 | @@ -486,7 +486,7 @@ class PersonAccountToMergeVocabulary( |
2653 | ) |
2654 | |
2655 | def search(self, text, vocab_filter=None): |
2656 | - """See `SQLObjectVocabularyBase`. |
2657 | + """See `StormVocabularyBase`. |
2658 | |
2659 | Return people whose fti or email address match :text. |
2660 | """ |
2661 | @@ -516,7 +516,7 @@ class VocabularyFilterPerson(VocabularyFilter): |
2662 | |
2663 | @property |
2664 | def filter_terms(self): |
2665 | - return [Person.teamownerID == None] |
2666 | + return [Is(Person.teamowner_id, None)] |
2667 | |
2668 | |
2669 | class VocabularyFilterTeam(VocabularyFilter): |
2670 | @@ -529,13 +529,11 @@ class VocabularyFilterTeam(VocabularyFilter): |
2671 | |
2672 | @property |
2673 | def filter_terms(self): |
2674 | - return [Person.teamownerID != None] |
2675 | + return [IsNot(Person.teamowner_id, None)] |
2676 | |
2677 | |
2678 | @implementer(IHugeVocabulary) |
2679 | -class ValidPersonOrTeamVocabulary( |
2680 | - BasePersonVocabulary, SQLObjectVocabularyBase |
2681 | -): |
2682 | +class ValidPersonOrTeamVocabulary(BasePersonVocabulary, StormVocabularyBase): |
2683 | """The set of valid, viewable Persons/Teams in Launchpad. |
2684 | |
2685 | A Person is considered valid if they have a preferred email address, and |
2686 | @@ -1572,7 +1570,7 @@ class CommercialProjectsVocabulary(NamedStormVocabulary): |
2687 | raise LookupError(token) |
2688 | |
2689 | def searchForTerms(self, query=None, vocab_filter=None): |
2690 | - """See `SQLObjectVocabularyBase`.""" |
2691 | + """See `StormVocabularyBase`.""" |
2692 | results = self._doSearch(query) |
2693 | num = results.count() |
2694 | return CountableIterator(num, results, self.toTerm) |
2695 | @@ -1944,7 +1942,7 @@ class PillarVocabularyBase(NamedStormHugeVocabulary): |
2696 | def toTerm(self, obj): |
2697 | """See `IVocabulary`.""" |
2698 | if type(obj) == int: |
2699 | - return self.toTerm(PillarName.get(obj)) |
2700 | + return self.toTerm(IStore(PillarName).get(PillarName, obj)) |
2701 | if IPillarName.providedBy(obj): |
2702 | assert obj.active, "Inactive object %s %d" % ( |
2703 | obj.__class__.__name__, |
2704 | diff --git a/lib/lp/scripts/garbo.py b/lib/lp/scripts/garbo.py |
2705 | index eab11db..06da799 100644 |
2706 | --- a/lib/lp/scripts/garbo.py |
2707 | +++ b/lib/lp/scripts/garbo.py |
2708 | @@ -1994,7 +1994,7 @@ class ArchiveAuthTokenDeactivator(BulkPruner): |
2709 | ) |
2710 | ) |
2711 | affected_ppas = load_related(Archive, tokens, ["archive_id"]) |
2712 | - load_related(Person, affected_ppas, ["ownerID"]) |
2713 | + load_related(Person, affected_ppas, ["owner_id"]) |
2714 | getUtility(IPersonSet).getPrecachedPersonsFromIDs( |
2715 | [token.person_id for token in tokens], need_preferred_email=True |
2716 | ) |
2717 | diff --git a/lib/lp/scripts/harness.py b/lib/lp/scripts/harness.py |
2718 | index 1949e57..4d65d31 100644 |
2719 | --- a/lib/lp/scripts/harness.py |
2720 | +++ b/lib/lp/scripts/harness.py |
2721 | @@ -73,7 +73,7 @@ def _get_locals(): |
2722 | # Create a few variables "in case they come in handy." |
2723 | # Do we really use these? Are they worth carrying around? |
2724 | d = store.get(Distribution, 1) |
2725 | - p = Person.get(1) |
2726 | + p = store.get(Person, 1) |
2727 | ds = store.get(DistroSeries, 1) |
2728 | prod = store.get(Product, 1) |
2729 | proj = store.get(ProjectGroup, 1) |
2730 | diff --git a/lib/lp/services/apachelogparser/model/parsedapachelog.py b/lib/lp/services/apachelogparser/model/parsedapachelog.py |
2731 | index 66a4b35..f8541da 100644 |
2732 | --- a/lib/lp/services/apachelogparser/model/parsedapachelog.py |
2733 | +++ b/lib/lp/services/apachelogparser/model/parsedapachelog.py |
2734 | @@ -3,15 +3,16 @@ |
2735 | |
2736 | __all__ = ["ParsedApacheLog"] |
2737 | |
2738 | +from datetime import timezone |
2739 | + |
2740 | import six |
2741 | -from storm.locals import Int, Unicode |
2742 | +from storm.locals import DateTime, Int, Unicode |
2743 | from zope.interface import implementer |
2744 | |
2745 | from lp.services.apachelogparser.interfaces.parsedapachelog import ( |
2746 | IParsedApacheLog, |
2747 | ) |
2748 | from lp.services.database.constants import UTC_NOW |
2749 | -from lp.services.database.datetimecol import UtcDateTimeCol |
2750 | from lp.services.database.interfaces import IStore |
2751 | from lp.services.database.stormbase import StormBase |
2752 | |
2753 | @@ -25,7 +26,9 @@ class ParsedApacheLog(StormBase): |
2754 | id = Int(primary=True) |
2755 | first_line = Unicode(allow_none=False) |
2756 | bytes_read = Int(allow_none=False) |
2757 | - date_last_parsed = UtcDateTimeCol(notNull=True, default=UTC_NOW) |
2758 | + date_last_parsed = DateTime( |
2759 | + allow_none=False, default=UTC_NOW, tzinfo=timezone.utc |
2760 | + ) |
2761 | |
2762 | def __init__(self, first_line, bytes_read): |
2763 | super().__init__() |
2764 | diff --git a/lib/lp/services/auth/tests/test_model.py b/lib/lp/services/auth/tests/test_model.py |
2765 | index b33ccf7..ca33eec 100644 |
2766 | --- a/lib/lp/services/auth/tests/test_model.py |
2767 | +++ b/lib/lp/services/auth/tests/test_model.py |
2768 | @@ -46,31 +46,36 @@ from lp.testing.matchers import HasQueryCount |
2769 | from lp.testing.pages import webservice_for_person |
2770 | |
2771 | |
2772 | -class TestAccessToken(TestCaseWithFactory): |
2773 | +class TestAccessTokenBase: |
2774 | layer = DatabaseFunctionalLayer |
2775 | |
2776 | def test_owner_can_edit(self): |
2777 | owner = self.factory.makePerson() |
2778 | - _, token = self.factory.makeAccessToken(owner=owner) |
2779 | + _, token = self.factory.makeAccessToken( |
2780 | + owner=owner, target=self.makeTarget() |
2781 | + ) |
2782 | login_person(owner) |
2783 | self.assertTrue(check_permission("launchpad.Edit", token)) |
2784 | |
2785 | def test_target_owner_can_edit(self): |
2786 | target_owner = self.factory.makePerson() |
2787 | - repository = self.factory.makeGitRepository(owner=target_owner) |
2788 | - _, token = self.factory.makeAccessToken(target=repository) |
2789 | + _, token = self.factory.makeAccessToken( |
2790 | + target=self.makeTarget(target_owner) |
2791 | + ) |
2792 | login_person(target_owner) |
2793 | self.assertTrue(check_permission("launchpad.Edit", token)) |
2794 | |
2795 | def test_other_user_cannot_edit(self): |
2796 | - _, token = self.factory.makeAccessToken() |
2797 | + _, token = self.factory.makeAccessToken(target=self.makeTarget()) |
2798 | login_person(self.factory.makePerson()) |
2799 | self.assertFalse(check_permission("launchpad.Edit", token)) |
2800 | |
2801 | def test_updateLastUsed_never_used(self): |
2802 | # If the token has never been used, we update its last-used date. |
2803 | owner = self.factory.makePerson() |
2804 | - _, token = self.factory.makeAccessToken(owner=owner) |
2805 | + _, token = self.factory.makeAccessToken( |
2806 | + owner=owner, target=self.makeTarget() |
2807 | + ) |
2808 | login_person(owner) |
2809 | self.assertIsNone(token.date_last_used) |
2810 | transaction.commit() |
2811 | @@ -82,7 +87,9 @@ class TestAccessToken(TestCaseWithFactory): |
2812 | # If the token's last-used date was updated recently, we leave it |
2813 | # alone. |
2814 | owner = self.factory.makePerson() |
2815 | - _, token = self.factory.makeAccessToken(owner=owner) |
2816 | + _, token = self.factory.makeAccessToken( |
2817 | + owner=owner, target=self.makeTarget() |
2818 | + ) |
2819 | login_person(owner) |
2820 | recent = datetime.now(timezone.utc) - timedelta(minutes=1) |
2821 | removeSecurityProxy(token).date_last_used = recent |
2822 | @@ -94,7 +101,9 @@ class TestAccessToken(TestCaseWithFactory): |
2823 | # If the token's last-used date is outside our update resolution, we |
2824 | # update it. |
2825 | owner = self.factory.makePerson() |
2826 | - _, token = self.factory.makeAccessToken(owner=owner) |
2827 | + _, token = self.factory.makeAccessToken( |
2828 | + owner=owner, target=self.makeTarget() |
2829 | + ) |
2830 | login_person(owner) |
2831 | recent = datetime.now(timezone.utc) - timedelta(hours=1) |
2832 | removeSecurityProxy(token).date_last_used = recent |
2833 | @@ -107,7 +116,9 @@ class TestAccessToken(TestCaseWithFactory): |
2834 | # If the token is locked by another transaction, we leave it alone. |
2835 | owner = self.factory.makePerson() |
2836 | owner_email = removeSecurityProxy(owner.preferredemail).email |
2837 | - secret, token = self.factory.makeAccessToken(owner=owner) |
2838 | + secret, token = self.factory.makeAccessToken( |
2839 | + owner=owner, target=self.makeTarget() |
2840 | + ) |
2841 | login_person(owner) |
2842 | self.assertIsNone(token.date_last_used) |
2843 | transaction.commit() |
2844 | @@ -150,7 +161,9 @@ class TestAccessToken(TestCaseWithFactory): |
2845 | def test_is_expired(self): |
2846 | owner = self.factory.makePerson() |
2847 | login_person(owner) |
2848 | - _, current_token = self.factory.makeAccessToken(owner=owner) |
2849 | + _, current_token = self.factory.makeAccessToken( |
2850 | + owner=owner, target=self.makeTarget() |
2851 | + ) |
2852 | _, expired_token = self.factory.makeAccessToken( |
2853 | owner=owner, |
2854 | date_expires=datetime.now(timezone.utc) - timedelta(minutes=1), |
2855 | @@ -161,7 +174,9 @@ class TestAccessToken(TestCaseWithFactory): |
2856 | def test_revoke(self): |
2857 | owner = self.factory.makePerson() |
2858 | _, token = self.factory.makeAccessToken( |
2859 | - owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS] |
2860 | + owner=owner, |
2861 | + scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS], |
2862 | + target=self.makeTarget(), |
2863 | ) |
2864 | login_person(owner) |
2865 | self.assertThat( |
2866 | @@ -177,7 +192,12 @@ class TestAccessToken(TestCaseWithFactory): |
2867 | ) |
2868 | |
2869 | |
2870 | -class TestAccessTokenSet(TestCaseWithFactory): |
2871 | +class TestAccessTokenGitRepository(TestAccessTokenBase, TestCaseWithFactory): |
2872 | + def makeTarget(self, owner=None): |
2873 | + return self.factory.makeGitRepository(owner=owner) |
2874 | + |
2875 | + |
2876 | +class TestAccessTokenSetBase: |
2877 | layer = DatabaseFunctionalLayer |
2878 | |
2879 | def test_new(self): |
2880 | @@ -185,7 +205,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2881 | self.assertEqual(64, len(secret)) |
2882 | owner = self.factory.makePerson() |
2883 | description = "Test token" |
2884 | - target = self.factory.makeGitRepository() |
2885 | + target = self.makeTarget() |
2886 | scopes = [AccessTokenScope.REPOSITORY_BUILD_STATUS] |
2887 | _, token = self.factory.makeAccessToken( |
2888 | secret=secret, |
2889 | @@ -206,13 +226,13 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2890 | ) |
2891 | |
2892 | def test_getByID(self): |
2893 | - secret, token = self.factory.makeAccessToken() |
2894 | + secret, token = self.factory.makeAccessToken(target=self.makeTarget()) |
2895 | token_id = removeSecurityProxy(token).id |
2896 | self.assertEqual(token, getUtility(IAccessTokenSet).getByID(token_id)) |
2897 | self.assertIsNone(getUtility(IAccessTokenSet).getByID(token_id + 1)) |
2898 | |
2899 | def test_getBySecret(self): |
2900 | - secret, token = self.factory.makeAccessToken() |
2901 | + secret, token = self.factory.makeAccessToken(target=self.makeTarget()) |
2902 | self.assertEqual( |
2903 | token, getUtility(IAccessTokenSet).getBySecret(secret) |
2904 | ) |
2905 | @@ -225,9 +245,15 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2906 | def test_findByOwner(self): |
2907 | owners = [self.factory.makePerson() for _ in range(3)] |
2908 | tokens = [ |
2909 | - self.factory.makeAccessToken(owner=owners[0])[1], |
2910 | - self.factory.makeAccessToken(owner=owners[0])[1], |
2911 | - self.factory.makeAccessToken(owner=owners[1])[1], |
2912 | + self.factory.makeAccessToken( |
2913 | + owner=owners[0], target=self.makeTarget() |
2914 | + )[1], |
2915 | + self.factory.makeAccessToken( |
2916 | + owner=owners[0], target=self.makeTarget() |
2917 | + )[1], |
2918 | + self.factory.makeAccessToken( |
2919 | + owner=owners[1], target=self.makeTarget() |
2920 | + )[1], |
2921 | ] |
2922 | self.assertContentEqual( |
2923 | tokens[:2], getUtility(IAccessTokenSet).findByOwner(owners[0]) |
2924 | @@ -240,7 +266,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2925 | ) |
2926 | |
2927 | def test_findByTarget(self): |
2928 | - targets = [self.factory.makeGitRepository() for _ in range(3)] |
2929 | + targets = [self.makeTarget() for _ in range(3)] |
2930 | tokens = [ |
2931 | self.factory.makeAccessToken(target=targets[0])[1], |
2932 | self.factory.makeAccessToken(target=targets[0])[1], |
2933 | @@ -257,7 +283,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2934 | ) |
2935 | |
2936 | def test_findByTarget_visible_by_user(self): |
2937 | - targets = [self.factory.makeGitRepository() for _ in range(3)] |
2938 | + targets = [self.makeTarget() for _ in range(3)] |
2939 | owners = [self.factory.makePerson() for _ in range(3)] |
2940 | tokens = [ |
2941 | self.factory.makeAccessToken( |
2942 | @@ -290,7 +316,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2943 | ) |
2944 | |
2945 | def test_findByTarget_excludes_expired(self): |
2946 | - target = self.factory.makeGitRepository() |
2947 | + target = self.makeTarget() |
2948 | _, current_token = self.factory.makeAccessToken(target=target) |
2949 | _, expires_soon_token = self.factory.makeAccessToken( |
2950 | target=target, |
2951 | @@ -312,7 +338,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2952 | ) |
2953 | |
2954 | def test_getByTargetAndID(self): |
2955 | - targets = [self.factory.makeGitRepository() for _ in range(3)] |
2956 | + targets = [self.makeTarget() for _ in range(3)] |
2957 | tokens = [ |
2958 | self.factory.makeAccessToken(target=targets[0])[1], |
2959 | self.factory.makeAccessToken(target=targets[0])[1], |
2960 | @@ -337,7 +363,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2961 | ) |
2962 | |
2963 | def test_getByTargetAndID_visible_by_user(self): |
2964 | - targets = [self.factory.makeGitRepository() for _ in range(3)] |
2965 | + targets = [self.makeTarget() for _ in range(3)] |
2966 | owners = [self.factory.makePerson() for _ in range(3)] |
2967 | tokens = [ |
2968 | self.factory.makeAccessToken( |
2969 | @@ -374,7 +400,7 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2970 | self.assertIsNone(fetched_token) |
2971 | |
2972 | def test_getByTargetAndID_excludes_expired(self): |
2973 | - target = self.factory.makeGitRepository() |
2974 | + target = self.makeTarget() |
2975 | _, current_token = self.factory.makeAccessToken(target=target) |
2976 | _, expires_soon_token = self.factory.makeAccessToken( |
2977 | target=target, |
2978 | @@ -403,6 +429,13 @@ class TestAccessTokenSet(TestCaseWithFactory): |
2979 | ) |
2980 | |
2981 | |
2982 | +class TestGitRepositoryAccessTokenSet( |
2983 | + TestAccessTokenSetBase, TestCaseWithFactory |
2984 | +): |
2985 | + def makeTarget(self): |
2986 | + return self.factory.makeGitRepository() |
2987 | + |
2988 | + |
2989 | class TestAccessTokenTargetBase: |
2990 | layer = DatabaseFunctionalLayer |
2991 | |
2992 | diff --git a/lib/lp/services/authserver/tests/test_authserver.py b/lib/lp/services/authserver/tests/test_authserver.py |
2993 | index 2f29fd1..3961957 100644 |
2994 | --- a/lib/lp/services/authserver/tests/test_authserver.py |
2995 | +++ b/lib/lp/services/authserver/tests/test_authserver.py |
2996 | @@ -6,12 +6,12 @@ |
2997 | import xmlrpc.client |
2998 | |
2999 | from pymacaroons import Macaroon |
3000 | -from storm.sqlobject import SQLObjectNotFound |
3001 | from testtools.matchers import Equals, Is, MatchesListwise, MatchesStructure |
3002 | from zope.component import getUtility |
3003 | from zope.interface import implementer |
3004 | from zope.publisher.xmlrpc import TestRequest |
3005 | |
3006 | +from lp.app.errors import NotFoundError |
3007 | from lp.services.authserver.interfaces import ( |
3008 | IAuthServer, |
3009 | IAuthServerApplication, |
3010 | @@ -266,7 +266,7 @@ class MacaroonTests(TestCaseWithFactory): |
3011 | # Pick a large ID that doesn't exist in sampledata. |
3012 | lfa_id = 1000000 |
3013 | self.assertRaises( |
3014 | - SQLObjectNotFound, |
3015 | + NotFoundError, |
3016 | getUtility(ILibraryFileAliasSet).__getitem__, |
3017 | lfa_id, |
3018 | ) |
3019 | diff --git a/lib/lp/services/authserver/xmlrpc.py b/lib/lp/services/authserver/xmlrpc.py |
3020 | index 54312d7..74758fe 100644 |
3021 | --- a/lib/lp/services/authserver/xmlrpc.py |
3022 | +++ b/lib/lp/services/authserver/xmlrpc.py |
3023 | @@ -9,12 +9,12 @@ __all__ = [ |
3024 | ] |
3025 | |
3026 | from pymacaroons import Macaroon |
3027 | -from storm.sqlobject import SQLObjectNotFound |
3028 | from zope.component import getUtility |
3029 | from zope.interface import implementer |
3030 | from zope.interface.interfaces import ComponentLookupError |
3031 | from zope.security.proxy import removeSecurityProxy |
3032 | |
3033 | +from lp.app.errors import NotFoundError |
3034 | from lp.code.interfaces.cibuild import ICIBuildSet |
3035 | from lp.oci.interfaces.ocirecipebuild import IOCIRecipeBuildSet |
3036 | from lp.registry.interfaces.person import IPersonSet |
3037 | @@ -69,7 +69,7 @@ class AuthServerAPIView(LaunchpadXMLRPCView): |
3038 | # The context is a `LibraryFileAlias` ID. |
3039 | try: |
3040 | return getUtility(ILibraryFileAliasSet)[context] |
3041 | - except SQLObjectNotFound: |
3042 | + except NotFoundError: |
3043 | return None |
3044 | elif context_type == "BinaryPackageBuild": |
3045 | # The context is a `BinaryPackageBuild` ID. |
3046 | diff --git a/lib/lp/services/database/datetimecol.py b/lib/lp/services/database/datetimecol.py |
3047 | deleted file mode 100644 |
3048 | index b23a381..0000000 |
3049 | --- a/lib/lp/services/database/datetimecol.py |
3050 | +++ /dev/null |
3051 | @@ -1,14 +0,0 @@ |
3052 | -# Copyright 2009 Canonical Ltd. This software is licensed under the |
3053 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
3054 | - |
3055 | -"""UtcDateTimeCol for SQLObject""" |
3056 | - |
3057 | -__all__ = ["UtcDateTimeCol"] |
3058 | - |
3059 | -from datetime import timezone |
3060 | - |
3061 | -import storm.sqlobject |
3062 | - |
3063 | - |
3064 | -class UtcDateTimeCol(storm.sqlobject.UtcDateTimeCol): |
3065 | - _kwargs = {"tzinfo": timezone.utc} |
3066 | diff --git a/lib/lp/services/database/doc/security-proxies.rst b/lib/lp/services/database/doc/security-proxies.rst |
3067 | index 3675907..ac20f99 100644 |
3068 | --- a/lib/lp/services/database/doc/security-proxies.rst |
3069 | +++ b/lib/lp/services/database/doc/security-proxies.rst |
3070 | @@ -1,7 +1,7 @@ |
3071 | Security proxies |
3072 | ---------------- |
3073 | |
3074 | -SQLObjects that are security proxied should still behave normally, this |
3075 | +Storm objects that are security proxied should still behave normally, this |
3076 | includes being comparable with non-security proxied objects. |
3077 | |
3078 | First, some imports and set up:: |
3079 | @@ -9,11 +9,12 @@ First, some imports and set up:: |
3080 | >>> from zope.component import getUtility |
3081 | >>> from lp.registry.interfaces.person import IPersonSet |
3082 | >>> from lp.registry.model.person import Person |
3083 | + >>> from lp.services.database.interfaces import IStore |
3084 | |
3085 | Get a proxied and unproxied person object for the same person, and demonstrate |
3086 | working comparisons:: |
3087 | |
3088 | - >>> mark = Person.get(1) |
3089 | + >>> mark = IStore(Person).get(Person, 1) |
3090 | >>> mark_proxied = getUtility(IPersonSet).get(1) |
3091 | >>> mark is mark_proxied |
3092 | False |
3093 | @@ -26,8 +27,7 @@ working comparisons:: |
3094 | >>> mark_proxied == mark_proxied |
3095 | True |
3096 | |
3097 | -A dbschema Item can also be given to sqlobject's select() method, or any |
3098 | -of its variants. |
3099 | +A ``lazr.enum.DBItem`` can also be given to Storm's find() method. |
3100 | |
3101 | >>> proxied_policy = mark_proxied.membership_policy |
3102 | >>> type(proxied_policy) |
3103 | @@ -35,19 +35,21 @@ of its variants. |
3104 | |
3105 | # We don't want this test to fail when we add new person entries, so we |
3106 | # compare it against a base number. |
3107 | - >>> Person.select( |
3108 | - ... Person.q.membership_policy == proxied_policy |
3109 | + >>> IStore(Person).find( |
3110 | + ... Person, membership_policy=proxied_policy |
3111 | ... ).count() > 60 |
3112 | True |
3113 | - >>> person = Person.select(Person.q.membership_policy == proxied_policy)[ |
3114 | - ... 0 |
3115 | - ... ] |
3116 | + >>> person = ( |
3117 | + ... IStore(Person) |
3118 | + ... .find(Person, membership_policy=proxied_policy) |
3119 | + ... .first() |
3120 | + ... ) |
3121 | >>> person.membership_policy.name |
3122 | 'MODERATED' |
3123 | |
3124 | XXX: stevea: 20051018: Rewrite this test to use security proxies directly |
3125 | XXX: bug 3315 |
3126 | -DB schema objects should be comparable correctly when proxied... |
3127 | +``lazr.enum.DBItem`` objects are comparable correctly when proxied. |
3128 | |
3129 | >>> from lp.registry.interfaces.distroseries import IDistroSeriesSet |
3130 | >>> from lp.registry.interfaces.series import SeriesStatus |
3131 | diff --git a/lib/lp/services/database/doc/storm-security-proxies.rst b/lib/lp/services/database/doc/storm-security-proxies.rst |
3132 | index e95fb66..600dcc2 100644 |
3133 | --- a/lib/lp/services/database/doc/storm-security-proxies.rst |
3134 | +++ b/lib/lp/services/database/doc/storm-security-proxies.rst |
3135 | @@ -1,5 +1,5 @@ |
3136 | -Demonstrate that SQLObject works with security proxies |
3137 | ------------------------------------------------------- |
3138 | +Demonstrate that Storm works with security proxies |
3139 | +-------------------------------------------------- |
3140 | |
3141 | Do some imports. |
3142 | |
3143 | diff --git a/lib/lp/services/database/interfaces.py b/lib/lp/services/database/interfaces.py |
3144 | index 808e25b..f619539 100644 |
3145 | --- a/lib/lp/services/database/interfaces.py |
3146 | +++ b/lib/lp/services/database/interfaces.py |
3147 | @@ -9,7 +9,6 @@ __all__ = [ |
3148 | "IPrimaryObject", |
3149 | "IPrimaryStore", |
3150 | "IRequestExpired", |
3151 | - "ISQLBase", |
3152 | "IStandbyStore", |
3153 | "IStore", |
3154 | "IStoreSelector", |
3155 | @@ -21,7 +20,6 @@ __all__ = [ |
3156 | |
3157 | from zope.interface import Interface |
3158 | from zope.interface.common.interfaces import IRuntimeError |
3159 | -from zope.schema import Int |
3160 | |
3161 | |
3162 | class IRequestExpired(IRuntimeError): |
3163 | @@ -30,15 +28,6 @@ class IRequestExpired(IRuntimeError): |
3164 | """ |
3165 | |
3166 | |
3167 | -# XXX 2007-02-09 jamesh: |
3168 | -# This derived from sqlos.interfaces.ISQLObject before hand. I don't |
3169 | -# think it is ever used though ... |
3170 | -class ISQLBase(Interface): |
3171 | - """An extension of ISQLObject that provides an ID.""" |
3172 | - |
3173 | - id = Int(title="The integer ID for the instance") |
3174 | - |
3175 | - |
3176 | # |
3177 | # Database policies |
3178 | # |
3179 | diff --git a/lib/lp/services/database/multitablecopy.py b/lib/lp/services/database/multitablecopy.py |
3180 | index 1865206..b34e1dd 100644 |
3181 | --- a/lib/lp/services/database/multitablecopy.py |
3182 | +++ b/lib/lp/services/database/multitablecopy.py |
3183 | @@ -10,7 +10,7 @@ import time |
3184 | from zope.interface import implementer |
3185 | |
3186 | from lp.services.database import postgresql |
3187 | -from lp.services.database.sqlbase import cursor, quote, quoteIdentifier |
3188 | +from lp.services.database.sqlbase import cursor, quote, quote_identifier |
3189 | from lp.services.looptuner import DBLoopTuner, ITunableLoop |
3190 | |
3191 | |
3192 | @@ -295,7 +295,7 @@ class MultiTableCopy: |
3193 | Return value is properly quoted for use as an SQL identifier. |
3194 | """ |
3195 | raw_name = self.getRawHoldingTableName(tablename, suffix) |
3196 | - return quoteIdentifier(raw_name) |
3197 | + return quote_identifier(raw_name) |
3198 | |
3199 | def _pointsToTable(self, source_table, foreign_key): |
3200 | """Name of table that source_table.foreign_key refers to. |
3201 | @@ -353,9 +353,9 @@ class MultiTableCopy: |
3202 | extracted. The WHERE clause may refer to rows from table being |
3203 | extracted as "source." |
3204 | :param id_sequence: SQL sequence that should assign new identifiers |
3205 | - for the extracted rows. Defaults to `source_table` with "_seq_id" |
3206 | - appended, which by SQLObject/Launchpad convention is the sequence |
3207 | - that provides `source_table`'s primary key values. Used verbatim, |
3208 | + for the extracted rows. Defaults to `source_table` with "_id_seq" |
3209 | + appended, which by Launchpad convention is the sequence that |
3210 | + provides `source_table`'s primary key values. Used verbatim, |
3211 | without quoting. |
3212 | :param inert_where: Boolean SQL expression characterizing rows that |
3213 | are extracted, but should not poured back into `source_table` |
3214 | diff --git a/lib/lp/services/database/postgresql.py b/lib/lp/services/database/postgresql.py |
3215 | index 8ba5252..afbfcb0 100644 |
3216 | --- a/lib/lp/services/database/postgresql.py |
3217 | +++ b/lib/lp/services/database/postgresql.py |
3218 | @@ -8,7 +8,7 @@ and table manipulation |
3219 | |
3220 | import re |
3221 | |
3222 | -from lp.services.database.sqlbase import quote, quoteIdentifier, sqlvalues |
3223 | +from lp.services.database.sqlbase import quote, quote_identifier, sqlvalues |
3224 | |
3225 | |
3226 | def listReferences(cur, table, column, indirect=True, _state=None): |
3227 | @@ -308,8 +308,8 @@ def generateResetSequencesSQL(cur): |
3228 | if table is None or column is None: |
3229 | continue |
3230 | sql = "SELECT max(%s) FROM %s" % ( |
3231 | - quoteIdentifier(column), |
3232 | - quoteIdentifier(table), |
3233 | + quote_identifier(column), |
3234 | + quote_identifier(table), |
3235 | ) |
3236 | cur.execute(sql) |
3237 | last_value = cur.fetchone()[0] |
3238 | diff --git a/lib/lp/services/database/sqlbase.py b/lib/lp/services/database/sqlbase.py |
3239 | index ebbc1d9..f604f4c 100644 |
3240 | --- a/lib/lp/services/database/sqlbase.py |
3241 | +++ b/lib/lp/services/database/sqlbase.py |
3242 | @@ -16,20 +16,16 @@ __all__ = [ |
3243 | "ISOLATION_LEVEL_REPEATABLE_READ", |
3244 | "ISOLATION_LEVEL_SERIALIZABLE", |
3245 | "quote", |
3246 | - "quoteIdentifier", |
3247 | "quote_identifier", |
3248 | "reset_store", |
3249 | "session_store", |
3250 | - "SQLBase", |
3251 | "sqlvalues", |
3252 | "StupidCache", |
3253 | ] |
3254 | |
3255 | - |
3256 | from datetime import datetime, timezone |
3257 | |
3258 | import psycopg2 |
3259 | -import storm |
3260 | import transaction |
3261 | from psycopg2.extensions import ( |
3262 | ISOLATION_LEVEL_AUTOCOMMIT, |
3263 | @@ -42,27 +38,18 @@ from psycopg2.extensions import ( |
3264 | from storm.databases.postgres import compile as postgres_compile |
3265 | from storm.expr import State |
3266 | from storm.expr import compile as storm_compile |
3267 | -from storm.locals import Storm # noqa: B1 |
3268 | -from storm.locals import Store |
3269 | from storm.zope.interfaces import IZStorm |
3270 | from twisted.python.util import mergeFunctionMetadata |
3271 | from zope.component import getUtility |
3272 | -from zope.interface import implementer |
3273 | -from zope.security.proxy import removeSecurityProxy |
3274 | |
3275 | from lp.services.config import dbconfig |
3276 | from lp.services.database.interfaces import ( |
3277 | DEFAULT_FLAVOR, |
3278 | MAIN_STORE, |
3279 | DisallowedStore, |
3280 | - IPrimaryObject, |
3281 | - IPrimaryStore, |
3282 | - ISQLBase, |
3283 | - IStore, |
3284 | IStoreSelector, |
3285 | ) |
3286 | from lp.services.database.sqlobject import sqlrepr |
3287 | -from lp.services.propertycache import clear_property_cache |
3288 | |
3289 | # Default we want for scripts, and the PostgreSQL default. Note psycopg1 will |
3290 | # use SERIALIZABLE unless we override, but psycopg2 will not. |
3291 | @@ -84,9 +71,6 @@ class StupidCache: |
3292 | This class is basically equivalent to Storm's standard Cache class |
3293 | with a very large size but without the overhead of maintaining the |
3294 | LRU list. |
3295 | - |
3296 | - This provides caching behaviour equivalent to what we were using |
3297 | - under SQLObject. |
3298 | """ |
3299 | |
3300 | def __init__(self, size): |
3301 | @@ -112,168 +96,15 @@ class StupidCache: |
3302 | return self._cache.keys() |
3303 | |
3304 | |
3305 | -def _get_sqlobject_store(): |
3306 | - """Return the store used by the SQLObject compatibility layer.""" |
3307 | - return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
3308 | - |
3309 | - |
3310 | -class LaunchpadStyle(storm.sqlobject.SQLObjectStyle): |
3311 | - """A SQLObject style for launchpad. |
3312 | +def _get_main_default_store(): |
3313 | + """Return the main store using the default flavor. |
3314 | |
3315 | - Python attributes and database columns are lowercase. |
3316 | - Class names and database tables are MixedCase. Using this style should |
3317 | - simplify SQLBase class definitions since more defaults will be correct. |
3318 | + For web requests, the default flavor uses a primary or standby database |
3319 | + depending on the type of request (see |
3320 | + `lp.services.database.policy.LaunchpadDatabasePolicy`); in all other |
3321 | + situations, it uses the primary database. |
3322 | """ |
3323 | - |
3324 | - def pythonAttrToDBColumn(self, attr): |
3325 | - return attr |
3326 | - |
3327 | - def dbColumnToPythonAttr(self, col): |
3328 | - return col |
3329 | - |
3330 | - def pythonClassToDBTable(self, className): |
3331 | - return className |
3332 | - |
3333 | - def dbTableToPythonClass(self, table): |
3334 | - return table |
3335 | - |
3336 | - def idForTable(self, table): |
3337 | - return "id" |
3338 | - |
3339 | - def pythonClassToAttr(self, className): |
3340 | - return className.lower() |
3341 | - |
3342 | - # dsilvers: 20050322: If you take this method out; then RelativeJoin |
3343 | - # instances in our SQLObject classes cause the following error: |
3344 | - # AttributeError: 'LaunchpadStyle' object has no attribute |
3345 | - # 'tableReference' |
3346 | - def tableReference(self, table): |
3347 | - """Return the tablename mapped for use in RelativeJoin statements.""" |
3348 | - return table.__str__() |
3349 | - |
3350 | - |
3351 | -@implementer(ISQLBase) |
3352 | -class SQLBase(storm.sqlobject.SQLObjectBase): |
3353 | - """Base class emulating SQLObject for legacy database classes.""" |
3354 | - |
3355 | - _style = LaunchpadStyle() |
3356 | - |
3357 | - # Silence warnings in linter script, which complains about all |
3358 | - # SQLBase-derived objects missing an id. |
3359 | - id = None |
3360 | - |
3361 | - def __init__(self, *args, **kwargs): |
3362 | - """Extended version of the SQLObjectBase constructor. |
3363 | - |
3364 | - We force use of the primary Store. |
3365 | - |
3366 | - We refetch any parameters from different stores from the |
3367 | - correct primary Store. |
3368 | - """ |
3369 | - # Make it simple to write dumb-invalidators - initialized |
3370 | - # _cached_properties to a valid list rather than just-in-time |
3371 | - # creation. |
3372 | - self._cached_properties = [] |
3373 | - store = IPrimaryStore(self.__class__) |
3374 | - |
3375 | - # The constructor will fail if objects from a different Store |
3376 | - # are passed in. We need to refetch these objects from the correct |
3377 | - # primary Store if necessary so the foreign key references can be |
3378 | - # constructed. |
3379 | - # XXX StuartBishop 2009-03-02 bug=336867: We probably want to remove |
3380 | - # this code - there are enough other places developers have to be |
3381 | - # aware of the replication # set boundaries. Why should |
3382 | - # Person(..., account=an_account) work but |
3383 | - # some_person.account = an_account fail? |
3384 | - for key, argument in kwargs.items(): |
3385 | - argument = removeSecurityProxy(argument) |
3386 | - if not isinstance(argument, Storm): # noqa: B1 |
3387 | - continue |
3388 | - argument_store = Store.of(argument) |
3389 | - if argument_store is not store: |
3390 | - new_argument = store.find( |
3391 | - argument.__class__, id=argument.id |
3392 | - ).one() |
3393 | - assert ( |
3394 | - new_argument is not None |
3395 | - ), "%s not yet synced to this store" % repr(argument) |
3396 | - kwargs[key] = new_argument |
3397 | - |
3398 | - store.add(self) |
3399 | - try: |
3400 | - self._create(None, **kwargs) |
3401 | - except Exception: |
3402 | - store.remove(self) |
3403 | - raise |
3404 | - |
3405 | - @classmethod |
3406 | - def _get_store(cls): |
3407 | - return IStore(cls) |
3408 | - |
3409 | - def __repr__(self): |
3410 | - return "<%s object>" % (self.__class__.__name__) |
3411 | - |
3412 | - def destroySelf(self): |
3413 | - my_primary = IPrimaryObject(self) |
3414 | - if self is my_primary: |
3415 | - super().destroySelf() |
3416 | - else: |
3417 | - my_primary.destroySelf() |
3418 | - |
3419 | - def __eq__(self, other): |
3420 | - """Equality operator. |
3421 | - |
3422 | - Objects compare equal if they have the same class and id, and the id |
3423 | - is not None. |
3424 | - |
3425 | - This rule allows objects retrieved from different stores to compare |
3426 | - equal. Newly-created objects may not yet have an id; in such cases |
3427 | - we flush the store so that we can find out their id. |
3428 | - """ |
3429 | - naked_self = removeSecurityProxy(self) |
3430 | - naked_other = removeSecurityProxy(other) |
3431 | - if naked_self.__class__ != naked_other.__class__: |
3432 | - return False |
3433 | - try: |
3434 | - self_id = naked_self.id |
3435 | - except KeyError: |
3436 | - self.syncUpdate() |
3437 | - self_id = naked_self.id |
3438 | - if self_id is None: |
3439 | - return False |
3440 | - try: |
3441 | - other_id = naked_other.id |
3442 | - except KeyError: |
3443 | - other.syncUpdate() |
3444 | - other_id = naked_other.id |
3445 | - return self_id == other_id |
3446 | - |
3447 | - def __ne__(self, other): |
3448 | - """Inverse of __eq__.""" |
3449 | - return not (self == other) |
3450 | - |
3451 | - def __hash__(self): |
3452 | - """Hash operator. |
3453 | - |
3454 | - We must define __hash__ since we define __eq__ (Python 3 requires |
3455 | - this), but we need to take care to preserve the invariant that |
3456 | - objects that compare equal have the same hash value. Newly-created |
3457 | - objects may not yet have an id; in such cases we flush the store so |
3458 | - that we can find out their id. |
3459 | - """ |
3460 | - try: |
3461 | - id = self.id |
3462 | - except KeyError: |
3463 | - self.syncUpdate() |
3464 | - id = self.id |
3465 | - return hash((self.__class__, id)) |
3466 | - |
3467 | - def __storm_invalidated__(self): |
3468 | - """Flush cached properties.""" |
3469 | - # XXX: RobertCollins 2010-08-16 bug=622648: Note this is not directly |
3470 | - # tested, but the entire test suite blows up awesomely if it's broken. |
3471 | - # It's entirely unclear where tests for this should be. |
3472 | - clear_property_cache(self) |
3473 | + return getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
3474 | |
3475 | |
3476 | def get_transaction_timestamp(store): |
3477 | @@ -319,21 +150,14 @@ def quote(x): |
3478 | >>> from datetime import datetime, date, time |
3479 | >>> quote(datetime(2003, 12, 4, 13, 45, 50)) |
3480 | "'2003-12-04 13:45:50'" |
3481 | + >>> quote(datetime(2003, 12, 4, 13, 45, 50, 123456)) |
3482 | + "'2003-12-04 13:45:50.123456'" |
3483 | >>> quote(date(2003, 12, 4)) |
3484 | "'2003-12-04'" |
3485 | >>> quote(time(13, 45, 50)) |
3486 | "'13:45:50'" |
3487 | |
3488 | - This function special cases datetime objects, due to a bug that has |
3489 | - since been fixed in SQLOS (it installed an SQLObject converter that |
3490 | - stripped the time component from the value). By itself, the sqlrepr |
3491 | - function has the following output: |
3492 | - |
3493 | - >>> sqlrepr(datetime(2003, 12, 4, 13, 45, 50), "postgres") |
3494 | - "'2003-12-04T13:45:50'" |
3495 | - |
3496 | - This function also special cases set objects, which SQLObject's |
3497 | - sqlrepr() doesn't know how to handle. |
3498 | + sqlrepr() also special-cases set objects. |
3499 | |
3500 | >>> quote(set([1, 2, 3])) |
3501 | '(1, 2, 3)' |
3502 | @@ -343,12 +167,6 @@ def quote(x): |
3503 | """ |
3504 | if isinstance(x, datetime): |
3505 | return "'%s'" % x |
3506 | - elif ISQLBase(x, None) is not None: |
3507 | - return str(x.id) |
3508 | - elif isinstance(x, (set, frozenset)): |
3509 | - # SQLObject can't cope with sets, so convert to a list, which it |
3510 | - # /does/ know how to handle. |
3511 | - x = list(x) |
3512 | return sqlrepr(x, "postgres") |
3513 | |
3514 | |
3515 | @@ -407,23 +225,20 @@ def quote_identifier(identifier): |
3516 | In SQL, identifiers are quoted using " rather than ' which is reserved |
3517 | for strings. |
3518 | |
3519 | - >>> print(quoteIdentifier("hello")) |
3520 | + >>> print(quote_identifier("hello")) |
3521 | "hello" |
3522 | - >>> print(quoteIdentifier("'")) |
3523 | + >>> print(quote_identifier("'")) |
3524 | "'" |
3525 | - >>> print(quoteIdentifier('"')) |
3526 | + >>> print(quote_identifier('"')) |
3527 | """" |
3528 | - >>> print(quoteIdentifier("\\")) |
3529 | + >>> print(quote_identifier("\\")) |
3530 | "\" |
3531 | - >>> print(quoteIdentifier('\\"')) |
3532 | + >>> print(quote_identifier('\\"')) |
3533 | "\""" |
3534 | ''' |
3535 | return '"%s"' % identifier.replace('"', '""') |
3536 | |
3537 | |
3538 | -quoteIdentifier = quote_identifier # Backwards compatibility for now. |
3539 | - |
3540 | - |
3541 | def convert_storm_clause_to_string(storm_clause): |
3542 | """Convert a Storm expression into a plain string. |
3543 | |
3544 | @@ -489,25 +304,28 @@ def convert_storm_clause_to_string(storm_clause): |
3545 | def flush_database_updates(): |
3546 | """Flushes all pending database updates. |
3547 | |
3548 | - When SQLObject's _lazyUpdate flag is set, then it's possible to have |
3549 | - changes written to objects that aren't flushed to the database, leading to |
3550 | - inconsistencies when doing e.g.:: |
3551 | + Storm normally flushes changes to objects before it needs to issue the |
3552 | + next query, but there are situations where it doesn't realize that it |
3553 | + needs to do so. One common case is when creating an object and |
3554 | + immediately fetching its ID, which is typically assigned by the database |
3555 | + based on a sequence when the row is inserted:: |
3556 | |
3557 | - # Assuming the Beer table already has a 'Victoria Bitter' row... |
3558 | - assert Beer.select("name LIKE 'Vic%'").count() == 1 # This will pass |
3559 | - beer = Beer.byName('Victoria Bitter') |
3560 | - beer.name = 'VB' |
3561 | - assert Beer.select("name LIKE 'Vic%'").count() == 0 # This will fail |
3562 | + store = IStore(Beer) |
3563 | + beer = Beer(name="Victoria Bitter") |
3564 | + store.add(beer) |
3565 | + assert beer.id is not None # This will fail |
3566 | |
3567 | To avoid this problem, use this function:: |
3568 | |
3569 | - # Assuming the Beer table already has a 'Victoria Bitter' row... |
3570 | - assert Beer.select("name LIKE 'Vic%'").count() == 1 # This will pass |
3571 | - beer = Beer.byName('Victoria Bitter') |
3572 | - beer.name = 'VB' |
3573 | + store = IStore(Beer) |
3574 | + beer = Beer(name="Victoria Bitter") |
3575 | + store.add(beer) |
3576 | flush_database_updates() |
3577 | - assert Beer.select("name LIKE 'Vic%'").count() == 0 # This will pass |
3578 | + assert beer.id is not None # This will pass |
3579 | |
3580 | + (You can also flush individual stores using `store.flush()`, which is |
3581 | + normally sufficient, but sometimes this function is a convenient |
3582 | + shorthand if you don't already have a store object handy.) |
3583 | """ |
3584 | zstorm = getUtility(IZStorm) |
3585 | for name, store in zstorm.iterstores(): |
3586 | @@ -517,14 +335,13 @@ def flush_database_updates(): |
3587 | def flush_database_caches(): |
3588 | """Flush all database caches. |
3589 | |
3590 | - SQLObject caches field values from the database in SQLObject |
3591 | - instances. If SQL statements are issued that change the state of |
3592 | - the database behind SQLObject's back, these cached values will be |
3593 | - invalid. |
3594 | + Storm caches field values from the database in Storm instances. If SQL |
3595 | + statements are issued that change the state of the database behind |
3596 | + Storm's back, these cached values will be invalid. |
3597 | |
3598 | - This function iterates through all the objects in the SQLObject |
3599 | - connection's cache, and synchronises them with the database. This |
3600 | - ensures that they all reflect the values in the database. |
3601 | + This function iterates through all the objects in the Storm connection's |
3602 | + cache, and synchronises them with the database. This ensures that they |
3603 | + all reflect the values in the database. |
3604 | """ |
3605 | zstorm = getUtility(IZStorm) |
3606 | for name, store in zstorm.iterstores(): |
3607 | @@ -537,7 +354,7 @@ def block_implicit_flushes(func): |
3608 | |
3609 | def block_implicit_flushes_decorator(*args, **kwargs): |
3610 | try: |
3611 | - store = _get_sqlobject_store() |
3612 | + store = _get_main_default_store() |
3613 | except DisallowedStore: |
3614 | return func(*args, **kwargs) |
3615 | store.block_implicit_flushes() |
3616 | @@ -556,7 +373,7 @@ def reset_store(func): |
3617 | try: |
3618 | return func(*args, **kwargs) |
3619 | finally: |
3620 | - _get_sqlobject_store().reset() |
3621 | + _get_main_default_store().reset() |
3622 | |
3623 | return mergeFunctionMetadata(func, reset_store_decorator) |
3624 | |
3625 | @@ -608,7 +425,7 @@ class cursor: |
3626 | """ |
3627 | |
3628 | def __init__(self): |
3629 | - self._connection = _get_sqlobject_store()._connection |
3630 | + self._connection = _get_main_default_store()._connection |
3631 | self._result = None |
3632 | |
3633 | def execute(self, query, params=None): |
3634 | diff --git a/lib/lp/services/database/sqlobject/__init__.py b/lib/lp/services/database/sqlobject/__init__.py |
3635 | index 4013d35..bd5e645 100644 |
3636 | --- a/lib/lp/services/database/sqlobject/__init__.py |
3637 | +++ b/lib/lp/services/database/sqlobject/__init__.py |
3638 | @@ -7,31 +7,6 @@ |
3639 | import datetime |
3640 | |
3641 | from storm.expr import SQL |
3642 | -from storm.sqlobject import ( # noqa: F401 |
3643 | - AND, |
3644 | - CONTAINSSTRING, |
3645 | - DESC, |
3646 | - IN, |
3647 | - LIKE, |
3648 | - NOT, |
3649 | - OR, |
3650 | - BoolCol, |
3651 | - DateCol, |
3652 | - FloatCol, |
3653 | - ForeignKey, |
3654 | - IntCol, |
3655 | - IntervalCol, |
3656 | - SingleJoin, |
3657 | - SQLConstant, |
3658 | - SQLMultipleJoin, |
3659 | - SQLObjectBase, |
3660 | - SQLObjectMoreThanOneResultError, |
3661 | - SQLObjectNotFound, |
3662 | - SQLObjectResultSet, |
3663 | - SQLRelatedJoin, |
3664 | - StringCol, |
3665 | - UtcDateTimeCol, |
3666 | -) |
3667 | |
3668 | _sqlStringReplace = [ |
3669 | ("\\", "\\\\"), |
3670 | @@ -70,7 +45,7 @@ def sqlrepr(value, dbname=None): |
3671 | return repr(value) |
3672 | elif value is None: |
3673 | return "NULL" |
3674 | - elif isinstance(value, (list, set, tuple)): |
3675 | + elif isinstance(value, (frozenset, list, set, tuple)): |
3676 | return "(%s)" % ", ".join(sqlrepr(v, dbname) for v in value) |
3677 | elif isinstance(value, datetime.datetime): |
3678 | return value.strftime("'%Y-%m-%dT%H:%M:%S'") |
3679 | diff --git a/lib/lp/services/database/tests/test_transaction_decorators.py b/lib/lp/services/database/tests/test_transaction_decorators.py |
3680 | index ef79e98..c5d4776 100644 |
3681 | --- a/lib/lp/services/database/tests/test_transaction_decorators.py |
3682 | +++ b/lib/lp/services/database/tests/test_transaction_decorators.py |
3683 | @@ -21,7 +21,9 @@ class TestTransactionDecorators(unittest.TestCase): |
3684 | def setUp(self): |
3685 | switch_dbuser("librarian") |
3686 | self.store = IStore(LibraryFileContent) |
3687 | - self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba") |
3688 | + self.content_id = ( |
3689 | + db.Library().add("deadbeef", 1234, "abababab", "ba").id |
3690 | + ) |
3691 | self.file_content = self._getTestFileContent() |
3692 | transaction.commit() |
3693 | |
3694 | diff --git a/lib/lp/services/features/model.py b/lib/lp/services/features/model.py |
3695 | index 29631b0..1d64a9a 100644 |
3696 | --- a/lib/lp/services/features/model.py |
3697 | +++ b/lib/lp/services/features/model.py |
3698 | @@ -13,7 +13,6 @@ import six |
3699 | from storm.locals import DateTime, Int, Reference, Unicode |
3700 | from zope.interface import implementer |
3701 | |
3702 | -from lp.services.database.datetimecol import UtcDateTimeCol |
3703 | from lp.services.database.interfaces import IStore |
3704 | from lp.services.database.stormbase import StormBase |
3705 | from lp.services.features.interfaces import IFeatureRules |
3706 | @@ -54,7 +53,7 @@ class FeatureFlagChangelogEntry(StormBase): |
3707 | __storm_table__ = "FeatureFlagChangelogEntry" |
3708 | |
3709 | id = Int(primary=True) |
3710 | - date_changed = UtcDateTimeCol(notNull=True) |
3711 | + date_changed = DateTime(allow_none=False, tzinfo=timezone.utc) |
3712 | diff = Unicode(allow_none=False) |
3713 | comment = Unicode(allow_none=False) |
3714 | person_id = Int(name="person", allow_none=False) |
3715 | diff --git a/lib/lp/services/librarian/client.py b/lib/lp/services/librarian/client.py |
3716 | index c8a803e..71912a9 100644 |
3717 | --- a/lib/lp/services/librarian/client.py |
3718 | +++ b/lib/lp/services/librarian/client.py |
3719 | @@ -24,13 +24,11 @@ from urllib.request import urlopen |
3720 | |
3721 | import six |
3722 | from lazr.restful.utils import get_current_browser_request |
3723 | -from storm.store import Store |
3724 | from zope.interface import implementer |
3725 | |
3726 | from lp.services.config import config, dbconfig |
3727 | -from lp.services.database.interfaces import IPrimaryStore |
3728 | +from lp.services.database.interfaces import IPrimaryStore, IStore |
3729 | from lp.services.database.postgresql import ConnectionString |
3730 | -from lp.services.database.sqlobject import SQLObjectNotFound |
3731 | from lp.services.librarian.interfaces.client import ( |
3732 | LIBRARIAN_SERVER_DEFAULT_TIMEOUT, |
3733 | DownloadFailed, |
3734 | @@ -250,6 +248,7 @@ class FileUploadClient: |
3735 | sha1=sha1_digester.hexdigest(), |
3736 | md5=md5_digester.hexdigest(), |
3737 | ) |
3738 | + store.add(content) |
3739 | LibraryFileAlias( |
3740 | id=aliasID, |
3741 | content=content, |
3742 | @@ -259,7 +258,7 @@ class FileUploadClient: |
3743 | restricted=self.restricted, |
3744 | ) |
3745 | |
3746 | - Store.of(content).flush() |
3747 | + store.flush() |
3748 | |
3749 | assert isinstance(aliasID, int), "aliasID %r not an integer" % ( |
3750 | aliasID, |
3751 | @@ -410,10 +409,7 @@ class FileDownloadClient: |
3752 | """ |
3753 | from lp.services.librarian.model import LibraryFileAlias |
3754 | |
3755 | - try: |
3756 | - lfa = LibraryFileAlias.get(aliasID) |
3757 | - except SQLObjectNotFound: |
3758 | - lfa = None |
3759 | + lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, aliasID) |
3760 | |
3761 | if lfa is None: |
3762 | raise DownloadFailed("Alias %d not found" % aliasID) |
3763 | diff --git a/lib/lp/services/librarian/model.py b/lib/lp/services/librarian/model.py |
3764 | index 83ca952..25692c8 100644 |
3765 | --- a/lib/lp/services/librarian/model.py |
3766 | +++ b/lib/lp/services/librarian/model.py |
3767 | @@ -15,23 +15,25 @@ from datetime import datetime, timezone |
3768 | from urllib.parse import urlparse |
3769 | |
3770 | from lazr.delegates import delegate_to |
3771 | -from storm.locals import Date, Desc, Int, Reference, ReferenceSet, Store |
3772 | +from storm.locals import ( |
3773 | + Bool, |
3774 | + Date, |
3775 | + DateTime, |
3776 | + Desc, |
3777 | + Int, |
3778 | + Reference, |
3779 | + Store, |
3780 | + Unicode, |
3781 | +) |
3782 | from zope.component import adapter, getUtility |
3783 | from zope.interface import Interface, implementer |
3784 | |
3785 | +from lp.app.errors import NotFoundError |
3786 | from lp.registry.errors import InvalidFilename |
3787 | from lp.services.config import config |
3788 | from lp.services.database.constants import DEFAULT, UTC_NOW |
3789 | -from lp.services.database.datetimecol import UtcDateTimeCol |
3790 | from lp.services.database.interfaces import IPrimaryStore, IStore |
3791 | -from lp.services.database.sqlbase import SQLBase, session_store |
3792 | -from lp.services.database.sqlobject import ( |
3793 | - BoolCol, |
3794 | - ForeignKey, |
3795 | - IntCol, |
3796 | - SQLRelatedJoin, |
3797 | - StringCol, |
3798 | -) |
3799 | +from lp.services.database.sqlbase import session_store |
3800 | from lp.services.database.stormbase import StormBase |
3801 | from lp.services.librarian.interfaces import ( |
3802 | ILibraryFileAlias, |
3803 | @@ -51,48 +53,65 @@ from lp.services.tokens import create_token |
3804 | |
3805 | |
3806 | @implementer(ILibraryFileContent) |
3807 | -class LibraryFileContent(SQLBase): |
3808 | +class LibraryFileContent(StormBase): |
3809 | """A pointer to file content in the librarian.""" |
3810 | |
3811 | - _table = "LibraryFileContent" |
3812 | + __storm_table__ = "LibraryFileContent" |
3813 | + |
3814 | + id = Int(primary=True) |
3815 | + datecreated = DateTime( |
3816 | + allow_none=False, default=UTC_NOW, tzinfo=timezone.utc |
3817 | + ) |
3818 | + filesize = Int(allow_none=False) |
3819 | + sha256 = Unicode() |
3820 | + sha1 = Unicode(allow_none=False) |
3821 | + md5 = Unicode(allow_none=False) |
3822 | |
3823 | - datecreated = UtcDateTimeCol(notNull=True, default=UTC_NOW) |
3824 | - filesize = IntCol(notNull=True) |
3825 | - sha256 = StringCol() |
3826 | - sha1 = StringCol(notNull=True) |
3827 | - md5 = StringCol(notNull=True) |
3828 | + def __init__(self, filesize, md5, sha1, sha256, id=None): |
3829 | + super().__init__() |
3830 | + if id is not None: |
3831 | + self.id = id |
3832 | + self.filesize = filesize |
3833 | + self.md5 = md5 |
3834 | + self.sha1 = sha1 |
3835 | + self.sha256 = sha256 |
3836 | |
3837 | |
3838 | @implementer(ILibraryFileAlias) |
3839 | -class LibraryFileAlias(SQLBase): |
3840 | +class LibraryFileAlias(StormBase): |
3841 | """A filename and mimetype that we can serve some given content with.""" |
3842 | |
3843 | - _table = "LibraryFileAlias" |
3844 | - date_created = UtcDateTimeCol(notNull=False, default=DEFAULT) |
3845 | - content = ForeignKey( |
3846 | - foreignKey="LibraryFileContent", |
3847 | - dbName="content", |
3848 | - notNull=False, |
3849 | - ) |
3850 | - filename = StringCol(notNull=True) |
3851 | - mimetype = StringCol(notNull=True) |
3852 | - expires = UtcDateTimeCol(notNull=False, default=None) |
3853 | - restricted = BoolCol(notNull=True, default=False) |
3854 | - hits = IntCol(notNull=True, default=0) |
3855 | - |
3856 | - products = SQLRelatedJoin( |
3857 | - "ProductRelease", |
3858 | - joinColumn="libraryfile", |
3859 | - otherColumn="productrelease", |
3860 | - intermediateTable="ProductReleaseFile", |
3861 | - ) |
3862 | + __storm_table__ = "LibraryFileAlias" |
3863 | |
3864 | - sourcepackages = ReferenceSet( |
3865 | - "id", |
3866 | - "SourcePackageReleaseFile.libraryfile_id", |
3867 | - "SourcePackageReleaseFile.sourcepackagerelease_id", |
3868 | - "SourcePackageRelease.id", |
3869 | + id = Int(primary=True) |
3870 | + date_created = DateTime( |
3871 | + allow_none=True, default=DEFAULT, tzinfo=timezone.utc |
3872 | ) |
3873 | + content_id = Int(name="content", allow_none=True) |
3874 | + content = Reference(content_id, "LibraryFileContent.id") |
3875 | + filename = Unicode(allow_none=False) |
3876 | + mimetype = Unicode(allow_none=False) |
3877 | + expires = DateTime(allow_none=True, default=None, tzinfo=timezone.utc) |
3878 | + restricted = Bool(allow_none=False, default=False) |
3879 | + hits = Int(allow_none=False, default=0) |
3880 | + |
3881 | + def __init__( |
3882 | + self, |
3883 | + content, |
3884 | + filename, |
3885 | + mimetype, |
3886 | + id=None, |
3887 | + expires=None, |
3888 | + restricted=False, |
3889 | + ): |
3890 | + super().__init__() |
3891 | + if id is not None: |
3892 | + self.id = id |
3893 | + self.content = content |
3894 | + self.filename = filename |
3895 | + self.mimetype = mimetype |
3896 | + self.expires = expires |
3897 | + self.restricted = restricted |
3898 | |
3899 | @property |
3900 | def client(self): |
3901 | @@ -198,23 +217,9 @@ class LibraryFileAlias(SQLBase): |
3902 | entry.count += count |
3903 | self.hits += count |
3904 | |
3905 | - products = SQLRelatedJoin( |
3906 | - "ProductRelease", |
3907 | - joinColumn="libraryfile", |
3908 | - otherColumn="productrelease", |
3909 | - intermediateTable="ProductReleaseFile", |
3910 | - ) |
3911 | - |
3912 | - sourcepackages = ReferenceSet( |
3913 | - "id", |
3914 | - "SourcePackageReleaseFile.libraryfile_id", |
3915 | - "SourcePackageReleaseFile.sourcepackagerelease_id", |
3916 | - "SourcePackageRelease.id", |
3917 | - ) |
3918 | - |
3919 | @property |
3920 | def deleted(self): |
3921 | - return self.contentID is None |
3922 | + return self.content_id is None |
3923 | |
3924 | def __storm_invalidated__(self): |
3925 | """Make sure that the file is closed across transaction boundary.""" |
3926 | @@ -278,17 +283,17 @@ class LibraryFileAliasSet: |
3927 | |
3928 | def __getitem__(self, key): |
3929 | """See ILibraryFileAliasSet.__getitem__""" |
3930 | - return LibraryFileAlias.get(key) |
3931 | + lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, key) |
3932 | + if lfa is None: |
3933 | + raise NotFoundError(key) |
3934 | + return lfa |
3935 | |
3936 | def findBySHA256(self, sha256): |
3937 | """See ILibraryFileAliasSet.""" |
3938 | - return LibraryFileAlias.select( |
3939 | - """ |
3940 | - content = LibraryFileContent.id |
3941 | - AND LibraryFileContent.sha256 = '%s' |
3942 | - """ |
3943 | - % sha256, |
3944 | - clauseTables=["LibraryFileContent"], |
3945 | + return IStore(LibraryFileAlias).find( |
3946 | + LibraryFileAlias, |
3947 | + LibraryFileAlias.content == LibraryFileContent.id, |
3948 | + LibraryFileContent.sha256 == sha256, |
3949 | ) |
3950 | |
3951 | def preloadLastDownloaded(self, lfas): |
3952 | @@ -326,7 +331,7 @@ class LibraryFileAliasSet: |
3953 | |
3954 | |
3955 | @implementer(ILibraryFileDownloadCount) |
3956 | -class LibraryFileDownloadCount(SQLBase): |
3957 | +class LibraryFileDownloadCount(StormBase): |
3958 | """See `ILibraryFileDownloadCount`""" |
3959 | |
3960 | __storm_table__ = "LibraryFileDownloadCount" |
3961 | @@ -339,16 +344,23 @@ class LibraryFileDownloadCount(SQLBase): |
3962 | country_id = Int(name="country", allow_none=True) |
3963 | country = Reference(country_id, "Country.id") |
3964 | |
3965 | + def __init__(self, libraryfilealias, day, count, country=None): |
3966 | + super().__init__() |
3967 | + self.libraryfilealias = libraryfilealias |
3968 | + self.day = day |
3969 | + self.count = count |
3970 | + self.country = country |
3971 | + |
3972 | |
3973 | class TimeLimitedToken(StormBase): |
3974 | """A time limited access token for accessing a private file.""" |
3975 | |
3976 | __storm_table__ = "TimeLimitedToken" |
3977 | |
3978 | - created = UtcDateTimeCol(notNull=True, default=UTC_NOW) |
3979 | - path = StringCol(notNull=True) |
3980 | + created = DateTime(allow_none=False, default=UTC_NOW, tzinfo=timezone.utc) |
3981 | + path = Unicode(allow_none=False) |
3982 | # The hex SHA-256 hash of the token. |
3983 | - token = StringCol(notNull=True) |
3984 | + token = Unicode(allow_none=False) |
3985 | |
3986 | __storm_primary__ = ("path", "token") |
3987 | |
3988 | diff --git a/lib/lp/services/librarian/tests/test_client.py b/lib/lp/services/librarian/tests/test_client.py |
3989 | index 39f6ee8..012d1e1 100644 |
3990 | --- a/lib/lp/services/librarian/tests/test_client.py |
3991 | +++ b/lib/lp/services/librarian/tests/test_client.py |
3992 | @@ -20,7 +20,7 @@ from testtools.testcase import ExpectedException |
3993 | |
3994 | from lp.services.config import config |
3995 | from lp.services.daemons.tachandler import TacTestSetup |
3996 | -from lp.services.database.interfaces import IStandbyStore |
3997 | +from lp.services.database.interfaces import IStandbyStore, IStore |
3998 | from lp.services.database.policy import StandbyDatabasePolicy |
3999 | from lp.services.database.sqlbase import block_implicit_flushes |
4000 | from lp.services.librarian import client as client_module |
4001 | @@ -387,8 +387,9 @@ class LibrarianClientTestCase(TestCase): |
4002 | sha256 = hashlib.sha256(data).hexdigest() |
4003 | |
4004 | client = LibrarianClient() |
4005 | - lfa = LibraryFileAlias.get( |
4006 | - client.addFile("file", len(data), io.BytesIO(data), "text/plain") |
4007 | + lfa = IStore(LibraryFileAlias).get( |
4008 | + LibraryFileAlias, |
4009 | + client.addFile("file", len(data), io.BytesIO(data), "text/plain"), |
4010 | ) |
4011 | |
4012 | self.assertEqual(md5, lfa.content.md5) |
4013 | @@ -427,7 +428,7 @@ class LibrarianClientTestCase(TestCase): |
4014 | "expected %s to start with %s" % (download_url, expected_host), |
4015 | ) |
4016 | # If the alias has been deleted, _getURLForDownload returns None. |
4017 | - lfa = LibraryFileAlias.get(alias_id) |
4018 | + lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, alias_id) |
4019 | lfa.content = None |
4020 | call = block_implicit_flushes( # Prevent a ProgrammingError |
4021 | LibrarianClient._getURLForDownload |
4022 | @@ -469,7 +470,7 @@ class LibrarianClientTestCase(TestCase): |
4023 | "expected %s to start with %s" % (download_url, expected_host), |
4024 | ) |
4025 | # If the alias has been deleted, _getURLForDownload returns None. |
4026 | - lfa = LibraryFileAlias.get(alias_id) |
4027 | + lfa = IStore(LibraryFileAlias).get(LibraryFileAlias, alias_id) |
4028 | lfa.content = None |
4029 | call = block_implicit_flushes( # Prevent a ProgrammingError |
4030 | RestrictedLibrarianClient._getURLForDownload |
4031 | diff --git a/lib/lp/services/librarianserver/db.py b/lib/lp/services/librarianserver/db.py |
4032 | index b747c2c..21631b2 100644 |
4033 | --- a/lib/lp/services/librarianserver/db.py |
4034 | +++ b/lib/lp/services/librarianserver/db.py |
4035 | @@ -49,7 +49,12 @@ class Library: |
4036 | # The following methods are read-only queries. |
4037 | |
4038 | def lookupBySHA1(self, digest): |
4039 | - return [fc.id for fc in LibraryFileContent.selectBy(sha1=digest)] |
4040 | + return [ |
4041 | + fc.id |
4042 | + for fc in IStore(LibraryFileContent).find( |
4043 | + LibraryFileContent, sha1=digest |
4044 | + ) |
4045 | + ] |
4046 | |
4047 | @defer.inlineCallbacks |
4048 | def _verifyMacaroon(self, macaroon, aliasid): |
4049 | @@ -142,7 +147,7 @@ class Library: |
4050 | def getAliases(self, fileid): |
4051 | results = IStore(LibraryFileAlias).find( |
4052 | LibraryFileAlias, |
4053 | - LibraryFileAlias.contentID == LibraryFileContent.id, |
4054 | + LibraryFileAlias.content_id == LibraryFileContent.id, |
4055 | LibraryFileAlias.restricted == self.restricted, |
4056 | LibraryFileContent.id == fileid, |
4057 | ) |
4058 | @@ -151,20 +156,22 @@ class Library: |
4059 | # the following methods are used for adding to the library |
4060 | |
4061 | def add(self, digest, size, md5_digest, sha256_digest): |
4062 | + store = IStore(LibraryFileContent) |
4063 | lfc = LibraryFileContent( |
4064 | filesize=size, sha1=digest, md5=md5_digest, sha256=sha256_digest |
4065 | ) |
4066 | - return lfc.id |
4067 | - |
4068 | - def addAlias(self, fileid, filename, mimetype, expires=None): |
4069 | - """Add an alias, and return its ID. |
4070 | - |
4071 | - If a matching alias already exists, it will return that ID instead. |
4072 | - """ |
4073 | - return LibraryFileAlias( |
4074 | - contentID=fileid, |
4075 | + store.add(lfc) |
4076 | + store.flush() |
4077 | + return lfc |
4078 | + |
4079 | + def addAlias(self, content, filename, mimetype, expires=None): |
4080 | + """Add an alias and return it.""" |
4081 | + lfa = LibraryFileAlias( |
4082 | + content=content, |
4083 | filename=filename, |
4084 | mimetype=mimetype, |
4085 | expires=expires, |
4086 | restricted=self.restricted, |
4087 | - ).id |
4088 | + ) |
4089 | + IStore(LibraryFileAlias).flush() |
4090 | + return lfa |
4091 | diff --git a/lib/lp/services/librarianserver/librariangc.py b/lib/lp/services/librarianserver/librariangc.py |
4092 | index 14423be..aafd7cb 100644 |
4093 | --- a/lib/lp/services/librarianserver/librariangc.py |
4094 | +++ b/lib/lp/services/librarianserver/librariangc.py |
4095 | @@ -20,7 +20,7 @@ from lp.services.config import config |
4096 | from lp.services.database.postgresql import ( |
4097 | drop_tables, |
4098 | listReferences, |
4099 | - quoteIdentifier, |
4100 | + quote_identifier, |
4101 | ) |
4102 | from lp.services.features import getFeatureFlag |
4103 | from lp.services.librarianserver import swift |
4104 | @@ -436,8 +436,8 @@ class UnreferencedLibraryFileAliasPruner: |
4105 | WHERE LibraryFileAlias.id = %(table)s.%(column)s |
4106 | """ |
4107 | % { |
4108 | - "table": quoteIdentifier(table), |
4109 | - "column": quoteIdentifier(column), |
4110 | + "table": quote_identifier(table), |
4111 | + "column": quote_identifier(column), |
4112 | } |
4113 | ) |
4114 | log.debug( |
4115 | diff --git a/lib/lp/services/librarianserver/storage.py b/lib/lp/services/librarianserver/storage.py |
4116 | index 2c2437b..cabded8 100644 |
4117 | --- a/lib/lp/services/librarianserver/storage.py |
4118 | +++ b/lib/lp/services/librarianserver/storage.py |
4119 | @@ -270,15 +270,16 @@ class LibraryFileUpload: |
4120 | # If we haven't got a contentID, we need to create one and return |
4121 | # it to the client. |
4122 | if self.contentID is None: |
4123 | - contentID = self.storage.library.add( |
4124 | + content = self.storage.library.add( |
4125 | dstDigest, |
4126 | self.size, |
4127 | self.md5_digester.hexdigest(), |
4128 | self.sha256_digester.hexdigest(), |
4129 | ) |
4130 | + contentID = content.id |
4131 | aliasID = self.storage.library.addAlias( |
4132 | - contentID, self.filename, self.mimetype, self.expires |
4133 | - ) |
4134 | + content, self.filename, self.mimetype, self.expires |
4135 | + ).id |
4136 | self.debugLog.append( |
4137 | "created contentID: %r, aliasID: %r." |
4138 | % (contentID, aliasID) |
4139 | diff --git a/lib/lp/services/librarianserver/testing/fake.py b/lib/lp/services/librarianserver/testing/fake.py |
4140 | index e265e6a..9898dc2 100644 |
4141 | --- a/lib/lp/services/librarianserver/testing/fake.py |
4142 | +++ b/lib/lp/services/librarianserver/testing/fake.py |
4143 | @@ -25,6 +25,7 @@ from transaction.interfaces import ISynchronizer |
4144 | from zope.interface import implementer |
4145 | |
4146 | from lp.services.config import config |
4147 | +from lp.services.database.interfaces import IStore |
4148 | from lp.services.librarian.client import get_libraryfilealias_download_path |
4149 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
4150 | from lp.services.librarian.interfaces.client import ( |
4151 | @@ -102,7 +103,7 @@ class FakeLibrarian(Fixture): |
4152 | ) |
4153 | |
4154 | file_ref = self._makeLibraryFileContent(content) |
4155 | - alias = self._makeAlias(file_ref.id, name, content, contentType) |
4156 | + alias = self._makeAlias(file_ref, name, content, contentType) |
4157 | self.aliases[alias.id] = alias |
4158 | return alias |
4159 | |
4160 | @@ -142,12 +143,13 @@ class FakeLibrarian(Fixture): |
4161 | for alias in self.aliases.values(): |
4162 | alias.file_committed = True |
4163 | |
4164 | - def _makeAlias(self, file_id, name, content, content_type): |
4165 | + def _makeAlias(self, lfc, name, content, content_type): |
4166 | """Create a `LibraryFileAlias`.""" |
4167 | alias = InstrumentedLibraryFileAlias( |
4168 | - contentID=file_id, filename=name, mimetype=content_type |
4169 | + content=lfc, filename=name, mimetype=content_type |
4170 | ) |
4171 | alias.content_bytes = content |
4172 | + IStore(LibraryFileAlias).flush() |
4173 | return alias |
4174 | |
4175 | def _makeLibraryFileContent(self, content): |
4176 | @@ -160,6 +162,7 @@ class FakeLibrarian(Fixture): |
4177 | content_object = LibraryFileContent( |
4178 | filesize=size, md5=md5, sha1=sha1, sha256=sha256 |
4179 | ) |
4180 | + IStore(LibraryFileContent).add(content_object) |
4181 | return content_object |
4182 | |
4183 | def create( |
4184 | diff --git a/lib/lp/services/librarianserver/testing/server.py b/lib/lp/services/librarianserver/testing/server.py |
4185 | index c2ef75b..26a7914 100644 |
4186 | --- a/lib/lp/services/librarianserver/testing/server.py |
4187 | +++ b/lib/lp/services/librarianserver/testing/server.py |
4188 | @@ -19,6 +19,7 @@ from fixtures import Fixture, FunctionFixture |
4189 | |
4190 | from lp.services.config import config |
4191 | from lp.services.daemons.tachandler import TacException, TacTestSetup |
4192 | +from lp.services.database.interfaces import IStore |
4193 | from lp.services.librarian.model import LibraryFileContent |
4194 | from lp.services.librarianserver.storage import _relFileLocation |
4195 | from lp.services.osutils import get_pid_from_file |
4196 | @@ -255,7 +256,7 @@ class LibrarianServerFixture(TacTestSetup): |
4197 | def fillLibrarianFile(fileid, content=None): |
4198 | """Write contents in disk for a librarian sampledata.""" |
4199 | with dbuser("librariangc"): |
4200 | - lfc = LibraryFileContent.get(fileid) |
4201 | + lfc = IStore(LibraryFileContent).get(LibraryFileContent, fileid) |
4202 | if content is None: |
4203 | content = b"x" * lfc.filesize |
4204 | else: |
4205 | diff --git a/lib/lp/services/librarianserver/tests/test_db.py b/lib/lp/services/librarianserver/tests/test_db.py |
4206 | index f43d918..48d980b 100644 |
4207 | --- a/lib/lp/services/librarianserver/tests/test_db.py |
4208 | +++ b/lib/lp/services/librarianserver/tests/test_db.py |
4209 | @@ -42,21 +42,20 @@ class DBTestCase(TestCase): |
4210 | self.assertEqual([], library.lookupBySHA1("deadbeef")) |
4211 | |
4212 | # Add a file, check it is found by lookupBySHA1 |
4213 | - fileID = library.add("deadbeef", 1234, "abababab", "babababa") |
4214 | - self.assertEqual([fileID], library.lookupBySHA1("deadbeef")) |
4215 | + content = library.add("deadbeef", 1234, "abababab", "babababa") |
4216 | + self.assertEqual([content.id], library.lookupBySHA1("deadbeef")) |
4217 | |
4218 | # Add a new file with the same digest |
4219 | - newFileID = library.add("deadbeef", 1234, "abababab", "babababa") |
4220 | + new_content = library.add("deadbeef", 1234, "abababab", "babababa") |
4221 | # Check it gets a new ID anyway |
4222 | - self.assertNotEqual(fileID, newFileID) |
4223 | + self.assertNotEqual(content.id, new_content.id) |
4224 | # Check it is found by lookupBySHA1 |
4225 | self.assertEqual( |
4226 | - sorted([fileID, newFileID]), |
4227 | + sorted([content.id, new_content.id]), |
4228 | sorted(library.lookupBySHA1("deadbeef")), |
4229 | ) |
4230 | |
4231 | - aliasID = library.addAlias(fileID, "file1", "text/unknown") |
4232 | - alias = library.getAlias(aliasID, None, "/") |
4233 | + alias = library.addAlias(content, "file1", "text/unknown") |
4234 | self.assertEqual("file1", alias.filename) |
4235 | self.assertEqual("text/unknown", alias.mimetype) |
4236 | |
4237 | @@ -97,7 +96,9 @@ class TestLibrarianStuff(TestCase): |
4238 | super().setUp() |
4239 | switch_dbuser("librarian") |
4240 | self.store = IStore(LibraryFileContent) |
4241 | - self.content_id = db.Library().add("deadbeef", 1234, "abababab", "ba") |
4242 | + self.content_id = ( |
4243 | + db.Library().add("deadbeef", 1234, "abababab", "ba").id |
4244 | + ) |
4245 | self.file_content = self._getTestFileContent() |
4246 | transaction.commit() |
4247 | |
4248 | diff --git a/lib/lp/services/librarianserver/tests/test_gc.py b/lib/lp/services/librarianserver/tests/test_gc.py |
4249 | index 41087cc..90f2039 100644 |
4250 | --- a/lib/lp/services/librarianserver/tests/test_gc.py |
4251 | +++ b/lib/lp/services/librarianserver/tests/test_gc.py |
4252 | @@ -22,13 +22,12 @@ from swiftclient import client as swiftclient |
4253 | from testtools.matchers import AnyMatch, Equals, MatchesListwise, MatchesRegex |
4254 | |
4255 | from lp.services.config import config |
4256 | -from lp.services.database.interfaces import IPrimaryStore |
4257 | +from lp.services.database.interfaces import IStore |
4258 | from lp.services.database.sqlbase import ( |
4259 | ISOLATION_LEVEL_AUTOCOMMIT, |
4260 | connect, |
4261 | cursor, |
4262 | ) |
4263 | -from lp.services.database.sqlobject import SQLObjectNotFound |
4264 | from lp.services.features.testing import FeatureFixture |
4265 | from lp.services.librarian.client import LibrarianClient |
4266 | from lp.services.librarian.model import LibraryFileAlias, LibraryFileContent |
4267 | @@ -50,6 +49,7 @@ class TestLibrarianGarbageCollectionBase: |
4268 | |
4269 | def setUp(self): |
4270 | super().setUp() |
4271 | + self.store = IStore(LibraryFileContent) |
4272 | self.client = LibrarianClient() |
4273 | self.patch(librariangc, "log", BufferLogger()) |
4274 | |
4275 | @@ -74,8 +74,7 @@ class TestLibrarianGarbageCollectionBase: |
4276 | # Make sure that every file the database knows about exists on disk. |
4277 | # We manually remove them for tests that need to cope with missing |
4278 | # library items. |
4279 | - store = IPrimaryStore(LibraryFileContent) |
4280 | - for content in store.find(LibraryFileContent): |
4281 | + for content in self.store.find(LibraryFileContent): |
4282 | path = librariangc.get_file_path(content.id) |
4283 | if not os.path.exists(path): |
4284 | if not os.path.exists(os.path.dirname(path)): |
4285 | @@ -121,18 +120,18 @@ class TestLibrarianGarbageCollectionBase: |
4286 | io.BytesIO(content), |
4287 | "text/plain", |
4288 | ) |
4289 | - f1 = LibraryFileAlias.get(f1_id) |
4290 | + f1 = self.store.get(LibraryFileAlias, f1_id) |
4291 | f2_id = self.client.addFile( |
4292 | "foo.txt", |
4293 | len(content), |
4294 | io.BytesIO(content), |
4295 | "text/plain", |
4296 | ) |
4297 | - f2 = LibraryFileAlias.get(f2_id) |
4298 | + f2 = self.store.get(LibraryFileAlias, f2_id) |
4299 | |
4300 | # Make sure the duplicates really are distinct |
4301 | self.assertNotEqual(f1_id, f2_id) |
4302 | - self.assertNotEqual(f1.contentID, f2.contentID) |
4303 | + self.assertNotEqual(f1.content_id, f2.content_id) |
4304 | |
4305 | f1.date_created = self.ancient_past |
4306 | f2.date_created = self.ancient_past |
4307 | @@ -141,8 +140,8 @@ class TestLibrarianGarbageCollectionBase: |
4308 | |
4309 | # Set the time on disk to match the database timestamp. |
4310 | utime = calendar.timegm(self.ancient_past.utctimetuple()) |
4311 | - os.utime(librariangc.get_file_path(f1.contentID), (utime, utime)) |
4312 | - os.utime(librariangc.get_file_path(f2.contentID), (utime, utime)) |
4313 | + os.utime(librariangc.get_file_path(f1.content_id), (utime, utime)) |
4314 | + os.utime(librariangc.get_file_path(f2.content_id), (utime, utime)) |
4315 | |
4316 | del f1, f2 |
4317 | |
4318 | @@ -165,20 +164,20 @@ class TestLibrarianGarbageCollectionBase: |
4319 | |
4320 | # Confirm that the duplicates have been merged |
4321 | self.ztm.begin() |
4322 | - f1 = LibraryFileAlias.get(self.f1_id) |
4323 | - f2 = LibraryFileAlias.get(self.f2_id) |
4324 | - self.assertEqual(f1.contentID, f2.contentID) |
4325 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4326 | + f2 = self.store.get(LibraryFileAlias, self.f2_id) |
4327 | + self.assertEqual(f1.content_id, f2.content_id) |
4328 | |
4329 | def test_DeleteUnreferencedAliases(self): |
4330 | self.ztm.begin() |
4331 | |
4332 | # Confirm that our sample files are there. |
4333 | - f1 = LibraryFileAlias.get(self.f1_id) |
4334 | - f2 = LibraryFileAlias.get(self.f2_id) |
4335 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4336 | + f2 = self.store.get(LibraryFileAlias, self.f2_id) |
4337 | # Grab the content IDs related to these |
4338 | # unreferenced LibraryFileAliases |
4339 | - c1_id = f1.contentID |
4340 | - c2_id = f2.contentID |
4341 | + c1_id = f1.content_id |
4342 | + c2_id = f2.content_id |
4343 | del f1, f2 |
4344 | self.ztm.abort() |
4345 | |
4346 | @@ -188,13 +187,13 @@ class TestLibrarianGarbageCollectionBase: |
4347 | # This should have committed |
4348 | self.ztm.begin() |
4349 | |
4350 | - # Confirm that the LibaryFileContents are still there. |
4351 | - LibraryFileContent.get(c1_id) |
4352 | - LibraryFileContent.get(c2_id) |
4353 | + # Confirm that the LibraryFileContents are still there. |
4354 | + self.assertIsNotNone(self.store.get(LibraryFileContent, c1_id)) |
4355 | + self.assertIsNotNone(self.store.get(LibraryFileContent, c2_id)) |
4356 | |
4357 | # But the LibraryFileAliases should be gone |
4358 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id) |
4359 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id) |
4360 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id)) |
4361 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id)) |
4362 | |
4363 | def test_DeleteUnreferencedAliases2(self): |
4364 | # Don't delete LibraryFileAliases accessed recently |
4365 | @@ -205,8 +204,8 @@ class TestLibrarianGarbageCollectionBase: |
4366 | |
4367 | # We now have two aliases sharing the same content. |
4368 | self.ztm.begin() |
4369 | - f1 = LibraryFileAlias.get(self.f1_id) |
4370 | - f2 = LibraryFileAlias.get(self.f2_id) |
4371 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4372 | + f2 = self.store.get(LibraryFileAlias, self.f2_id) |
4373 | self.assertEqual(f1.content, f2.content) |
4374 | |
4375 | # Flag one of our LibraryFileAliases as being recently created |
4376 | @@ -222,8 +221,8 @@ class TestLibrarianGarbageCollectionBase: |
4377 | librariangc.delete_unreferenced_aliases(self.con) |
4378 | |
4379 | self.ztm.begin() |
4380 | - LibraryFileAlias.get(self.f1_id) |
4381 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id) |
4382 | + self.assertIsNotNone(self.store.get(LibraryFileAlias, self.f1_id)) |
4383 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id)) |
4384 | |
4385 | def test_DeleteUnreferencedAndWellExpiredAliases(self): |
4386 | # LibraryFileAliases can be removed after they have expired |
4387 | @@ -234,7 +233,7 @@ class TestLibrarianGarbageCollectionBase: |
4388 | |
4389 | # Flag one of our LibraryFileAliases with an expiry date in the past |
4390 | self.ztm.begin() |
4391 | - f1 = LibraryFileAlias.get(self.f1_id) |
4392 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4393 | f1.expires = self.ancient_past |
4394 | del f1 |
4395 | self.ztm.commit() |
4396 | @@ -246,8 +245,8 @@ class TestLibrarianGarbageCollectionBase: |
4397 | |
4398 | # Make sure both our example files are gone |
4399 | self.ztm.begin() |
4400 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id) |
4401 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id) |
4402 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id)) |
4403 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id)) |
4404 | |
4405 | def test_DoneDeleteUnreferencedButNotExpiredAliases(self): |
4406 | # LibraryFileAliases can be removed only after they have expired. |
4407 | @@ -261,7 +260,7 @@ class TestLibrarianGarbageCollectionBase: |
4408 | # Flag one of our LibraryFileAliases with an expiry date in the |
4409 | # recent past. |
4410 | self.ztm.begin() |
4411 | - f1 = LibraryFileAlias.get(self.f1_id) |
4412 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4413 | f1.expires = self.recent_past |
4414 | del f1 |
4415 | self.ztm.commit() |
4416 | @@ -274,7 +273,7 @@ class TestLibrarianGarbageCollectionBase: |
4417 | # Make sure both our example files are still there |
4418 | self.ztm.begin() |
4419 | # Our recently expired LibraryFileAlias is still available. |
4420 | - LibraryFileAlias.get(self.f1_id) |
4421 | + self.assertIsNotNone(self.store.get(LibraryFileAlias, self.f1_id)) |
4422 | |
4423 | def test_deleteWellExpiredAliases(self): |
4424 | # LibraryFileAlias records that are expired are unlinked from their |
4425 | @@ -282,7 +281,7 @@ class TestLibrarianGarbageCollectionBase: |
4426 | |
4427 | # Flag one of our LibraryFileAliases with an expiry date in the past |
4428 | self.ztm.begin() |
4429 | - f1 = LibraryFileAlias.get(self.f1_id) |
4430 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4431 | f1.expires = self.ancient_past |
4432 | del f1 |
4433 | self.ztm.commit() |
4434 | @@ -292,10 +291,10 @@ class TestLibrarianGarbageCollectionBase: |
4435 | |
4436 | self.ztm.begin() |
4437 | # Make sure the well expired f1 is still there, but has no content. |
4438 | - f1 = LibraryFileAlias.get(self.f1_id) |
4439 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4440 | self.assertIsNone(f1.content) |
4441 | # f2 should still have content, as it isn't flagged for expiry. |
4442 | - f2 = LibraryFileAlias.get(self.f2_id) |
4443 | + f2 = self.store.get(LibraryFileAlias, self.f2_id) |
4444 | self.assertIsNotNone(f2.content) |
4445 | |
4446 | def test_ignoreRecentlyExpiredAliases(self): |
4447 | @@ -305,7 +304,7 @@ class TestLibrarianGarbageCollectionBase: |
4448 | # Flag one of our LibraryFileAliases with an expiry date in the |
4449 | # recent past. |
4450 | self.ztm.begin() |
4451 | - f1 = LibraryFileAlias.get(self.f1_id) |
4452 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4453 | f1.expires = self.recent_past # Within stay of execution. |
4454 | del f1 |
4455 | self.ztm.commit() |
4456 | @@ -316,10 +315,10 @@ class TestLibrarianGarbageCollectionBase: |
4457 | self.ztm.begin() |
4458 | # Make sure f1 is still there and has content. This ensures that |
4459 | # our stay of execution is still working. |
4460 | - f1 = LibraryFileAlias.get(self.f1_id) |
4461 | + f1 = self.store.get(LibraryFileAlias, self.f1_id) |
4462 | self.assertIsNotNone(f1.content) |
4463 | # f2 should still have content, as it isn't flagged for expiry. |
4464 | - f2 = LibraryFileAlias.get(self.f2_id) |
4465 | + f2 = self.store.get(LibraryFileAlias, self.f2_id) |
4466 | self.assertIsNotNone(f2.content) |
4467 | |
4468 | def test_DeleteUnreferencedContent(self): |
4469 | @@ -583,11 +582,11 @@ class TestLibrarianGarbageCollectionBase: |
4470 | |
4471 | # Make sure that our example files have been garbage collected |
4472 | self.ztm.begin() |
4473 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f1_id) |
4474 | - self.assertRaises(SQLObjectNotFound, LibraryFileAlias.get, self.f2_id) |
4475 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f1_id)) |
4476 | + self.assertIsNone(self.store.get(LibraryFileAlias, self.f2_id)) |
4477 | |
4478 | # And make sure stuff that *is* referenced remains |
4479 | - LibraryFileAlias.get(2) |
4480 | + self.assertIsNotNone(self.store.get(LibraryFileAlias, 2)) |
4481 | cur = cursor() |
4482 | cur.execute("SELECT count(*) FROM LibraryFileAlias") |
4483 | count = cur.fetchone()[0] |
4484 | @@ -625,22 +624,24 @@ class TestDiskLibrarianGarbageCollection( |
4485 | # original file, ignoring the extension. |
4486 | switch_dbuser("testadmin") |
4487 | content = b"foo" |
4488 | - lfa = LibraryFileAlias.get( |
4489 | + lfa = self.store.get( |
4490 | + LibraryFileAlias, |
4491 | self.client.addFile( |
4492 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4493 | - ) |
4494 | + ), |
4495 | ) |
4496 | - id_aborted = lfa.contentID |
4497 | + id_aborted = lfa.content_id |
4498 | # Roll back the database changes, leaving the file on disk. |
4499 | transaction.abort() |
4500 | |
4501 | - lfa = LibraryFileAlias.get( |
4502 | + lfa = self.store.get( |
4503 | + LibraryFileAlias, |
4504 | self.client.addFile( |
4505 | "bar.txt", len(content), io.BytesIO(content), "text/plain" |
4506 | - ) |
4507 | + ), |
4508 | ) |
4509 | transaction.commit() |
4510 | - id_committed = lfa.contentID |
4511 | + id_committed = lfa.content_id |
4512 | |
4513 | switch_dbuser(config.librarian_gc.dbuser) |
4514 | |
4515 | @@ -811,19 +812,21 @@ class TestSwiftLibrarianGarbageCollection( |
4516 | # by a manifest. GC treats the segments like the original file. |
4517 | switch_dbuser("testadmin") |
4518 | content = b"uploading to swift bigly" |
4519 | - big1_lfa = LibraryFileAlias.get( |
4520 | + big1_lfa = self.store.get( |
4521 | + LibraryFileAlias, |
4522 | self.client.addFile( |
4523 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4524 | - ) |
4525 | + ), |
4526 | ) |
4527 | - big1_id = big1_lfa.contentID |
4528 | + big1_id = big1_lfa.content_id |
4529 | |
4530 | - big2_lfa = LibraryFileAlias.get( |
4531 | + big2_lfa = self.store.get( |
4532 | + LibraryFileAlias, |
4533 | self.client.addFile( |
4534 | "bar.txt", len(content), io.BytesIO(content), "text/plain" |
4535 | - ) |
4536 | + ), |
4537 | ) |
4538 | - big2_id = big2_lfa.contentID |
4539 | + big2_id = big2_lfa.content_id |
4540 | transaction.commit() |
4541 | |
4542 | for lfc_id in (big1_id, big2_id): |
4543 | @@ -872,19 +875,21 @@ class TestSwiftLibrarianGarbageCollection( |
4544 | # suggest that it might happen. |
4545 | switch_dbuser("testadmin") |
4546 | content = b"uploading to swift" |
4547 | - f1_lfa = LibraryFileAlias.get( |
4548 | + f1_lfa = self.store.get( |
4549 | + LibraryFileAlias, |
4550 | self.client.addFile( |
4551 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4552 | - ) |
4553 | + ), |
4554 | ) |
4555 | - f1_id = f1_lfa.contentID |
4556 | + f1_id = f1_lfa.content_id |
4557 | |
4558 | - f2_lfa = LibraryFileAlias.get( |
4559 | + f2_lfa = self.store.get( |
4560 | + LibraryFileAlias, |
4561 | self.client.addFile( |
4562 | "bar.txt", len(content), io.BytesIO(content), "text/plain" |
4563 | - ) |
4564 | + ), |
4565 | ) |
4566 | - f2_id = f2_lfa.contentID |
4567 | + f2_id = f2_lfa.content_id |
4568 | transaction.commit() |
4569 | |
4570 | for lfc_id in (f1_id, f2_id): |
4571 | @@ -937,19 +942,21 @@ class TestSwiftLibrarianGarbageCollection( |
4572 | # to delete it. It's not clear why this happens in practice. |
4573 | switch_dbuser("testadmin") |
4574 | content = b"uploading to swift" |
4575 | - f1_lfa = LibraryFileAlias.get( |
4576 | + f1_lfa = self.store.get( |
4577 | + LibraryFileAlias, |
4578 | self.client.addFile( |
4579 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4580 | - ) |
4581 | + ), |
4582 | ) |
4583 | - f1_id = f1_lfa.contentID |
4584 | + f1_id = f1_lfa.content_id |
4585 | |
4586 | - f2_lfa = LibraryFileAlias.get( |
4587 | + f2_lfa = self.store.get( |
4588 | + LibraryFileAlias, |
4589 | self.client.addFile( |
4590 | "bar.txt", len(content), io.BytesIO(content), "text/plain" |
4591 | - ) |
4592 | + ), |
4593 | ) |
4594 | - f2_id = f2_lfa.contentID |
4595 | + f2_id = f2_lfa.content_id |
4596 | transaction.commit() |
4597 | |
4598 | for lfc_id in (f1_id, f2_id): |
4599 | @@ -1017,14 +1024,15 @@ class TestTwoSwiftsLibrarianGarbageCollection( |
4600 | switch_dbuser("testadmin") |
4601 | content = b"foo" |
4602 | lfas = [ |
4603 | - LibraryFileAlias.get( |
4604 | + self.store.get( |
4605 | + LibraryFileAlias, |
4606 | self.client.addFile( |
4607 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4608 | - ) |
4609 | + ), |
4610 | ) |
4611 | for _ in range(12) |
4612 | ] |
4613 | - lfc_ids = [lfa.contentID for lfa in lfas] |
4614 | + lfc_ids = [lfa.content_id for lfa in lfas] |
4615 | transaction.commit() |
4616 | |
4617 | # Simulate a migration in progress. Some files are only in the old |
4618 | @@ -1103,14 +1111,15 @@ class TestTwoSwiftsLibrarianGarbageCollection( |
4619 | switch_dbuser("testadmin") |
4620 | content = b"foo" |
4621 | lfas = [ |
4622 | - LibraryFileAlias.get( |
4623 | + self.store.get( |
4624 | + LibraryFileAlias, |
4625 | self.client.addFile( |
4626 | "foo.txt", len(content), io.BytesIO(content), "text/plain" |
4627 | - ) |
4628 | + ), |
4629 | ) |
4630 | for _ in range(12) |
4631 | ] |
4632 | - lfc_ids = [lfa.contentID for lfa in lfas] |
4633 | + lfc_ids = [lfa.content_id for lfa in lfas] |
4634 | transaction.commit() |
4635 | |
4636 | for lfc_id in lfc_ids: |
4637 | diff --git a/lib/lp/services/librarianserver/tests/test_storage.py b/lib/lp/services/librarianserver/tests/test_storage.py |
4638 | index c95d4f8..4f452f5 100644 |
4639 | --- a/lib/lp/services/librarianserver/tests/test_storage.py |
4640 | +++ b/lib/lp/services/librarianserver/tests/test_storage.py |
4641 | @@ -105,23 +105,28 @@ class LibrarianStorageTestCase(unittest.TestCase): |
4642 | newfile = self.storage.startAddFile("file", len(data)) |
4643 | newfile.append(data) |
4644 | lfc_id, lfa_id = newfile.store() |
4645 | - lfc = LibraryFileContent.get(lfc_id) |
4646 | + lfc = self.store.get(LibraryFileContent, lfc_id) |
4647 | self.assertEqual(md5, lfc.md5) |
4648 | self.assertEqual(sha1, lfc.sha1) |
4649 | self.assertEqual(sha256, lfc.sha256) |
4650 | |
4651 | |
4652 | +class StubLibraryFileContent: |
4653 | + def __init__(self, id): |
4654 | + self.id = id |
4655 | + |
4656 | + |
4657 | class StubLibrary: |
4658 | # Used by test_multipleFilesInOnePrefixedDirectory |
4659 | |
4660 | def lookupBySHA1(self, digest): |
4661 | return [] |
4662 | |
4663 | - def addAlias(self, fileid, filename, mimetype): |
4664 | + def addAlias(self, content, filename, mimetype): |
4665 | pass |
4666 | |
4667 | id = 0x11111110 |
4668 | |
4669 | def add(self, digest, size): |
4670 | self.id += 1 |
4671 | - return self.id |
4672 | + return StubLibraryFileContent(self.id) |
4673 | diff --git a/lib/lp/services/librarianserver/tests/test_storage_db.py b/lib/lp/services/librarianserver/tests/test_storage_db.py |
4674 | index b2d0b1b..05723f1 100644 |
4675 | --- a/lib/lp/services/librarianserver/tests/test_storage_db.py |
4676 | +++ b/lib/lp/services/librarianserver/tests/test_storage_db.py |
4677 | @@ -10,6 +10,7 @@ from testtools.testcase import ExpectedException |
4678 | from testtools.twistedsupport import AsynchronousDeferredRunTest |
4679 | from twisted.internet import defer |
4680 | |
4681 | +from lp.services.database.interfaces import IStore |
4682 | from lp.services.database.sqlbase import flush_database_updates |
4683 | from lp.services.features.testing import FeatureFixture |
4684 | from lp.services.librarian.model import LibraryFileContent |
4685 | @@ -129,11 +130,16 @@ class LibrarianStorageDBTests(TestCase): |
4686 | fileid2, aliasid2 = newfile2.store() |
4687 | |
4688 | # Create rows in the database for these files. |
4689 | - LibraryFileContent( |
4690 | - filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661 |
4691 | + store = IStore(LibraryFileContent) |
4692 | + store.add( |
4693 | + LibraryFileContent( |
4694 | + filesize=0, sha1="foo", md5="xx", sha256="xx", id=6661 |
4695 | + ) |
4696 | ) |
4697 | - LibraryFileContent( |
4698 | - filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662 |
4699 | + store.add( |
4700 | + LibraryFileContent( |
4701 | + filesize=0, sha1="foo", md5="xx", sha256="xx", id=6662 |
4702 | + ) |
4703 | ) |
4704 | |
4705 | flush_database_updates() |
4706 | diff --git a/lib/lp/services/librarianserver/tests/test_web.py b/lib/lp/services/librarianserver/tests/test_web.py |
4707 | index a84b74d..19ebe56 100644 |
4708 | --- a/lib/lp/services/librarianserver/tests/test_web.py |
4709 | +++ b/lib/lp/services/librarianserver/tests/test_web.py |
4710 | @@ -342,7 +342,7 @@ class LibrarianWebTestCase(LibrarianWebTestMixin, TestCaseWithFactory): |
4711 | |
4712 | # Delete the on-disk file. |
4713 | storage = LibrarianStorage(config.librarian_server.root, None) |
4714 | - os.remove(storage._fileLocation(file_alias.contentID)) |
4715 | + os.remove(storage._fileLocation(file_alias.content_id)) |
4716 | |
4717 | # The URL now 500s, since the DB says it should exist. |
4718 | response = requests.get(url) |
4719 | diff --git a/lib/lp/services/librarianserver/web.py b/lib/lp/services/librarianserver/web.py |
4720 | index ef9b6eb..efa7f55 100644 |
4721 | --- a/lib/lp/services/librarianserver/web.py |
4722 | +++ b/lib/lp/services/librarianserver/web.py |
4723 | @@ -130,7 +130,7 @@ class LibraryFileAliasResource(resource.Resource): |
4724 | try: |
4725 | alias = self.storage.getFileAlias(aliasID, token, path) |
4726 | return ( |
4727 | - alias.contentID, |
4728 | + alias.content_id, |
4729 | alias.filename, |
4730 | alias.mimetype, |
4731 | alias.date_created, |
4732 | diff --git a/lib/lp/services/messages/interfaces/message.py b/lib/lp/services/messages/interfaces/message.py |
4733 | index 354a84e..31525df 100644 |
4734 | --- a/lib/lp/services/messages/interfaces/message.py |
4735 | +++ b/lib/lp/services/messages/interfaces/message.py |
4736 | @@ -161,8 +161,14 @@ class IMessageView(IMessageCommon): |
4737 | title = TextLine( |
4738 | title=_("The message title, usually just the subject."), readonly=True |
4739 | ) |
4740 | - visible = Bool( |
4741 | - title="This message is visible or not.", required=False, default=True |
4742 | + visible = exported( |
4743 | + Bool( |
4744 | + title=_("Message visibility."), |
4745 | + description=_("Whether or not the message is visible."), |
4746 | + readonly=True, |
4747 | + default=True, |
4748 | + ), |
4749 | + as_of="devel", |
4750 | ) |
4751 | |
4752 | bugattachments = exported( |
4753 | diff --git a/lib/lp/services/messages/model/message.py b/lib/lp/services/messages/model/message.py |
4754 | index ade6a5d..9f84932 100644 |
4755 | --- a/lib/lp/services/messages/model/message.py |
4756 | +++ b/lib/lp/services/messages/model/message.py |
4757 | @@ -720,8 +720,7 @@ class UserToUserEmail(StormBase): |
4758 | # On the other hand, we really don't need a UserToUserEmailSet for any |
4759 | # other purpose. There isn't any other relationship that can be |
4760 | # inferred, so in this case I think it makes fine sense for the |
4761 | - # constructor to add self to the store. Also, this closely mimics |
4762 | - # what the SQLObject compatibility layer does. |
4763 | + # constructor to add self to the store. |
4764 | Store.of(sender).add(self) |
4765 | |
4766 | |
4767 | diff --git a/lib/lp/services/openid/model/openididentifier.py b/lib/lp/services/openid/model/openididentifier.py |
4768 | index 18ef165..923c7e5 100644 |
4769 | --- a/lib/lp/services/openid/model/openididentifier.py |
4770 | +++ b/lib/lp/services/openid/model/openididentifier.py |
4771 | @@ -5,10 +5,11 @@ |
4772 | |
4773 | __all__ = ["OpenIdIdentifier"] |
4774 | |
4775 | -from storm.locals import Int, Reference, Unicode |
4776 | +from datetime import timezone |
4777 | + |
4778 | +from storm.locals import DateTime, Int, Reference, Unicode |
4779 | |
4780 | from lp.services.database.constants import UTC_NOW |
4781 | -from lp.services.database.datetimecol import UtcDateTimeCol |
4782 | from lp.services.database.stormbase import StormBase |
4783 | |
4784 | |
4785 | @@ -19,4 +20,6 @@ class OpenIdIdentifier(StormBase): |
4786 | identifier = Unicode(primary=True) |
4787 | account_id = Int("account") |
4788 | account = Reference(account_id, "Account.id") |
4789 | - date_created = UtcDateTimeCol(notNull=True, default=UTC_NOW) |
4790 | + date_created = DateTime( |
4791 | + allow_none=False, default=UTC_NOW, tzinfo=timezone.utc |
4792 | + ) |
4793 | diff --git a/lib/lp/services/session/model.py b/lib/lp/services/session/model.py |
4794 | index b54d290..8ab900c 100644 |
4795 | --- a/lib/lp/services/session/model.py |
4796 | +++ b/lib/lp/services/session/model.py |
4797 | @@ -5,10 +5,11 @@ |
4798 | |
4799 | __all__ = ["SessionData", "SessionPkgData"] |
4800 | |
4801 | -from storm.locals import Pickle, Unicode |
4802 | +from datetime import timezone |
4803 | + |
4804 | +from storm.locals import DateTime, Pickle, Unicode |
4805 | from zope.interface import implementer, provider |
4806 | |
4807 | -from lp.services.database.datetimecol import UtcDateTimeCol |
4808 | from lp.services.database.stormbase import StormBase |
4809 | from lp.services.session.interfaces import IUseSessionStore |
4810 | |
4811 | @@ -20,8 +21,8 @@ class SessionData(StormBase): |
4812 | |
4813 | __storm_table__ = "SessionData" |
4814 | client_id = Unicode(primary=True) |
4815 | - created = UtcDateTimeCol() |
4816 | - last_accessed = UtcDateTimeCol() |
4817 | + created = DateTime(tzinfo=timezone.utc) |
4818 | + last_accessed = DateTime(tzinfo=timezone.utc) |
4819 | |
4820 | |
4821 | @implementer(IUseSessionStore) |
4822 | diff --git a/lib/lp/services/statistics/tests/test_update_stats.py b/lib/lp/services/statistics/tests/test_update_stats.py |
4823 | index 577ff58..2b25e58 100644 |
4824 | --- a/lib/lp/services/statistics/tests/test_update_stats.py |
4825 | +++ b/lib/lp/services/statistics/tests/test_update_stats.py |
4826 | @@ -281,7 +281,7 @@ class UpdateTranslationStatsTest(unittest.TestCase): |
4827 | |
4828 | flush_database_caches() |
4829 | |
4830 | - # The transaction changed, we need to refetch SQLObjects. |
4831 | + # The transaction changed, we need to refetch Storm instances. |
4832 | ubuntu = self.distribution["ubuntu"] |
4833 | hoary = self.distroseriesset.queryByName(ubuntu, "hoary") |
4834 | spanish = self.languageset["es"] |
4835 | diff --git a/lib/lp/services/tarfile_helpers.py b/lib/lp/services/tarfile_helpers.py |
4836 | index ef10c54..09dc721 100644 |
4837 | --- a/lib/lp/services/tarfile_helpers.py |
4838 | +++ b/lib/lp/services/tarfile_helpers.py |
4839 | @@ -13,16 +13,6 @@ import tarfile |
4840 | import tempfile |
4841 | import time |
4842 | |
4843 | -# A note about tarballs, BytesIO and unicode. SQLObject returns unicode |
4844 | -# values for columns which are declared as StringCol. We have to be careful |
4845 | -# not to pass unicode instances to the tarfile module, because when the |
4846 | -# tarfile's filehandle is a BytesIO object, the BytesIO object gets upset |
4847 | -# later when we ask it for its value and it tries to join together its |
4848 | -# buffers. This is why the tarball code is sprinkled with ".encode('ascii')". |
4849 | -# If we get separate StringCol and UnicodeCol column types, we won't need this |
4850 | -# any longer. |
4851 | -# -- Dafydd Harries, 2005-04-07. |
4852 | - |
4853 | |
4854 | class LaunchpadWriteTarFile: |
4855 | """Convenience wrapper around the tarfile module. |
4856 | diff --git a/lib/lp/services/verification/doc/logintoken.rst b/lib/lp/services/verification/doc/logintoken.rst |
4857 | index 2cd0377..95c25e3 100644 |
4858 | --- a/lib/lp/services/verification/doc/logintoken.rst |
4859 | +++ b/lib/lp/services/verification/doc/logintoken.rst |
4860 | @@ -19,17 +19,18 @@ follows: |
4861 | 4) The token is now marked as consumed, together with any other |
4862 | tokens of the same type and for the same email address. |
4863 | |
4864 | + >>> import transaction |
4865 | + >>> from lp.registry.model.person import Person |
4866 | + >>> from lp.services.database.interfaces import IStore |
4867 | + >>> from lp.services.database.sqlbase import flush_database_updates |
4868 | + >>> from lp.services.mail import stub |
4869 | >>> from lp.services.verification.interfaces.authtoken import ( |
4870 | ... LoginTokenType, |
4871 | ... ) |
4872 | - >>> from lp.registry.model.person import Person |
4873 | >>> from lp.services.verification.interfaces.logintoken import ( |
4874 | ... ILoginTokenSet, |
4875 | ... ) |
4876 | - >>> from lp.services.database.sqlbase import flush_database_updates |
4877 | - >>> from lp.services.mail import stub |
4878 | - >>> import transaction |
4879 | - >>> foobar = Person.byName("name16") |
4880 | + >>> foobar = IStore(Person).find(Person, name="name16").one() |
4881 | |
4882 | Let's create a new LoginToken to confirm an email address for foobar. |
4883 | |
4884 | diff --git a/lib/lp/services/webapp/configure.zcml b/lib/lp/services/webapp/configure.zcml |
4885 | index ad25c53..5b61d74 100644 |
4886 | --- a/lib/lp/services/webapp/configure.zcml |
4887 | +++ b/lib/lp/services/webapp/configure.zcml |
4888 | @@ -50,10 +50,6 @@ |
4889 | factory='.batching.FiniteSequenceAdapter' /> |
4890 | |
4891 | <adapter |
4892 | - factory='.batching.FiniteSequenceAdapter' |
4893 | - for='storm.zope.interfaces.ISQLObjectResultSet' /> |
4894 | - |
4895 | - <adapter |
4896 | factory='.batching.BoundReferenceSetAdapter' |
4897 | for='storm.references.BoundReferenceSet' /> |
4898 | |
4899 | @@ -246,10 +242,6 @@ |
4900 | |
4901 | <adapter |
4902 | factory="lp.services.webapp.snapshot.snapshot_sql_result" /> |
4903 | - <!-- It also works for the legacy SQLObject interface. --> |
4904 | - <adapter |
4905 | - factory="lp.services.webapp.snapshot.snapshot_sql_result" |
4906 | - for="storm.zope.interfaces.ISQLObjectResultSet" /> |
4907 | |
4908 | <class class="lp.services.webapp.publisher.RenamedView"> |
4909 | <allow interface="zope.publisher.interfaces.browser.IBrowserPublisher" |
4910 | diff --git a/lib/lp/services/webapp/database.zcml b/lib/lp/services/webapp/database.zcml |
4911 | index b7130a2..7a5ae12 100644 |
4912 | --- a/lib/lp/services/webapp/database.zcml |
4913 | +++ b/lib/lp/services/webapp/database.zcml |
4914 | @@ -60,9 +60,6 @@ |
4915 | <implements interface="lp.services.database.interfaces.IStore" /> |
4916 | <allow attributes="get" /> |
4917 | </class> |
4918 | - <class class="lp.services.database.sqlbase.SQLBase"> |
4919 | - <implements interface="lp.services.database.interfaces.IDBObject" /> |
4920 | - </class> |
4921 | <class class="lp.services.database.stormbase.StormBase"> |
4922 | <implements interface="lp.services.database.interfaces.IDBObject" /> |
4923 | </class> |
4924 | diff --git a/lib/lp/services/webapp/marshallers.py b/lib/lp/services/webapp/marshallers.py |
4925 | index 7f13a36..48ab7ea 100644 |
4926 | --- a/lib/lp/services/webapp/marshallers.py |
4927 | +++ b/lib/lp/services/webapp/marshallers.py |
4928 | @@ -6,14 +6,13 @@ __all__ = ["choiceMarshallerError"] |
4929 | |
4930 | def choiceMarshallerError(field, request, vocabulary=None): |
4931 | # We don't support marshalling a normal Choice field with a |
4932 | - # SQLObjectVocabularyBase/StormVocabularyBase-based vocabulary. |
4933 | + # StormVocabularyBase-based vocabulary. |
4934 | # Normally for this kind of use case, one returns None and |
4935 | # lets the Zope machinery alert the user that the lookup has gone wrong. |
4936 | # However, we want to be more helpful, so we make an assertion, |
4937 | # with a comment on how to make things better. |
4938 | raise AssertionError( |
4939 | - "You exported %s as an IChoice based on an " |
4940 | - "SQLObjectVocabularyBase/StormVocabularyBase; you " |
4941 | + "You exported %s as an IChoice based on a StormVocabularyBase; you " |
4942 | "should use lazr.restful.fields.ReferenceChoice " |
4943 | "instead." % field.__name__ |
4944 | ) |
4945 | diff --git a/lib/lp/services/webapp/snapshot.py b/lib/lp/services/webapp/snapshot.py |
4946 | index 2d46805..fd6aaa9 100644 |
4947 | --- a/lib/lp/services/webapp/snapshot.py |
4948 | +++ b/lib/lp/services/webapp/snapshot.py |
4949 | @@ -19,13 +19,12 @@ HARD_LIMIT_FOR_SNAPSHOT = 1000 |
4950 | |
4951 | |
4952 | @implementer(ISnapshotValueFactory) |
4953 | -@adapter(IResultSet) # And ISQLObjectResultSet. |
4954 | +@adapter(IResultSet) |
4955 | def snapshot_sql_result(value): |
4956 | """Snapshot adapter for the Storm result set.""" |
4957 | - # SQLMultipleJoin and SQLRelatedJoin return |
4958 | - # SelectResults, which doesn't really help the Snapshot |
4959 | - # object. We therefore list()ify the values; this isn't |
4960 | - # perfect but allows deltas to be generated reliably. |
4961 | + # ReferenceSet returns ResultSets, which doesn't really help the |
4962 | + # Snapshot object. We therefore list()ify the values; this isn't perfect |
4963 | + # but allows deltas to be generated reliably. |
4964 | return shortlist( |
4965 | value, longest_expected=100, hardlimit=HARD_LIMIT_FOR_SNAPSHOT |
4966 | ) |
4967 | diff --git a/lib/lp/services/webapp/tests/test_servers.py b/lib/lp/services/webapp/tests/test_servers.py |
4968 | index f865502..e777ac2 100644 |
4969 | --- a/lib/lp/services/webapp/tests/test_servers.py |
4970 | +++ b/lib/lp/services/webapp/tests/test_servers.py |
4971 | @@ -778,7 +778,7 @@ class LoggingTransaction: |
4972 | self.log.append("ABORT") |
4973 | |
4974 | |
4975 | -class TestWebServiceAccessTokens(TestCaseWithFactory): |
4976 | +class TestWebServiceAccessTokensBase: |
4977 | """Test personal access tokens for the webservice. |
4978 | |
4979 | These are bearer tokens with an owner, a context, and some scopes. We |
4980 | @@ -791,7 +791,9 @@ class TestWebServiceAccessTokens(TestCaseWithFactory): |
4981 | def test_valid(self): |
4982 | owner = self.factory.makePerson() |
4983 | secret, token = self.factory.makeAccessToken( |
4984 | - owner=owner, scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS] |
4985 | + owner=owner, |
4986 | + target=self.makeTarget(owner=owner), |
4987 | + scopes=[AccessTokenScope.REPOSITORY_BUILD_STATUS], |
4988 | ) |
4989 | self.assertIsNone(removeSecurityProxy(token).date_last_used) |
4990 | transaction.commit() |
4991 | @@ -828,6 +830,7 @@ class TestWebServiceAccessTokens(TestCaseWithFactory): |
4992 | owner = self.factory.makePerson() |
4993 | secret, token = self.factory.makeAccessToken( |
4994 | owner=owner, |
4995 | + target=self.makeTarget(owner=owner), |
4996 | date_expires=datetime.now(timezone.utc) - timedelta(days=1), |
4997 | ) |
4998 | transaction.commit() |
4999 | @@ -859,7 +862,9 @@ class TestWebServiceAccessTokens(TestCaseWithFactory): |
5000 |
The diff has been truncated for viewing.
LGTM!