Merge lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge into lp:launchpad/db-devel
- 567922-fix-ec2-failures-after-db-devel-merge
- Merge into db-devel
Proposed by
Michael Nelson
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Michael Nelson | ||||
Approved revision: | no longer in the source branch. | ||||
Merged at revision: | 9405 | ||||
Proposed branch: | lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge | ||||
Merge into: | lp:launchpad/db-devel | ||||
Prerequisite: | lp:~michael.nelson/launchpad/567922-build-gen-checkpoint | ||||
Diff against target: |
1064 lines (+168/-141) 20 files modified
lib/lp/buildmaster/configure.zcml (+1/-1) lib/lp/buildmaster/interfaces/packagebuild.py (+1/-1) lib/lp/buildmaster/model/buildbase.py (+8/-7) lib/lp/buildmaster/model/buildfarmjob.py (+1/-1) lib/lp/buildmaster/model/buildfarmjobbehavior.py (+1/-2) lib/lp/buildmaster/model/packagebuild.py (+3/-4) lib/lp/buildmaster/tests/test_buildbase.py (+1/-1) lib/lp/buildmaster/tests/test_buildqueue.py (+22/-12) lib/lp/buildmaster/tests/test_manager.py (+4/-4) lib/lp/soyuz/browser/tests/archive-views.txt (+6/-6) lib/lp/soyuz/doc/build-estimated-dispatch-time.txt (+4/-4) lib/lp/soyuz/doc/buildd-scoring.txt (+4/-2) lib/lp/soyuz/doc/buildd-slavescanner.txt (+62/-62) lib/lp/soyuz/model/binarypackagebuild.py (+16/-11) lib/lp/soyuz/model/binarypackagebuildbehavior.py (+2/-1) lib/lp/soyuz/scripts/buildd.py (+4/-4) lib/lp/soyuz/scripts/packagecopier.py (+4/-4) lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py (+13/-3) lib/lp/soyuz/scripts/tests/test_copypackage.py (+10/-10) lib/lp/soyuz/scripts/tests/test_populatearchive.py (+1/-1) |
||||
To merge this branch: | bzr merge lp:~michael.nelson/launchpad/567922-fix-ec2-failures-after-db-devel-merge | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Abel Deuring (community) | code | Approve | |
Review via email: mp+25759@code.launchpad.net |
Commit message
Description of the change
This branch is part of a pipeline for
https:/
https:/
Overview
========
After getting most of the unit-tests running, I re-merged a fresh db-devel into my pipeline, resolved the conflicts, fixed the obvious errors and sent it off to ec2test. This branch is starts to fix the new failures.
To test
=======
make schema
bin/test -vv -t test_buildfarmjob -t test_buildqueue -t test_manager -t test_buildd_
To post a comment you must log in.
Revision history for this message
Abel Deuring (adeuring) : | # |
review:
Approve
(code)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/buildmaster/configure.zcml' | |||
2 | --- lib/lp/buildmaster/configure.zcml 2010-05-27 13:26:43 +0000 | |||
3 | +++ lib/lp/buildmaster/configure.zcml 2010-05-27 13:26:53 +0000 | |||
4 | @@ -87,7 +87,7 @@ | |||
5 | 87 | <!-- Grant permissions to write on BuildQueue and Build. --> | 87 | <!-- Grant permissions to write on BuildQueue and Build. --> |
6 | 88 | <require | 88 | <require |
7 | 89 | permission="zope.Public" | 89 | permission="zope.Public" |
9 | 90 | set_attributes="lastscore builder logtail date_started"/> | 90 | set_attributes="lastscore estimated_duration builder logtail date_started"/> |
10 | 91 | </class> | 91 | </class> |
11 | 92 | 92 | ||
12 | 93 | 93 | ||
13 | 94 | 94 | ||
14 | === modified file 'lib/lp/buildmaster/interfaces/packagebuild.py' | |||
15 | --- lib/lp/buildmaster/interfaces/packagebuild.py 2010-05-27 13:26:43 +0000 | |||
16 | +++ lib/lp/buildmaster/interfaces/packagebuild.py 2010-05-27 13:26:53 +0000 | |||
17 | @@ -82,7 +82,7 @@ | |||
18 | 82 | title=_("Distribution series"), required=True, | 82 | title=_("Distribution series"), required=True, |
19 | 83 | description=_("Shortcut for its distribution series."))) | 83 | description=_("Shortcut for its distribution series."))) |
20 | 84 | 84 | ||
22 | 85 | def getUploaderCommand(distro_series, upload_leaf, uploader_logfilename): | 85 | def getUploaderCommand(package_build, upload_leaf, uploader_logfilename): |
23 | 86 | """Get the command to run as the uploader. | 86 | """Get the command to run as the uploader. |
24 | 87 | 87 | ||
25 | 88 | :return: A list of command line arguments, beginning with the | 88 | :return: A list of command line arguments, beginning with the |
26 | 89 | 89 | ||
27 | === modified file 'lib/lp/buildmaster/model/buildbase.py' | |||
28 | --- lib/lp/buildmaster/model/buildbase.py 2010-05-27 13:26:43 +0000 | |||
29 | +++ lib/lp/buildmaster/model/buildbase.py 2010-05-27 13:26:53 +0000 | |||
30 | @@ -63,7 +63,8 @@ | |||
31 | 63 | """Return the directory that things will be stored in.""" | 63 | """Return the directory that things will be stored in.""" |
32 | 64 | return os.path.join(config.builddmaster.root, 'incoming', upload_leaf) | 64 | return os.path.join(config.builddmaster.root, 'incoming', upload_leaf) |
33 | 65 | 65 | ||
35 | 66 | def getUploaderCommand(self, upload_leaf, uploader_logfilename): | 66 | @staticmethod |
36 | 67 | def getUploaderCommand(build, upload_leaf, uploader_logfilename): | ||
37 | 67 | """See `IBuildBase`.""" | 68 | """See `IBuildBase`.""" |
38 | 68 | root = os.path.abspath(config.builddmaster.root) | 69 | root = os.path.abspath(config.builddmaster.root) |
39 | 69 | uploader_command = list(config.builddmaster.uploader.split()) | 70 | uploader_command = list(config.builddmaster.uploader.split()) |
40 | @@ -71,12 +72,12 @@ | |||
41 | 71 | # add extra arguments for processing a binary upload | 72 | # add extra arguments for processing a binary upload |
42 | 72 | extra_args = [ | 73 | extra_args = [ |
43 | 73 | "--log-file", "%s" % uploader_logfilename, | 74 | "--log-file", "%s" % uploader_logfilename, |
48 | 74 | "-d", "%s" % self.distribution.name, | 75 | "-d", "%s" % build.distribution.name, |
49 | 75 | "-s", "%s" % (self.distro_series.name + | 76 | "-s", "%s" % (build.distro_series.name + |
50 | 76 | pocketsuffix[self.pocket]), | 77 | pocketsuffix[build.pocket]), |
51 | 77 | "-b", "%s" % self.id, | 78 | "-b", "%s" % build.id, |
52 | 78 | "-J", "%s" % upload_leaf, | 79 | "-J", "%s" % upload_leaf, |
54 | 79 | '--context=%s' % self.policy_name, | 80 | '--context=%s' % build.policy_name, |
55 | 80 | "%s" % root, | 81 | "%s" % root, |
56 | 81 | ] | 82 | ] |
57 | 82 | 83 | ||
58 | @@ -204,7 +205,7 @@ | |||
59 | 204 | uploader_logfilename = os.path.join( | 205 | uploader_logfilename = os.path.join( |
60 | 205 | upload_dir, UPLOAD_LOG_FILENAME) | 206 | upload_dir, UPLOAD_LOG_FILENAME) |
61 | 206 | uploader_command = build.getUploaderCommand( | 207 | uploader_command = build.getUploaderCommand( |
63 | 207 | upload_leaf, uploader_logfilename) | 208 | build, upload_leaf, uploader_logfilename) |
64 | 208 | logger.debug("Saving uploader log at '%s'" % uploader_logfilename) | 209 | logger.debug("Saving uploader log at '%s'" % uploader_logfilename) |
65 | 209 | 210 | ||
66 | 210 | logger.info("Invoking uploader on %s" % root) | 211 | logger.info("Invoking uploader on %s" % root) |
67 | 211 | 212 | ||
68 | === modified file 'lib/lp/buildmaster/model/buildfarmjob.py' | |||
69 | --- lib/lp/buildmaster/model/buildfarmjob.py 2010-05-27 13:26:43 +0000 | |||
70 | +++ lib/lp/buildmaster/model/buildfarmjob.py 2010-05-27 13:26:53 +0000 | |||
71 | @@ -62,7 +62,7 @@ | |||
72 | 62 | """See `IBuildFarmJobOld`.""" | 62 | """See `IBuildFarmJobOld`.""" |
73 | 63 | raise NotImplementedError | 63 | raise NotImplementedError |
74 | 64 | 64 | ||
76 | 65 | def getByJob(self): | 65 | def getByJob(self, job): |
77 | 66 | """See `IBuildFarmJobOld`.""" | 66 | """See `IBuildFarmJobOld`.""" |
78 | 67 | raise NotImplementedError | 67 | raise NotImplementedError |
79 | 68 | 68 | ||
80 | 69 | 69 | ||
81 | === modified file 'lib/lp/buildmaster/model/buildfarmjobbehavior.py' | |||
82 | --- lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-05-27 13:26:43 +0000 | |||
83 | +++ lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-05-27 13:26:53 +0000 | |||
84 | @@ -184,8 +184,7 @@ | |||
85 | 184 | # XXX: dsilvers 2005-03-02: Confirm the builder has the right build? | 184 | # XXX: dsilvers 2005-03-02: Confirm the builder has the right build? |
86 | 185 | 185 | ||
87 | 186 | build = queueItem.specific_job.build | 186 | build = queueItem.specific_job.build |
90 | 187 | build.handleStatus( | 187 | build.handleStatus(build_status, librarian, slave_status) |
89 | 188 | build, build_status, librarian, slave_status) | ||
91 | 189 | 188 | ||
92 | 190 | 189 | ||
93 | 191 | class IdleBuildBehavior(BuildFarmJobBehaviorBase): | 190 | class IdleBuildBehavior(BuildFarmJobBehaviorBase): |
94 | 192 | 191 | ||
95 | === modified file 'lib/lp/buildmaster/model/packagebuild.py' | |||
96 | --- lib/lp/buildmaster/model/packagebuild.py 2010-05-27 13:26:43 +0000 | |||
97 | +++ lib/lp/buildmaster/model/packagebuild.py 2010-05-27 13:26:53 +0000 | |||
98 | @@ -122,11 +122,10 @@ | |||
99 | 122 | return BuildBase.getUploadDir(upload_leaf) | 122 | return BuildBase.getUploadDir(upload_leaf) |
100 | 123 | 123 | ||
101 | 124 | @staticmethod | 124 | @staticmethod |
104 | 125 | def getUploaderCommand(package_build, distro_series, upload_leaf, | 125 | def getUploaderCommand(package_build, upload_leaf, upload_logfilename): |
103 | 126 | upload_logfilename): | ||
105 | 127 | """See `IPackageBuild`.""" | 126 | """See `IPackageBuild`.""" |
106 | 128 | return BuildBase.getUploaderCommand( | 127 | return BuildBase.getUploaderCommand( |
108 | 129 | package_build, distro_series, upload_leaf, upload_logfilename) | 128 | package_build, upload_leaf, upload_logfilename) |
109 | 130 | 129 | ||
110 | 131 | @staticmethod | 130 | @staticmethod |
111 | 132 | def getLogFromSlave(package_build): | 131 | def getLogFromSlave(package_build): |
112 | @@ -182,7 +181,7 @@ | |||
113 | 182 | 181 | ||
114 | 183 | def queueBuild(self, suspended=False): | 182 | def queueBuild(self, suspended=False): |
115 | 184 | """See `IPackageBuild`.""" | 183 | """See `IPackageBuild`.""" |
117 | 185 | return BuildBase.queueBuild(self, suspended=False) | 184 | return BuildBase.queueBuild(self, suspended=suspended) |
118 | 186 | 185 | ||
119 | 187 | def handleStatus(self, status, librarian, slave_status): | 186 | def handleStatus(self, status, librarian, slave_status): |
120 | 188 | """See `IPackageBuild`.""" | 187 | """See `IPackageBuild`.""" |
121 | 189 | 188 | ||
122 | === modified file 'lib/lp/buildmaster/tests/test_buildbase.py' | |||
123 | --- lib/lp/buildmaster/tests/test_buildbase.py 2010-05-27 13:26:43 +0000 | |||
124 | +++ lib/lp/buildmaster/tests/test_buildbase.py 2010-05-27 13:26:53 +0000 | |||
125 | @@ -115,7 +115,7 @@ | |||
126 | 115 | os.path.abspath(config.builddmaster.root), | 115 | os.path.abspath(config.builddmaster.root), |
127 | 116 | ]) | 116 | ]) |
128 | 117 | uploader_command = build_base.getUploaderCommand( | 117 | uploader_command = build_base.getUploaderCommand( |
130 | 118 | upload_leaf, log_file) | 118 | build_base, upload_leaf, log_file) |
131 | 119 | self.assertEqual(config_args, uploader_command) | 119 | self.assertEqual(config_args, uploader_command) |
132 | 120 | 120 | ||
133 | 121 | 121 | ||
134 | 122 | 122 | ||
135 | === modified file 'lib/lp/buildmaster/tests/test_buildqueue.py' | |||
136 | --- lib/lp/buildmaster/tests/test_buildqueue.py 2010-04-21 11:51:26 +0000 | |||
137 | +++ lib/lp/buildmaster/tests/test_buildqueue.py 2010-05-27 13:26:53 +0000 | |||
138 | @@ -11,6 +11,7 @@ | |||
139 | 11 | from zope import component | 11 | from zope import component |
140 | 12 | from zope.component import getGlobalSiteManager, getUtility | 12 | from zope.component import getGlobalSiteManager, getUtility |
141 | 13 | from zope.interface.verify import verifyObject | 13 | from zope.interface.verify import verifyObject |
142 | 14 | from zope.security.proxy import removeSecurityProxy | ||
143 | 14 | 15 | ||
144 | 15 | from canonical.launchpad.webapp.interfaces import ( | 16 | from canonical.launchpad.webapp.interfaces import ( |
145 | 16 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) | 17 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) |
146 | @@ -45,7 +46,7 @@ | |||
147 | 45 | for build in test.builds: | 46 | for build in test.builds: |
148 | 46 | bq = build.buildqueue_record | 47 | bq = build.buildqueue_record |
149 | 47 | source = None | 48 | source = None |
151 | 48 | for attr in ('sourcepackagerelease', 'sourcepackagename'): | 49 | for attr in ('source_package_release', 'sourcepackagename'): |
152 | 49 | source = getattr(build, attr, None) | 50 | source = getattr(build, attr, None) |
153 | 50 | if source is not None: | 51 | if source is not None: |
154 | 51 | break | 52 | break |
155 | @@ -105,7 +106,7 @@ | |||
156 | 105 | # Monkey-patch BuildQueueSet._now() so it returns a constant time stamp | 106 | # Monkey-patch BuildQueueSet._now() so it returns a constant time stamp |
157 | 106 | # that's not too far in the future. This avoids spurious test failures. | 107 | # that's not too far in the future. This avoids spurious test failures. |
158 | 107 | monkey_patch_the_now_property(bq) | 108 | monkey_patch_the_now_property(bq) |
160 | 108 | delay = bq._estimateTimeToNextBuilder() | 109 | delay = removeSecurityProxy(bq)._estimateTimeToNextBuilder() |
161 | 109 | test.assertTrue( | 110 | test.assertTrue( |
162 | 110 | delay <= min_time, | 111 | delay <= min_time, |
163 | 111 | "Wrong min time to next available builder (%s > %s)" | 112 | "Wrong min time to next available builder (%s > %s)" |
164 | @@ -122,7 +123,8 @@ | |||
165 | 122 | def check_delay_for_job(test, the_job, delay): | 123 | def check_delay_for_job(test, the_job, delay): |
166 | 123 | # Obtain the builder statistics pertaining to this job. | 124 | # Obtain the builder statistics pertaining to this job. |
167 | 124 | builder_data = get_builder_data() | 125 | builder_data = get_builder_data() |
169 | 125 | estimated_delay = the_job._estimateJobDelay(builder_data) | 126 | estimated_delay = removeSecurityProxy(the_job)._estimateJobDelay( |
170 | 127 | builder_data) | ||
171 | 126 | test.assertEqual(delay, estimated_delay) | 128 | test.assertEqual(delay, estimated_delay) |
172 | 127 | 129 | ||
173 | 128 | 130 | ||
174 | @@ -144,12 +146,13 @@ | |||
175 | 144 | This avoids spurious test failures. | 146 | This avoids spurious test failures. |
176 | 145 | """ | 147 | """ |
177 | 146 | # Use the date/time the job started if available. | 148 | # Use the date/time the job started if available. |
178 | 149 | naked_buildqueue = removeSecurityProxy(buildqueue) | ||
179 | 147 | if buildqueue.job.date_started: | 150 | if buildqueue.job.date_started: |
180 | 148 | time_stamp = buildqueue.job.date_started | 151 | time_stamp = buildqueue.job.date_started |
181 | 149 | else: | 152 | else: |
183 | 150 | time_stamp = buildqueue._now() | 153 | time_stamp = naked_buildqueue._now() |
184 | 151 | 154 | ||
186 | 152 | buildqueue._now = FakeMethod(result=time_stamp) | 155 | naked_buildqueue._now = FakeMethod(result=time_stamp) |
187 | 153 | return time_stamp | 156 | return time_stamp |
188 | 154 | 157 | ||
189 | 155 | 158 | ||
190 | @@ -475,7 +478,10 @@ | |||
191 | 475 | # The build in question is an x86/native one. | 478 | # The build in question is an x86/native one. |
192 | 476 | self.assertEqual(self.x86_proc.id, build.processor.id) | 479 | self.assertEqual(self.x86_proc.id, build.processor.id) |
193 | 477 | self.assertEqual(False, build.is_virtualized) | 480 | self.assertEqual(False, build.is_virtualized) |
195 | 478 | bq = build.buildqueue_record | 481 | |
196 | 482 | # To test this non-interface method, we need to remove the | ||
197 | 483 | # security proxy. | ||
198 | 484 | bq = removeSecurityProxy(build.buildqueue_record) | ||
199 | 479 | builder_stats = get_builder_data() | 485 | builder_stats = get_builder_data() |
200 | 480 | # We have 4 x86 native builders. | 486 | # We have 4 x86 native builders. |
201 | 481 | self.assertEqual( | 487 | self.assertEqual( |
202 | @@ -511,7 +517,7 @@ | |||
203 | 511 | # will be free again. | 517 | # will be free again. |
204 | 512 | build, bq = find_job(self, 'flex') | 518 | build, bq = find_job(self, 'flex') |
205 | 513 | bq.reset() | 519 | bq.reset() |
207 | 514 | free_count = bq._getFreeBuildersCount( | 520 | free_count = removeSecurityProxy(bq)._getFreeBuildersCount( |
208 | 515 | build.processor, build.is_virtualized) | 521 | build.processor, build.is_virtualized) |
209 | 516 | self.assertEqual(1, free_count) | 522 | self.assertEqual(1, free_count) |
210 | 517 | 523 | ||
211 | @@ -1095,7 +1101,7 @@ | |||
212 | 1095 | for build in self.builds: | 1101 | for build in self.builds: |
213 | 1096 | bq = build.buildqueue_record | 1102 | bq = build.buildqueue_record |
214 | 1097 | if bq.processor == self.hppa_proc: | 1103 | if bq.processor == self.hppa_proc: |
216 | 1098 | bq.virtualized = True | 1104 | removeSecurityProxy(bq).virtualized = True |
217 | 1099 | job = self.factory.makeSourcePackageRecipeBuildJob( | 1105 | job = self.factory.makeSourcePackageRecipeBuildJob( |
218 | 1100 | virtualized=True, estimated_duration=332, | 1106 | virtualized=True, estimated_duration=332, |
219 | 1101 | sourcename='xxr-openssh-client', score=1050) | 1107 | sourcename='xxr-openssh-client', score=1050) |
220 | @@ -1111,7 +1117,8 @@ | |||
221 | 1111 | 1117 | ||
222 | 1112 | flex_build, flex_job = find_job(self, 'flex', 'hppa') | 1118 | flex_build, flex_job = find_job(self, 'flex', 'hppa') |
223 | 1113 | # The head job platform is the one of job #21 (xxr-openssh-client). | 1119 | # The head job platform is the one of job #21 (xxr-openssh-client). |
225 | 1114 | self.assertEquals((None, True), flex_job._getHeadJobPlatform()) | 1120 | self.assertEquals( |
226 | 1121 | (None, True), removeSecurityProxy(flex_job)._getHeadJobPlatform()) | ||
227 | 1115 | # The delay will be 900 (= 15*60) + 332 seconds | 1122 | # The delay will be 900 (= 15*60) + 332 seconds |
228 | 1116 | check_delay_for_job(self, flex_job, 1232) | 1123 | check_delay_for_job(self, flex_job, 1232) |
229 | 1117 | 1124 | ||
230 | @@ -1133,7 +1140,8 @@ | |||
231 | 1133 | # 20, xx-recipe-zsh, p: None, v:False e:0:03:42 *** s: 1053 | 1140 | # 20, xx-recipe-zsh, p: None, v:False e:0:03:42 *** s: 1053 |
232 | 1134 | 1141 | ||
233 | 1135 | # The newly added 'xxr-gwibber' job is the new head job now. | 1142 | # The newly added 'xxr-gwibber' job is the new head job now. |
235 | 1136 | self.assertEquals((None, None), flex_job._getHeadJobPlatform()) | 1143 | self.assertEquals( |
236 | 1144 | (None, None), removeSecurityProxy(flex_job)._getHeadJobPlatform()) | ||
237 | 1137 | # The newly added 'xxr-gwibber' job now weighs in as well and the | 1145 | # The newly added 'xxr-gwibber' job now weighs in as well and the |
238 | 1138 | # delay is 900 (= 15*60) + (332+111)/2 seconds | 1146 | # delay is 900 (= 15*60) + (332+111)/2 seconds |
239 | 1139 | check_delay_for_job(self, flex_job, 1121) | 1147 | check_delay_for_job(self, flex_job, 1121) |
240 | @@ -1142,7 +1150,9 @@ | |||
241 | 1142 | # 'xxr-openssh-client' jobs since the 'virtualized' values do not | 1150 | # 'xxr-openssh-client' jobs since the 'virtualized' values do not |
242 | 1143 | # match. | 1151 | # match. |
243 | 1144 | flex_build, flex_job = find_job(self, 'flex', '386') | 1152 | flex_build, flex_job = find_job(self, 'flex', '386') |
245 | 1145 | self.assertEquals((None, False), flex_job._getHeadJobPlatform()) | 1153 | self.assertEquals( |
246 | 1154 | (None, False), | ||
247 | 1155 | removeSecurityProxy(flex_job)._getHeadJobPlatform()) | ||
248 | 1146 | # delay is 960 (= 16*60) + 222 seconds | 1156 | # delay is 960 (= 16*60) + 222 seconds |
249 | 1147 | check_delay_for_job(self, flex_job, 1182) | 1157 | check_delay_for_job(self, flex_job, 1182) |
250 | 1148 | 1158 | ||
251 | @@ -1222,7 +1232,7 @@ | |||
252 | 1222 | for build in self.builds: | 1232 | for build in self.builds: |
253 | 1223 | bq = build.buildqueue_record | 1233 | bq = build.buildqueue_record |
254 | 1224 | if bq.processor == self.x86_proc: | 1234 | if bq.processor == self.x86_proc: |
256 | 1225 | bq.virtualized = True | 1235 | removeSecurityProxy(bq).virtualized = True |
257 | 1226 | 1236 | ||
258 | 1227 | def test_pending_jobs_only(self): | 1237 | def test_pending_jobs_only(self): |
259 | 1228 | # Let's see the assertion fail for a job that's not pending any more. | 1238 | # Let's see the assertion fail for a job that's not pending any more. |
260 | 1229 | 1239 | ||
261 | === modified file 'lib/lp/buildmaster/tests/test_manager.py' | |||
262 | --- lib/lp/buildmaster/tests/test_manager.py 2010-04-23 13:42:50 +0000 | |||
263 | +++ lib/lp/buildmaster/tests/test_manager.py 2010-05-27 13:26:53 +0000 | |||
264 | @@ -586,7 +586,7 @@ | |||
265 | 586 | self.assertTrue(job.date_started is not None) | 586 | self.assertTrue(job.date_started is not None) |
266 | 587 | self.assertEqual(job.job.status, JobStatus.RUNNING) | 587 | self.assertEqual(job.job.status, JobStatus.RUNNING) |
267 | 588 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) | 588 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
269 | 589 | self.assertEqual(build.buildstate, BuildStatus.BUILDING) | 589 | self.assertEqual(build.status, BuildStatus.BUILDING) |
270 | 590 | self.assertEqual(job.logtail, logtail) | 590 | self.assertEqual(job.logtail, logtail) |
271 | 591 | 591 | ||
272 | 592 | def _getManager(self): | 592 | def _getManager(self): |
273 | @@ -710,7 +710,7 @@ | |||
274 | 710 | self.assertTrue(job.builder is None) | 710 | self.assertTrue(job.builder is None) |
275 | 711 | self.assertTrue(job.date_started is None) | 711 | self.assertTrue(job.date_started is None) |
276 | 712 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) | 712 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
278 | 713 | self.assertEqual(build.buildstate, BuildStatus.NEEDSBUILD) | 713 | self.assertEqual(build.status, BuildStatus.NEEDSBUILD) |
279 | 714 | 714 | ||
280 | 715 | def testScanRescuesJobFromBrokenBuilder(self): | 715 | def testScanRescuesJobFromBrokenBuilder(self): |
281 | 716 | # The job assigned to a broken builder is rescued. | 716 | # The job assigned to a broken builder is rescued. |
282 | @@ -798,7 +798,7 @@ | |||
283 | 798 | 'i386 build of mozilla-firefox 0.9 in ubuntu hoary RELEASE', | 798 | 'i386 build of mozilla-firefox 0.9 in ubuntu hoary RELEASE', |
284 | 799 | build.title) | 799 | build.title) |
285 | 800 | 800 | ||
287 | 801 | self.assertEqual('BUILDING', build.buildstate.name) | 801 | self.assertEqual('BUILDING', build.status.name) |
288 | 802 | self.assertNotEqual(None, job.builder) | 802 | self.assertNotEqual(None, job.builder) |
289 | 803 | self.assertNotEqual(None, job.date_started) | 803 | self.assertNotEqual(None, job.date_started) |
290 | 804 | self.assertNotEqual(None, job.logtail) | 804 | self.assertNotEqual(None, job.logtail) |
291 | @@ -811,7 +811,7 @@ | |||
292 | 811 | """Re-fetch the `IBuildQueue` record and check if it's clean.""" | 811 | """Re-fetch the `IBuildQueue` record and check if it's clean.""" |
293 | 812 | job = getUtility(IBuildQueueSet).get(job_id) | 812 | job = getUtility(IBuildQueueSet).get(job_id) |
294 | 813 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) | 813 | build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(job) |
296 | 814 | self.assertEqual('NEEDSBUILD', build.buildstate.name) | 814 | self.assertEqual('NEEDSBUILD', build.status.name) |
297 | 815 | self.assertEqual(None, job.builder) | 815 | self.assertEqual(None, job.builder) |
298 | 816 | self.assertEqual(None, job.date_started) | 816 | self.assertEqual(None, job.date_started) |
299 | 817 | self.assertEqual(None, job.logtail) | 817 | self.assertEqual(None, job.logtail) |
300 | 818 | 818 | ||
301 | === modified file 'lib/lp/soyuz/browser/tests/archive-views.txt' | |||
302 | --- lib/lp/soyuz/browser/tests/archive-views.txt 2010-04-29 09:53:56 +0000 | |||
303 | +++ lib/lp/soyuz/browser/tests/archive-views.txt 2010-05-27 13:26:53 +0000 | |||
304 | @@ -361,7 +361,7 @@ | |||
305 | 361 | >>> warty_hppa = getUtility(IDistributionSet)['ubuntu']['warty']['hppa'] | 361 | >>> warty_hppa = getUtility(IDistributionSet)['ubuntu']['warty']['hppa'] |
306 | 362 | >>> source = view.filtered_sources[0] | 362 | >>> source = view.filtered_sources[0] |
307 | 363 | >>> ignore = source.sourcepackagerelease.createBuild( | 363 | >>> ignore = source.sourcepackagerelease.createBuild( |
309 | 364 | ... distroarchseries=warty_hppa, archive=view.context, | 364 | ... distro_arch_series=warty_hppa, archive=view.context, |
310 | 365 | ... pocket=source.pocket) | 365 | ... pocket=source.pocket) |
311 | 366 | >>> builds = getUtility(IBinaryPackageBuildSet).getBuildsForArchive( | 366 | >>> builds = getUtility(IBinaryPackageBuildSet).getBuildsForArchive( |
312 | 367 | ... view.context) | 367 | ... view.context) |
313 | @@ -374,9 +374,9 @@ | |||
314 | 374 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE | 374 | i386 build of cdrkit 1.0 in ubuntu breezy-autotest RELEASE |
315 | 375 | 375 | ||
316 | 376 | >>> login('foo.bar@canonical.com') | 376 | >>> login('foo.bar@canonical.com') |
320 | 377 | >>> builds[0].buildstate = BuildStatus.NEEDSBUILD | 377 | >>> builds[0].status = BuildStatus.NEEDSBUILD |
321 | 378 | >>> builds[1].buildstate = BuildStatus.BUILDING | 378 | >>> builds[1].status = BuildStatus.BUILDING |
322 | 379 | >>> builds[2].buildstate = BuildStatus.BUILDING | 379 | >>> builds[2].status = BuildStatus.BUILDING |
323 | 380 | >>> login(ANONYMOUS) | 380 | >>> login(ANONYMOUS) |
324 | 381 | 381 | ||
325 | 382 | >>> view.num_pkgs_building | 382 | >>> view.num_pkgs_building |
326 | @@ -386,7 +386,7 @@ | |||
327 | 386 | of packages that are currently building. | 386 | of packages that are currently building. |
328 | 387 | 387 | ||
329 | 388 | >>> login('foo.bar@canonical.com') | 388 | >>> login('foo.bar@canonical.com') |
331 | 389 | >>> builds[4].buildstate = BuildStatus.NEEDSBUILD | 389 | >>> builds[4].status = BuildStatus.NEEDSBUILD |
332 | 390 | >>> login(ANONYMOUS) | 390 | >>> login(ANONYMOUS) |
333 | 391 | >>> view.num_pkgs_building | 391 | >>> view.num_pkgs_building |
334 | 392 | {'building': 2, 'waiting': 1, 'total': 3} | 392 | {'building': 2, 'waiting': 1, 'total': 3} |
335 | @@ -395,7 +395,7 @@ | |||
336 | 395 | to be building: | 395 | to be building: |
337 | 396 | 396 | ||
338 | 397 | >>> login('foo.bar@canonical.com') | 397 | >>> login('foo.bar@canonical.com') |
340 | 398 | >>> builds[4].buildstate = BuildStatus.BUILDING | 398 | >>> builds[4].status = BuildStatus.BUILDING |
341 | 399 | >>> login(ANONYMOUS) | 399 | >>> login(ANONYMOUS) |
342 | 400 | >>> view.num_pkgs_building | 400 | >>> view.num_pkgs_building |
343 | 401 | {'building': 3, 'waiting': 0, 'total': 3} | 401 | {'building': 3, 'waiting': 0, 'total': 3} |
344 | 402 | 402 | ||
345 | === modified file 'lib/lp/soyuz/doc/build-estimated-dispatch-time.txt' | |||
346 | --- lib/lp/soyuz/doc/build-estimated-dispatch-time.txt 2010-04-09 15:46:09 +0000 | |||
347 | +++ lib/lp/soyuz/doc/build-estimated-dispatch-time.txt 2010-05-27 13:26:53 +0000 | |||
348 | @@ -50,7 +50,7 @@ | |||
349 | 50 | ... hoary.main_archive) | 50 | ... hoary.main_archive) |
350 | 51 | >>> alsa_bqueue = alsa_build.queueBuild() | 51 | >>> alsa_bqueue = alsa_build.queueBuild() |
351 | 52 | >>> alsa_bqueue.lastscore = 500 | 52 | >>> alsa_bqueue.lastscore = 500 |
353 | 53 | >>> alsa_build.buildstate = BuildStatus.NEEDSBUILD | 53 | >>> alsa_build.status = BuildStatus.NEEDSBUILD |
354 | 54 | 54 | ||
355 | 55 | Access the currently building job via the builder. | 55 | Access the currently building job via the builder. |
356 | 56 | 56 | ||
357 | @@ -64,7 +64,7 @@ | |||
358 | 64 | 64 | ||
359 | 65 | Make sure the job at hand is currently being built. | 65 | Make sure the job at hand is currently being built. |
360 | 66 | 66 | ||
362 | 67 | >>> cur_build.buildstate == BuildStatus.BUILDING | 67 | >>> cur_build.status == BuildStatus.BUILDING |
363 | 68 | True | 68 | True |
364 | 69 | 69 | ||
365 | 70 | The start time estimation mechanism for a pending job N depends on | 70 | The start time estimation mechanism for a pending job N depends on |
366 | @@ -124,7 +124,7 @@ | |||
367 | 124 | >>> pmount_bqueue.lastscore = 66 | 124 | >>> pmount_bqueue.lastscore = 66 |
368 | 125 | >>> removeSecurityProxy(pmount_bqueue).estimated_duration = ( | 125 | >>> removeSecurityProxy(pmount_bqueue).estimated_duration = ( |
369 | 126 | ... timedelta(minutes=12)) | 126 | ... timedelta(minutes=12)) |
371 | 127 | >>> pmount_build.buildstate = BuildStatus.NEEDSBUILD | 127 | >>> pmount_build.status = BuildStatus.NEEDSBUILD |
372 | 128 | 128 | ||
373 | 129 | Followed by another build for the 'iceweasel' source package that is added | 129 | Followed by another build for the 'iceweasel' source package that is added |
374 | 130 | to mark's PPA. | 130 | to mark's PPA. |
375 | @@ -142,7 +142,7 @@ | |||
376 | 142 | >>> removeSecurityProxy(iceweasel_bqueue).estimated_duration = ( | 142 | >>> removeSecurityProxy(iceweasel_bqueue).estimated_duration = ( |
377 | 143 | ... timedelta(minutes=48)) | 143 | ... timedelta(minutes=48)) |
378 | 144 | >>> iceweasel_bqueue.lastscore = 666 | 144 | >>> iceweasel_bqueue.lastscore = 666 |
380 | 145 | >>> iceweasel_build.buildstate = BuildStatus.NEEDSBUILD | 145 | >>> iceweasel_build.status = BuildStatus.NEEDSBUILD |
381 | 146 | 146 | ||
382 | 147 | Since the 'iceweasel' build has a higher score (666) than the 'pmount' | 147 | Since the 'iceweasel' build has a higher score (666) than the 'pmount' |
383 | 148 | build (66) its estimated dispatch time is essentially "now". | 148 | build (66) its estimated dispatch time is essentially "now". |
384 | 149 | 149 | ||
385 | === modified file 'lib/lp/soyuz/doc/buildd-scoring.txt' | |||
386 | --- lib/lp/soyuz/doc/buildd-scoring.txt 2010-04-09 15:46:09 +0000 | |||
387 | +++ lib/lp/soyuz/doc/buildd-scoring.txt 2010-05-27 13:26:53 +0000 | |||
388 | @@ -65,8 +65,10 @@ | |||
389 | 65 | ... hoary386, pub.pocket, pub.archive) | 65 | ... hoary386, pub.pocket, pub.archive) |
390 | 66 | ... | 66 | ... |
391 | 67 | ... build_queue = build.queueBuild() | 67 | ... build_queue = build.queueBuild() |
394 | 68 | ... build_queue.job.date_created = date_created | 68 | ... from zope.security.proxy import removeSecurityProxy |
395 | 69 | ... build_queue.manual = manual | 69 | ... naked_build_queue = removeSecurityProxy(build_queue) |
396 | 70 | ... naked_build_queue.job.date_created = date_created | ||
397 | 71 | ... naked_build_queue.manual = manual | ||
398 | 70 | ... | 72 | ... |
399 | 71 | ... return build_queue | 73 | ... return build_queue |
400 | 72 | 74 | ||
401 | 73 | 75 | ||
402 | === modified file 'lib/lp/soyuz/doc/buildd-slavescanner.txt' | |||
403 | --- lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-05-27 13:26:43 +0000 | |||
404 | +++ lib/lp/soyuz/doc/buildd-slavescanner.txt 2010-05-27 13:26:53 +0000 | |||
405 | @@ -90,8 +90,7 @@ | |||
406 | 90 | 90 | ||
407 | 91 | >>> default_start = datetime.datetime(2005, 1, 1, 8, 0, 0, tzinfo=UTC) | 91 | >>> default_start = datetime.datetime(2005, 1, 1, 8, 0, 0, tzinfo=UTC) |
408 | 92 | >>> def setupBuildQueue(build_queue, builder): | 92 | >>> def setupBuildQueue(build_queue, builder): |
411 | 93 | ... build_queue.builder = builder | 93 | ... build_queue.markAsBuilding(builder) |
410 | 94 | ... build_queue.setDateStarted(default_start) | ||
412 | 95 | 94 | ||
413 | 96 | Remove any previous buildmaster ROOT directory, to avoid any garbage | 95 | Remove any previous buildmaster ROOT directory, to avoid any garbage |
414 | 97 | lock conflict (it would be recreated automatically if necessary) | 96 | lock conflict (it would be recreated automatically if necessary) |
415 | @@ -105,7 +104,7 @@ | |||
416 | 105 | Let's check the procedures to verify/collect running build process: | 104 | Let's check the procedures to verify/collect running build process: |
417 | 106 | 105 | ||
418 | 107 | WAITING - PACKAGEFAIL -> Package has failed to build, notice from | 106 | WAITING - PACKAGEFAIL -> Package has failed to build, notice from |
420 | 108 | builder is stored, but Build.buildstate is mark as 'Failed to Build': | 107 | builder is stored, but Build.status is mark as 'Failed to Build': |
421 | 109 | 108 | ||
422 | 110 | Get a builder from the sample data: | 109 | Get a builder from the sample data: |
423 | 111 | 110 | ||
424 | @@ -139,19 +138,19 @@ | |||
425 | 139 | >>> a_builder.updateBuild(bqItem3) | 138 | >>> a_builder.updateBuild(bqItem3) |
426 | 140 | >>> build.builder is not None | 139 | >>> build.builder is not None |
427 | 141 | True | 140 | True |
433 | 142 | >>> build.datebuilt is not None | 141 | >>> build.date_finished is not None |
434 | 143 | True | 142 | True |
435 | 144 | >>> build.buildduration is not None | 143 | >>> build.duration is not None |
436 | 145 | True | 144 | True |
437 | 146 | >>> build.buildlog is not None | 145 | >>> build.log is not None |
438 | 147 | True | 146 | True |
439 | 148 | >>> check_mail_sent(last_stub_mail_count) | 147 | >>> check_mail_sent(last_stub_mail_count) |
440 | 149 | True | 148 | True |
442 | 150 | >>> build.buildstate.title | 149 | >>> build.status.title |
443 | 151 | 'Failed to build' | 150 | 'Failed to build' |
444 | 152 | 151 | ||
445 | 153 | WAITING - DEPWAIT -> a required dependency is missing, again notice | 152 | WAITING - DEPWAIT -> a required dependency is missing, again notice |
447 | 154 | from builder, but Build.buildstate has the right state: | 153 | from builder, but Build.status has the right state: |
448 | 155 | 154 | ||
449 | 156 | >>> bqItem4 = a_build.queueBuild() | 155 | >>> bqItem4 = a_build.queueBuild() |
450 | 157 | >>> setupBuildQueue(bqItem4, a_builder) | 156 | >>> setupBuildQueue(bqItem4, a_builder) |
451 | @@ -170,17 +169,17 @@ | |||
452 | 170 | CRITICAL:slave-scanner:***** bob is MANUALDEPWAIT ***** | 169 | CRITICAL:slave-scanner:***** bob is MANUALDEPWAIT ***** |
453 | 171 | >>> build.builder is not None | 170 | >>> build.builder is not None |
454 | 172 | True | 171 | True |
460 | 173 | >>> build.datebuilt is not None | 172 | >>> build.date_finished is not None |
461 | 174 | True | 173 | True |
462 | 175 | >>> build.buildduration is not None | 174 | >>> build.duration is not None |
463 | 176 | True | 175 | True |
464 | 177 | >>> build.buildlog is not None | 176 | >>> build.log is not None |
465 | 178 | True | 177 | True |
466 | 179 | >>> check_mail_sent(last_stub_mail_count) | 178 | >>> check_mail_sent(last_stub_mail_count) |
467 | 180 | False | 179 | False |
468 | 181 | >>> build.dependencies | 180 | >>> build.dependencies |
469 | 182 | u'baz (>= 1.0.1)' | 181 | u'baz (>= 1.0.1)' |
471 | 183 | >>> build.buildstate.title | 182 | >>> build.status.title |
472 | 184 | 'Dependency wait' | 183 | 'Dependency wait' |
473 | 185 | 184 | ||
474 | 186 | WAITING - CHROOTFAIL -> the Chroot for this distroseries is damage, nor | 185 | WAITING - CHROOTFAIL -> the Chroot for this distroseries is damage, nor |
475 | @@ -199,15 +198,15 @@ | |||
476 | 199 | CRITICAL:slave-scanner:***** bob is CHROOTWAIT ***** | 198 | CRITICAL:slave-scanner:***** bob is CHROOTWAIT ***** |
477 | 200 | >>> build.builder is not None | 199 | >>> build.builder is not None |
478 | 201 | True | 200 | True |
484 | 202 | >>> build.datebuilt is not None | 201 | >>> build.date_finished is not None |
485 | 203 | True | 202 | True |
486 | 204 | >>> build.buildduration is not None | 203 | >>> build.duration is not None |
487 | 205 | True | 204 | True |
488 | 206 | >>> build.buildlog is not None | 205 | >>> build.log is not None |
489 | 207 | True | 206 | True |
490 | 208 | >>> check_mail_sent(last_stub_mail_count) | 207 | >>> check_mail_sent(last_stub_mail_count) |
491 | 209 | True | 208 | True |
493 | 210 | >>> build.buildstate.title | 209 | >>> build.status.title |
494 | 211 | 'Chroot problem' | 210 | 'Chroot problem' |
495 | 212 | 211 | ||
496 | 213 | WAITING - BUILDERFAIL -> builder has failed by internal error, job is available for next build round: | 212 | WAITING - BUILDERFAIL -> builder has failed by internal error, job is available for next build round: |
497 | @@ -234,7 +233,7 @@ | |||
498 | 234 | >>> check_mail_sent(last_stub_mail_count) | 233 | >>> check_mail_sent(last_stub_mail_count) |
499 | 235 | False | 234 | False |
500 | 236 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem6) | 235 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem6) |
502 | 237 | >>> print build.buildstate.title | 236 | >>> print build.status.title |
503 | 238 | Needs building | 237 | Needs building |
504 | 239 | >>> job = bqItem6.specific_job.job | 238 | >>> job = bqItem6.specific_job.job |
505 | 240 | >>> print job.status.title | 239 | >>> print job.status.title |
506 | @@ -286,7 +285,7 @@ | |||
507 | 286 | >>> a_builder.updateBuild(bqItem8) | 285 | >>> a_builder.updateBuild(bqItem8) |
508 | 287 | >>> bqItem8.builder is None | 286 | >>> bqItem8.builder is None |
509 | 288 | True | 287 | True |
511 | 289 | >>> print bqItem8.specific_job.build.buildstate.name | 288 | >>> print bqItem8.specific_job.build.status.name |
512 | 290 | NEEDSBUILD | 289 | NEEDSBUILD |
513 | 291 | 290 | ||
514 | 292 | Cleanup in preparation for the next test: | 291 | Cleanup in preparation for the next test: |
515 | @@ -347,15 +346,15 @@ | |||
516 | 347 | WARNING:slave-scanner:Build ... upload failed. | 346 | WARNING:slave-scanner:Build ... upload failed. |
517 | 348 | >>> build.builder is not None | 347 | >>> build.builder is not None |
518 | 349 | True | 348 | True |
524 | 350 | >>> build.datebuilt is not None | 349 | >>> build.date_finished is not None |
525 | 351 | True | 350 | True |
526 | 352 | >>> build.buildduration is not None | 351 | >>> build.duration is not None |
527 | 353 | True | 352 | True |
528 | 354 | >>> build.buildlog is not None | 353 | >>> build.log is not None |
529 | 355 | True | 354 | True |
530 | 356 | >>> check_mail_sent(last_stub_mail_count) | 355 | >>> check_mail_sent(last_stub_mail_count) |
531 | 357 | True | 356 | True |
533 | 358 | >>> build.buildstate.title | 357 | >>> build.status.title |
534 | 359 | 'Failed to upload' | 358 | 'Failed to upload' |
535 | 360 | 359 | ||
536 | 361 | Let's check the emails generated by this 'failure' | 360 | Let's check the emails generated by this 'failure' |
537 | @@ -380,7 +379,7 @@ | |||
538 | 380 | ... | 379 | ... |
539 | 381 | X-Launchpad-Build-State: FAILEDTOUPLOAD | 380 | X-Launchpad-Build-State: FAILEDTOUPLOAD |
540 | 382 | ... | 381 | ... |
542 | 383 | * Build Log: http://.../...i386.mozilla-firefox_0.9_NEEDSBUILD.txt.gz | 382 | * Build Log: http://.../...i386.mozilla-firefox_0.9_BUILDING.txt.gz |
543 | 384 | ... | 383 | ... |
544 | 385 | Upload log: | 384 | Upload log: |
545 | 386 | INFO Creating lockfile:... | 385 | INFO Creating lockfile:... |
546 | @@ -396,7 +395,7 @@ | |||
547 | 396 | >>> build.upload_log is not None | 395 | >>> build.upload_log is not None |
548 | 397 | True | 396 | True |
549 | 398 | 397 | ||
551 | 399 | What we can clearly notice is that the buildlog is still containing | 398 | What we can clearly notice is that the log is still containing |
552 | 400 | the old build state (BUILDING) in its name. This is a minor problem | 399 | the old build state (BUILDING) in its name. This is a minor problem |
553 | 401 | that can be sorted by modifying the execution order of procedures | 400 | that can be sorted by modifying the execution order of procedures |
554 | 402 | inside Buildergroup.buildStatus_OK method. | 401 | inside Buildergroup.buildStatus_OK method. |
555 | @@ -431,7 +430,7 @@ | |||
556 | 431 | the build record to FULLYBUILT, as the process-upload would do: | 430 | the build record to FULLYBUILT, as the process-upload would do: |
557 | 432 | 431 | ||
558 | 433 | >>> from lp.buildmaster.interfaces.buildbase import BuildStatus | 432 | >>> from lp.buildmaster.interfaces.buildbase import BuildStatus |
560 | 434 | >>> build.buildstate = BuildStatus.FULLYBUILT | 433 | >>> build.status = BuildStatus.FULLYBUILT |
561 | 435 | 434 | ||
562 | 436 | Now the updateBuild should recognize this build record as a | 435 | Now the updateBuild should recognize this build record as a |
563 | 437 | Successfully built and uploaded procedure, not sending any | 436 | Successfully built and uploaded procedure, not sending any |
564 | @@ -440,13 +439,13 @@ | |||
565 | 440 | >>> a_builder.updateBuild(bqItem10) | 439 | >>> a_builder.updateBuild(bqItem10) |
566 | 441 | >>> build.builder is not None | 440 | >>> build.builder is not None |
567 | 442 | True | 441 | True |
575 | 443 | >>> build.datebuilt is not None | 442 | >>> build.date_finished is not None |
576 | 444 | True | 443 | True |
577 | 445 | >>> build.buildduration is not None | 444 | >>> build.duration is not None |
578 | 446 | True | 445 | True |
579 | 447 | >>> build.buildlog is not None | 446 | >>> build.log is not None |
580 | 448 | True | 447 | True |
581 | 449 | >>> build.buildstate.title | 448 | >>> build.status.title |
582 | 450 | 'Successfully built' | 449 | 'Successfully built' |
583 | 451 | >>> check_mail_sent(last_stub_mail_count) | 450 | >>> check_mail_sent(last_stub_mail_count) |
584 | 452 | False | 451 | False |
585 | @@ -484,7 +483,7 @@ | |||
586 | 484 | >>> check_mail_sent(last_stub_mail_count) | 483 | >>> check_mail_sent(last_stub_mail_count) |
587 | 485 | False | 484 | False |
588 | 486 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem11) | 485 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(bqItem11) |
590 | 487 | >>> print build.buildstate.title | 486 | >>> print build.status.title |
591 | 488 | Needs building | 487 | Needs building |
592 | 489 | >>> job = bqItem11.specific_job.job | 488 | >>> job = bqItem11.specific_job.job |
593 | 490 | >>> print job.status.title | 489 | >>> print job.status.title |
594 | @@ -517,23 +516,24 @@ | |||
595 | 517 | 516 | ||
596 | 518 | >>> bqItem12.builder = None | 517 | >>> bqItem12.builder = None |
597 | 519 | 518 | ||
599 | 520 | The buildlog is collected and compressed locally using gzip algorithm, | 519 | The log is collected and compressed locally using gzip algorithm, |
600 | 521 | let's see how this method works: | 520 | let's see how this method works: |
601 | 522 | 521 | ||
602 | 523 | >>> bqItem10 = getUtility(IBinaryPackageBuildSet).getByBuildID( | 522 | >>> bqItem10 = getUtility(IBinaryPackageBuildSet).getByBuildID( |
603 | 524 | ... 6).queueBuild() | 523 | ... 6).queueBuild() |
604 | 525 | >>> setupBuildQueue(bqItem10, a_builder) | 524 | >>> setupBuildQueue(bqItem10, a_builder) |
605 | 525 | >>> build = bqItem10.specific_job.build | ||
606 | 526 | >>> build.status = BuildStatus.FULLYBUILT | ||
607 | 526 | >>> bqItem10.builder.setSlaveForTesting(WaitingSlave('BuildStatus.OK')) | 527 | >>> bqItem10.builder.setSlaveForTesting(WaitingSlave('BuildStatus.OK')) |
608 | 527 | 528 | ||
610 | 528 | Before collecting and processing the buildlog we will store the files | 529 | Before collecting and processing the log we will store the files |
611 | 529 | already created in /tmp so we can verify later that this mechanism is | 530 | already created in /tmp so we can verify later that this mechanism is |
612 | 530 | not leaving any temporary file behind. See bug #172798. | 531 | not leaving any temporary file behind. See bug #172798. |
613 | 531 | 532 | ||
614 | 532 | >>> old_tmps = os.listdir('/tmp') | 533 | >>> old_tmps = os.listdir('/tmp') |
615 | 533 | 534 | ||
617 | 534 | Collect and process the buildlog. | 535 | Collect and process the log. |
618 | 535 | 536 | ||
619 | 536 | >>> build = bqItem10.specific_job.build | ||
620 | 537 | >>> logfile_alias = build.getLogFromSlave(build) | 537 | >>> logfile_alias = build.getLogFromSlave(build) |
621 | 538 | 538 | ||
622 | 539 | Audit the /tmp for lost temporary files, there should not be any new | 539 | Audit the /tmp for lost temporary files, there should not be any new |
623 | @@ -543,7 +543,7 @@ | |||
624 | 543 | >>> sorted(os.listdir('/tmp')) == sorted(old_tmps) | 543 | >>> sorted(os.listdir('/tmp')) == sorted(old_tmps) |
625 | 544 | True | 544 | True |
626 | 545 | 545 | ||
628 | 546 | The buildlog was compressed and directly transferred to Librarian. | 546 | The log was compressed and directly transferred to Librarian. |
629 | 547 | 547 | ||
630 | 548 | >>> from canonical.launchpad.interfaces import ILibraryFileAliasSet | 548 | >>> from canonical.launchpad.interfaces import ILibraryFileAliasSet |
631 | 549 | >>> logfile = getUtility(ILibraryFileAliasSet)[logfile_alias] | 549 | >>> logfile = getUtility(ILibraryFileAliasSet)[logfile_alias] |
632 | @@ -558,7 +558,7 @@ | |||
633 | 558 | 558 | ||
634 | 559 | >>> commit() | 559 | >>> commit() |
635 | 560 | 560 | ||
637 | 561 | Check if the buildlog content is correct and accessible via the | 561 | Check if the log content is correct and accessible via the |
638 | 562 | library file directly and via Librarian http front-end. | 562 | library file directly and via Librarian http front-end. |
639 | 563 | 563 | ||
640 | 564 | Since LibrarianFileAlias does not implement required attributes for | 564 | Since LibrarianFileAlias does not implement required attributes for |
641 | @@ -592,7 +592,7 @@ | |||
642 | 592 | 592 | ||
643 | 593 | >>> os.remove(fname) | 593 | >>> os.remove(fname) |
644 | 594 | 594 | ||
646 | 595 | The Librarian serves buildlog files with 'gzip' content-encoding and | 595 | The Librarian serves log files with 'gzip' content-encoding and |
647 | 596 | 'text/plain' content-type. This combination instructs the browser to | 596 | 'text/plain' content-type. This combination instructs the browser to |
648 | 597 | decompress the file and display it inline, which makes it easier for | 597 | decompress the file and display it inline, which makes it easier for |
649 | 598 | users to view it. | 598 | users to view it. |
650 | @@ -700,7 +700,7 @@ | |||
651 | 700 | >>> current_job = a_builder.currentjob | 700 | >>> current_job = a_builder.currentjob |
652 | 701 | >>> resurrect_build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( | 701 | >>> resurrect_build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
653 | 702 | ... current_job) | 702 | ... current_job) |
655 | 703 | >>> resurrect_build.buildstate = BuildStatus.NEEDSBUILD | 703 | >>> resurrect_build.status = BuildStatus.NEEDSBUILD |
656 | 704 | >>> syncUpdate(resurrect_build) | 704 | >>> syncUpdate(resurrect_build) |
657 | 705 | >>> current_job.builder = None | 705 | >>> current_job.builder = None |
658 | 706 | >>> current_job.setDateStarted(None) | 706 | >>> current_job.setDateStarted(None) |
659 | @@ -713,7 +713,7 @@ | |||
660 | 713 | >>> old_candidate = removeSecurityProxy(a_builder)._findBuildCandidate() | 713 | >>> old_candidate = removeSecurityProxy(a_builder)._findBuildCandidate() |
661 | 714 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( | 714 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
662 | 715 | ... old_candidate) | 715 | ... old_candidate) |
664 | 716 | >>> print build.buildstate.name | 716 | >>> print build.status.name |
665 | 717 | NEEDSBUILD | 717 | NEEDSBUILD |
666 | 718 | 718 | ||
667 | 719 | The 'candidate' is constant until we dispatch it. | 719 | The 'candidate' is constant until we dispatch it. |
668 | @@ -747,7 +747,7 @@ | |||
669 | 747 | >>> from canonical.launchpad.interfaces import PackagePublishingStatus | 747 | >>> from canonical.launchpad.interfaces import PackagePublishingStatus |
670 | 748 | >>> from canonical.testing.layers import LaunchpadZopelessLayer | 748 | >>> from canonical.testing.layers import LaunchpadZopelessLayer |
671 | 749 | 749 | ||
673 | 750 | >>> spr = build.sourcepackagerelease | 750 | >>> spr = build.source_package_release |
674 | 751 | >>> pub = removeSecurityProxy(build).current_source_publication | 751 | >>> pub = removeSecurityProxy(build).current_source_publication |
675 | 752 | >>> commit() | 752 | >>> commit() |
676 | 753 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | 753 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') |
677 | @@ -764,7 +764,7 @@ | |||
678 | 764 | Because the 'previous' candidate was marked as superseded, so it's not | 764 | Because the 'previous' candidate was marked as superseded, so it's not |
679 | 765 | part of the candidates list anymore. | 765 | part of the candidates list anymore. |
680 | 766 | 766 | ||
682 | 767 | >>> print build.buildstate.name | 767 | >>> print build.status.name |
683 | 768 | SUPERSEDED | 768 | SUPERSEDED |
684 | 769 | 769 | ||
685 | 770 | If the candidate is for a private build whose source has not been | 770 | If the candidate is for a private build whose source has not been |
686 | @@ -825,7 +825,7 @@ | |||
687 | 825 | 825 | ||
688 | 826 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( | 826 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
689 | 827 | ... current_job) | 827 | ... current_job) |
691 | 828 | >>> print build.buildstate.name | 828 | >>> print build.status.name |
692 | 829 | NEEDSBUILD | 829 | NEEDSBUILD |
693 | 830 | 830 | ||
694 | 831 | >>> another_candidate = removeSecurityProxy( | 831 | >>> another_candidate = removeSecurityProxy( |
695 | @@ -833,7 +833,7 @@ | |||
696 | 833 | >>> print another_candidate | 833 | >>> print another_candidate |
697 | 834 | None | 834 | None |
698 | 835 | 835 | ||
700 | 836 | >>> print build.buildstate.name | 836 | >>> print build.status.name |
701 | 837 | SUPERSEDED | 837 | SUPERSEDED |
702 | 838 | 838 | ||
703 | 839 | We'll reset the archive back to non-private for further tests: | 839 | We'll reset the archive back to non-private for further tests: |
704 | @@ -1060,7 +1060,7 @@ | |||
705 | 1060 | >>> login('foo.bar@canonical.com') | 1060 | >>> login('foo.bar@canonical.com') |
706 | 1061 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( | 1061 | >>> build = getUtility(IBinaryPackageBuildSet).getByQueueEntry( |
707 | 1062 | ... candidate) | 1062 | ... candidate) |
709 | 1063 | >>> for build_file in build.sourcepackagerelease.files: | 1063 | >>> for build_file in build.source_package_release.files: |
710 | 1064 | ... removeSecurityProxy(build_file).libraryfile.restricted = True | 1064 | ... removeSecurityProxy(build_file).libraryfile.restricted = True |
711 | 1065 | >>> private_ppa = factory.makeArchive( | 1065 | >>> private_ppa = factory.makeArchive( |
712 | 1066 | ... owner=cprov_archive.owner, name='pppa', private=True, | 1066 | ... owner=cprov_archive.owner, name='pppa', private=True, |
713 | @@ -1121,7 +1121,7 @@ | |||
714 | 1121 | We will create an ancestry in the primary archive target to the 'main' | 1121 | We will create an ancestry in the primary archive target to the 'main' |
715 | 1122 | component and this time the dispatching will follow that component. | 1122 | component and this time the dispatching will follow that component. |
716 | 1123 | 1123 | ||
718 | 1124 | >>> sourcename = build.sourcepackagerelease.name | 1124 | >>> sourcename = build.source_package_release.name |
719 | 1125 | 1125 | ||
720 | 1126 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | 1126 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') |
721 | 1127 | >>> login('foo.bar@canonical.com') | 1127 | >>> login('foo.bar@canonical.com') |
722 | @@ -1147,7 +1147,7 @@ | |||
723 | 1147 | 1147 | ||
724 | 1148 | >>> candidate.destroySelf() | 1148 | >>> candidate.destroySelf() |
725 | 1149 | 1149 | ||
727 | 1150 | Since this is a build in a private archive, the buildlog was uploaded to | 1150 | Since this is a build in a private archive, the log was uploaded to |
728 | 1151 | the restricted librarian. | 1151 | the restricted librarian. |
729 | 1152 | 1152 | ||
730 | 1153 | >>> candidate = a_build.queueBuild() | 1153 | >>> candidate = a_build.queueBuild() |
731 | @@ -1161,7 +1161,7 @@ | |||
732 | 1161 | >>> build.archive.private | 1161 | >>> build.archive.private |
733 | 1162 | True | 1162 | True |
734 | 1163 | 1163 | ||
736 | 1164 | >>> lfa = build.buildlog | 1164 | >>> lfa = build.log |
737 | 1165 | >>> lfa.restricted | 1165 | >>> lfa.restricted |
738 | 1166 | True | 1166 | True |
739 | 1167 | >>> print lfa.filename | 1167 | >>> print lfa.filename |
740 | @@ -1177,7 +1177,7 @@ | |||
741 | 1177 | ... | 1177 | ... |
742 | 1178 | DownloadFailed: Alias ... cannot be downloaded from this client. | 1178 | DownloadFailed: Alias ... cannot be downloaded from this client. |
743 | 1179 | 1179 | ||
745 | 1180 | Accessing the buildlog via the restricted librarian will work as expected. | 1180 | Accessing the log via the restricted librarian will work as expected. |
746 | 1181 | 1181 | ||
747 | 1182 | >>> import urlparse | 1182 | >>> import urlparse |
748 | 1183 | >>> from canonical.librarian.interfaces import IRestrictedLibrarianClient | 1183 | >>> from canonical.librarian.interfaces import IRestrictedLibrarianClient |
749 | @@ -1199,7 +1199,7 @@ | |||
750 | 1199 | 1199 | ||
751 | 1200 | >>> removeSecurityProxy(build).archive = cprov_archive | 1200 | >>> removeSecurityProxy(build).archive = cprov_archive |
752 | 1201 | >>> cprov_archive.require_virtualized = True | 1201 | >>> cprov_archive.require_virtualized = True |
754 | 1202 | >>> for build_file in a_build.sourcepackagerelease.files: | 1202 | >>> for build_file in a_build.source_package_release.files: |
755 | 1203 | ... removeSecurityProxy(build_file).libraryfile.restricted = False | 1203 | ... removeSecurityProxy(build_file).libraryfile.restricted = False |
756 | 1204 | >>> mark_archive = getUtility(IPersonSet).getByName('mark').archive | 1204 | >>> mark_archive = getUtility(IPersonSet).getByName('mark').archive |
757 | 1205 | 1205 | ||
758 | @@ -1289,7 +1289,7 @@ | |||
759 | 1289 | >>> hoary_evo = hoary.getSourcePackage( | 1289 | >>> hoary_evo = hoary.getSourcePackage( |
760 | 1290 | ... 'evolution').currentrelease.sourcepackagerelease | 1290 | ... 'evolution').currentrelease.sourcepackagerelease |
761 | 1291 | >>> updates_build = hoary_evo.createBuild( | 1291 | >>> updates_build = hoary_evo.createBuild( |
763 | 1292 | ... distroarchseries=hoary_i386, | 1292 | ... distro_arch_series=hoary_i386, |
764 | 1293 | ... pocket=PackagePublishingPocket.UPDATES, | 1293 | ... pocket=PackagePublishingPocket.UPDATES, |
765 | 1294 | ... processor=hoary_i386.default_processor, | 1294 | ... processor=hoary_i386.default_processor, |
766 | 1295 | ... archive=hoary_i386.main_archive) | 1295 | ... archive=hoary_i386.main_archive) |
767 | @@ -1387,7 +1387,7 @@ | |||
768 | 1387 | >>> a_builder.currentjob.destroySelf() | 1387 | >>> a_builder.currentjob.destroySelf() |
769 | 1388 | 1388 | ||
770 | 1389 | >>> bqItem3 = a_build.queueBuild() | 1389 | >>> bqItem3 = a_build.queueBuild() |
772 | 1390 | >>> removeSecurityProxy(build).buildstate = ( | 1390 | >>> removeSecurityProxy(build).status = ( |
773 | 1391 | ... BuildStatus.NEEDSBUILD) | 1391 | ... BuildStatus.NEEDSBUILD) |
774 | 1392 | >>> removeSecurityProxy(build).pocket = ( | 1392 | >>> removeSecurityProxy(build).pocket = ( |
775 | 1393 | ... PackagePublishingPocket.SECURITY) | 1393 | ... PackagePublishingPocket.SECURITY) |
776 | 1394 | 1394 | ||
777 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' | |||
778 | --- lib/lp/soyuz/model/binarypackagebuild.py 2010-05-27 13:26:43 +0000 | |||
779 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2010-05-27 13:26:53 +0000 | |||
780 | @@ -20,7 +20,6 @@ | |||
781 | 20 | Desc, In, Join, LeftJoin) | 20 | Desc, In, Join, LeftJoin) |
782 | 21 | from storm.store import Store | 21 | from storm.store import Store |
783 | 22 | from sqlobject import SQLObjectNotFound | 22 | from sqlobject import SQLObjectNotFound |
784 | 23 | from sqlobject.sqlbuilder import AND, IN | ||
785 | 24 | 23 | ||
786 | 25 | from canonical.config import config | 24 | from canonical.config import config |
787 | 26 | from canonical.database.sqlbase import quote_like, SQLBase, sqlvalues | 25 | from canonical.database.sqlbase import quote_like, SQLBase, sqlvalues |
788 | @@ -629,7 +628,7 @@ | |||
789 | 629 | else: | 628 | else: |
790 | 630 | # completed states (success and failure) | 629 | # completed states (success and failure) |
791 | 631 | buildduration = DurationFormatterAPI( | 630 | buildduration = DurationFormatterAPI( |
793 | 632 | self.date_finished - self.date_started).approximateduration() | 631 | self.duration).approximateduration() |
794 | 633 | buildlog_url = self.log_url | 632 | buildlog_url = self.log_url |
795 | 634 | builder_url = canonical_url(self.builder) | 633 | builder_url = canonical_url(self.builder) |
796 | 635 | 634 | ||
797 | @@ -739,11 +738,13 @@ | |||
798 | 739 | return None | 738 | return None |
799 | 740 | 739 | ||
800 | 741 | archseries_ids = [d.id for d in archseries] | 740 | archseries_ids = [d.id for d in archseries] |
806 | 742 | 741 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | |
807 | 743 | return BinaryPackageBuild.select( | 742 | return store.find( |
808 | 744 | AND(BinaryPackageBuild.q.buildstate==BuildStatus.NEEDSBUILD, | 743 | BinaryPackageBuild, |
809 | 745 | IN(BinaryPackageBuild.q.distroarchseriesID, archseries_ids)) | 744 | In(BinaryPackageBuild.distro_arch_series_id, archseries_ids), |
810 | 746 | ) | 745 | BinaryPackageBuild.package_build == PackageBuild.id, |
811 | 746 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
812 | 747 | BuildFarmJob.status == BuildStatus.NEEDSBUILD) | ||
813 | 747 | 748 | ||
814 | 748 | def handleOptionalParamsForBuildQueries( | 749 | def handleOptionalParamsForBuildQueries( |
815 | 749 | self, queries, tables, status=None, name=None, pocket=None, | 750 | self, queries, tables, status=None, name=None, pocket=None, |
816 | @@ -1140,17 +1141,21 @@ | |||
817 | 1140 | arch_ids = [d.id for d in archseries] | 1141 | arch_ids = [d.id for d in archseries] |
818 | 1141 | 1142 | ||
819 | 1142 | query = """ | 1143 | query = """ |
822 | 1143 | Build.distroarchseries IN %s AND | 1144 | BinaryPackageBuild.distro_arch_series IN %s AND |
823 | 1144 | Build.buildstate = %s AND | 1145 | BinaryPackageBuild.package_build = PackageBuild.id AND |
824 | 1146 | PackageBuild.build_farm_job = BuildFarmJob.id AND | ||
825 | 1147 | BuildFarmJob.status = %s AND | ||
826 | 1145 | BuildQueue.job_type = %s AND | 1148 | BuildQueue.job_type = %s AND |
827 | 1146 | BuildQueue.job = BuildPackageJob.job AND | 1149 | BuildQueue.job = BuildPackageJob.job AND |
829 | 1147 | BuildPackageJob.build = build.id AND | 1150 | BuildPackageJob.build = BinaryPackageBuild.id AND |
830 | 1148 | BuildQueue.builder IS NULL | 1151 | BuildQueue.builder IS NULL |
831 | 1149 | """ % sqlvalues( | 1152 | """ % sqlvalues( |
832 | 1150 | arch_ids, BuildStatus.NEEDSBUILD, BuildFarmJobType.PACKAGEBUILD) | 1153 | arch_ids, BuildStatus.NEEDSBUILD, BuildFarmJobType.PACKAGEBUILD) |
833 | 1151 | 1154 | ||
834 | 1152 | candidates = BuildQueue.select( | 1155 | candidates = BuildQueue.select( |
836 | 1153 | query, clauseTables=['Build', 'BuildPackageJob'], | 1156 | query, clauseTables=[ |
837 | 1157 | 'BinaryPackageBuild', 'PackageBuild', 'BuildFarmJob', | ||
838 | 1158 | 'BuildPackageJob'], | ||
839 | 1154 | orderBy=['-BuildQueue.lastscore']) | 1159 | orderBy=['-BuildQueue.lastscore']) |
840 | 1155 | 1160 | ||
841 | 1156 | return candidates | 1161 | return candidates |
842 | 1157 | 1162 | ||
843 | === modified file 'lib/lp/soyuz/model/binarypackagebuildbehavior.py' | |||
844 | --- lib/lp/soyuz/model/binarypackagebuildbehavior.py 2010-05-27 13:26:43 +0000 | |||
845 | +++ lib/lp/soyuz/model/binarypackagebuildbehavior.py 2010-05-27 13:26:53 +0000 | |||
846 | @@ -122,7 +122,8 @@ | |||
847 | 122 | 122 | ||
848 | 123 | # This should already have been checked earlier, but just check again | 123 | # This should already have been checked earlier, but just check again |
849 | 124 | # here in case of programmer errors. | 124 | # here in case of programmer errors. |
851 | 125 | reason = build.archive.checkUploadToPocket(build.distroseries, | 125 | reason = build.archive.checkUploadToPocket( |
852 | 126 | build.distro_series, | ||
853 | 126 | build.pocket) | 127 | build.pocket) |
854 | 127 | assert reason is None, ( | 128 | assert reason is None, ( |
855 | 128 | "%s (%s) can not be built for pocket %s: invalid pocket due " | 129 | "%s (%s) can not be built for pocket %s: invalid pocket due " |
856 | 129 | 130 | ||
857 | === modified file 'lib/lp/soyuz/scripts/buildd.py' | |||
858 | --- lib/lp/soyuz/scripts/buildd.py 2010-04-09 15:46:09 +0000 | |||
859 | +++ lib/lp/soyuz/scripts/buildd.py 2010-05-27 13:26:53 +0000 | |||
860 | @@ -169,9 +169,9 @@ | |||
861 | 169 | 169 | ||
862 | 170 | for build in builds: | 170 | for build in builds: |
863 | 171 | if not build.buildqueue_record: | 171 | if not build.buildqueue_record: |
867 | 172 | name = build.sourcepackagerelease.name | 172 | name = build.source_package_release.name |
868 | 173 | version = build.sourcepackagerelease.version | 173 | version = build.source_package_release.version |
869 | 174 | tag = build.distroarchseries.architecturetag | 174 | tag = build.distro_arch_series.architecturetag |
870 | 175 | self.logger.debug( | 175 | self.logger.debug( |
871 | 176 | "Creating buildqueue record for %s (%s) on %s" | 176 | "Creating buildqueue record for %s (%s) on %s" |
872 | 177 | % (name, version, tag)) | 177 | % (name, version, tag)) |
873 | @@ -195,7 +195,7 @@ | |||
874 | 195 | for job in candidates: | 195 | for job in candidates: |
875 | 196 | uptodate_build = getUtility( | 196 | uptodate_build = getUtility( |
876 | 197 | IBinaryPackageBuildSet).getByQueueEntry(job) | 197 | IBinaryPackageBuildSet).getByQueueEntry(job) |
878 | 198 | if uptodate_build.buildstate != BuildStatus.NEEDSBUILD: | 198 | if uptodate_build.status != BuildStatus.NEEDSBUILD: |
879 | 199 | continue | 199 | continue |
880 | 200 | job.score() | 200 | job.score() |
881 | 201 | 201 | ||
882 | 202 | 202 | ||
883 | === modified file 'lib/lp/soyuz/scripts/packagecopier.py' | |||
884 | --- lib/lp/soyuz/scripts/packagecopier.py 2010-05-15 17:43:59 +0000 | |||
885 | +++ lib/lp/soyuz/scripts/packagecopier.py 2010-05-27 13:26:53 +0000 | |||
886 | @@ -110,7 +110,7 @@ | |||
887 | 110 | package_upload = build.package_upload | 110 | package_upload = build.package_upload |
888 | 111 | package_files.append((package_upload, 'changesfile')) | 111 | package_files.append((package_upload, 'changesfile')) |
889 | 112 | # Re-upload the buildlog file as necessary. | 112 | # Re-upload the buildlog file as necessary. |
891 | 113 | package_files.append((build, 'buildlog')) | 113 | package_files.append((build, 'log')) |
892 | 114 | elif IPackageUploadCustom.providedBy(pub_record): | 114 | elif IPackageUploadCustom.providedBy(pub_record): |
893 | 115 | # Re-upload the custom files included | 115 | # Re-upload the custom files included |
894 | 116 | package_files.append((pub_record, 'libraryfilealias')) | 116 | package_files.append((pub_record, 'libraryfilealias')) |
895 | @@ -341,7 +341,7 @@ | |||
896 | 341 | if not copied_binaries.issuperset(published_binaries): | 341 | if not copied_binaries.issuperset(published_binaries): |
897 | 342 | raise CannotCopy( | 342 | raise CannotCopy( |
898 | 343 | "binaries conflicting with the existing ones") | 343 | "binaries conflicting with the existing ones") |
900 | 344 | self._checkConflictingFiles(source) | 344 | self._checkConflictingFiles(source) |
901 | 345 | 345 | ||
902 | 346 | def _checkConflictingFiles(self, source): | 346 | def _checkConflictingFiles(self, source): |
903 | 347 | # If both the source and destination archive are the same, we don't | 347 | # If both the source and destination archive are the same, we don't |
904 | @@ -350,7 +350,7 @@ | |||
905 | 350 | if source.archive.id == self.archive.id: | 350 | if source.archive.id == self.archive.id: |
906 | 351 | return None | 351 | return None |
907 | 352 | source_files = [ | 352 | source_files = [ |
909 | 353 | sprf.libraryfile.filename for sprf in | 353 | sprf.libraryfile.filename for sprf in |
910 | 354 | source.sourcepackagerelease.files] | 354 | source.sourcepackagerelease.files] |
911 | 355 | destination_sha1s = self.archive.getFilesAndSha1s(source_files) | 355 | destination_sha1s = self.archive.getFilesAndSha1s(source_files) |
912 | 356 | for lf in source.sourcepackagerelease.files: | 356 | for lf in source.sourcepackagerelease.files: |
913 | @@ -624,7 +624,7 @@ | |||
914 | 624 | # If binaries are included in the copy we include binary custom files. | 624 | # If binaries are included in the copy we include binary custom files. |
915 | 625 | if include_binaries: | 625 | if include_binaries: |
916 | 626 | for build in source.getBuilds(): | 626 | for build in source.getBuilds(): |
918 | 627 | if build.buildstate != BuildStatus.FULLYBUILT: | 627 | if build.status != BuildStatus.FULLYBUILT: |
919 | 628 | continue | 628 | continue |
920 | 629 | delayed_copy.addBuild(build) | 629 | delayed_copy.addBuild(build) |
921 | 630 | original_build_upload = build.package_upload | 630 | original_build_upload = build.package_upload |
922 | 631 | 631 | ||
923 | === modified file 'lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py' | |||
924 | --- lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py 2010-04-12 08:29:02 +0000 | |||
925 | +++ lib/lp/soyuz/scripts/tests/test_buildd_cronscripts.py 2010-05-27 13:26:53 +0000 | |||
926 | @@ -15,9 +15,13 @@ | |||
927 | 15 | 15 | ||
928 | 16 | from canonical.config import config | 16 | from canonical.config import config |
929 | 17 | from canonical.launchpad.scripts.logger import QuietFakeLogger | 17 | from canonical.launchpad.scripts.logger import QuietFakeLogger |
930 | 18 | from canonical.launchpad.webapp.interfaces import ( | ||
931 | 19 | IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR) | ||
932 | 18 | from canonical.testing import ( | 20 | from canonical.testing import ( |
933 | 19 | DatabaseLayer, LaunchpadLayer, LaunchpadZopelessLayer) | 21 | DatabaseLayer, LaunchpadLayer, LaunchpadZopelessLayer) |
934 | 20 | from lp.buildmaster.interfaces.buildbase import BuildStatus | 22 | from lp.buildmaster.interfaces.buildbase import BuildStatus |
935 | 23 | from lp.buildmaster.model.buildfarmjob import BuildFarmJob | ||
936 | 24 | from lp.buildmaster.model.packagebuild import PackageBuild | ||
937 | 21 | from lp.registry.interfaces.distribution import IDistributionSet | 25 | from lp.registry.interfaces.distribution import IDistributionSet |
938 | 22 | from lp.services.scripts.base import LaunchpadScriptFailure | 26 | from lp.services.scripts.base import LaunchpadScriptFailure |
939 | 23 | from lp.soyuz.interfaces.component import IComponentSet | 27 | from lp.soyuz.interfaces.component import IComponentSet |
940 | @@ -228,7 +232,13 @@ | |||
941 | 228 | self.number_of_pending_builds = self.getPendingBuilds().count() | 232 | self.number_of_pending_builds = self.getPendingBuilds().count() |
942 | 229 | 233 | ||
943 | 230 | def getPendingBuilds(self): | 234 | def getPendingBuilds(self): |
945 | 231 | return BinaryPackageBuild.selectBy(buildstate=BuildStatus.NEEDSBUILD) | 235 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
946 | 236 | pending_builds = store.find( | ||
947 | 237 | BinaryPackageBuild, | ||
948 | 238 | BinaryPackageBuild.package_build == PackageBuild.id, | ||
949 | 239 | PackageBuild.build_farm_job == BuildFarmJob.id, | ||
950 | 240 | BuildFarmJob.status == BuildStatus.NEEDSBUILD) | ||
951 | 241 | return pending_builds | ||
952 | 232 | 242 | ||
953 | 233 | def getRetryDepwait(self, distribution=None): | 243 | def getRetryDepwait(self, distribution=None): |
954 | 234 | test_args = ['-n'] | 244 | test_args = ['-n'] |
955 | @@ -278,7 +288,7 @@ | |||
956 | 278 | 288 | ||
957 | 279 | # Make it dependend on the only binary that can be satisfied in | 289 | # Make it dependend on the only binary that can be satisfied in |
958 | 280 | # the sampledata. | 290 | # the sampledata. |
960 | 281 | depwait_build.dependencies = 'pmount' | 291 | depwait_build.dependencies = u'pmount' |
961 | 282 | 292 | ||
962 | 283 | self.layer.commit() | 293 | self.layer.commit() |
963 | 284 | 294 | ||
964 | @@ -291,7 +301,7 @@ | |||
965 | 291 | self.assertEqual( | 301 | self.assertEqual( |
966 | 292 | self.number_of_pending_builds + 1, | 302 | self.number_of_pending_builds + 1, |
967 | 293 | self.getPendingBuilds().count()) | 303 | self.getPendingBuilds().count()) |
969 | 294 | self.assertEqual(depwait_build.buildstate.name, 'NEEDSBUILD') | 304 | self.assertEqual(depwait_build.status.name, 'NEEDSBUILD') |
970 | 295 | self.assertEqual(depwait_build.buildqueue_record.lastscore, 1755) | 305 | self.assertEqual(depwait_build.buildqueue_record.lastscore, 1755) |
971 | 296 | 306 | ||
972 | 297 | 307 | ||
973 | 298 | 308 | ||
974 | === modified file 'lib/lp/soyuz/scripts/tests/test_copypackage.py' | |||
975 | --- lib/lp/soyuz/scripts/tests/test_copypackage.py 2010-05-15 17:43:59 +0000 | |||
976 | +++ lib/lp/soyuz/scripts/tests/test_copypackage.py 2010-05-27 13:26:53 +0000 | |||
977 | @@ -305,8 +305,8 @@ | |||
978 | 305 | 'Privacy mismatch on %s' % build.upload_changesfile.filename) | 305 | 'Privacy mismatch on %s' % build.upload_changesfile.filename) |
979 | 306 | n_files += 1 | 306 | n_files += 1 |
980 | 307 | self.assertEquals( | 307 | self.assertEquals( |
983 | 308 | build.buildlog.restricted, restricted, | 308 | build.log.restricted, restricted, |
984 | 309 | 'Privacy mismatch on %s' % build.buildlog.filename) | 309 | 'Privacy mismatch on %s' % build.log.filename) |
985 | 310 | n_files += 1 | 310 | n_files += 1 |
986 | 311 | self.assertEquals( | 311 | self.assertEquals( |
987 | 312 | n_files, expected_n_files, | 312 | n_files, expected_n_files, |
988 | @@ -322,7 +322,7 @@ | |||
989 | 322 | # update_files_privacy() called on a private binary | 322 | # update_files_privacy() called on a private binary |
990 | 323 | # publication that was copied to a public location correctly | 323 | # publication that was copied to a public location correctly |
991 | 324 | # makes all its related files (deb file, upload changesfile | 324 | # makes all its related files (deb file, upload changesfile |
993 | 325 | # and buildlog) public. | 325 | # and log) public. |
994 | 326 | 326 | ||
995 | 327 | # Create a new private PPA and a private source publication. | 327 | # Create a new private PPA and a private source publication. |
996 | 328 | private_source = self.makeSource(private=True) | 328 | private_source = self.makeSource(private=True) |
997 | @@ -491,7 +491,7 @@ | |||
998 | 491 | 491 | ||
999 | 492 | def test_cannot_copy_binaries_from_FTBFS(self): | 492 | def test_cannot_copy_binaries_from_FTBFS(self): |
1000 | 493 | [build] = self.source.createMissingBuilds() | 493 | [build] = self.source.createMissingBuilds() |
1002 | 494 | build.buildstate = BuildStatus.FAILEDTOBUILD | 494 | build.status = BuildStatus.FAILEDTOBUILD |
1003 | 495 | self.assertCannotCopyBinaries( | 495 | self.assertCannotCopyBinaries( |
1004 | 496 | 'source has no binaries to be copied') | 496 | 'source has no binaries to be copied') |
1005 | 497 | 497 | ||
1006 | @@ -501,7 +501,7 @@ | |||
1007 | 501 | # retried anytime, but they will fail-to-upload if a copy | 501 | # retried anytime, but they will fail-to-upload if a copy |
1008 | 502 | # has built successfully. | 502 | # has built successfully. |
1009 | 503 | [build] = self.source.createMissingBuilds() | 503 | [build] = self.source.createMissingBuilds() |
1011 | 504 | build.buildstate = BuildStatus.FAILEDTOBUILD | 504 | build.status = BuildStatus.FAILEDTOBUILD |
1012 | 505 | self.assertCanCopySourceOnly() | 505 | self.assertCanCopySourceOnly() |
1013 | 506 | 506 | ||
1014 | 507 | def test_cannot_copy_binaries_from_binaries_pending_publication(self): | 507 | def test_cannot_copy_binaries_from_binaries_pending_publication(self): |
1015 | @@ -1156,7 +1156,7 @@ | |||
1016 | 1156 | changes_file_name = '%s_%s_%s.changes' % ( | 1156 | changes_file_name = '%s_%s_%s.changes' % ( |
1017 | 1157 | lazy_bin.name, lazy_bin.version, build_i386.arch_tag) | 1157 | lazy_bin.name, lazy_bin.version, build_i386.arch_tag) |
1018 | 1158 | package_upload = self.test_publisher.addPackageUpload( | 1158 | package_upload = self.test_publisher.addPackageUpload( |
1020 | 1159 | ppa, build_i386.distroarchseries.distroseries, | 1159 | ppa, build_i386.distro_arch_series.distroseries, |
1021 | 1160 | build_i386.pocket, changes_file_content='anything', | 1160 | build_i386.pocket, changes_file_content='anything', |
1022 | 1161 | changes_file_name=changes_file_name) | 1161 | changes_file_name=changes_file_name) |
1023 | 1162 | package_upload.addBuild(build_i386) | 1162 | package_upload.addBuild(build_i386) |
1024 | @@ -1862,8 +1862,8 @@ | |||
1025 | 1862 | status=PackagePublishingStatus.PUBLISHED) | 1862 | status=PackagePublishingStatus.PUBLISHED) |
1026 | 1863 | 1863 | ||
1027 | 1864 | # The i386 build is completed and the hppa one pending. | 1864 | # The i386 build is completed and the hppa one pending. |
1030 | 1865 | self.assertEqual(build_hppa.buildstate, BuildStatus.NEEDSBUILD) | 1865 | self.assertEqual(build_hppa.status, BuildStatus.NEEDSBUILD) |
1031 | 1866 | self.assertEqual(build_i386.buildstate, BuildStatus.FULLYBUILT) | 1866 | self.assertEqual(build_i386.status, BuildStatus.FULLYBUILT) |
1032 | 1867 | 1867 | ||
1033 | 1868 | # Commit to ensure librarian files are written. | 1868 | # Commit to ensure librarian files are written. |
1034 | 1869 | self.layer.txn.commit() | 1869 | self.layer.txn.commit() |
1035 | @@ -2249,7 +2249,7 @@ | |||
1036 | 2249 | 'foo_source.buildlog', restricted=True) | 2249 | 'foo_source.buildlog', restricted=True) |
1037 | 2250 | 2250 | ||
1038 | 2251 | for build in ppa_source.getBuilds(): | 2251 | for build in ppa_source.getBuilds(): |
1040 | 2252 | build.buildlog = fake_buildlog | 2252 | build.log = fake_buildlog |
1041 | 2253 | 2253 | ||
1042 | 2254 | # Create ancestry environment in the primary archive, so we can | 2254 | # Create ancestry environment in the primary archive, so we can |
1043 | 2255 | # test unembargoed overrides. | 2255 | # test unembargoed overrides. |
1044 | @@ -2312,7 +2312,7 @@ | |||
1045 | 2312 | # Check build's upload changesfile | 2312 | # Check build's upload changesfile |
1046 | 2313 | self.assertFalse(build.upload_changesfile.restricted) | 2313 | self.assertFalse(build.upload_changesfile.restricted) |
1047 | 2314 | # Check build's buildlog. | 2314 | # Check build's buildlog. |
1049 | 2315 | self.assertFalse(build.buildlog.restricted) | 2315 | self.assertFalse(build.log.restricted) |
1050 | 2316 | # Check that the pocket is -security as specified in the | 2316 | # Check that the pocket is -security as specified in the |
1051 | 2317 | # script parameters. | 2317 | # script parameters. |
1052 | 2318 | self.assertEqual( | 2318 | self.assertEqual( |
1053 | 2319 | 2319 | ||
1054 | === modified file 'lib/lp/soyuz/scripts/tests/test_populatearchive.py' | |||
1055 | --- lib/lp/soyuz/scripts/tests/test_populatearchive.py 2010-04-09 15:46:09 +0000 | |||
1056 | +++ lib/lp/soyuz/scripts/tests/test_populatearchive.py 2010-05-27 13:26:53 +0000 | |||
1057 | @@ -655,7 +655,7 @@ | |||
1058 | 655 | def build_in_wrong_state(build): | 655 | def build_in_wrong_state(build): |
1059 | 656 | """True if the given build is not (pending and suspended).""" | 656 | """True if the given build is not (pending and suspended).""" |
1060 | 657 | return not ( | 657 | return not ( |
1062 | 658 | build.buildstate == BuildStatus.NEEDSBUILD and | 658 | build.status == BuildStatus.NEEDSBUILD and |
1063 | 659 | build.buildqueue_record.job.status == JobStatus.SUSPENDED) | 659 | build.buildqueue_record.job.status == JobStatus.SUSPENDED) |
1064 | 660 | hoary = getUtility(IDistributionSet)['ubuntu']['hoary'] | 660 | hoary = getUtility(IDistributionSet)['ubuntu']['hoary'] |
1065 | 661 | 661 |