Merge lp:~wgrant/launchpad/replace-archiveuploader-doctests-0 into lp:launchpad
- replace-archiveuploader-doctests-0
- Merge into devel
Proposed by
William Grant
Status: | Merged |
---|---|
Approved by: | Robert Collins |
Approved revision: | no longer in the source branch. |
Merged at revision: | 11269 |
Proposed branch: | lp:~wgrant/launchpad/replace-archiveuploader-doctests-0 |
Merge into: | lp:launchpad |
Diff against target: |
765 lines (+219/-350) 8 files modified
lib/lp/archiveuploader/nascentupload.py (+4/-22) lib/lp/archiveuploader/tests/nascentupload-announcements.txt (+3/-6) lib/lp/archiveuploader/tests/nascentupload-security-uploads.txt (+3/-0) lib/lp/archiveuploader/tests/nascentupload.txt (+22/-22) lib/lp/archiveuploader/tests/nascentuploadfile.txt (+4/-264) lib/lp/archiveuploader/tests/test_dscfile.py (+137/-2) lib/lp/archiveuploader/tests/test_utils.py (+46/-29) lib/lp/soyuz/tests/test_doc.py (+0/-5) |
To merge this branch: | bzr merge lp:~wgrant/launchpad/replace-archiveuploader-doctests-0 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Robert Collins (community) | Approve | ||
Review via email: mp+30850@code.launchpad.net |
Commit message
Replaced some archiveuploader doctests with unit tests, and moved some remaining archiveuploader doctests from Soyuz to archiveuploader.
Description of the change
Three things:
- Replaces some archiveuploader doctest sections with unit tests.
- Removes a couple of attributes from NascentUpload that were used by only one test -- that test now calculates them itself.
- Moves some missed archiveuploader doctests from lp.soyuz to lp.archiveuploader.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/archiveuploader/nascentupload.py' | |||
2 | --- lib/lp/archiveuploader/nascentupload.py 2010-07-15 09:42:28 +0000 | |||
3 | +++ lib/lp/archiveuploader/nascentupload.py 2010-07-24 09:41:13 +0000 | |||
4 | @@ -82,10 +82,6 @@ | |||
5 | 82 | archindep = False | 82 | archindep = False |
6 | 83 | archdep = False | 83 | archdep = False |
7 | 84 | 84 | ||
8 | 85 | # Defined in check_sourceful_consistency() | ||
9 | 86 | native = False | ||
10 | 87 | hasorig = False | ||
11 | 88 | |||
12 | 89 | # Defined if we successfully do_accept() and storeObjectsInDatabase() | 85 | # Defined if we successfully do_accept() and storeObjectsInDatabase() |
13 | 90 | queue_root = None | 86 | queue_root = None |
14 | 91 | 87 | ||
15 | @@ -308,31 +304,17 @@ | |||
16 | 308 | assert self.sourceful, ( | 304 | assert self.sourceful, ( |
17 | 309 | "Source consistency check called for a non-source upload") | 305 | "Source consistency check called for a non-source upload") |
18 | 310 | 306 | ||
33 | 311 | dsc = 0 | 307 | dsc = len([ |
34 | 312 | native_tarball = 0 | 308 | file for file in self.changes.files |
35 | 313 | orig_tarball = 0 | 309 | if determine_source_file_type(file.filename) == |
36 | 314 | 310 | SourcePackageFileType.DSC]) | |
23 | 315 | for uploaded_file in self.changes.files: | ||
24 | 316 | filetype = determine_source_file_type(uploaded_file.filename) | ||
25 | 317 | if filetype == SourcePackageFileType.DSC: | ||
26 | 318 | dsc += 1 | ||
27 | 319 | elif (filetype == SourcePackageFileType.NATIVE_TARBALL | ||
28 | 320 | and not isinstance(uploaded_file, CustomUploadFile)): | ||
29 | 321 | native_tarball += 1 | ||
30 | 322 | elif filetype == SourcePackageFileType.ORIG_TARBALL: | ||
31 | 323 | orig_tarball += 1 | ||
32 | 324 | |||
37 | 325 | 311 | ||
38 | 326 | # It is never sane to upload more than one source at a time. | 312 | # It is never sane to upload more than one source at a time. |
39 | 327 | if dsc > 1: | 313 | if dsc > 1: |
40 | 328 | self.reject("Changes file lists more than one .dsc") | 314 | self.reject("Changes file lists more than one .dsc") |
41 | 329 | |||
42 | 330 | if dsc == 0: | 315 | if dsc == 0: |
43 | 331 | self.reject("Sourceful upload without a .dsc") | 316 | self.reject("Sourceful upload without a .dsc") |
44 | 332 | 317 | ||
45 | 333 | self.native = bool(native_tarball) | ||
46 | 334 | self.hasorig = bool(orig_tarball) | ||
47 | 335 | |||
48 | 336 | def _check_binaryful_consistency(self): | 318 | def _check_binaryful_consistency(self): |
49 | 337 | """Heuristic checks on a binaryful upload. | 319 | """Heuristic checks on a binaryful upload. |
50 | 338 | 320 | ||
51 | 339 | 321 | ||
52 | === renamed file 'lib/lp/soyuz/doc/nascentupload-announcements.txt' => 'lib/lp/archiveuploader/tests/nascentupload-announcements.txt' | |||
53 | --- lib/lp/soyuz/doc/nascentupload-announcements.txt 2010-05-27 09:08:10 +0000 | |||
54 | +++ lib/lp/archiveuploader/tests/nascentupload-announcements.txt 2010-07-24 09:41:13 +0000 | |||
55 | @@ -39,6 +39,8 @@ | |||
56 | 39 | 39 | ||
57 | 40 | We need to be logged into the security model in order to get any further | 40 | We need to be logged into the security model in order to get any further |
58 | 41 | 41 | ||
59 | 42 | >>> from canonical.testing.layers import LaunchpadZopelessLayer | ||
60 | 43 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | ||
61 | 42 | >>> login('foo.bar@canonical.com') | 44 | >>> login('foo.bar@canonical.com') |
62 | 43 | 45 | ||
63 | 44 | Helper functions to examine emails that were sent: | 46 | Helper functions to examine emails that were sent: |
64 | @@ -58,17 +60,12 @@ | |||
65 | 58 | address and allow uploads to universe: | 60 | address and allow uploads to universe: |
66 | 59 | 61 | ||
67 | 60 | >>> from canonical.launchpad.interfaces import ( | 62 | >>> from canonical.launchpad.interfaces import ( |
70 | 61 | ... SeriesStatus, IComponentSet, IDistributionSet, | 63 | ... SeriesStatus, IDistributionSet, ILibraryFileAliasSet) |
69 | 62 | ... ILibraryFileAliasSet) | ||
71 | 63 | >>> ubuntu = getUtility(IDistributionSet)['ubuntu'] | 64 | >>> ubuntu = getUtility(IDistributionSet)['ubuntu'] |
72 | 64 | >>> hoary = ubuntu['hoary'] | 65 | >>> hoary = ubuntu['hoary'] |
73 | 65 | >>> hoary.status = SeriesStatus.DEVELOPMENT | 66 | >>> hoary.status = SeriesStatus.DEVELOPMENT |
74 | 66 | >>> hoary.changeslist = "hoary-announce@lists.ubuntu.com" | 67 | >>> hoary.changeslist = "hoary-announce@lists.ubuntu.com" |
75 | 67 | >>> from canonical.launchpad.database import ComponentSelection | ||
76 | 68 | >>> universe = getUtility(IComponentSet)['universe'] | ||
77 | 69 | >>> trash = ComponentSelection(distroseries=hoary, component=universe) | ||
78 | 70 | >>> fake_chroot = getUtility(ILibraryFileAliasSet)[1] | 68 | >>> fake_chroot = getUtility(ILibraryFileAliasSet)[1] |
79 | 71 | >>> trash = hoary['i386'].addOrUpdateChroot(fake_chroot) | ||
80 | 72 | >>> trash = hoary['hppa'].addOrUpdateChroot(fake_chroot) | 69 | >>> trash = hoary['hppa'].addOrUpdateChroot(fake_chroot) |
81 | 73 | 70 | ||
82 | 74 | NEW source upload to RELEASE pocket via 'sync' policy (it presents | 71 | NEW source upload to RELEASE pocket via 'sync' policy (it presents |
83 | 75 | 72 | ||
84 | === renamed file 'lib/lp/soyuz/doc/nascentupload-security-uploads.txt' => 'lib/lp/archiveuploader/tests/nascentupload-security-uploads.txt' | |||
85 | --- lib/lp/soyuz/doc/nascentupload-security-uploads.txt 2009-05-13 14:05:27 +0000 | |||
86 | +++ lib/lp/archiveuploader/tests/nascentupload-security-uploads.txt 2010-07-24 09:41:13 +0000 | |||
87 | @@ -31,6 +31,8 @@ | |||
88 | 31 | respective processorfamily and processor. Let's create them | 31 | respective processorfamily and processor. Let's create them |
89 | 32 | on-the-fly: | 32 | on-the-fly: |
90 | 33 | 33 | ||
91 | 34 | >>> from canonical.testing.layers import LaunchpadZopelessLayer | ||
92 | 35 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | ||
93 | 34 | >>> from canonical.launchpad.interfaces import IDistributionSet | 36 | >>> from canonical.launchpad.interfaces import IDistributionSet |
94 | 35 | >>> from canonical.launchpad.database import ( | 37 | >>> from canonical.launchpad.database import ( |
95 | 36 | ... Processor, ProcessorFamily) | 38 | ... Processor, ProcessorFamily) |
96 | @@ -45,6 +47,7 @@ | |||
97 | 45 | 47 | ||
98 | 46 | >>> import transaction | 48 | >>> import transaction |
99 | 47 | >>> transaction.commit() | 49 | >>> transaction.commit() |
100 | 50 | >>> LaunchpadZopelessLayer.switchDbUser('uploader') | ||
101 | 48 | 51 | ||
102 | 49 | 52 | ||
103 | 50 | == Mixed Security Upload == | 53 | == Mixed Security Upload == |
104 | 51 | 54 | ||
105 | === renamed file 'lib/lp/soyuz/doc/nascentupload.txt' => 'lib/lp/archiveuploader/tests/nascentupload.txt' | |||
106 | --- lib/lp/soyuz/doc/nascentupload.txt 2010-05-21 12:12:58 +0000 | |||
107 | +++ lib/lp/archiveuploader/tests/nascentupload.txt 2010-07-24 09:41:13 +0000 | |||
108 | @@ -10,27 +10,13 @@ | |||
109 | 10 | >>> login('foo.bar@canonical.com') | 10 | >>> login('foo.bar@canonical.com') |
110 | 11 | 11 | ||
111 | 12 | For the purpose of this test, hoary needs to be an open (development) | 12 | For the purpose of this test, hoary needs to be an open (development) |
118 | 13 | distroseries so that we can upload to it. It also needs to allow uploads | 13 | distroseries so that we can upload to it. |
113 | 14 | to the universe component. | ||
114 | 15 | |||
115 | 16 | This test normally runs as the "uploader" db user but it does not have | ||
116 | 17 | permission to add a new ComponentSelection, so we temporarily switch to | ||
117 | 18 | "launchpad" to handle this. | ||
119 | 19 | 14 | ||
120 | 20 | >>> from canonical.launchpad.interfaces import ( | 15 | >>> from canonical.launchpad.interfaces import ( |
123 | 21 | ... SeriesStatus, IComponentSet, IDistributionSet) | 16 | ... SeriesStatus, IDistributionSet) |
122 | 22 | >>> from canonical.launchpad.database import ComponentSelection | ||
124 | 23 | >>> ubuntu = getUtility(IDistributionSet)['ubuntu'] | 17 | >>> ubuntu = getUtility(IDistributionSet)['ubuntu'] |
125 | 24 | >>> hoary = ubuntu['hoary'] | 18 | >>> hoary = ubuntu['hoary'] |
126 | 25 | >>> hoary.status = SeriesStatus.DEVELOPMENT | 19 | >>> hoary.status = SeriesStatus.DEVELOPMENT |
127 | 26 | >>> universe = getUtility(IComponentSet)['universe'] | ||
128 | 27 | >>> from canonical.testing import LaunchpadZopelessLayer | ||
129 | 28 | >>> from canonical.database.sqlbase import commit | ||
130 | 29 | >>> commit() | ||
131 | 30 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | ||
132 | 31 | >>> trash = ComponentSelection(distroseries=hoary, component=universe) | ||
133 | 32 | >>> commit() | ||
134 | 33 | >>> LaunchpadZopelessLayer.switchDbUser('uploader') | ||
135 | 34 | 20 | ||
136 | 35 | A NascentUpload is a collection of files in a directory. They | 21 | A NascentUpload is a collection of files in a directory. They |
137 | 36 | represent what may turn out to be an acceptable upload to a launchpad | 22 | represent what may turn out to be an acceptable upload to a launchpad |
138 | @@ -152,9 +138,21 @@ | |||
139 | 152 | 138 | ||
140 | 153 | ed_source is uses ORIG + DIFF form: | 139 | ed_source is uses ORIG + DIFF form: |
141 | 154 | 140 | ||
143 | 155 | >>> ed_source_upload.native | 141 | >>> from lp.archiveuploader.utils import determine_source_file_type |
144 | 142 | >>> from lp.registry.interfaces.sourcepackage import SourcePackageFileType | ||
145 | 143 | >>> def determine_file_types(upload): | ||
146 | 144 | ... return [determine_source_file_type(uf.filename) | ||
147 | 145 | ... for uf in upload.changes.files] | ||
148 | 146 | >>> def has_orig(upload): | ||
149 | 147 | ... return (SourcePackageFileType.ORIG_TARBALL | ||
150 | 148 | ... in determine_file_types(upload)) | ||
151 | 149 | >>> def has_native(upload): | ||
152 | 150 | ... return (SourcePackageFileType.NATIVE_TARBALL | ||
153 | 151 | ... in determine_file_types(upload)) | ||
154 | 152 | |||
155 | 153 | >>> has_native(ed_source_upload) | ||
156 | 156 | False | 154 | False |
158 | 157 | >>> ed_source_upload.hasorig | 155 | >>> has_orig(ed_source_upload) |
159 | 158 | True | 156 | True |
160 | 159 | 157 | ||
161 | 160 | For *sourceful* uploads 'archdep' and 'archindep' are always False: | 158 | For *sourceful* uploads 'archdep' and 'archindep' are always False: |
162 | @@ -197,9 +195,9 @@ | |||
163 | 197 | As expected 'native' and 'hasorig' doesn't make any sense for binary | 195 | As expected 'native' and 'hasorig' doesn't make any sense for binary |
164 | 198 | uploads, so they are alway False: | 196 | uploads, so they are alway False: |
165 | 199 | 197 | ||
167 | 200 | >>> ed_binary_upload.native | 198 | >>> has_native(ed_binary_upload) |
168 | 201 | False | 199 | False |
170 | 202 | >>> ed_binary_upload.hasorig | 200 | >>> has_orig(ed_binary_upload) |
171 | 203 | False | 201 | False |
172 | 204 | 202 | ||
173 | 205 | Since the binary policy lets things through unsigned, we don't try and | 203 | Since the binary policy lets things through unsigned, we don't try and |
174 | @@ -287,9 +285,9 @@ | |||
175 | 287 | ancestries (it saves a lot of bandwidth). So, the upload is not | 285 | ancestries (it saves a lot of bandwidth). So, the upload is not |
176 | 288 | 'native', neither 'hasorig': | 286 | 'native', neither 'hasorig': |
177 | 289 | 287 | ||
179 | 290 | >>> ed_mixed_upload.native | 288 | >>> has_native(ed_mixed_upload) |
180 | 291 | False | 289 | False |
182 | 292 | >>> ed_mixed_upload.hasorig | 290 | >>> has_orig(ed_mixed_upload) |
183 | 293 | False | 291 | False |
184 | 294 | 292 | ||
185 | 295 | But if we check the DSC we will find the reference to the already | 293 | But if we check the DSC we will find the reference to the already |
186 | @@ -716,7 +714,9 @@ | |||
187 | 716 | to set hoary to CURRENT in order to do this because we're not allowed | 714 | to set hoary to CURRENT in order to do this because we're not allowed |
188 | 717 | to upload to -UPDATES in a DEVELOPMENT series. | 715 | to upload to -UPDATES in a DEVELOPMENT series. |
189 | 718 | 716 | ||
190 | 717 | >>> from canonical.testing import LaunchpadZopelessLayer | ||
191 | 719 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') | 718 | >>> LaunchpadZopelessLayer.switchDbUser('launchpad') |
192 | 719 | >>> from canonical.database.sqlbase import commit | ||
193 | 720 | >>> hoary.status = SeriesStatus.CURRENT | 720 | >>> hoary.status = SeriesStatus.CURRENT |
194 | 721 | >>> commit() | 721 | >>> commit() |
195 | 722 | >>> LaunchpadZopelessLayer.switchDbUser('uploader') | 722 | >>> LaunchpadZopelessLayer.switchDbUser('uploader') |
196 | 723 | 723 | ||
197 | === modified file 'lib/lp/archiveuploader/tests/nascentuploadfile.txt' | |||
198 | --- lib/lp/archiveuploader/tests/nascentuploadfile.txt 2010-06-16 09:08:16 +0000 | |||
199 | +++ lib/lp/archiveuploader/tests/nascentuploadfile.txt 2010-07-24 09:41:13 +0000 | |||
200 | @@ -155,161 +155,10 @@ | |||
201 | 155 | 155 | ||
202 | 156 | === CustomUploadFile identification === | 156 | === CustomUploadFile identification === |
203 | 157 | 157 | ||
359 | 158 | Source and Binary files are easily recognized by a regexp on the | 158 | A custom upload is essentially a tarball, so it matches the is_source |
360 | 159 | filenames: | 159 | regexp, even though it isn't actually a source file: |
361 | 160 | 160 | ||
362 | 161 | >>> from lp.archiveuploader.utils import ( | 161 | >>> from lp.archiveuploader.utils import re_issource |
208 | 162 | ... re_isadeb, re_issource) | ||
209 | 163 | |||
210 | 164 | The binary regexp matches 'deb', 'ddeb' and 'udeb' filenames. | ||
211 | 165 | |||
212 | 166 | >>> deb_match = re_isadeb.match('foo-bar_1.0_i386.deb') | ||
213 | 167 | >>> deb_match.group(0) | ||
214 | 168 | 'foo-bar_1.0_i386.deb' | ||
215 | 169 | >>> deb_match.group(1) | ||
216 | 170 | 'foo-bar' | ||
217 | 171 | >>> deb_match.group(2) | ||
218 | 172 | '1.0' | ||
219 | 173 | >>> deb_match.group(3) | ||
220 | 174 | 'i386' | ||
221 | 175 | |||
222 | 176 | >>> ddeb_match = re_isadeb.match('foo-bar_1.0_i386.ddeb') | ||
223 | 177 | >>> ddeb_match.group(0) | ||
224 | 178 | 'foo-bar_1.0_i386.ddeb' | ||
225 | 179 | >>> ddeb_match.group(1) | ||
226 | 180 | 'foo-bar' | ||
227 | 181 | >>> ddeb_match.group(2) | ||
228 | 182 | '1.0' | ||
229 | 183 | >>> ddeb_match.group(3) | ||
230 | 184 | 'i386' | ||
231 | 185 | |||
232 | 186 | >>> udeb_match = re_isadeb.match('foo-bar_1.0_i386.udeb') | ||
233 | 187 | >>> udeb_match.group(0) | ||
234 | 188 | 'foo-bar_1.0_i386.udeb' | ||
235 | 189 | >>> udeb_match.group(1) | ||
236 | 190 | 'foo-bar' | ||
237 | 191 | >>> udeb_match.group(2) | ||
238 | 192 | '1.0' | ||
239 | 193 | >>> udeb_match.group(3) | ||
240 | 194 | 'i386' | ||
241 | 195 | |||
242 | 196 | The source regexp matches 'orig.tar.gz', 'tar.gz', 'diff.gz' and 'dsc' | ||
243 | 197 | filenames. | ||
244 | 198 | |||
245 | 199 | >>> src_match = re_issource.match('foo_1.0.orig.tar.gz') | ||
246 | 200 | >>> src_match.group(0) | ||
247 | 201 | 'foo_1.0.orig.tar.gz' | ||
248 | 202 | >>> src_match.group(1) | ||
249 | 203 | 'foo' | ||
250 | 204 | >>> src_match.group(2) | ||
251 | 205 | '1.0' | ||
252 | 206 | >>> src_match.group(3) | ||
253 | 207 | 'orig.tar.gz' | ||
254 | 208 | |||
255 | 209 | >>> src_match = re_issource.match('foo_1.0.tar.gz') | ||
256 | 210 | >>> src_match.group(0) | ||
257 | 211 | 'foo_1.0.tar.gz' | ||
258 | 212 | >>> src_match.group(1) | ||
259 | 213 | 'foo' | ||
260 | 214 | >>> src_match.group(2) | ||
261 | 215 | '1.0' | ||
262 | 216 | >>> src_match.group(3) | ||
263 | 217 | 'tar.gz' | ||
264 | 218 | |||
265 | 219 | >>> src_match = re_issource.match('foo_1.0.tar.bz2') | ||
266 | 220 | >>> src_match.group(0) | ||
267 | 221 | 'foo_1.0.tar.bz2' | ||
268 | 222 | >>> src_match.group(1) | ||
269 | 223 | 'foo' | ||
270 | 224 | >>> src_match.group(2) | ||
271 | 225 | '1.0' | ||
272 | 226 | >>> src_match.group(3) | ||
273 | 227 | 'tar.bz2' | ||
274 | 228 | |||
275 | 229 | >>> src_match = re_issource.match('foo_1.0.diff.gz') | ||
276 | 230 | >>> src_match.group(0) | ||
277 | 231 | 'foo_1.0.diff.gz' | ||
278 | 232 | >>> src_match.group(1) | ||
279 | 233 | 'foo' | ||
280 | 234 | >>> src_match.group(2) | ||
281 | 235 | '1.0' | ||
282 | 236 | >>> src_match.group(3) | ||
283 | 237 | 'diff.gz' | ||
284 | 238 | |||
285 | 239 | >>> src_match = re_issource.match('foo_1.0.dsc') | ||
286 | 240 | >>> src_match.group(0) | ||
287 | 241 | 'foo_1.0.dsc' | ||
288 | 242 | >>> src_match.group(1) | ||
289 | 243 | 'foo' | ||
290 | 244 | >>> src_match.group(2) | ||
291 | 245 | '1.0' | ||
292 | 246 | >>> src_match.group(3) | ||
293 | 247 | 'dsc' | ||
294 | 248 | |||
295 | 249 | >>> src_match = re_issource.match('foo_1.0.debian.tar.gz') | ||
296 | 250 | >>> src_match.group(0) | ||
297 | 251 | 'foo_1.0.debian.tar.gz' | ||
298 | 252 | >>> src_match.group(1) | ||
299 | 253 | 'foo' | ||
300 | 254 | >>> src_match.group(2) | ||
301 | 255 | '1.0' | ||
302 | 256 | >>> src_match.group(3) | ||
303 | 257 | 'debian.tar.gz' | ||
304 | 258 | |||
305 | 259 | >>> src_match = re_issource.match('foo_1.0.debian.tar.bz2') | ||
306 | 260 | >>> src_match.group(0) | ||
307 | 261 | 'foo_1.0.debian.tar.bz2' | ||
308 | 262 | >>> src_match.group(1) | ||
309 | 263 | 'foo' | ||
310 | 264 | >>> src_match.group(2) | ||
311 | 265 | '1.0' | ||
312 | 266 | >>> src_match.group(3) | ||
313 | 267 | 'debian.tar.bz2' | ||
314 | 268 | |||
315 | 269 | >>> src_match = re_issource.match('foo_1.0.orig-foo.tar.gz') | ||
316 | 270 | >>> src_match.group(0) | ||
317 | 271 | 'foo_1.0.orig-foo.tar.gz' | ||
318 | 272 | >>> src_match.group(1) | ||
319 | 273 | 'foo' | ||
320 | 274 | >>> src_match.group(2) | ||
321 | 275 | '1.0' | ||
322 | 276 | >>> src_match.group(3) | ||
323 | 277 | 'orig-foo.tar.gz' | ||
324 | 278 | |||
325 | 279 | >>> src_match = re_issource.match('foo_1.0.orig-bar.tar.bz2') | ||
326 | 280 | >>> src_match.group(0) | ||
327 | 281 | 'foo_1.0.orig-bar.tar.bz2' | ||
328 | 282 | >>> src_match.group(1) | ||
329 | 283 | 'foo' | ||
330 | 284 | >>> src_match.group(2) | ||
331 | 285 | '1.0' | ||
332 | 286 | >>> src_match.group(3) | ||
333 | 287 | 'orig-bar.tar.bz2' | ||
334 | 288 | |||
335 | 289 | >>> src_match = re_issource.match('foo_1.0.porig-bar.tar.bz2') | ||
336 | 290 | >>> src_match.group(0) | ||
337 | 291 | 'foo_1.0.porig-bar.tar.bz2' | ||
338 | 292 | >>> src_match.group(1) | ||
339 | 293 | 'foo' | ||
340 | 294 | >>> src_match.group(2) | ||
341 | 295 | '1.0.porig-bar' | ||
342 | 296 | >>> src_match.group(3) | ||
343 | 297 | 'tar.bz2' | ||
344 | 298 | |||
345 | 299 | And finally some failures: | ||
346 | 300 | |||
347 | 301 | >>> re_isadeb.match('foo-bar_1.0_i386.bed') is None | ||
348 | 302 | True | ||
349 | 303 | |||
350 | 304 | >>> re_issource.match('foo_1.0.c') is None | ||
351 | 305 | True | ||
352 | 306 | |||
353 | 307 | >>> re_issource.match('foo_1.0.diff.bz2') is None | ||
354 | 308 | True | ||
355 | 309 | |||
356 | 310 | However a custom upload is essencially a tarball, so it also matches | ||
357 | 311 | the is_source regexp: | ||
358 | 312 | |||
363 | 313 | >>> src_match = re_issource.match('dist-upgrader_1.0.tar.gz') | 162 | >>> src_match = re_issource.match('dist-upgrader_1.0.tar.gz') |
364 | 314 | >>> src_match.group(0) | 163 | >>> src_match.group(0) |
365 | 315 | 'dist-upgrader_1.0.tar.gz' | 164 | 'dist-upgrader_1.0.tar.gz' |
366 | @@ -600,115 +449,6 @@ | |||
367 | 600 | ['File ed_0.2-20.dsc mentioned in the changes has a size mismatch. 578 != 500'] | 449 | ['File ed_0.2-20.dsc mentioned in the changes has a size mismatch. 578 != 500'] |
368 | 601 | 450 | ||
369 | 602 | 451 | ||
370 | 603 | === Format file type verification === | ||
371 | 604 | |||
372 | 605 | DSCFile performs additional verification on the types of the referenced | ||
373 | 606 | files, confirming that they are suitable for the source package's | ||
374 | 607 | format. There is an error generator to verify each format. | ||
375 | 608 | |||
376 | 609 | >>> from lp.archiveuploader.dscfile import (check_format_1_0_files, | ||
377 | 610 | ... check_format_3_0_native_files, check_format_3_0_quilt_files) | ||
378 | 611 | >>> from lp.registry.interfaces.sourcepackage import SourcePackageFileType | ||
379 | 612 | |||
380 | 613 | ==== 1.0 ==== | ||
381 | 614 | |||
382 | 615 | A 1.0 source can contain either a tar.gz or an orig.tar.gz and diff.gz. | ||
383 | 616 | |||
384 | 617 | >>> list(check_format_1_0_files('foo_1.dsc', { | ||
385 | 618 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
386 | 619 | ... SourcePackageFileType.DIFF: 1, | ||
387 | 620 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
388 | 621 | ... SourcePackageFileType.NATIVE_TARBALL: 0, | ||
389 | 622 | ... }, {}, 0)) | ||
390 | 623 | [] | ||
391 | 624 | |||
392 | 625 | >>> list(check_format_1_0_files('foo_1.dsc', { | ||
393 | 626 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
394 | 627 | ... SourcePackageFileType.ORIG_TARBALL: 0, | ||
395 | 628 | ... SourcePackageFileType.DIFF: 0, | ||
396 | 629 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
397 | 630 | ... }, {}, 0)) | ||
398 | 631 | [] | ||
399 | 632 | |||
400 | 633 | But if we have some other combination, or bzip2 compression, errors | ||
401 | 634 | will be generated. | ||
402 | 635 | |||
403 | 636 | >>> list(check_format_1_0_files('foo_1.dsc', { | ||
404 | 637 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
405 | 638 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
406 | 639 | ... SourcePackageFileType.DIFF: 1, | ||
407 | 640 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
408 | 641 | ... }, {}, 1)) | ||
409 | 642 | [UploadError('foo_1.dsc: is format 1.0 but uses bzip2 compression.',), UploadError('foo_1.dsc: must have exactly one tar.gz, or an orig.tar.gz and diff.gz',)] | ||
410 | 643 | |||
411 | 644 | The files are also bad if there are any components: | ||
412 | 645 | |||
413 | 646 | >>> list(check_format_1_0_files('foo_1.dsc', { | ||
414 | 647 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
415 | 648 | ... SourcePackageFileType.DIFF: 1, | ||
416 | 649 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
417 | 650 | ... SourcePackageFileType.NATIVE_TARBALL: 0, | ||
418 | 651 | ... }, {'foo': 1}, 0)) | ||
419 | 652 | [UploadError('foo_1.dsc: must have exactly one tar.gz, or an orig.tar.gz and diff.gz',)] | ||
420 | 653 | |||
421 | 654 | ==== 3.0 (native) ==== | ||
422 | 655 | |||
423 | 656 | A 3.0 (native) source must contain just a tar.(gz|bz2). | ||
424 | 657 | |||
425 | 658 | >>> list(check_format_3_0_native_files('foo_1.dsc', { | ||
426 | 659 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
427 | 660 | ... SourcePackageFileType.ORIG_TARBALL: 0, | ||
428 | 661 | ... SourcePackageFileType.DIFF: 0, | ||
429 | 662 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
430 | 663 | ... }, {}, 1)) | ||
431 | 664 | [] | ||
432 | 665 | |||
433 | 666 | >>> list(check_format_3_0_native_files('foo_1.dsc', { | ||
434 | 667 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
435 | 668 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
436 | 669 | ... SourcePackageFileType.DIFF: 0, | ||
437 | 670 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
438 | 671 | ... }, {}, 1)) | ||
439 | 672 | [UploadError('foo_1.dsc: must have only a tar.*.',)] | ||
440 | 673 | |||
441 | 674 | >>> list(check_format_3_0_native_files('foo_1.dsc', { | ||
442 | 675 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
443 | 676 | ... SourcePackageFileType.ORIG_TARBALL: 0, | ||
444 | 677 | ... SourcePackageFileType.DIFF: 0, | ||
445 | 678 | ... SourcePackageFileType.DEBIAN_TARBALL: 0, | ||
446 | 679 | ... }, {'foo': 1}, 0)) | ||
447 | 680 | [UploadError('foo_1.dsc: must have only a tar.*.',)] | ||
448 | 681 | |||
449 | 682 | ==== 3.0 (quilt) ==== | ||
450 | 683 | |||
451 | 684 | A 3.0 (quilt) source must have an orig.tar.*, a debian.tar.*, and at | ||
452 | 685 | most one orig-COMPONENT.tar.* for each COMPONENT. | ||
453 | 686 | |||
454 | 687 | >>> list(check_format_3_0_quilt_files('foo_1.dsc', { | ||
455 | 688 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
456 | 689 | ... SourcePackageFileType.DEBIAN_TARBALL: 1, | ||
457 | 690 | ... SourcePackageFileType.NATIVE_TARBALL: 0, | ||
458 | 691 | ... SourcePackageFileType.DIFF: 0, | ||
459 | 692 | ... }, {'foo': 1}, 1)) | ||
460 | 693 | [] | ||
461 | 694 | |||
462 | 695 | >>> list(check_format_3_0_quilt_files('foo_1.dsc', { | ||
463 | 696 | ... SourcePackageFileType.NATIVE_TARBALL: 1, | ||
464 | 697 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
465 | 698 | ... SourcePackageFileType.DIFF: 0, | ||
466 | 699 | ... SourcePackageFileType.DEBIAN_TARBALL: 1, | ||
467 | 700 | ... }, {}, 1)) | ||
468 | 701 | [UploadError('foo_1.dsc: must have only an orig.tar.*, a debian.tar.*, and optionally orig-*.tar.*',)] | ||
469 | 702 | |||
470 | 703 | >>> list(check_format_3_0_quilt_files('foo_1.dsc', { | ||
471 | 704 | ... SourcePackageFileType.ORIG_TARBALL: 1, | ||
472 | 705 | ... SourcePackageFileType.DEBIAN_TARBALL: 1, | ||
473 | 706 | ... SourcePackageFileType.NATIVE_TARBALL: 0, | ||
474 | 707 | ... SourcePackageFileType.DIFF: 0, | ||
475 | 708 | ... }, {'foo': 2}, 0)) | ||
476 | 709 | [UploadError('foo_1.dsc: has more than one orig-foo.tar.*.',)] | ||
477 | 710 | |||
478 | 711 | |||
479 | 712 | === Sub-DSC files or DSCUploadedFiles === | 452 | === Sub-DSC files or DSCUploadedFiles === |
480 | 713 | 453 | ||
481 | 714 | Sub-DSCFiles are DSCUploadedFile objects. | 454 | Sub-DSCFiles are DSCUploadedFile objects. |
482 | 715 | 455 | ||
483 | === renamed file 'lib/lp/soyuz/doc/safe_fix_maintainer.txt' => 'lib/lp/archiveuploader/tests/safe_fix_maintainer.txt' | |||
484 | === modified file 'lib/lp/archiveuploader/tests/test_dscfile.py' | |||
485 | --- lib/lp/archiveuploader/tests/test_dscfile.py 2010-07-20 15:25:30 +0000 | |||
486 | +++ lib/lp/archiveuploader/tests/test_dscfile.py 2010-07-24 09:41:13 +0000 | |||
487 | @@ -7,16 +7,22 @@ | |||
488 | 7 | 7 | ||
489 | 8 | import os | 8 | import os |
490 | 9 | 9 | ||
491 | 10 | from canonical.config import config | ||
492 | 11 | from canonical.launchpad.scripts.logger import QuietFakeLogger | 10 | from canonical.launchpad.scripts.logger import QuietFakeLogger |
493 | 12 | from canonical.testing.layers import LaunchpadZopelessLayer | 11 | from canonical.testing.layers import LaunchpadZopelessLayer |
494 | 13 | from lp.archiveuploader.dscfile import ( | 12 | from lp.archiveuploader.dscfile import ( |
496 | 14 | DSCFile, findChangelog, findCopyright) | 13 | DSCFile, findChangelog, findCopyright, format_to_file_checker_map) |
497 | 15 | from lp.archiveuploader.nascentuploadfile import UploadError | 14 | from lp.archiveuploader.nascentuploadfile import UploadError |
498 | 16 | from lp.archiveuploader.tests import datadir, mock_logger_quiet | 15 | from lp.archiveuploader.tests import datadir, mock_logger_quiet |
499 | 17 | from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy | 16 | from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy |
500 | 17 | from lp.registry.interfaces.sourcepackage import SourcePackageFileType | ||
501 | 18 | from lp.soyuz.interfaces.sourcepackageformat import SourcePackageFormat | ||
502 | 18 | from lp.testing import TestCase, TestCaseWithFactory | 19 | from lp.testing import TestCase, TestCaseWithFactory |
503 | 19 | 20 | ||
504 | 21 | ORIG_TARBALL = SourcePackageFileType.ORIG_TARBALL | ||
505 | 22 | DEBIAN_TARBALL = SourcePackageFileType.DEBIAN_TARBALL | ||
506 | 23 | NATIVE_TARBALL = SourcePackageFileType.NATIVE_TARBALL | ||
507 | 24 | DIFF = SourcePackageFileType.DIFF | ||
508 | 25 | |||
509 | 20 | 26 | ||
510 | 21 | class TestDscFile(TestCase): | 27 | class TestDscFile(TestCase): |
511 | 22 | 28 | ||
512 | @@ -146,3 +152,132 @@ | |||
513 | 146 | dsc_file.cleanUp() | 152 | dsc_file.cleanUp() |
514 | 147 | finally: | 153 | finally: |
515 | 148 | os.chmod(tempdir, 0755) | 154 | os.chmod(tempdir, 0755) |
516 | 155 | |||
517 | 156 | |||
518 | 157 | class BaseTestSourceFileVerification(TestCase): | ||
519 | 158 | |||
520 | 159 | def assertErrorsForFiles(self, expected, files, components={}, | ||
521 | 160 | bzip2_count=0): | ||
522 | 161 | """Check problems with the given set of files for the given format. | ||
523 | 162 | |||
524 | 163 | :param expected: a list of expected errors, as strings. | ||
525 | 164 | :param format: the `SourcePackageFormat` to check against. | ||
526 | 165 | :param files: a dict mapping `SourcePackageFileType`s to counts. | ||
527 | 166 | :param components: a dict mapping orig component tarball components | ||
528 | 167 | to counts. | ||
529 | 168 | :param bzip2_count: number of files using bzip2 compression. | ||
530 | 169 | """ | ||
531 | 170 | full_files = { | ||
532 | 171 | NATIVE_TARBALL: 0, | ||
533 | 172 | ORIG_TARBALL: 0, | ||
534 | 173 | DIFF: 0, | ||
535 | 174 | DEBIAN_TARBALL: 0, | ||
536 | 175 | } | ||
537 | 176 | full_files.update(files) | ||
538 | 177 | self.assertEquals( | ||
539 | 178 | expected, | ||
540 | 179 | [str(e) for e in format_to_file_checker_map[self.format]( | ||
541 | 180 | 'foo_1.dsc', full_files, components, bzip2_count)]) | ||
542 | 181 | |||
543 | 182 | def assertFilesOK(self, files, components={}, bzip2_count=0): | ||
544 | 183 | """Check that the given set of files is OK for the given format. | ||
545 | 184 | |||
546 | 185 | :param format: the `SourcePackageFormat` to check against. | ||
547 | 186 | :param files: a dict mapping `SourcePackageFileType`s to counts. | ||
548 | 187 | :param components: a dict mapping orig component tarball components | ||
549 | 188 | to counts. | ||
550 | 189 | :param bzip2_count: number of files using bzip2 compression. | ||
551 | 190 | """ | ||
552 | 191 | self.assertErrorsForFiles([], files, components, bzip2_count) | ||
553 | 192 | |||
554 | 193 | |||
555 | 194 | class Test10SourceFormatVerification(BaseTestSourceFileVerification): | ||
556 | 195 | |||
557 | 196 | format = SourcePackageFormat.FORMAT_1_0 | ||
558 | 197 | |||
559 | 198 | wrong_files_error = ('foo_1.dsc: must have exactly one tar.gz, or an ' | ||
560 | 199 | 'orig.tar.gz and diff.gz') | ||
561 | 200 | bzip2_error = 'foo_1.dsc: is format 1.0 but uses bzip2 compression.' | ||
562 | 201 | |||
563 | 202 | def testFormat10Debian(self): | ||
564 | 203 | # A 1.0 source can contain an original tarball and a Debian diff | ||
565 | 204 | self.assertFilesOK({ORIG_TARBALL: 1, DIFF: 1}) | ||
566 | 205 | |||
567 | 206 | def testFormat10Native(self): | ||
568 | 207 | # A 1.0 source can contain a native tarball. | ||
569 | 208 | self.assertFilesOK({NATIVE_TARBALL: 1}) | ||
570 | 209 | |||
571 | 210 | def testFormat10CannotHaveWrongFiles(self): | ||
572 | 211 | # A 1.0 source cannot have a combination of native and | ||
573 | 212 | # non-native files, and cannot have just one of the non-native | ||
574 | 213 | # files. | ||
575 | 214 | for combination in ( | ||
576 | 215 | {DIFF: 1}, {ORIG_TARBALL: 1}, {ORIG_TARBALL: 1, DIFF: 1, | ||
577 | 216 | NATIVE_TARBALL: 1}): | ||
578 | 217 | self.assertErrorsForFiles([self.wrong_files_error], combination) | ||
579 | 218 | |||
580 | 219 | # A 1.0 source with component tarballs is invalid. | ||
581 | 220 | self.assertErrorsForFiles( | ||
582 | 221 | [self.wrong_files_error], {ORIG_TARBALL: 1, DIFF: 1}, {'foo': 1}) | ||
583 | 222 | |||
584 | 223 | def testFormat10CannotUseBzip2(self): | ||
585 | 224 | # 1.0 sources cannot use bzip2 compression. | ||
586 | 225 | self.assertErrorsForFiles( | ||
587 | 226 | [self.bzip2_error], {NATIVE_TARBALL: 1}, {}, 1) | ||
588 | 227 | |||
589 | 228 | |||
590 | 229 | class Test30QuiltSourceFormatVerification(BaseTestSourceFileVerification): | ||
591 | 230 | |||
592 | 231 | format = SourcePackageFormat.FORMAT_3_0_QUILT | ||
593 | 232 | |||
594 | 233 | wrong_files_error = ('foo_1.dsc: must have only an orig.tar.*, a ' | ||
595 | 234 | 'debian.tar.* and optionally orig-*.tar.*') | ||
596 | 235 | comp_conflict_error = 'foo_1.dsc: has more than one orig-bar.tar.*.' | ||
597 | 236 | |||
598 | 237 | def testFormat30Quilt(self): | ||
599 | 238 | # A 3.0 (quilt) source must contain an orig tarball and a debian | ||
600 | 239 | # tarball. It may also contain at most one component tarball for | ||
601 | 240 | # each component, and can use gzip or bzip2 compression. | ||
602 | 241 | for components in ({}, {'foo': 1}, {'foo': 1, 'bar': 1}): | ||
603 | 242 | for bzip2_count in (0, 1): | ||
604 | 243 | self.assertFilesOK( | ||
605 | 244 | {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1}, components, | ||
606 | 245 | bzip2_count) | ||
607 | 246 | |||
608 | 247 | def testFormat30QuiltCannotHaveConflictingComponentTarballs(self): | ||
609 | 248 | # Multiple conflicting tarballs for a single component are | ||
610 | 249 | # invalid. | ||
611 | 250 | self.assertErrorsForFiles( | ||
612 | 251 | [self.comp_conflict_error], | ||
613 | 252 | {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1}, {'foo': 1, 'bar': 2}) | ||
614 | 253 | |||
615 | 254 | def testFormat30QuiltCannotHaveWrongFiles(self): | ||
616 | 255 | # 3.0 (quilt) sources may not have a diff or native tarball. | ||
617 | 256 | for filetype in (DIFF, NATIVE_TARBALL): | ||
618 | 257 | self.assertErrorsForFiles( | ||
619 | 258 | [self.wrong_files_error], | ||
620 | 259 | {ORIG_TARBALL: 1, DEBIAN_TARBALL: 1, filetype: 1}) | ||
621 | 260 | |||
622 | 261 | |||
623 | 262 | class Test30QuiltSourceFormatVerification(BaseTestSourceFileVerification): | ||
624 | 263 | |||
625 | 264 | format = SourcePackageFormat.FORMAT_3_0_NATIVE | ||
626 | 265 | |||
627 | 266 | wrong_files_error = 'foo_1.dsc: must have only a tar.*.' | ||
628 | 267 | |||
629 | 268 | def testFormat30Native(self): | ||
630 | 269 | # 3.0 (native) sources must contain just a native tarball. They | ||
631 | 270 | # may use gzip or bzip2 compression. | ||
632 | 271 | for bzip2_count in (0, 1): | ||
633 | 272 | self.assertFilesOK({NATIVE_TARBALL: 1}, {}, | ||
634 | 273 | bzip2_count) | ||
635 | 274 | |||
636 | 275 | def testFormat30NativeCannotHaveWrongFiles(self): | ||
637 | 276 | # 3.0 (quilt) sources may not have a diff, Debian tarball, orig | ||
638 | 277 | # tarball, or any component tarballs. | ||
639 | 278 | for filetype in (DIFF, DEBIAN_TARBALL, ORIG_TARBALL): | ||
640 | 279 | self.assertErrorsForFiles( | ||
641 | 280 | [self.wrong_files_error], {NATIVE_TARBALL: 1, filetype: 1}) | ||
642 | 281 | # A 3.0 (native) source with component tarballs is invalid. | ||
643 | 282 | self.assertErrorsForFiles( | ||
644 | 283 | [self.wrong_files_error], {NATIVE_TARBALL: 1}, {'foo': 1}) | ||
645 | 149 | 284 | ||
646 | === modified file 'lib/lp/archiveuploader/tests/test_utils.py' | |||
647 | --- lib/lp/archiveuploader/tests/test_utils.py 2010-07-18 00:26:33 +0000 | |||
648 | +++ lib/lp/archiveuploader/tests/test_utils.py 2010-07-24 09:41:13 +0000 | |||
649 | @@ -5,23 +5,19 @@ | |||
650 | 5 | 5 | ||
651 | 6 | # arch-tag: 90e6eb79-83a2-47e8-9f8b-3c687079c923 | 6 | # arch-tag: 90e6eb79-83a2-47e8-9f8b-3c687079c923 |
652 | 7 | 7 | ||
655 | 8 | import unittest | 8 | from testtools import TestCase |
654 | 9 | import sys | ||
656 | 10 | 9 | ||
657 | 11 | from lp.registry.interfaces.sourcepackage import SourcePackageFileType | 10 | from lp.registry.interfaces.sourcepackage import SourcePackageFileType |
658 | 12 | from lp.soyuz.interfaces.binarypackagerelease import BinaryPackageFileType | 11 | from lp.soyuz.interfaces.binarypackagerelease import BinaryPackageFileType |
659 | 13 | from lp.archiveuploader.tests import datadir | 12 | from lp.archiveuploader.tests import datadir |
667 | 14 | 13 | from lp.archiveuploader.utils import (determine_binary_file_type, | |
668 | 15 | 14 | determine_source_file_type, re_isadeb, re_issource) | |
669 | 16 | class TestUtilities(unittest.TestCase): | 15 | |
670 | 17 | 16 | ||
671 | 18 | def testImport(self): | 17 | class TestUtilities(TestCase): |
665 | 19 | """lp.archiveuploader.utils should be importable""" | ||
666 | 20 | import lp.archiveuploader.utils | ||
672 | 21 | 18 | ||
673 | 22 | def test_determine_source_file_type(self): | 19 | def test_determine_source_file_type(self): |
674 | 23 | """lp.archiveuploader.utils.determine_source_file_type should work.""" | 20 | """lp.archiveuploader.utils.determine_source_file_type should work.""" |
675 | 24 | from lp.archiveuploader.utils import determine_source_file_type | ||
676 | 25 | 21 | ||
677 | 26 | # .dsc -> DSC | 22 | # .dsc -> DSC |
678 | 27 | self.assertEquals( | 23 | self.assertEquals( |
679 | @@ -74,8 +70,6 @@ | |||
680 | 74 | 70 | ||
681 | 75 | def test_determine_binary_file_type(self): | 71 | def test_determine_binary_file_type(self): |
682 | 76 | """lp.archiveuploader.utils.determine_binary_file_type should work.""" | 72 | """lp.archiveuploader.utils.determine_binary_file_type should work.""" |
683 | 77 | from lp.archiveuploader.utils import determine_binary_file_type | ||
684 | 78 | |||
685 | 79 | # .deb -> DEB | 73 | # .deb -> DEB |
686 | 80 | self.assertEquals( | 74 | self.assertEquals( |
687 | 81 | determine_binary_file_type('foo_1.0-1_all.deb'), | 75 | determine_binary_file_type('foo_1.0-1_all.deb'), |
688 | @@ -222,20 +216,43 @@ | |||
689 | 222 | pass | 216 | pass |
690 | 223 | 217 | ||
691 | 224 | 218 | ||
709 | 225 | def test_suite(): | 219 | class TestFilenameRegularExpressions(TestCase): |
710 | 226 | suite = unittest.TestSuite() | 220 | |
711 | 227 | loader = unittest.TestLoader() | 221 | def test_re_isadeb(self): |
712 | 228 | suite.addTest(loader.loadTestsFromTestCase(TestUtilities)) | 222 | # Verify that the three binary extensions match the regexp. |
713 | 229 | return suite | 223 | for extension in ('deb', 'ddeb', 'udeb'): |
714 | 230 | 224 | self.assertEquals( | |
715 | 231 | 225 | ('foo-bar', '1.0', 'i386', extension), | |
716 | 232 | def main(argv): | 226 | re_isadeb.match('foo-bar_1.0_i386.%s' % extension).groups()) |
717 | 233 | suite = test_suite() | 227 | |
718 | 234 | runner = unittest.TextTestRunner(verbosity = 2) | 228 | # Some other extension doesn't match. |
719 | 235 | if not runner.run(suite).wasSuccessful(): | 229 | self.assertIs(None, re_isadeb.match('foo-bar_1.0_i386.notdeb')) |
720 | 236 | return 1 | 230 | |
721 | 237 | return 0 | 231 | # A missing architecture also doesn't match. |
722 | 238 | 232 | self.assertIs(None, re_isadeb.match('foo-bar_1.0.deb')) | |
723 | 239 | 233 | ||
724 | 240 | if __name__ == '__main__': | 234 | def test_re_issource(self): |
725 | 241 | sys.exit(main(sys.argv)) | 235 | # Verify that various source extensions match the regexp. |
726 | 236 | extensions = ( | ||
727 | 237 | 'dsc', 'tar.gz', 'tar.bz2', 'diff.gz', 'orig.tar.gz', | ||
728 | 238 | 'orig.tar.bz2', 'orig-bar.tar.gz', 'orig-bar.tar.bz2', | ||
729 | 239 | 'debian.tar.gz', 'debian.tar.bz2') | ||
730 | 240 | for extension in extensions: | ||
731 | 241 | self.assertEquals( | ||
732 | 242 | ('foo-bar', '1.0', extension), | ||
733 | 243 | re_issource.match('foo-bar_1.0.%s' % extension).groups()) | ||
734 | 244 | |||
735 | 245 | # While orig-*.tar.gz is all interpreted as extension, *orig-*.tar.gz | ||
736 | 246 | # is taken to have an extension of just 'tar.gz'. | ||
737 | 247 | self.assertEquals( | ||
738 | 248 | ('foo-bar', '1.0.porig-bar', 'tar.gz'), | ||
739 | 249 | re_issource.match('foo-bar_1.0.porig-bar.tar.gz').groups()) | ||
740 | 250 | |||
741 | 251 | # Some other extension doesn't match. | ||
742 | 252 | self.assertIs(None, re_issource.match('foo-bar_1.0.notdsc')) | ||
743 | 253 | |||
744 | 254 | # A badly formatted name also doesn't match. | ||
745 | 255 | self.assertIs(None, re_issource.match('foo-bar.dsc')) | ||
746 | 256 | |||
747 | 257 | # bzip2 compression for files which must be gzipped is invalid. | ||
748 | 258 | self.assertIs(None, re_issource.match('foo-bar_1.0.diff.bz2')) | ||
749 | 242 | 259 | ||
750 | === renamed file 'lib/lp/soyuz/doc/uploadpolicy.txt' => 'lib/lp/archiveuploader/tests/uploadpolicy.txt' | |||
751 | === modified file 'lib/lp/soyuz/tests/test_doc.py' | |||
752 | --- lib/lp/soyuz/tests/test_doc.py 2010-04-30 09:49:59 +0000 | |||
753 | +++ lib/lp/soyuz/tests/test_doc.py 2010-07-24 09:41:13 +0000 | |||
754 | @@ -123,11 +123,6 @@ | |||
755 | 123 | 123 | ||
756 | 124 | 124 | ||
757 | 125 | special = { | 125 | special = { |
758 | 126 | 'nascentupload.txt': LayeredDocFileSuite( | ||
759 | 127 | '../doc/nascentupload.txt', | ||
760 | 128 | setUp=uploaderSetUp, tearDown=uploaderTearDown, | ||
761 | 129 | layer=LaunchpadZopelessLayer, | ||
762 | 130 | ), | ||
763 | 131 | 'build-notification.txt': LayeredDocFileSuite( | 126 | 'build-notification.txt': LayeredDocFileSuite( |
764 | 132 | '../doc/build-notification.txt', | 127 | '../doc/build-notification.txt', |
765 | 133 | setUp=builddmasterSetUp, | 128 | setUp=builddmasterSetUp, |
Thanks. You might like to start using testscenarios for the permutations rather than looping - its easier to debug failures that way.