Merge lp:~spiv/bzr/command-cleanup into lp:bzr

Proposed by Andrew Bennetts
Status: Merged
Merged at revision: not available
Proposed branch: lp:~spiv/bzr/command-cleanup
Merge into: lp:bzr
Diff against target: 2070 lines (+764/-813)
15 files modified
NEWS (+5/-0)
bzrlib/builtins.py (+704/-793)
bzrlib/cleanup.py (+11/-2)
bzrlib/commands.py (+28/-2)
bzrlib/tests/commands/test_branch.py (+3/-3)
bzrlib/tests/commands/test_cat.py (+1/-1)
bzrlib/tests/commands/test_checkout.py (+2/-2)
bzrlib/tests/commands/test_commit.py (+1/-1)
bzrlib/tests/commands/test_init.py (+1/-1)
bzrlib/tests/commands/test_init_repository.py (+1/-1)
bzrlib/tests/commands/test_merge.py (+1/-1)
bzrlib/tests/commands/test_missing.py (+1/-1)
bzrlib/tests/commands/test_pull.py (+2/-2)
bzrlib/tests/commands/test_push.py (+2/-2)
bzrlib/tests/commands/test_update.py (+1/-1)
To merge this branch: bzr merge lp:~spiv/bzr/command-cleanup
Reviewer Review Type Date Requested Status
Martin Pool Approve
Review via email: mp+16943@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Andrew Bennetts (spiv) wrote :

This branch is mostly mechanical: it replaces lots of try/finally blocks in bzrlib/builtins.py with calls to a new self.add_cleanup method, implemented using the robust logic in the bzrlib.cleanup module. This fixes issues of the sort reported in bug 496590 (ObjectNotLocked annotating file from other branch): try/finally tends to unconditionally attempt cleanups that might fail when there's a prior error, thus failing a second time and masking the original error.

There are some small infrastructural improvements to support this: Command classes now have add_cleanup and cleanup_now methods (the latter because sometimes commands have multiple stages, e.g. 'access locked repository' followed by 'format and print output'). The OperationWithCleanups class now has cleanup_now and run_simple.

The diff is large but as already mentioned mostly mechanical. It overall shrinks the line count of builtins.py *and* reduces the indentation level of most of the touched lines... so the code is hopefully clearer as well as more robust.

Revision history for this message
Martin Pool (mbp) wrote :

I'm not sure if 'run_simple' is a great name but I can't think of a better one.

Is this in the developer guide? Maybe it should be.

I'm going to trust you (and the test suite ;-) that these changes actually are mechanical.

It seems a bit strange that the operation should be registered when the Command is constructed, rather than when it's run, but I suppose the command is only to be used once, and its lifetime is already not very clear.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'NEWS'
2--- NEWS 2010-01-08 09:27:39 +0000
3+++ NEWS 2010-01-11 12:40:31 +0000
4@@ -87,6 +87,11 @@
5 Improvements
6 ************
7
8+* Added ``add_cleanup`` and ``cleanup_now`` to ``bzrlib.command.Command``.
9+ All the builtin commands now use ``add_cleanup`` rather than
10+ ``try``/``finally`` blocks where applicable as it is simpler and more
11+ robust. (Andrew Bennetts)
12+
13 * Push will now inform the user when they are trying to push to a foreign
14 VCS for which roundtripping is not supported, and will suggest them to
15 use dpush. (Jelmer Vernooij)
16
17=== modified file 'bzrlib/builtins.py'
18--- bzrlib/builtins.py 2009-12-23 05:42:33 +0000
19+++ bzrlib/builtins.py 2010-01-11 12:40:30 +0000
20@@ -502,23 +502,19 @@
21 wt.lock_read()
22 except (errors.NoWorkingTree, errors.NotLocalUrl):
23 raise errors.NoWorkingTree(location)
24+ self.add_cleanup(wt.unlock)
25+ revid = wt.last_revision()
26 try:
27- revid = wt.last_revision()
28- try:
29- revno_t = wt.branch.revision_id_to_dotted_revno(revid)
30- except errors.NoSuchRevision:
31- revno_t = ('???',)
32- revno = ".".join(str(n) for n in revno_t)
33- finally:
34- wt.unlock()
35+ revno_t = wt.branch.revision_id_to_dotted_revno(revid)
36+ except errors.NoSuchRevision:
37+ revno_t = ('???',)
38+ revno = ".".join(str(n) for n in revno_t)
39 else:
40 b = Branch.open_containing(location)[0]
41 b.lock_read()
42- try:
43- revno = b.revno()
44- finally:
45- b.unlock()
46-
47+ self.add_cleanup(b.unlock)
48+ revno = b.revno()
49+ self.cleanup_now()
50 self.outf.write(str(revno) + '\n')
51
52
53@@ -546,43 +542,40 @@
54 wt = WorkingTree.open_containing(directory)[0]
55 b = wt.branch
56 wt.lock_read()
57+ self.add_cleanup(wt.unlock)
58 except (errors.NoWorkingTree, errors.NotLocalUrl):
59 wt = None
60 b = Branch.open_containing(directory)[0]
61 b.lock_read()
62- try:
63- revision_ids = []
64- if revision is not None:
65- revision_ids.extend(rev.as_revision_id(b) for rev in revision)
66- if revision_info_list is not None:
67- for rev_str in revision_info_list:
68- rev_spec = RevisionSpec.from_string(rev_str)
69- revision_ids.append(rev_spec.as_revision_id(b))
70- # No arguments supplied, default to the last revision
71- if len(revision_ids) == 0:
72- if tree:
73- if wt is None:
74- raise errors.NoWorkingTree(directory)
75- revision_ids.append(wt.last_revision())
76- else:
77- revision_ids.append(b.last_revision())
78-
79- revinfos = []
80- maxlen = 0
81- for revision_id in revision_ids:
82- try:
83- dotted_revno = b.revision_id_to_dotted_revno(revision_id)
84- revno = '.'.join(str(i) for i in dotted_revno)
85- except errors.NoSuchRevision:
86- revno = '???'
87- maxlen = max(maxlen, len(revno))
88- revinfos.append([revno, revision_id])
89- finally:
90- if wt is None:
91- b.unlock()
92+ self.add_cleanup(b.unlock)
93+ revision_ids = []
94+ if revision is not None:
95+ revision_ids.extend(rev.as_revision_id(b) for rev in revision)
96+ if revision_info_list is not None:
97+ for rev_str in revision_info_list:
98+ rev_spec = RevisionSpec.from_string(rev_str)
99+ revision_ids.append(rev_spec.as_revision_id(b))
100+ # No arguments supplied, default to the last revision
101+ if len(revision_ids) == 0:
102+ if tree:
103+ if wt is None:
104+ raise errors.NoWorkingTree(directory)
105+ revision_ids.append(wt.last_revision())
106 else:
107- wt.unlock()
108-
109+ revision_ids.append(b.last_revision())
110+
111+ revinfos = []
112+ maxlen = 0
113+ for revision_id in revision_ids:
114+ try:
115+ dotted_revno = b.revision_id_to_dotted_revno(revision_id)
116+ revno = '.'.join(str(i) for i in dotted_revno)
117+ except errors.NoSuchRevision:
118+ revno = '???'
119+ maxlen = max(maxlen, len(revno))
120+ revinfos.append([revno, revision_id])
121+
122+ self.cleanup_now()
123 for ri in revinfos:
124 self.outf.write('%*s %s\n' % (maxlen, ri[0], ri[1]))
125
126@@ -660,13 +653,11 @@
127
128 if base_tree:
129 base_tree.lock_read()
130- try:
131- tree, file_list = tree_files_for_add(file_list)
132- added, ignored = tree.smart_add(file_list, not
133- no_recurse, action=action, save=not dry_run)
134- finally:
135- if base_tree is not None:
136- base_tree.unlock()
137+ self.add_cleanup(base_tree.unlock)
138+ tree, file_list = tree_files_for_add(file_list)
139+ added, ignored = tree.smart_add(file_list, not
140+ no_recurse, action=action, save=not dry_run)
141+ self.cleanup_now()
142 if len(ignored) > 0:
143 if verbose:
144 for glob in sorted(ignored.keys()):
145@@ -736,30 +727,28 @@
146 revision = _get_one_revision('inventory', revision)
147 work_tree, file_list = tree_files(file_list)
148 work_tree.lock_read()
149- try:
150- if revision is not None:
151- tree = revision.as_tree(work_tree.branch)
152-
153- extra_trees = [work_tree]
154- tree.lock_read()
155- else:
156- tree = work_tree
157- extra_trees = []
158-
159- if file_list is not None:
160- file_ids = tree.paths2ids(file_list, trees=extra_trees,
161- require_versioned=True)
162- # find_ids_across_trees may include some paths that don't
163- # exist in 'tree'.
164- entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
165- for file_id in file_ids if file_id in tree)
166- else:
167- entries = tree.inventory.entries()
168- finally:
169- tree.unlock()
170- if tree is not work_tree:
171- work_tree.unlock()
172-
173+ self.add_cleanup(work_tree.unlock)
174+ if revision is not None:
175+ tree = revision.as_tree(work_tree.branch)
176+
177+ extra_trees = [work_tree]
178+ tree.lock_read()
179+ self.add_cleanup(tree.unlock)
180+ else:
181+ tree = work_tree
182+ extra_trees = []
183+
184+ if file_list is not None:
185+ file_ids = tree.paths2ids(file_list, trees=extra_trees,
186+ require_versioned=True)
187+ # find_ids_across_trees may include some paths that don't
188+ # exist in 'tree'.
189+ entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
190+ for file_id in file_ids if file_id in tree)
191+ else:
192+ entries = tree.inventory.entries()
193+
194+ self.cleanup_now()
195 for path, entry in entries:
196 if kind and kind != entry.kind:
197 continue
198@@ -811,10 +800,8 @@
199 raise errors.BzrCommandError("missing file argument")
200 tree, rel_names = tree_files(names_list, canonicalize=False)
201 tree.lock_tree_write()
202- try:
203- self._run(tree, names_list, rel_names, after)
204- finally:
205- tree.unlock()
206+ self.add_cleanup(tree.unlock)
207+ self._run(tree, names_list, rel_names, after)
208
209 def run_auto(self, names_list, after, dry_run):
210 if names_list is not None and len(names_list) > 1:
211@@ -825,10 +812,8 @@
212 ' --auto.')
213 work_tree, file_list = tree_files(names_list, default_branch='.')
214 work_tree.lock_tree_write()
215- try:
216- rename_map.RenameMap.guess_renames(work_tree, dry_run)
217- finally:
218- work_tree.unlock()
219+ self.add_cleanup(work_tree.unlock)
220+ rename_map.RenameMap.guess_renames(work_tree, dry_run)
221
222 def _run(self, tree, names_list, rel_names, after):
223 into_existing = osutils.isdir(names_list[-1])
224@@ -1012,34 +997,29 @@
225
226 if branch_from is not branch_to:
227 branch_from.lock_read()
228- try:
229- if revision is not None:
230- revision_id = revision.as_revision_id(branch_from)
231-
232- branch_to.lock_write()
233- try:
234- if tree_to is not None:
235- view_info = _get_view_info_for_change_reporter(tree_to)
236- change_reporter = delta._ChangeReporter(
237- unversioned_filter=tree_to.is_ignored,
238- view_info=view_info)
239- result = tree_to.pull(
240- branch_from, overwrite, revision_id, change_reporter,
241- possible_transports=possible_transports, local=local)
242- else:
243- result = branch_to.pull(
244- branch_from, overwrite, revision_id, local=local)
245-
246- result.report(self.outf)
247- if verbose and result.old_revid != result.new_revid:
248- log.show_branch_change(
249- branch_to, self.outf, result.old_revno,
250- result.old_revid)
251- finally:
252- branch_to.unlock()
253- finally:
254- if branch_from is not branch_to:
255- branch_from.unlock()
256+ self.add_cleanup(branch_from.unlock)
257+ if revision is not None:
258+ revision_id = revision.as_revision_id(branch_from)
259+
260+ branch_to.lock_write()
261+ self.add_cleanup(branch_to.unlock)
262+ if tree_to is not None:
263+ view_info = _get_view_info_for_change_reporter(tree_to)
264+ change_reporter = delta._ChangeReporter(
265+ unversioned_filter=tree_to.is_ignored,
266+ view_info=view_info)
267+ result = tree_to.pull(
268+ branch_from, overwrite, revision_id, change_reporter,
269+ possible_transports=possible_transports, local=local)
270+ else:
271+ result = branch_to.pull(
272+ branch_from, overwrite, revision_id, local=local)
273+
274+ result.report(self.outf)
275+ if verbose and result.old_revid != result.new_revid:
276+ log.show_branch_change(
277+ branch_to, self.outf, result.old_revno,
278+ result.old_revid)
279
280
281 class cmd_push(Command):
282@@ -1212,66 +1192,64 @@
283 from_location)
284 revision = _get_one_revision('branch', revision)
285 br_from.lock_read()
286+ self.add_cleanup(br_from.unlock)
287+ if revision is not None:
288+ revision_id = revision.as_revision_id(br_from)
289+ else:
290+ # FIXME - wt.last_revision, fallback to branch, fall back to
291+ # None or perhaps NULL_REVISION to mean copy nothing
292+ # RBC 20060209
293+ revision_id = br_from.last_revision()
294+ if to_location is None:
295+ to_location = urlutils.derive_to_location(from_location)
296+ to_transport = transport.get_transport(to_location)
297 try:
298- if revision is not None:
299- revision_id = revision.as_revision_id(br_from)
300+ to_transport.mkdir('.')
301+ except errors.FileExists:
302+ if not use_existing_dir:
303+ raise errors.BzrCommandError('Target directory "%s" '
304+ 'already exists.' % to_location)
305 else:
306- # FIXME - wt.last_revision, fallback to branch, fall back to
307- # None or perhaps NULL_REVISION to mean copy nothing
308- # RBC 20060209
309- revision_id = br_from.last_revision()
310- if to_location is None:
311- to_location = urlutils.derive_to_location(from_location)
312- to_transport = transport.get_transport(to_location)
313- try:
314- to_transport.mkdir('.')
315- except errors.FileExists:
316- if not use_existing_dir:
317- raise errors.BzrCommandError('Target directory "%s" '
318- 'already exists.' % to_location)
319+ try:
320+ bzrdir.BzrDir.open_from_transport(to_transport)
321+ except errors.NotBranchError:
322+ pass
323 else:
324- try:
325- bzrdir.BzrDir.open_from_transport(to_transport)
326- except errors.NotBranchError:
327- pass
328- else:
329- raise errors.AlreadyBranchError(to_location)
330- except errors.NoSuchFile:
331- raise errors.BzrCommandError('Parent of "%s" does not exist.'
332- % to_location)
333- try:
334- # preserve whatever source format we have.
335- dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
336- possible_transports=[to_transport],
337- accelerator_tree=accelerator_tree,
338- hardlink=hardlink, stacked=stacked,
339- force_new_repo=standalone,
340- create_tree_if_local=not no_tree,
341- source_branch=br_from)
342- branch = dir.open_branch()
343- except errors.NoSuchRevision:
344- to_transport.delete_tree('.')
345- msg = "The branch %s has no revision %s." % (from_location,
346- revision)
347- raise errors.BzrCommandError(msg)
348- _merge_tags_if_possible(br_from, branch)
349- # If the source branch is stacked, the new branch may
350- # be stacked whether we asked for that explicitly or not.
351- # We therefore need a try/except here and not just 'if stacked:'
352- try:
353- note('Created new stacked branch referring to %s.' %
354- branch.get_stacked_on_url())
355- except (errors.NotStacked, errors.UnstackableBranchFormat,
356- errors.UnstackableRepositoryFormat), e:
357- note('Branched %d revision(s).' % branch.revno())
358- if switch:
359- # Switch to the new branch
360- wt, _ = WorkingTree.open_containing('.')
361- _mod_switch.switch(wt.bzrdir, branch)
362- note('Switched to branch: %s',
363- urlutils.unescape_for_display(branch.base, 'utf-8'))
364- finally:
365- br_from.unlock()
366+ raise errors.AlreadyBranchError(to_location)
367+ except errors.NoSuchFile:
368+ raise errors.BzrCommandError('Parent of "%s" does not exist.'
369+ % to_location)
370+ try:
371+ # preserve whatever source format we have.
372+ dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
373+ possible_transports=[to_transport],
374+ accelerator_tree=accelerator_tree,
375+ hardlink=hardlink, stacked=stacked,
376+ force_new_repo=standalone,
377+ create_tree_if_local=not no_tree,
378+ source_branch=br_from)
379+ branch = dir.open_branch()
380+ except errors.NoSuchRevision:
381+ to_transport.delete_tree('.')
382+ msg = "The branch %s has no revision %s." % (from_location,
383+ revision)
384+ raise errors.BzrCommandError(msg)
385+ _merge_tags_if_possible(br_from, branch)
386+ # If the source branch is stacked, the new branch may
387+ # be stacked whether we asked for that explicitly or not.
388+ # We therefore need a try/except here and not just 'if stacked:'
389+ try:
390+ note('Created new stacked branch referring to %s.' %
391+ branch.get_stacked_on_url())
392+ except (errors.NotStacked, errors.UnstackableBranchFormat,
393+ errors.UnstackableRepositoryFormat), e:
394+ note('Branched %d revision(s).' % branch.revno())
395+ if switch:
396+ # Switch to the new branch
397+ wt, _ = WorkingTree.open_containing('.')
398+ _mod_switch.switch(wt.bzrdir, branch)
399+ note('Switched to branch: %s',
400+ urlutils.unescape_for_display(branch.base, 'utf-8'))
401
402
403 class cmd_checkout(Command):
404@@ -1356,27 +1334,23 @@
405 def run(self, dir=u'.'):
406 tree = WorkingTree.open_containing(dir)[0]
407 tree.lock_read()
408- try:
409- new_inv = tree.inventory
410- old_tree = tree.basis_tree()
411- old_tree.lock_read()
412- try:
413- old_inv = old_tree.inventory
414- renames = []
415- iterator = tree.iter_changes(old_tree, include_unchanged=True)
416- for f, paths, c, v, p, n, k, e in iterator:
417- if paths[0] == paths[1]:
418- continue
419- if None in (paths):
420- continue
421- renames.append(paths)
422- renames.sort()
423- for old_name, new_name in renames:
424- self.outf.write("%s => %s\n" % (old_name, new_name))
425- finally:
426- old_tree.unlock()
427- finally:
428- tree.unlock()
429+ self.add_cleanup(tree.unlock)
430+ new_inv = tree.inventory
431+ old_tree = tree.basis_tree()
432+ old_tree.lock_read()
433+ self.add_cleanup(old_tree.unlock)
434+ old_inv = old_tree.inventory
435+ renames = []
436+ iterator = tree.iter_changes(old_tree, include_unchanged=True)
437+ for f, paths, c, v, p, n, k, e in iterator:
438+ if paths[0] == paths[1]:
439+ continue
440+ if None in (paths):
441+ continue
442+ renames.append(paths)
443+ renames.sort()
444+ for old_name, new_name in renames:
445+ self.outf.write("%s => %s\n" % (old_name, new_name))
446
447
448 class cmd_update(Command):
449@@ -1413,56 +1387,54 @@
450 else:
451 tree.lock_tree_write()
452 branch_location = tree.branch.base
453+ self.add_cleanup(tree.unlock)
454 # get rid of the final '/' and be ready for display
455 branch_location = urlutils.unescape_for_display(branch_location[:-1],
456 self.outf.encoding)
457+ existing_pending_merges = tree.get_parent_ids()[1:]
458+ if master is None:
459+ old_tip = None
460+ else:
461+ # may need to fetch data into a heavyweight checkout
462+ # XXX: this may take some time, maybe we should display a
463+ # message
464+ old_tip = branch.update(possible_transports)
465+ if revision is not None:
466+ revision_id = revision[0].as_revision_id(branch)
467+ else:
468+ revision_id = branch.last_revision()
469+ if revision_id == _mod_revision.ensure_null(tree.last_revision()):
470+ revno = branch.revision_id_to_revno(revision_id)
471+ note("Tree is up to date at revision %d of branch %s" %
472+ (revno, branch_location))
473+ return 0
474+ view_info = _get_view_info_for_change_reporter(tree)
475+ change_reporter = delta._ChangeReporter(
476+ unversioned_filter=tree.is_ignored,
477+ view_info=view_info)
478 try:
479- existing_pending_merges = tree.get_parent_ids()[1:]
480- if master is None:
481- old_tip = None
482- else:
483- # may need to fetch data into a heavyweight checkout
484- # XXX: this may take some time, maybe we should display a
485- # message
486- old_tip = branch.update(possible_transports)
487- if revision is not None:
488- revision_id = revision[0].as_revision_id(branch)
489- else:
490- revision_id = branch.last_revision()
491- if revision_id == _mod_revision.ensure_null(tree.last_revision()):
492- revno = branch.revision_id_to_revno(revision_id)
493- note("Tree is up to date at revision %d of branch %s" %
494- (revno, branch_location))
495- return 0
496- view_info = _get_view_info_for_change_reporter(tree)
497- change_reporter = delta._ChangeReporter(
498- unversioned_filter=tree.is_ignored,
499- view_info=view_info)
500- try:
501- conflicts = tree.update(
502- change_reporter,
503- possible_transports=possible_transports,
504- revision=revision_id,
505- old_tip=old_tip)
506- except errors.NoSuchRevision, e:
507- raise errors.BzrCommandError(
508- "branch has no revision %s\n"
509- "bzr update --revision only works"
510- " for a revision in the branch history"
511- % (e.revision))
512- revno = tree.branch.revision_id_to_revno(
513- _mod_revision.ensure_null(tree.last_revision()))
514- note('Updated to revision %d of branch %s' %
515- (revno, branch_location))
516- if tree.get_parent_ids()[1:] != existing_pending_merges:
517- note('Your local commits will now show as pending merges with '
518- "'bzr status', and can be committed with 'bzr commit'.")
519- if conflicts != 0:
520- return 1
521- else:
522- return 0
523- finally:
524- tree.unlock()
525+ conflicts = tree.update(
526+ change_reporter,
527+ possible_transports=possible_transports,
528+ revision=revision_id,
529+ old_tip=old_tip)
530+ except errors.NoSuchRevision, e:
531+ raise errors.BzrCommandError(
532+ "branch has no revision %s\n"
533+ "bzr update --revision only works"
534+ " for a revision in the branch history"
535+ % (e.revision))
536+ revno = tree.branch.revision_id_to_revno(
537+ _mod_revision.ensure_null(tree.last_revision()))
538+ note('Updated to revision %d of branch %s' %
539+ (revno, branch_location))
540+ if tree.get_parent_ids()[1:] != existing_pending_merges:
541+ note('Your local commits will now show as pending merges with '
542+ "'bzr status', and can be committed with 'bzr commit'.")
543+ if conflicts != 0:
544+ return 1
545+ else:
546+ return 0
547
548
549 class cmd_info(Command):
550@@ -1539,30 +1511,28 @@
551 file_list = [f for f in file_list]
552
553 tree.lock_write()
554- try:
555- # Heuristics should probably all move into tree.remove_smart or
556- # some such?
557- if new:
558- added = tree.changes_from(tree.basis_tree(),
559- specific_files=file_list).added
560- file_list = sorted([f[0] for f in added], reverse=True)
561- if len(file_list) == 0:
562- raise errors.BzrCommandError('No matching files.')
563- elif file_list is None:
564- # missing files show up in iter_changes(basis) as
565- # versioned-with-no-kind.
566- missing = []
567- for change in tree.iter_changes(tree.basis_tree()):
568- # Find paths in the working tree that have no kind:
569- if change[1][1] is not None and change[6][1] is None:
570- missing.append(change[1][1])
571- file_list = sorted(missing, reverse=True)
572- file_deletion_strategy = 'keep'
573- tree.remove(file_list, verbose=verbose, to_file=self.outf,
574- keep_files=file_deletion_strategy=='keep',
575- force=file_deletion_strategy=='force')
576- finally:
577- tree.unlock()
578+ self.add_cleanup(tree.unlock)
579+ # Heuristics should probably all move into tree.remove_smart or
580+ # some such?
581+ if new:
582+ added = tree.changes_from(tree.basis_tree(),
583+ specific_files=file_list).added
584+ file_list = sorted([f[0] for f in added], reverse=True)
585+ if len(file_list) == 0:
586+ raise errors.BzrCommandError('No matching files.')
587+ elif file_list is None:
588+ # missing files show up in iter_changes(basis) as
589+ # versioned-with-no-kind.
590+ missing = []
591+ for change in tree.iter_changes(tree.basis_tree()):
592+ # Find paths in the working tree that have no kind:
593+ if change[1][1] is not None and change[6][1] is None:
594+ missing.append(change[1][1])
595+ file_list = sorted(missing, reverse=True)
596+ file_deletion_strategy = 'keep'
597+ tree.remove(file_list, verbose=verbose, to_file=self.outf,
598+ keep_files=file_deletion_strategy=='keep',
599+ force=file_deletion_strategy=='force')
600
601
602 class cmd_file_id(Command):
603@@ -1994,21 +1964,17 @@
604 def run(self, show_ids=False):
605 tree = WorkingTree.open_containing(u'.')[0]
606 tree.lock_read()
607- try:
608- old = tree.basis_tree()
609- old.lock_read()
610- try:
611- for path, ie in old.inventory.iter_entries():
612- if not tree.has_id(ie.file_id):
613- self.outf.write(path)
614- if show_ids:
615- self.outf.write(' ')
616- self.outf.write(ie.file_id)
617- self.outf.write('\n')
618- finally:
619- old.unlock()
620- finally:
621- tree.unlock()
622+ self.add_cleanup(tree.unlock)
623+ old = tree.basis_tree()
624+ old.lock_read()
625+ self.add_cleanup(old.unlock)
626+ for path, ie in old.inventory.iter_entries():
627+ if not tree.has_id(ie.file_id):
628+ self.outf.write(path)
629+ if show_ids:
630+ self.outf.write(' ')
631+ self.outf.write(ie.file_id)
632+ self.outf.write('\n')
633
634
635 class cmd_modified(Command):
636@@ -2050,28 +2016,24 @@
637 def run(self, null=False):
638 wt = WorkingTree.open_containing(u'.')[0]
639 wt.lock_read()
640- try:
641- basis = wt.basis_tree()
642- basis.lock_read()
643- try:
644- basis_inv = basis.inventory
645- inv = wt.inventory
646- for file_id in inv:
647- if file_id in basis_inv:
648- continue
649- if inv.is_root(file_id) and len(basis_inv) == 0:
650- continue
651- path = inv.id2path(file_id)
652- if not os.access(osutils.abspath(path), os.F_OK):
653- continue
654- if null:
655- self.outf.write(path + '\0')
656- else:
657- self.outf.write(osutils.quotefn(path) + '\n')
658- finally:
659- basis.unlock()
660- finally:
661- wt.unlock()
662+ self.add_cleanup(wt.unlock)
663+ basis = wt.basis_tree()
664+ basis.lock_read()
665+ self.add_cleanup(basis.unlock)
666+ basis_inv = basis.inventory
667+ inv = wt.inventory
668+ for file_id in inv:
669+ if file_id in basis_inv:
670+ continue
671+ if inv.is_root(file_id) and len(basis_inv) == 0:
672+ continue
673+ path = inv.id2path(file_id)
674+ if not os.access(osutils.abspath(path), os.F_OK):
675+ continue
676+ if null:
677+ self.outf.write(path + '\0')
678+ else:
679+ self.outf.write(osutils.quotefn(path) + '\n')
680
681
682 class cmd_root(Command):
683@@ -2335,91 +2297,88 @@
684
685 file_ids = []
686 filter_by_dir = False
687- b = None
688- try:
689- if file_list:
690- # find the file ids to log and check for directory filtering
691- b, file_info_list, rev1, rev2 = _get_info_for_log_files(
692- revision, file_list)
693- for relpath, file_id, kind in file_info_list:
694- if file_id is None:
695- raise errors.BzrCommandError(
696- "Path unknown at end or start of revision range: %s" %
697- relpath)
698- # If the relpath is the top of the tree, we log everything
699- if relpath == '':
700- file_ids = []
701- break
702- else:
703- file_ids.append(file_id)
704- filter_by_dir = filter_by_dir or (
705- kind in ['directory', 'tree-reference'])
706- else:
707- # log everything
708- # FIXME ? log the current subdir only RBC 20060203
709- if revision is not None \
710- and len(revision) > 0 and revision[0].get_branch():
711- location = revision[0].get_branch()
712+ if file_list:
713+ # find the file ids to log and check for directory filtering
714+ b, file_info_list, rev1, rev2 = _get_info_for_log_files(
715+ revision, file_list)
716+ self.add_cleanup(b.unlock)
717+ for relpath, file_id, kind in file_info_list:
718+ if file_id is None:
719+ raise errors.BzrCommandError(
720+ "Path unknown at end or start of revision range: %s" %
721+ relpath)
722+ # If the relpath is the top of the tree, we log everything
723+ if relpath == '':
724+ file_ids = []
725+ break
726 else:
727- location = '.'
728- dir, relpath = bzrdir.BzrDir.open_containing(location)
729- b = dir.open_branch()
730- b.lock_read()
731- rev1, rev2 = _get_revision_range(revision, b, self.name())
732-
733- # Decide on the type of delta & diff filtering to use
734- # TODO: add an --all-files option to make this configurable & consistent
735- if not verbose:
736- delta_type = None
737- else:
738- delta_type = 'full'
739- if not show_diff:
740- diff_type = None
741- elif file_ids:
742- diff_type = 'partial'
743- else:
744- diff_type = 'full'
745-
746- # Build the log formatter
747- if log_format is None:
748- log_format = log.log_formatter_registry.get_default(b)
749- # Make a non-encoding output to include the diffs - bug 328007
750- unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
751- lf = log_format(show_ids=show_ids, to_file=self.outf,
752- to_exact_file=unencoded_output,
753- show_timezone=timezone,
754- delta_format=get_verbosity_level(),
755- levels=levels,
756- show_advice=levels is None)
757-
758- # Choose the algorithm for doing the logging. It's annoying
759- # having multiple code paths like this but necessary until
760- # the underlying repository format is faster at generating
761- # deltas or can provide everything we need from the indices.
762- # The default algorithm - match-using-deltas - works for
763- # multiple files and directories and is faster for small
764- # amounts of history (200 revisions say). However, it's too
765- # slow for logging a single file in a repository with deep
766- # history, i.e. > 10K revisions. In the spirit of "do no
767- # evil when adding features", we continue to use the
768- # original algorithm - per-file-graph - for the "single
769- # file that isn't a directory without showing a delta" case.
770- partial_history = revision and b.repository._format.supports_chks
771- match_using_deltas = (len(file_ids) != 1 or filter_by_dir
772- or delta_type or partial_history)
773-
774- # Build the LogRequest and execute it
775- if len(file_ids) == 0:
776- file_ids = None
777- rqst = make_log_request_dict(
778- direction=direction, specific_fileids=file_ids,
779- start_revision=rev1, end_revision=rev2, limit=limit,
780- message_search=message, delta_type=delta_type,
781- diff_type=diff_type, _match_using_deltas=match_using_deltas)
782- Logger(b, rqst).show(lf)
783- finally:
784- if b is not None:
785- b.unlock()
786+ file_ids.append(file_id)
787+ filter_by_dir = filter_by_dir or (
788+ kind in ['directory', 'tree-reference'])
789+ else:
790+ # log everything
791+ # FIXME ? log the current subdir only RBC 20060203
792+ if revision is not None \
793+ and len(revision) > 0 and revision[0].get_branch():
794+ location = revision[0].get_branch()
795+ else:
796+ location = '.'
797+ dir, relpath = bzrdir.BzrDir.open_containing(location)
798+ b = dir.open_branch()
799+ b.lock_read()
800+ self.add_cleanup(b.unlock)
801+ rev1, rev2 = _get_revision_range(revision, b, self.name())
802+
803+ # Decide on the type of delta & diff filtering to use
804+ # TODO: add an --all-files option to make this configurable & consistent
805+ if not verbose:
806+ delta_type = None
807+ else:
808+ delta_type = 'full'
809+ if not show_diff:
810+ diff_type = None
811+ elif file_ids:
812+ diff_type = 'partial'
813+ else:
814+ diff_type = 'full'
815+
816+ # Build the log formatter
817+ if log_format is None:
818+ log_format = log.log_formatter_registry.get_default(b)
819+ # Make a non-encoding output to include the diffs - bug 328007
820+ unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
821+ lf = log_format(show_ids=show_ids, to_file=self.outf,
822+ to_exact_file=unencoded_output,
823+ show_timezone=timezone,
824+ delta_format=get_verbosity_level(),
825+ levels=levels,
826+ show_advice=levels is None)
827+
828+ # Choose the algorithm for doing the logging. It's annoying
829+ # having multiple code paths like this but necessary until
830+ # the underlying repository format is faster at generating
831+ # deltas or can provide everything we need from the indices.
832+ # The default algorithm - match-using-deltas - works for
833+ # multiple files and directories and is faster for small
834+ # amounts of history (200 revisions say). However, it's too
835+ # slow for logging a single file in a repository with deep
836+ # history, i.e. > 10K revisions. In the spirit of "do no
837+ # evil when adding features", we continue to use the
838+ # original algorithm - per-file-graph - for the "single
839+ # file that isn't a directory without showing a delta" case.
840+ partial_history = revision and b.repository._format.supports_chks
841+ match_using_deltas = (len(file_ids) != 1 or filter_by_dir
842+ or delta_type or partial_history)
843+
844+ # Build the LogRequest and execute it
845+ if len(file_ids) == 0:
846+ file_ids = None
847+ rqst = make_log_request_dict(
848+ direction=direction, specific_fileids=file_ids,
849+ start_revision=rev1, end_revision=rev2, limit=limit,
850+ message_search=message, delta_type=delta_type,
851+ diff_type=diff_type, _match_using_deltas=match_using_deltas)
852+ Logger(b, rqst).show(lf)
853
854
855 def _get_revision_range(revisionspec_list, branch, command_name):
856@@ -2492,12 +2451,10 @@
857 file_id = tree.path2id(relpath)
858 b = tree.branch
859 b.lock_read()
860- try:
861- touching_revs = log.find_touching_revisions(b, file_id)
862- for revno, revision_id, what in touching_revs:
863- self.outf.write("%6d %s\n" % (revno, what))
864- finally:
865- b.unlock()
866+ self.add_cleanup(b.unlock)
867+ touching_revs = log.find_touching_revisions(b, file_id)
868+ for revno, revision_id, what in touching_revs:
869+ self.outf.write("%6d %s\n" % (revno, what))
870
871
872 class cmd_ls(Command):
873@@ -2570,53 +2527,51 @@
874 note("Ignoring files outside view. View is %s" % view_str)
875
876 tree.lock_read()
877- try:
878- for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
879- from_dir=relpath, recursive=recursive):
880- # Apply additional masking
881- if not all and not selection[fc]:
882- continue
883- if kind is not None and fkind != kind:
884- continue
885- if apply_view:
886- try:
887- if relpath:
888- fullpath = osutils.pathjoin(relpath, fp)
889- else:
890- fullpath = fp
891- views.check_path_in_view(tree, fullpath)
892- except errors.FileOutsideView:
893- continue
894+ self.add_cleanup(tree.unlock)
895+ for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
896+ from_dir=relpath, recursive=recursive):
897+ # Apply additional masking
898+ if not all and not selection[fc]:
899+ continue
900+ if kind is not None and fkind != kind:
901+ continue
902+ if apply_view:
903+ try:
904+ if relpath:
905+ fullpath = osutils.pathjoin(relpath, fp)
906+ else:
907+ fullpath = fp
908+ views.check_path_in_view(tree, fullpath)
909+ except errors.FileOutsideView:
910+ continue
911
912- # Output the entry
913- if prefix:
914- fp = osutils.pathjoin(prefix, fp)
915- kindch = entry.kind_character()
916- outstring = fp + kindch
917- ui.ui_factory.clear_term()
918- if verbose:
919- outstring = '%-8s %s' % (fc, outstring)
920- if show_ids and fid is not None:
921- outstring = "%-50s %s" % (outstring, fid)
922+ # Output the entry
923+ if prefix:
924+ fp = osutils.pathjoin(prefix, fp)
925+ kindch = entry.kind_character()
926+ outstring = fp + kindch
927+ ui.ui_factory.clear_term()
928+ if verbose:
929+ outstring = '%-8s %s' % (fc, outstring)
930+ if show_ids and fid is not None:
931+ outstring = "%-50s %s" % (outstring, fid)
932+ self.outf.write(outstring + '\n')
933+ elif null:
934+ self.outf.write(fp + '\0')
935+ if show_ids:
936+ if fid is not None:
937+ self.outf.write(fid)
938+ self.outf.write('\0')
939+ self.outf.flush()
940+ else:
941+ if show_ids:
942+ if fid is not None:
943+ my_id = fid
944+ else:
945+ my_id = ''
946+ self.outf.write('%-50s %s\n' % (outstring, my_id))
947+ else:
948 self.outf.write(outstring + '\n')
949- elif null:
950- self.outf.write(fp + '\0')
951- if show_ids:
952- if fid is not None:
953- self.outf.write(fid)
954- self.outf.write('\0')
955- self.outf.flush()
956- else:
957- if show_ids:
958- if fid is not None:
959- my_id = fid
960- else:
961- my_id = ''
962- self.outf.write('%-50s %s\n' % (outstring, my_id))
963- else:
964- self.outf.write(outstring + '\n')
965- finally:
966- tree.unlock()
967
968
969 class cmd_unknowns(Command):
970@@ -2734,15 +2689,13 @@
971 def run(self):
972 tree = WorkingTree.open_containing(u'.')[0]
973 tree.lock_read()
974- try:
975- for path, file_class, kind, file_id, entry in tree.list_files():
976- if file_class != 'I':
977- continue
978- ## XXX: Slightly inefficient since this was already calculated
979- pat = tree.is_ignored(path)
980- self.outf.write('%-50s %s\n' % (path, pat))
981- finally:
982- tree.unlock()
983+ self.add_cleanup(tree.unlock)
984+ for path, file_class, kind, file_id, entry in tree.list_files():
985+ if file_class != 'I':
986+ continue
987+ ## XXX: Slightly inefficient since this was already calculated
988+ pat = tree.is_ignored(path)
989+ self.outf.write('%-50s %s\n' % (path, pat))
990
991
992 class cmd_lookup_revision(Command):
993@@ -2851,11 +2804,9 @@
994 tree, branch, relpath = \
995 bzrdir.BzrDir.open_containing_tree_or_branch(filename)
996 branch.lock_read()
997- try:
998- return self._run(tree, branch, relpath, filename, revision,
999- name_from_revision, filters)
1000- finally:
1001- branch.unlock()
1002+ self.add_cleanup(branch.unlock)
1003+ return self._run(tree, branch, relpath, filename, revision,
1004+ name_from_revision, filters)
1005
1006 def _run(self, tree, b, relpath, filename, revision, name_from_revision,
1007 filtered):
1008@@ -3570,32 +3521,29 @@
1009 verbose = not is_quiet()
1010 # TODO: should possibly lock the history file...
1011 benchfile = open(".perf_history", "at", buffering=1)
1012+ self.add_cleanup(benchfile.close)
1013 else:
1014 test_suite_factory = None
1015 benchfile = None
1016- try:
1017- selftest_kwargs = {"verbose": verbose,
1018- "pattern": pattern,
1019- "stop_on_failure": one,
1020- "transport": transport,
1021- "test_suite_factory": test_suite_factory,
1022- "lsprof_timed": lsprof_timed,
1023- "lsprof_tests": lsprof_tests,
1024- "bench_history": benchfile,
1025- "matching_tests_first": first,
1026- "list_only": list_only,
1027- "random_seed": randomize,
1028- "exclude_pattern": exclude,
1029- "strict": strict,
1030- "load_list": load_list,
1031- "debug_flags": debugflag,
1032- "starting_with": starting_with
1033- }
1034- selftest_kwargs.update(self.additional_selftest_args)
1035- result = selftest(**selftest_kwargs)
1036- finally:
1037- if benchfile is not None:
1038- benchfile.close()
1039+ selftest_kwargs = {"verbose": verbose,
1040+ "pattern": pattern,
1041+ "stop_on_failure": one,
1042+ "transport": transport,
1043+ "test_suite_factory": test_suite_factory,
1044+ "lsprof_timed": lsprof_timed,
1045+ "lsprof_tests": lsprof_tests,
1046+ "bench_history": benchfile,
1047+ "matching_tests_first": first,
1048+ "list_only": list_only,
1049+ "random_seed": randomize,
1050+ "exclude_pattern": exclude,
1051+ "strict": strict,
1052+ "load_list": load_list,
1053+ "debug_flags": debugflag,
1054+ "starting_with": starting_with
1055+ }
1056+ selftest_kwargs.update(self.additional_selftest_args)
1057+ result = selftest(**selftest_kwargs)
1058 return int(not result)
1059
1060
1061@@ -3640,20 +3588,16 @@
1062 branch1 = Branch.open_containing(branch)[0]
1063 branch2 = Branch.open_containing(other)[0]
1064 branch1.lock_read()
1065- try:
1066- branch2.lock_read()
1067- try:
1068- last1 = ensure_null(branch1.last_revision())
1069- last2 = ensure_null(branch2.last_revision())
1070-
1071- graph = branch1.repository.get_graph(branch2.repository)
1072- base_rev_id = graph.find_unique_lca(last1, last2)
1073-
1074- print 'merge base is revision %s' % base_rev_id
1075- finally:
1076- branch2.unlock()
1077- finally:
1078- branch1.unlock()
1079+ self.add_cleanup(branch1.unlock)
1080+ branch2.lock_read()
1081+ self.add_cleanup(branch2.unlock)
1082+ last1 = ensure_null(branch1.last_revision())
1083+ last2 = ensure_null(branch2.last_revision())
1084+
1085+ graph = branch1.repository.get_graph(branch2.repository)
1086+ base_rev_id = graph.find_unique_lca(last1, last2)
1087+
1088+ print 'merge base is revision %s' % base_rev_id
1089
1090
1091 class cmd_merge(Command):
1092@@ -3776,82 +3720,76 @@
1093 view_info = _get_view_info_for_change_reporter(tree)
1094 change_reporter = delta._ChangeReporter(
1095 unversioned_filter=tree.is_ignored, view_info=view_info)
1096- cleanups = []
1097- try:
1098- pb = ui.ui_factory.nested_progress_bar()
1099- cleanups.append(pb.finished)
1100- tree.lock_write()
1101- cleanups.append(tree.unlock)
1102- if location is not None:
1103- try:
1104- mergeable = bundle.read_mergeable_from_url(location,
1105- possible_transports=possible_transports)
1106- except errors.NotABundle:
1107- mergeable = None
1108- else:
1109- if uncommitted:
1110- raise errors.BzrCommandError('Cannot use --uncommitted'
1111- ' with bundles or merge directives.')
1112-
1113- if revision is not None:
1114- raise errors.BzrCommandError(
1115- 'Cannot use -r with merge directives or bundles')
1116- merger, verified = _mod_merge.Merger.from_mergeable(tree,
1117- mergeable, pb)
1118-
1119- if merger is None and uncommitted:
1120- if revision is not None and len(revision) > 0:
1121- raise errors.BzrCommandError('Cannot use --uncommitted and'
1122- ' --revision at the same time.')
1123- merger = self.get_merger_from_uncommitted(tree, location, pb,
1124- cleanups)
1125- allow_pending = False
1126-
1127- if merger is None:
1128- merger, allow_pending = self._get_merger_from_branch(tree,
1129- location, revision, remember, possible_transports, pb)
1130-
1131- merger.merge_type = merge_type
1132- merger.reprocess = reprocess
1133- merger.show_base = show_base
1134- self.sanity_check_merger(merger)
1135- if (merger.base_rev_id == merger.other_rev_id and
1136- merger.other_rev_id is not None):
1137- note('Nothing to do.')
1138+ pb = ui.ui_factory.nested_progress_bar()
1139+ self.add_cleanup(pb.finished)
1140+ tree.lock_write()
1141+ self.add_cleanup(tree.unlock)
1142+ if location is not None:
1143+ try:
1144+ mergeable = bundle.read_mergeable_from_url(location,
1145+ possible_transports=possible_transports)
1146+ except errors.NotABundle:
1147+ mergeable = None
1148+ else:
1149+ if uncommitted:
1150+ raise errors.BzrCommandError('Cannot use --uncommitted'
1151+ ' with bundles or merge directives.')
1152+
1153+ if revision is not None:
1154+ raise errors.BzrCommandError(
1155+ 'Cannot use -r with merge directives or bundles')
1156+ merger, verified = _mod_merge.Merger.from_mergeable(tree,
1157+ mergeable, pb)
1158+
1159+ if merger is None and uncommitted:
1160+ if revision is not None and len(revision) > 0:
1161+ raise errors.BzrCommandError('Cannot use --uncommitted and'
1162+ ' --revision at the same time.')
1163+ merger = self.get_merger_from_uncommitted(tree, location, pb)
1164+ allow_pending = False
1165+
1166+ if merger is None:
1167+ merger, allow_pending = self._get_merger_from_branch(tree,
1168+ location, revision, remember, possible_transports, pb)
1169+
1170+ merger.merge_type = merge_type
1171+ merger.reprocess = reprocess
1172+ merger.show_base = show_base
1173+ self.sanity_check_merger(merger)
1174+ if (merger.base_rev_id == merger.other_rev_id and
1175+ merger.other_rev_id is not None):
1176+ note('Nothing to do.')
1177+ return 0
1178+ if pull:
1179+ if merger.interesting_files is not None:
1180+ raise errors.BzrCommandError('Cannot pull individual files')
1181+ if (merger.base_rev_id == tree.last_revision()):
1182+ result = tree.pull(merger.other_branch, False,
1183+ merger.other_rev_id)
1184+ result.report(self.outf)
1185 return 0
1186- if pull:
1187- if merger.interesting_files is not None:
1188- raise errors.BzrCommandError('Cannot pull individual files')
1189- if (merger.base_rev_id == tree.last_revision()):
1190- result = tree.pull(merger.other_branch, False,
1191- merger.other_rev_id)
1192- result.report(self.outf)
1193- return 0
1194- if merger.this_basis is None:
1195- raise errors.BzrCommandError(
1196- "This branch has no commits."
1197- " (perhaps you would prefer 'bzr pull')")
1198- if preview:
1199- return self._do_preview(merger, cleanups)
1200- elif interactive:
1201- return self._do_interactive(merger, cleanups)
1202- else:
1203- return self._do_merge(merger, change_reporter, allow_pending,
1204- verified)
1205- finally:
1206- for cleanup in reversed(cleanups):
1207- cleanup()
1208+ if merger.this_basis is None:
1209+ raise errors.BzrCommandError(
1210+ "This branch has no commits."
1211+ " (perhaps you would prefer 'bzr pull')")
1212+ if preview:
1213+ return self._do_preview(merger)
1214+ elif interactive:
1215+ return self._do_interactive(merger)
1216+ else:
1217+ return self._do_merge(merger, change_reporter, allow_pending,
1218+ verified)
1219
1220- def _get_preview(self, merger, cleanups):
1221+ def _get_preview(self, merger):
1222 tree_merger = merger.make_merger()
1223 tt = tree_merger.make_preview_transform()
1224- cleanups.append(tt.finalize)
1225+ self.add_cleanup(tt.finalize)
1226 result_tree = tt.get_preview_tree()
1227 return result_tree
1228
1229- def _do_preview(self, merger, cleanups):
1230+ def _do_preview(self, merger):
1231 from bzrlib.diff import show_diff_trees
1232- result_tree = self._get_preview(merger, cleanups)
1233+ result_tree = self._get_preview(merger)
1234 show_diff_trees(merger.this_tree, result_tree, self.outf,
1235 old_label='', new_label='')
1236
1237@@ -3867,7 +3805,7 @@
1238 else:
1239 return 0
1240
1241- def _do_interactive(self, merger, cleanups):
1242+ def _do_interactive(self, merger):
1243 """Perform an interactive merge.
1244
1245 This works by generating a preview tree of the merge, then using
1246@@ -3875,7 +3813,7 @@
1247 and the preview tree.
1248 """
1249 from bzrlib import shelf_ui
1250- result_tree = self._get_preview(merger, cleanups)
1251+ result_tree = self._get_preview(merger)
1252 writer = bzrlib.option.diff_writer_registry.get()
1253 shelver = shelf_ui.Shelver(merger.this_tree, result_tree, destroy=True,
1254 reporter=shelf_ui.ApplyReporter(),
1255@@ -3949,14 +3887,12 @@
1256 allow_pending = True
1257 return merger, allow_pending
1258
1259- def get_merger_from_uncommitted(self, tree, location, pb, cleanups):
1260+ def get_merger_from_uncommitted(self, tree, location, pb):
1261 """Get a merger for uncommitted changes.
1262
1263 :param tree: The tree the merger should apply to.
1264 :param location: The location containing uncommitted changes.
1265 :param pb: The progress bar to use for showing progress.
1266- :param cleanups: A list of operations to perform to clean up the
1267- temporary directories, unfinalized objects, etc.
1268 """
1269 location = self._select_branch_location(tree, location)[0]
1270 other_tree, other_path = WorkingTree.open_containing(location)
1271@@ -4049,63 +3985,61 @@
1272 merge_type = _mod_merge.Merge3Merger
1273 tree, file_list = tree_files(file_list)
1274 tree.lock_write()
1275- try:
1276- parents = tree.get_parent_ids()
1277- if len(parents) != 2:
1278- raise errors.BzrCommandError("Sorry, remerge only works after normal"
1279- " merges. Not cherrypicking or"
1280- " multi-merges.")
1281- repository = tree.branch.repository
1282- interesting_ids = None
1283- new_conflicts = []
1284- conflicts = tree.conflicts()
1285- if file_list is not None:
1286- interesting_ids = set()
1287- for filename in file_list:
1288- file_id = tree.path2id(filename)
1289- if file_id is None:
1290- raise errors.NotVersionedError(filename)
1291- interesting_ids.add(file_id)
1292- if tree.kind(file_id) != "directory":
1293- continue
1294+ self.add_cleanup(tree.unlock)
1295+ parents = tree.get_parent_ids()
1296+ if len(parents) != 2:
1297+ raise errors.BzrCommandError("Sorry, remerge only works after normal"
1298+ " merges. Not cherrypicking or"
1299+ " multi-merges.")
1300+ repository = tree.branch.repository
1301+ interesting_ids = None
1302+ new_conflicts = []
1303+ conflicts = tree.conflicts()
1304+ if file_list is not None:
1305+ interesting_ids = set()
1306+ for filename in file_list:
1307+ file_id = tree.path2id(filename)
1308+ if file_id is None:
1309+ raise errors.NotVersionedError(filename)
1310+ interesting_ids.add(file_id)
1311+ if tree.kind(file_id) != "directory":
1312+ continue
1313
1314- for name, ie in tree.inventory.iter_entries(file_id):
1315- interesting_ids.add(ie.file_id)
1316- new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
1317- else:
1318- # Remerge only supports resolving contents conflicts
1319- allowed_conflicts = ('text conflict', 'contents conflict')
1320- restore_files = [c.path for c in conflicts
1321- if c.typestring in allowed_conflicts]
1322- _mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
1323- tree.set_conflicts(ConflictList(new_conflicts))
1324- if file_list is not None:
1325- restore_files = file_list
1326- for filename in restore_files:
1327- try:
1328- restore(tree.abspath(filename))
1329- except errors.NotConflicted:
1330- pass
1331- # Disable pending merges, because the file texts we are remerging
1332- # have not had those merges performed. If we use the wrong parents
1333- # list, we imply that the working tree text has seen and rejected
1334- # all the changes from the other tree, when in fact those changes
1335- # have not yet been seen.
1336- pb = ui.ui_factory.nested_progress_bar()
1337- tree.set_parent_ids(parents[:1])
1338+ for name, ie in tree.inventory.iter_entries(file_id):
1339+ interesting_ids.add(ie.file_id)
1340+ new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
1341+ else:
1342+ # Remerge only supports resolving contents conflicts
1343+ allowed_conflicts = ('text conflict', 'contents conflict')
1344+ restore_files = [c.path for c in conflicts
1345+ if c.typestring in allowed_conflicts]
1346+ _mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
1347+ tree.set_conflicts(ConflictList(new_conflicts))
1348+ if file_list is not None:
1349+ restore_files = file_list
1350+ for filename in restore_files:
1351 try:
1352- merger = _mod_merge.Merger.from_revision_ids(pb,
1353- tree, parents[1])
1354- merger.interesting_ids = interesting_ids
1355- merger.merge_type = merge_type
1356- merger.show_base = show_base
1357- merger.reprocess = reprocess
1358- conflicts = merger.do_merge()
1359- finally:
1360- tree.set_parent_ids(parents)
1361- pb.finished()
1362+ restore(tree.abspath(filename))
1363+ except errors.NotConflicted:
1364+ pass
1365+ # Disable pending merges, because the file texts we are remerging
1366+ # have not had those merges performed. If we use the wrong parents
1367+ # list, we imply that the working tree text has seen and rejected
1368+ # all the changes from the other tree, when in fact those changes
1369+ # have not yet been seen.
1370+ pb = ui.ui_factory.nested_progress_bar()
1371+ tree.set_parent_ids(parents[:1])
1372+ try:
1373+ merger = _mod_merge.Merger.from_revision_ids(pb,
1374+ tree, parents[1])
1375+ merger.interesting_ids = interesting_ids
1376+ merger.merge_type = merge_type
1377+ merger.show_base = show_base
1378+ merger.reprocess = reprocess
1379+ conflicts = merger.do_merge()
1380 finally:
1381- tree.unlock()
1382+ tree.set_parent_ids(parents)
1383+ pb.finished()
1384 if conflicts > 0:
1385 return 1
1386 else:
1387@@ -4170,13 +4104,11 @@
1388 forget_merges=None):
1389 tree, file_list = tree_files(file_list)
1390 tree.lock_write()
1391- try:
1392- if forget_merges:
1393- tree.set_parent_ids(tree.get_parent_ids()[:1])
1394- else:
1395- self._revert_tree_to_revision(tree, revision, file_list, no_backup)
1396- finally:
1397- tree.unlock()
1398+ self.add_cleanup(tree.unlock)
1399+ if forget_merges:
1400+ tree.set_parent_ids(tree.get_parent_ids()[:1])
1401+ else:
1402+ self._revert_tree_to_revision(tree, revision, file_list, no_backup)
1403
1404 @staticmethod
1405 def _revert_tree_to_revision(tree, revision, file_list, no_backup):
1406@@ -4333,79 +4265,74 @@
1407 if remote_branch.base == local_branch.base:
1408 remote_branch = local_branch
1409
1410+ local_branch.lock_read()
1411+ self.add_cleanup(local_branch.unlock)
1412 local_revid_range = _revision_range_to_revid_range(
1413 _get_revision_range(my_revision, local_branch,
1414 self.name()))
1415
1416+ remote_branch.lock_read()
1417+ self.add_cleanup(remote_branch.unlock)
1418 remote_revid_range = _revision_range_to_revid_range(
1419 _get_revision_range(revision,
1420 remote_branch, self.name()))
1421
1422- local_branch.lock_read()
1423- try:
1424- remote_branch.lock_read()
1425- try:
1426- local_extra, remote_extra = find_unmerged(
1427- local_branch, remote_branch, restrict,
1428- backward=not reverse,
1429- include_merges=include_merges,
1430- local_revid_range=local_revid_range,
1431- remote_revid_range=remote_revid_range)
1432-
1433- if log_format is None:
1434- registry = log.log_formatter_registry
1435- log_format = registry.get_default(local_branch)
1436- lf = log_format(to_file=self.outf,
1437- show_ids=show_ids,
1438- show_timezone='original')
1439-
1440- status_code = 0
1441- if local_extra and not theirs_only:
1442- message("You have %d extra revision(s):\n" %
1443- len(local_extra))
1444- for revision in iter_log_revisions(local_extra,
1445- local_branch.repository,
1446- verbose):
1447- lf.log_revision(revision)
1448- printed_local = True
1449- status_code = 1
1450- else:
1451- printed_local = False
1452-
1453- if remote_extra and not mine_only:
1454- if printed_local is True:
1455- message("\n\n\n")
1456- message("You are missing %d revision(s):\n" %
1457- len(remote_extra))
1458- for revision in iter_log_revisions(remote_extra,
1459- remote_branch.repository,
1460- verbose):
1461- lf.log_revision(revision)
1462- status_code = 1
1463-
1464- if mine_only and not local_extra:
1465- # We checked local, and found nothing extra
1466- message('This branch is up to date.\n')
1467- elif theirs_only and not remote_extra:
1468- # We checked remote, and found nothing extra
1469- message('Other branch is up to date.\n')
1470- elif not (mine_only or theirs_only or local_extra or
1471- remote_extra):
1472- # We checked both branches, and neither one had extra
1473- # revisions
1474- message("Branches are up to date.\n")
1475- finally:
1476- remote_branch.unlock()
1477- finally:
1478- local_branch.unlock()
1479+ local_extra, remote_extra = find_unmerged(
1480+ local_branch, remote_branch, restrict,
1481+ backward=not reverse,
1482+ include_merges=include_merges,
1483+ local_revid_range=local_revid_range,
1484+ remote_revid_range=remote_revid_range)
1485+
1486+ if log_format is None:
1487+ registry = log.log_formatter_registry
1488+ log_format = registry.get_default(local_branch)
1489+ lf = log_format(to_file=self.outf,
1490+ show_ids=show_ids,
1491+ show_timezone='original')
1492+
1493+ status_code = 0
1494+ if local_extra and not theirs_only:
1495+ message("You have %d extra revision(s):\n" %
1496+ len(local_extra))
1497+ for revision in iter_log_revisions(local_extra,
1498+ local_branch.repository,
1499+ verbose):
1500+ lf.log_revision(revision)
1501+ printed_local = True
1502+ status_code = 1
1503+ else:
1504+ printed_local = False
1505+
1506+ if remote_extra and not mine_only:
1507+ if printed_local is True:
1508+ message("\n\n\n")
1509+ message("You are missing %d revision(s):\n" %
1510+ len(remote_extra))
1511+ for revision in iter_log_revisions(remote_extra,
1512+ remote_branch.repository,
1513+ verbose):
1514+ lf.log_revision(revision)
1515+ status_code = 1
1516+
1517+ if mine_only and not local_extra:
1518+ # We checked local, and found nothing extra
1519+ message('This branch is up to date.\n')
1520+ elif theirs_only and not remote_extra:
1521+ # We checked remote, and found nothing extra
1522+ message('Other branch is up to date.\n')
1523+ elif not (mine_only or theirs_only or local_extra or
1524+ remote_extra):
1525+ # We checked both branches, and neither one had extra
1526+ # revisions
1527+ message("Branches are up to date.\n")
1528+ self.cleanup_now()
1529 if not status_code and parent is None and other_branch is not None:
1530 local_branch.lock_write()
1531- try:
1532- # handle race conditions - a parent might be set while we run.
1533- if local_branch.get_parent() is None:
1534- local_branch.set_parent(remote_branch.base)
1535- finally:
1536- local_branch.unlock()
1537+ self.add_cleanup(local_branch.unlock)
1538+ # handle race conditions - a parent might be set while we run.
1539+ if local_branch.get_parent() is None:
1540+ local_branch.set_parent(remote_branch.base)
1541 return status_code
1542
1543
1544@@ -4490,18 +4417,16 @@
1545 else:
1546 b = Branch.open(branch)
1547 b.lock_read()
1548- try:
1549- if revision is None:
1550- rev_id = b.last_revision()
1551- else:
1552- rev_id = revision[0].as_revision_id(b)
1553- t = testament_class.from_revision(b.repository, rev_id)
1554- if long:
1555- sys.stdout.writelines(t.as_text_lines())
1556- else:
1557- sys.stdout.write(t.as_short_text())
1558- finally:
1559- b.unlock()
1560+ self.add_cleanup(b.unlock)
1561+ if revision is None:
1562+ rev_id = b.last_revision()
1563+ else:
1564+ rev_id = revision[0].as_revision_id(b)
1565+ t = testament_class.from_revision(b.repository, rev_id)
1566+ if long:
1567+ sys.stdout.writelines(t.as_text_lines())
1568+ else:
1569+ sys.stdout.write(t.as_short_text())
1570
1571
1572 class cmd_annotate(Command):
1573@@ -4533,30 +4458,28 @@
1574 bzrdir.BzrDir.open_containing_tree_or_branch(filename)
1575 if wt is not None:
1576 wt.lock_read()
1577+ self.add_cleanup(wt.unlock)
1578 else:
1579 branch.lock_read()
1580- try:
1581- tree = _get_one_revision_tree('annotate', revision, branch=branch)
1582- if wt is not None:
1583- file_id = wt.path2id(relpath)
1584- else:
1585- file_id = tree.path2id(relpath)
1586- if file_id is None:
1587- raise errors.NotVersionedError(filename)
1588- file_version = tree.inventory[file_id].revision
1589- if wt is not None and revision is None:
1590- # If there is a tree and we're not annotating historical
1591- # versions, annotate the working tree's content.
1592- annotate_file_tree(wt, file_id, self.outf, long, all,
1593- show_ids=show_ids)
1594- else:
1595- annotate_file(branch, file_version, file_id, long, all, self.outf,
1596- show_ids=show_ids)
1597- finally:
1598- if wt is not None:
1599- wt.unlock()
1600- else:
1601- branch.unlock()
1602+ self.add_cleanup(branch.unlock)
1603+ tree = _get_one_revision_tree('annotate', revision, branch=branch)
1604+ tree.lock_read()
1605+ self.add_cleanup(tree.unlock)
1606+ if wt is not None:
1607+ file_id = wt.path2id(relpath)
1608+ else:
1609+ file_id = tree.path2id(relpath)
1610+ if file_id is None:
1611+ raise errors.NotVersionedError(filename)
1612+ file_version = tree.inventory[file_id].revision
1613+ if wt is not None and revision is None:
1614+ # If there is a tree and we're not annotating historical
1615+ # versions, annotate the working tree's content.
1616+ annotate_file_tree(wt, file_id, self.outf, long, all,
1617+ show_ids=show_ids)
1618+ else:
1619+ annotate_file(branch, file_version, file_id, long, all, self.outf,
1620+ show_ids=show_ids)
1621
1622
1623 class cmd_re_sign(Command):
1624@@ -4574,10 +4497,8 @@
1625 raise errors.BzrCommandError('You must supply either --revision or a revision_id')
1626 b = WorkingTree.open_containing(u'.')[0].branch
1627 b.lock_write()
1628- try:
1629- return self._run(b, revision_id_list, revision)
1630- finally:
1631- b.unlock()
1632+ self.add_cleanup(b.unlock)
1633+ return self._run(b, revision_id_list, revision)
1634
1635 def _run(self, b, revision_id_list, revision):
1636 import bzrlib.gpg as gpg
1637@@ -4729,16 +4650,11 @@
1638
1639 if tree is not None:
1640 tree.lock_write()
1641+ self.add_cleanup(tree.unlock)
1642 else:
1643 b.lock_write()
1644- try:
1645- return self._run(b, tree, dry_run, verbose, revision, force,
1646- local=local)
1647- finally:
1648- if tree is not None:
1649- tree.unlock()
1650- else:
1651- b.unlock()
1652+ self.add_cleanup(b.unlock)
1653+ return self._run(b, tree, dry_run, verbose, revision, force, local=local)
1654
1655 def _run(self, b, tree, dry_run, verbose, revision, force, local=False):
1656 from bzrlib.log import log_formatter, show_log
1657@@ -5283,25 +5199,23 @@
1658 ):
1659 branch, relpath = Branch.open_containing(directory)
1660 branch.lock_write()
1661- try:
1662- if delete:
1663- branch.tags.delete_tag(tag_name)
1664- self.outf.write('Deleted tag %s.\n' % tag_name)
1665+ self.add_cleanup(branch.unlock)
1666+ if delete:
1667+ branch.tags.delete_tag(tag_name)
1668+ self.outf.write('Deleted tag %s.\n' % tag_name)
1669+ else:
1670+ if revision:
1671+ if len(revision) != 1:
1672+ raise errors.BzrCommandError(
1673+ "Tags can only be placed on a single revision, "
1674+ "not on a range")
1675+ revision_id = revision[0].as_revision_id(branch)
1676 else:
1677- if revision:
1678- if len(revision) != 1:
1679- raise errors.BzrCommandError(
1680- "Tags can only be placed on a single revision, "
1681- "not on a range")
1682- revision_id = revision[0].as_revision_id(branch)
1683- else:
1684- revision_id = branch.last_revision()
1685- if (not force) and branch.tags.has_tag(tag_name):
1686- raise errors.TagAlreadyExists(tag_name)
1687- branch.tags.set_tag(tag_name, revision_id)
1688- self.outf.write('Created tag %s.\n' % tag_name)
1689- finally:
1690- branch.unlock()
1691+ revision_id = branch.last_revision()
1692+ if (not force) and branch.tags.has_tag(tag_name):
1693+ raise errors.TagAlreadyExists(tag_name)
1694+ branch.tags.set_tag(tag_name, revision_id)
1695+ self.outf.write('Created tag %s.\n' % tag_name)
1696
1697
1698 class cmd_tags(Command):
1699@@ -5340,41 +5254,40 @@
1700 return
1701
1702 branch.lock_read()
1703- try:
1704- if revision:
1705- graph = branch.repository.get_graph()
1706- rev1, rev2 = _get_revision_range(revision, branch, self.name())
1707- revid1, revid2 = rev1.rev_id, rev2.rev_id
1708- # only show revisions between revid1 and revid2 (inclusive)
1709- tags = [(tag, revid) for tag, revid in tags if
1710- graph.is_between(revid, revid1, revid2)]
1711- if sort == 'alpha':
1712- tags.sort()
1713- elif sort == 'time':
1714- timestamps = {}
1715- for tag, revid in tags:
1716- try:
1717- revobj = branch.repository.get_revision(revid)
1718- except errors.NoSuchRevision:
1719- timestamp = sys.maxint # place them at the end
1720- else:
1721- timestamp = revobj.timestamp
1722- timestamps[revid] = timestamp
1723- tags.sort(key=lambda x: timestamps[x[1]])
1724- if not show_ids:
1725- # [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
1726- for index, (tag, revid) in enumerate(tags):
1727- try:
1728- revno = branch.revision_id_to_dotted_revno(revid)
1729- if isinstance(revno, tuple):
1730- revno = '.'.join(map(str, revno))
1731- except errors.NoSuchRevision:
1732- # Bad tag data/merges can lead to tagged revisions
1733- # which are not in this branch. Fail gracefully ...
1734- revno = '?'
1735- tags[index] = (tag, revno)
1736- finally:
1737- branch.unlock()
1738+ self.add_cleanup(branch.unlock)
1739+ if revision:
1740+ graph = branch.repository.get_graph()
1741+ rev1, rev2 = _get_revision_range(revision, branch, self.name())
1742+ revid1, revid2 = rev1.rev_id, rev2.rev_id
1743+ # only show revisions between revid1 and revid2 (inclusive)
1744+ tags = [(tag, revid) for tag, revid in tags if
1745+ graph.is_between(revid, revid1, revid2)]
1746+ if sort == 'alpha':
1747+ tags.sort()
1748+ elif sort == 'time':
1749+ timestamps = {}
1750+ for tag, revid in tags:
1751+ try:
1752+ revobj = branch.repository.get_revision(revid)
1753+ except errors.NoSuchRevision:
1754+ timestamp = sys.maxint # place them at the end
1755+ else:
1756+ timestamp = revobj.timestamp
1757+ timestamps[revid] = timestamp
1758+ tags.sort(key=lambda x: timestamps[x[1]])
1759+ if not show_ids:
1760+ # [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
1761+ for index, (tag, revid) in enumerate(tags):
1762+ try:
1763+ revno = branch.revision_id_to_dotted_revno(revid)
1764+ if isinstance(revno, tuple):
1765+ revno = '.'.join(map(str, revno))
1766+ except errors.NoSuchRevision:
1767+ # Bad tag data/merges can lead to tagged revisions
1768+ # which are not in this branch. Fail gracefully ...
1769+ revno = '?'
1770+ tags[index] = (tag, revno)
1771+ self.cleanup_now()
1772 for tag, revspec in tags:
1773 self.outf.write('%-20s %s\n' % (tag, revspec))
1774
1775@@ -5798,20 +5711,18 @@
1776 def run_for_list(self):
1777 tree = WorkingTree.open_containing('.')[0]
1778 tree.lock_read()
1779- try:
1780- manager = tree.get_shelf_manager()
1781- shelves = manager.active_shelves()
1782- if len(shelves) == 0:
1783- note('No shelved changes.')
1784- return 0
1785- for shelf_id in reversed(shelves):
1786- message = manager.get_metadata(shelf_id).get('message')
1787- if message is None:
1788- message = '<no message>'
1789- self.outf.write('%3d: %s\n' % (shelf_id, message))
1790- return 1
1791- finally:
1792- tree.unlock()
1793+ self.add_cleanup(tree.unlock)
1794+ manager = tree.get_shelf_manager()
1795+ shelves = manager.active_shelves()
1796+ if len(shelves) == 0:
1797+ note('No shelved changes.')
1798+ return 0
1799+ for shelf_id in reversed(shelves):
1800+ message = manager.get_metadata(shelf_id).get('message')
1801+ if message is None:
1802+ message = '<no message>'
1803+ self.outf.write('%3d: %s\n' % (shelf_id, message))
1804+ return 1
1805
1806
1807 class cmd_unshelve(Command):
1808
1809=== modified file 'bzrlib/cleanup.py'
1810--- bzrlib/cleanup.py 2009-10-26 06:23:14 +0000
1811+++ bzrlib/cleanup.py 2010-01-11 12:40:30 +0000
1812@@ -91,13 +91,14 @@
1813
1814 where `some_func` is::
1815
1816- def some_func(operation, args, ...)
1817+ def some_func(operation, args, ...):
1818 do_something()
1819 operation.add_cleanup(something)
1820 # etc
1821
1822 Note that the first argument passed to `some_func` will be the
1823- OperationWithCleanups object.
1824+ OperationWithCleanups object. To invoke `some_func` without that, use
1825+ `run_simple` instead of `run`.
1826 """
1827
1828 def __init__(self, func):
1829@@ -116,6 +117,14 @@
1830 return _do_with_cleanups(
1831 self.cleanups, self.func, self, *args, **kwargs)
1832
1833+ def run_simple(self, *args, **kwargs):
1834+ return _do_with_cleanups(
1835+ self.cleanups, self.func, *args, **kwargs)
1836+
1837+ def cleanup_now(self):
1838+ _run_cleanups(self.cleanups)
1839+ self.cleanups.clear()
1840+
1841
1842 def _do_with_cleanups(cleanup_funcs, func, *args, **kwargs):
1843 """Run `func`, then call all the cleanup_funcs.
1844
1845=== modified file 'bzrlib/commands.py'
1846--- bzrlib/commands.py 2010-01-08 07:37:25 +0000
1847+++ bzrlib/commands.py 2010-01-11 12:40:31 +0000
1848@@ -40,6 +40,7 @@
1849
1850 import bzrlib
1851 from bzrlib import (
1852+ cleanup,
1853 debug,
1854 errors,
1855 option,
1856@@ -384,7 +385,28 @@
1857 warn("No help message set for %r" % self)
1858 # List of standard options directly supported
1859 self.supported_std_options = []
1860-
1861+ self._operation = cleanup.OperationWithCleanups(self.run)
1862+
1863+ def add_cleanup(self, cleanup_func, *args, **kwargs):
1864+ """Register a function to call after self.run returns or raises.
1865+
1866+ Functions will be called in LIFO order.
1867+ """
1868+ self._operation.add_cleanup(cleanup_func, *args, **kwargs)
1869+
1870+ def cleanup_now(self):
1871+ """Execute and empty pending cleanup functions immediately.
1872+
1873+ After cleanup_now all registered cleanups are forgotten. add_cleanup
1874+ may be called again after cleanup_now; these cleanups will be called
1875+ after self.run returns or raises (or when cleanup_now is next called).
1876+
1877+ This is useful for releasing expensive or contentious resources (such
1878+ as write locks) before doing further work that does not require those
1879+ resources (such as writing results to self.outf).
1880+ """
1881+ self._operation.cleanup_now()
1882+
1883 @deprecated_method(deprecated_in((2, 1, 0)))
1884 def _maybe_expand_globs(self, file_list):
1885 """Glob expand file_list if the platform does not do that itself.
1886@@ -636,7 +658,11 @@
1887
1888 self._setup_outf()
1889
1890- return self.run(**all_cmd_args)
1891+ return self.run_direct(**all_cmd_args)
1892+
1893+ def run_direct(self, *args, **kwargs):
1894+ """Call run directly with objects (without parsing an argv list)."""
1895+ return self._operation.run_simple(*args, **kwargs)
1896
1897 def run(self):
1898 """Actually run the command.
1899
1900=== modified file 'bzrlib/tests/commands/test_branch.py'
1901--- bzrlib/tests/commands/test_branch.py 2009-03-23 14:59:43 +0000
1902+++ bzrlib/tests/commands/test_branch.py 2010-01-11 12:40:31 +0000
1903@@ -28,16 +28,16 @@
1904
1905 def test_branch_remote_local(self):
1906 cmd = cmd_branch()
1907- cmd.run(self.get_url('branch'), 'local')
1908+ cmd.run_direct(self.get_url('branch'), 'local')
1909 self.assertEquals(1, len(self.connections))
1910
1911 def test_branch_local_remote(self):
1912 cmd = cmd_branch()
1913- cmd.run('branch', self.get_url('remote'))
1914+ cmd.run_direct('branch', self.get_url('remote'))
1915 self.assertEquals(1, len(self.connections))
1916
1917 def test_branch_remote_remote(self):
1918 cmd = cmd_branch()
1919- cmd.run(self.get_url('branch'), self.get_url('remote'))
1920+ cmd.run_direct(self.get_url('branch'), self.get_url('remote'))
1921 self.assertEquals(2, len(self.connections))
1922
1923
1924=== modified file 'bzrlib/tests/commands/test_cat.py'
1925--- bzrlib/tests/commands/test_cat.py 2009-03-23 14:59:43 +0000
1926+++ bzrlib/tests/commands/test_cat.py 2010-01-11 12:40:30 +0000
1927@@ -50,7 +50,7 @@
1928 self.start_logging_connections()
1929
1930 cmd = cmd_cat()
1931- cmd.run(self.get_url('branch/foo'))
1932+ cmd.run_direct(self.get_url('branch/foo'))
1933 self.assertEquals(1, len(self.connections))
1934 self.assertEquals('foo', self.outf.getvalue())
1935
1936
1937=== modified file 'bzrlib/tests/commands/test_checkout.py'
1938--- bzrlib/tests/commands/test_checkout.py 2009-03-23 14:59:43 +0000
1939+++ bzrlib/tests/commands/test_checkout.py 2010-01-11 12:40:30 +0000
1940@@ -27,7 +27,7 @@
1941 self.start_logging_connections()
1942
1943 cmd = cmd_checkout()
1944- cmd.run(self.get_url('branch1'), 'local')
1945+ cmd.run_direct(self.get_url('branch1'), 'local')
1946 self.assertEquals(1, len(self.connections))
1947
1948 def test_checkout_lightweight(self):
1949@@ -36,6 +36,6 @@
1950 self.start_logging_connections()
1951
1952 cmd = cmd_checkout()
1953- cmd.run(self.get_url('branch1'), 'local', lightweight=True)
1954+ cmd.run_direct(self.get_url('branch1'), 'local', lightweight=True)
1955 self.assertEquals(1, len(self.connections))
1956
1957
1958=== modified file 'bzrlib/tests/commands/test_commit.py'
1959--- bzrlib/tests/commands/test_commit.py 2009-03-23 14:59:43 +0000
1960+++ bzrlib/tests/commands/test_commit.py 2010-01-11 12:40:30 +0000
1961@@ -42,7 +42,7 @@
1962 # commit do not provide a directory parameter, we have to change dir
1963 # manually
1964 os.chdir('local')
1965- commit.run(message=u'empty commit', unchanged=True)
1966+ commit.run_direct(message=u'empty commit', unchanged=True)
1967 self.assertEquals(1, len(self.connections))
1968
1969 def test_commit_both_modified(self):
1970
1971=== modified file 'bzrlib/tests/commands/test_init.py'
1972--- bzrlib/tests/commands/test_init.py 2009-03-23 14:59:43 +0000
1973+++ bzrlib/tests/commands/test_init.py 2010-01-11 12:40:30 +0000
1974@@ -30,6 +30,6 @@
1975 cmd = cmd_init()
1976 # We don't care about the ouput but 'outf' should be defined
1977 cmd.outf = tests.StringIOWrapper()
1978- cmd.run(self.get_url())
1979+ cmd.run_direct(self.get_url())
1980 self.assertEquals(1, len(self.connections))
1981
1982
1983=== modified file 'bzrlib/tests/commands/test_init_repository.py'
1984--- bzrlib/tests/commands/test_init_repository.py 2009-03-23 14:59:43 +0000
1985+++ bzrlib/tests/commands/test_init_repository.py 2010-01-11 12:40:31 +0000
1986@@ -30,6 +30,6 @@
1987 cmd = cmd_init_repository()
1988 # We don't care about the ouput but 'outf' should be defined
1989 cmd.outf = tests.StringIOWrapper()
1990- cmd.run(self.get_url())
1991+ cmd.run_direct(self.get_url())
1992 self.assertEquals(1, len(self.connections))
1993
1994
1995=== modified file 'bzrlib/tests/commands/test_merge.py'
1996--- bzrlib/tests/commands/test_merge.py 2009-03-23 14:59:43 +0000
1997+++ bzrlib/tests/commands/test_merge.py 2010-01-11 12:40:30 +0000
1998@@ -34,6 +34,6 @@
1999 cmd = cmd_merge()
2000 # We don't care about the ouput but 'outf' should be defined
2001 cmd.outf = StringIOWrapper()
2002- cmd.run(self.get_url('branch1'), directory='branch2')
2003+ cmd.run_direct(self.get_url('branch1'), directory='branch2')
2004 self.assertEquals(1, len(self.connections))
2005
2006
2007=== modified file 'bzrlib/tests/commands/test_missing.py'
2008--- bzrlib/tests/commands/test_missing.py 2009-03-23 14:59:43 +0000
2009+++ bzrlib/tests/commands/test_missing.py 2010-01-11 12:40:30 +0000
2010@@ -33,6 +33,6 @@
2011 cmd = cmd_missing()
2012 # We don't care about the ouput but 'outf' should be defined
2013 cmd.outf = self.make_utf8_encoded_stringio()
2014- cmd.run(self.get_url('branch2'))
2015+ cmd.run_direct(self.get_url('branch2'))
2016 self.assertEquals(1, len(self.connections))
2017
2018
2019=== modified file 'bzrlib/tests/commands/test_pull.py'
2020--- bzrlib/tests/commands/test_pull.py 2009-03-23 14:59:43 +0000
2021+++ bzrlib/tests/commands/test_pull.py 2010-01-11 12:40:31 +0000
2022@@ -35,7 +35,7 @@
2023 cmd = builtins.cmd_pull()
2024 # We don't care about the ouput but 'outf' should be defined
2025 cmd.outf = tests.StringIOWrapper()
2026- cmd.run(self.get_url('branch1'), directory='branch2')
2027+ cmd.run_direct(self.get_url('branch1'), directory='branch2')
2028 self.assertEquals(1, len(self.connections))
2029
2030 def test_pull_with_bound_branch(self):
2031@@ -53,6 +53,6 @@
2032 pull = builtins.cmd_pull()
2033 # We don't care about the ouput but 'outf' should be defined
2034 pull.outf = tests.StringIOWrapper()
2035- pull.run(self.get_url('remote'), directory='local')
2036+ pull.run_direct(self.get_url('remote'), directory='local')
2037 self.assertEquals(1, len(self.connections))
2038
2039
2040=== modified file 'bzrlib/tests/commands/test_push.py'
2041--- bzrlib/tests/commands/test_push.py 2009-03-23 14:59:43 +0000
2042+++ bzrlib/tests/commands/test_push.py 2010-01-11 12:40:31 +0000
2043@@ -30,7 +30,7 @@
2044 cmd = cmd_push()
2045 # We don't care about the ouput but 'outf' should be defined
2046 cmd.outf = tests.StringIOWrapper()
2047- cmd.run(self.get_url('remote'), directory='branch')
2048+ cmd.run_direct(self.get_url('remote'), directory='branch')
2049 self.assertEquals(1, len(self.connections))
2050
2051 def test_push_onto_stacked(self):
2052@@ -41,6 +41,6 @@
2053
2054 cmd = cmd_push()
2055 cmd.outf = tests.StringIOWrapper()
2056- cmd.run(self.get_url('remote'), directory='source',
2057+ cmd.run_direct(self.get_url('remote'), directory='source',
2058 stacked_on=self.get_url('base'))
2059 self.assertEqual(1, len(self.connections))
2060
2061=== modified file 'bzrlib/tests/commands/test_update.py'
2062--- bzrlib/tests/commands/test_update.py 2009-12-14 15:51:36 +0000
2063+++ bzrlib/tests/commands/test_update.py 2010-01-11 12:40:30 +0000
2064@@ -40,6 +40,6 @@
2065 # update needs the encoding from outf to print URLs
2066 update.outf = tests.StringIOWrapper()
2067 # update calls it 'dir' where other commands calls it 'directory'
2068- update.run(dir='local')
2069+ update.run_direct(dir='local')
2070 self.assertEquals(1, len(self.connections))
2071