362
349
' --revision or a revision_id')
363
350
b = WorkingTree.open_containing(u'.')[0].branch
365
revisions = b.repository.revisions
366
if revisions is None:
367
raise errors.BzrCommandError('Repository %r does not support '
368
'access to raw revision texts')
352
# TODO: jam 20060112 should cat-revision always output utf-8?
353
if revision_id is not None:
354
revision_id = osutils.safe_revision_id(revision_id, warn=False)
356
self.outf.write(b.repository.get_revision_xml(revision_id).decode('utf-8'))
357
except errors.NoSuchRevision:
358
msg = "The repository %s contains no revision %s." % (b.repository.base,
360
raise errors.BzrCommandError(msg)
361
elif revision is not None:
364
raise errors.BzrCommandError('You cannot specify a NULL'
366
rev_id = rev.as_revision_id(b)
367
self.outf.write(b.repository.get_revision_xml(rev_id).decode('utf-8'))
370
b.repository.lock_read()
372
# TODO: jam 20060112 should cat-revision always output utf-8?
373
if revision_id is not None:
374
revision_id = osutils.safe_revision_id(revision_id, warn=False)
376
self.print_revision(revisions, revision_id)
377
except errors.NoSuchRevision:
378
msg = "The repository %s contains no revision %s." % (
379
b.repository.base, revision_id)
380
raise errors.BzrCommandError(msg)
381
elif revision is not None:
384
raise errors.BzrCommandError(
385
'You cannot specify a NULL revision.')
386
rev_id = rev.as_revision_id(b)
387
self.print_revision(revisions, rev_id)
389
b.repository.unlock()
392
370
class cmd_dump_btree(Command):
393
__doc__ = """Dump the contents of a btree index file to stdout.
371
"""Dump the contents of a btree index file to stdout.
395
373
PATH is a btree index file, it can be any URL. This includes things like
396
374
.bzr/repository/pack-names, or .bzr/repository/indices/a34b3a...ca4a4.iix
479
452
To re-create the working tree, use "bzr checkout".
481
454
_see_also = ['checkout', 'working-trees']
482
takes_args = ['location*']
455
takes_args = ['location?']
483
456
takes_options = [
485
458
help='Remove the working tree even if it has '
486
459
'uncommitted changes.'),
489
def run(self, location_list, force=False):
490
if not location_list:
493
for location in location_list:
494
d = bzrdir.BzrDir.open(location)
497
working = d.open_workingtree()
498
except errors.NoWorkingTree:
499
raise errors.BzrCommandError("No working tree to remove")
500
except errors.NotLocalUrl:
501
raise errors.BzrCommandError("You cannot remove the working tree"
504
if (working.has_changes()):
505
raise errors.UncommittedChanges(working)
507
if working.user_url != working.branch.user_url:
508
raise errors.BzrCommandError("You cannot remove the working tree"
509
" from a lightweight checkout")
511
d.destroy_workingtree()
462
def run(self, location='.', force=False):
463
d = bzrdir.BzrDir.open(location)
466
working = d.open_workingtree()
467
except errors.NoWorkingTree:
468
raise errors.BzrCommandError("No working tree to remove")
469
except errors.NotLocalUrl:
470
raise errors.BzrCommandError("You cannot remove the working tree"
473
if (working.has_changes()):
474
raise errors.UncommittedChanges(working)
476
working_path = working.bzrdir.root_transport.base
477
branch_path = working.branch.bzrdir.root_transport.base
478
if working_path != branch_path:
479
raise errors.BzrCommandError("You cannot remove the working tree"
480
" from a lightweight checkout")
482
d.destroy_workingtree()
514
485
class cmd_revno(Command):
515
__doc__ = """Show current revision number.
486
"""Show current revision number.
517
488
This is equal to the number of revisions on this branch.
569
546
wt = WorkingTree.open_containing(directory)[0]
571
self.add_cleanup(wt.lock_read().unlock)
572
549
except (errors.NoWorkingTree, errors.NotLocalUrl):
574
551
b = Branch.open_containing(directory)[0]
575
self.add_cleanup(b.lock_read().unlock)
577
if revision is not None:
578
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
579
if revision_info_list is not None:
580
for rev_str in revision_info_list:
581
rev_spec = RevisionSpec.from_string(rev_str)
582
revision_ids.append(rev_spec.as_revision_id(b))
583
# No arguments supplied, default to the last revision
584
if len(revision_ids) == 0:
587
raise errors.NoWorkingTree(directory)
588
revision_ids.append(wt.last_revision())
555
if revision is not None:
556
revision_ids.extend(rev.as_revision_id(b) for rev in revision)
557
if revision_info_list is not None:
558
for rev_str in revision_info_list:
559
rev_spec = RevisionSpec.from_string(rev_str)
560
revision_ids.append(rev_spec.as_revision_id(b))
561
# No arguments supplied, default to the last revision
562
if len(revision_ids) == 0:
565
raise errors.NoWorkingTree(directory)
566
revision_ids.append(wt.last_revision())
568
revision_ids.append(b.last_revision())
572
for revision_id in revision_ids:
574
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
575
revno = '.'.join(str(i) for i in dotted_revno)
576
except errors.NoSuchRevision:
578
maxlen = max(maxlen, len(revno))
579
revinfos.append([revno, revision_id])
590
revision_ids.append(b.last_revision())
594
for revision_id in revision_ids:
596
dotted_revno = b.revision_id_to_dotted_revno(revision_id)
597
revno = '.'.join(str(i) for i in dotted_revno)
598
except errors.NoSuchRevision:
600
maxlen = max(maxlen, len(revno))
601
revinfos.append([revno, revision_id])
604
586
for ri in revinfos:
605
587
self.outf.write('%*s %s\n' % (maxlen, ri[0], ri[1]))
608
590
class cmd_add(Command):
609
__doc__ = """Add specified files or directories.
591
"""Add specified files or directories.
611
593
In non-recursive mode, all the named items are added, regardless
612
594
of whether they were previously ignored. A warning is given if
756
736
revision = _get_one_revision('inventory', revision)
757
737
work_tree, file_list = tree_files(file_list)
758
self.add_cleanup(work_tree.lock_read().unlock)
759
if revision is not None:
760
tree = revision.as_tree(work_tree.branch)
762
extra_trees = [work_tree]
763
self.add_cleanup(tree.lock_read().unlock)
768
if file_list is not None:
769
file_ids = tree.paths2ids(file_list, trees=extra_trees,
770
require_versioned=True)
771
# find_ids_across_trees may include some paths that don't
773
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
774
for file_id in file_ids if file_id in tree)
776
entries = tree.inventory.entries()
738
work_tree.lock_read()
740
if revision is not None:
741
tree = revision.as_tree(work_tree.branch)
743
extra_trees = [work_tree]
749
if file_list is not None:
750
file_ids = tree.paths2ids(file_list, trees=extra_trees,
751
require_versioned=True)
752
# find_ids_across_trees may include some paths that don't
754
entries = sorted((tree.id2path(file_id), tree.inventory[file_id])
755
for file_id in file_ids if file_id in tree)
757
entries = tree.inventory.entries()
760
if tree is not work_tree:
779
763
for path, entry in entries:
780
764
if kind and kind != entry.kind:
1019
1007
branch_from = Branch.open(location,
1020
1008
possible_transports=possible_transports)
1021
self.add_cleanup(branch_from.lock_read().unlock)
1023
1010
if branch_to.get_parent() is None or remember:
1024
1011
branch_to.set_parent(branch_from.base)
1026
if revision is not None:
1027
revision_id = revision.as_revision_id(branch_from)
1029
if tree_to is not None:
1030
view_info = _get_view_info_for_change_reporter(tree_to)
1031
change_reporter = delta._ChangeReporter(
1032
unversioned_filter=tree_to.is_ignored,
1033
view_info=view_info)
1034
result = tree_to.pull(
1035
branch_from, overwrite, revision_id, change_reporter,
1036
possible_transports=possible_transports, local=local)
1038
result = branch_to.pull(
1039
branch_from, overwrite, revision_id, local=local)
1041
result.report(self.outf)
1042
if verbose and result.old_revid != result.new_revid:
1043
log.show_branch_change(
1044
branch_to, self.outf, result.old_revno,
1013
if branch_from is not branch_to:
1014
branch_from.lock_read()
1016
if revision is not None:
1017
revision_id = revision.as_revision_id(branch_from)
1019
branch_to.lock_write()
1021
if tree_to is not None:
1022
view_info = _get_view_info_for_change_reporter(tree_to)
1023
change_reporter = delta._ChangeReporter(
1024
unversioned_filter=tree_to.is_ignored,
1025
view_info=view_info)
1026
result = tree_to.pull(
1027
branch_from, overwrite, revision_id, change_reporter,
1028
possible_transports=possible_transports, local=local)
1030
result = branch_to.pull(
1031
branch_from, overwrite, revision_id, local=local)
1033
result.report(self.outf)
1034
if verbose and result.old_revid != result.new_revid:
1035
log.show_branch_change(
1036
branch_to, self.outf, result.old_revno,
1041
if branch_from is not branch_to:
1042
branch_from.unlock()
1048
1045
class cmd_push(Command):
1049
__doc__ = """Update a mirror of this branch.
1046
"""Update a mirror of this branch.
1051
1048
The target branch will not have its working tree populated because this
1052
1049
is both expensive, and is not supported on remote file systems.
1113
1110
# Get the source branch
1114
1111
(tree, br_from,
1115
1112
_unused) = bzrdir.BzrDir.open_containing_tree_or_branch(directory)
1114
strict = br_from.get_config().get_user_option_as_bool('push_strict')
1115
if strict is None: strict = True # default value
1116
1116
# Get the tip's revision_id
1117
1117
revision = _get_one_revision('push', revision)
1118
1118
if revision is not None:
1119
1119
revision_id = revision.in_history(br_from).rev_id
1121
1121
revision_id = None
1122
if tree is not None and revision_id is None:
1123
tree.check_changed_or_out_of_date(
1124
strict, 'push_strict',
1125
more_error='Use --no-strict to force the push.',
1126
more_warning='Uncommitted changes will not be pushed.')
1122
if strict and tree is not None and revision_id is None:
1123
if (tree.has_changes()):
1124
raise errors.UncommittedChanges(
1125
tree, more='Use --no-strict to force the push.')
1126
if tree.last_revision() != tree.branch.last_revision():
1127
# The tree has lost sync with its branch, there is little
1128
# chance that the user is aware of it but he can still force
1129
# the push with --no-strict
1130
raise errors.OutOfDateTree(
1131
tree, more='Use --no-strict to force the push.')
1127
1133
# Get the stacked_on branch, if any
1128
1134
if stacked_on is not None:
1129
1135
stacked_on = urlutils.normalize_url(stacked_on)
1194
1200
' directory exists, but does not already'
1195
1201
' have a control directory. This flag will'
1196
1202
' allow branch to proceed.'),
1198
help="Bind new branch to from location."),
1200
1204
aliases = ['get', 'clone']
1202
1206
def run(self, from_location, to_location=None, revision=None,
1203
1207
hardlink=False, stacked=False, standalone=False, no_tree=False,
1204
use_existing_dir=False, switch=False, bind=False):
1208
use_existing_dir=False, switch=False):
1205
1209
from bzrlib import switch as _mod_switch
1206
1210
from bzrlib.tag import _merge_tags_if_possible
1207
1211
accelerator_tree, br_from = bzrdir.BzrDir.open_tree_or_branch(
1209
1213
revision = _get_one_revision('branch', revision)
1210
self.add_cleanup(br_from.lock_read().unlock)
1211
if revision is not None:
1212
revision_id = revision.as_revision_id(br_from)
1214
# FIXME - wt.last_revision, fallback to branch, fall back to
1215
# None or perhaps NULL_REVISION to mean copy nothing
1217
revision_id = br_from.last_revision()
1218
if to_location is None:
1219
to_location = urlutils.derive_to_location(from_location)
1220
to_transport = transport.get_transport(to_location)
1222
to_transport.mkdir('.')
1223
except errors.FileExists:
1224
if not use_existing_dir:
1225
raise errors.BzrCommandError('Target directory "%s" '
1226
'already exists.' % to_location)
1216
if revision is not None:
1217
revision_id = revision.as_revision_id(br_from)
1229
bzrdir.BzrDir.open_from_transport(to_transport)
1230
except errors.NotBranchError:
1219
# FIXME - wt.last_revision, fallback to branch, fall back to
1220
# None or perhaps NULL_REVISION to mean copy nothing
1222
revision_id = br_from.last_revision()
1223
if to_location is None:
1224
to_location = urlutils.derive_to_location(from_location)
1225
to_transport = transport.get_transport(to_location)
1227
to_transport.mkdir('.')
1228
except errors.FileExists:
1229
if not use_existing_dir:
1230
raise errors.BzrCommandError('Target directory "%s" '
1231
'already exists.' % to_location)
1233
raise errors.AlreadyBranchError(to_location)
1234
except errors.NoSuchFile:
1235
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1238
# preserve whatever source format we have.
1239
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1240
possible_transports=[to_transport],
1241
accelerator_tree=accelerator_tree,
1242
hardlink=hardlink, stacked=stacked,
1243
force_new_repo=standalone,
1244
create_tree_if_local=not no_tree,
1245
source_branch=br_from)
1246
branch = dir.open_branch()
1247
except errors.NoSuchRevision:
1248
to_transport.delete_tree('.')
1249
msg = "The branch %s has no revision %s." % (from_location,
1251
raise errors.BzrCommandError(msg)
1252
_merge_tags_if_possible(br_from, branch)
1253
# If the source branch is stacked, the new branch may
1254
# be stacked whether we asked for that explicitly or not.
1255
# We therefore need a try/except here and not just 'if stacked:'
1257
note('Created new stacked branch referring to %s.' %
1258
branch.get_stacked_on_url())
1259
except (errors.NotStacked, errors.UnstackableBranchFormat,
1260
errors.UnstackableRepositoryFormat), e:
1261
note('Branched %d revision(s).' % branch.revno())
1263
# Bind to the parent
1264
parent_branch = Branch.open(from_location)
1265
branch.bind(parent_branch)
1266
note('New branch bound to %s' % from_location)
1268
# Switch to the new branch
1269
wt, _ = WorkingTree.open_containing('.')
1270
_mod_switch.switch(wt.bzrdir, branch)
1271
note('Switched to branch: %s',
1272
urlutils.unescape_for_display(branch.base, 'utf-8'))
1234
bzrdir.BzrDir.open_from_transport(to_transport)
1235
except errors.NotBranchError:
1238
raise errors.AlreadyBranchError(to_location)
1239
except errors.NoSuchFile:
1240
raise errors.BzrCommandError('Parent of "%s" does not exist.'
1243
# preserve whatever source format we have.
1244
dir = br_from.bzrdir.sprout(to_transport.base, revision_id,
1245
possible_transports=[to_transport],
1246
accelerator_tree=accelerator_tree,
1247
hardlink=hardlink, stacked=stacked,
1248
force_new_repo=standalone,
1249
create_tree_if_local=not no_tree,
1250
source_branch=br_from)
1251
branch = dir.open_branch()
1252
except errors.NoSuchRevision:
1253
to_transport.delete_tree('.')
1254
msg = "The branch %s has no revision %s." % (from_location,
1256
raise errors.BzrCommandError(msg)
1257
_merge_tags_if_possible(br_from, branch)
1258
# If the source branch is stacked, the new branch may
1259
# be stacked whether we asked for that explicitly or not.
1260
# We therefore need a try/except here and not just 'if stacked:'
1262
note('Created new stacked branch referring to %s.' %
1263
branch.get_stacked_on_url())
1264
except (errors.NotStacked, errors.UnstackableBranchFormat,
1265
errors.UnstackableRepositoryFormat), e:
1266
note('Branched %d revision(s).' % branch.revno())
1268
# Switch to the new branch
1269
wt, _ = WorkingTree.open_containing('.')
1270
_mod_switch.switch(wt.bzrdir, branch)
1271
note('Switched to branch: %s',
1272
urlutils.unescape_for_display(branch.base, 'utf-8'))
1275
1277
class cmd_checkout(Command):
1276
__doc__ = """Create a new checkout of an existing branch.
1278
"""Create a new checkout of an existing branch.
1278
1280
If BRANCH_LOCATION is omitted, checkout will reconstitute a working tree for
1279
1281
the branch found in '.'. This is useful if you have removed the working tree
1353
1355
@display_command
1354
1356
def run(self, dir=u'.'):
1355
1357
tree = WorkingTree.open_containing(dir)[0]
1356
self.add_cleanup(tree.lock_read().unlock)
1357
new_inv = tree.inventory
1358
old_tree = tree.basis_tree()
1359
self.add_cleanup(old_tree.lock_read().unlock)
1360
old_inv = old_tree.inventory
1362
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1363
for f, paths, c, v, p, n, k, e in iterator:
1364
if paths[0] == paths[1]:
1368
renames.append(paths)
1370
for old_name, new_name in renames:
1371
self.outf.write("%s => %s\n" % (old_name, new_name))
1360
new_inv = tree.inventory
1361
old_tree = tree.basis_tree()
1362
old_tree.lock_read()
1364
old_inv = old_tree.inventory
1366
iterator = tree.iter_changes(old_tree, include_unchanged=True)
1367
for f, paths, c, v, p, n, k, e in iterator:
1368
if paths[0] == paths[1]:
1372
renames.append(paths)
1374
for old_name, new_name in renames:
1375
self.outf.write("%s => %s\n" % (old_name, new_name))
1374
1382
class cmd_update(Command):
1375
__doc__ = """Update a tree to have the latest code committed to its branch.
1383
"""Update a tree to have the latest code committed to its branch.
1377
1385
This will perform a merge into the working tree, and may generate
1378
1386
conflicts. If you have any local changes, you will still
1400
1408
master = branch.get_master_branch(
1401
1409
possible_transports=possible_transports)
1402
1410
if master is not None:
1403
1412
branch_location = master.base
1414
tree.lock_tree_write()
1406
1415
branch_location = tree.branch.base
1407
tree.lock_tree_write()
1408
self.add_cleanup(tree.unlock)
1409
1416
# get rid of the final '/' and be ready for display
1410
branch_location = urlutils.unescape_for_display(
1411
branch_location.rstrip('/'),
1413
existing_pending_merges = tree.get_parent_ids()[1:]
1417
# may need to fetch data into a heavyweight checkout
1418
# XXX: this may take some time, maybe we should display a
1420
old_tip = branch.update(possible_transports)
1421
if revision is not None:
1422
revision_id = revision[0].as_revision_id(branch)
1424
revision_id = branch.last_revision()
1425
if revision_id == _mod_revision.ensure_null(tree.last_revision()):
1426
revno = branch.revision_id_to_dotted_revno(revision_id)
1427
note("Tree is up to date at revision %s of branch %s" %
1428
('.'.join(map(str, revno)), branch_location))
1430
view_info = _get_view_info_for_change_reporter(tree)
1431
change_reporter = delta._ChangeReporter(
1432
unversioned_filter=tree.is_ignored,
1433
view_info=view_info)
1417
branch_location = urlutils.unescape_for_display(branch_location[:-1],
1435
conflicts = tree.update(
1437
possible_transports=possible_transports,
1438
revision=revision_id,
1440
except errors.NoSuchRevision, e:
1441
raise errors.BzrCommandError(
1442
"branch has no revision %s\n"
1443
"bzr update --revision only works"
1444
" for a revision in the branch history"
1446
revno = tree.branch.revision_id_to_dotted_revno(
1447
_mod_revision.ensure_null(tree.last_revision()))
1448
note('Updated to revision %s of branch %s' %
1449
('.'.join(map(str, revno)), branch_location))
1450
parent_ids = tree.get_parent_ids()
1451
if parent_ids[1:] and parent_ids[1:] != existing_pending_merges:
1452
note('Your local commits will now show as pending merges with '
1453
"'bzr status', and can be committed with 'bzr commit'.")
1420
existing_pending_merges = tree.get_parent_ids()[1:]
1424
# may need to fetch data into a heavyweight checkout
1425
# XXX: this may take some time, maybe we should display a
1427
old_tip = branch.update(possible_transports)
1428
if revision is not None:
1429
revision_id = revision[0].as_revision_id(branch)
1431
revision_id = branch.last_revision()
1432
if revision_id == _mod_revision.ensure_null(tree.last_revision()):
1433
revno = branch.revision_id_to_revno(revision_id)
1434
note("Tree is up to date at revision %d of branch %s" %
1435
(revno, branch_location))
1437
view_info = _get_view_info_for_change_reporter(tree)
1438
change_reporter = delta._ChangeReporter(
1439
unversioned_filter=tree.is_ignored,
1440
view_info=view_info)
1442
conflicts = tree.update(
1444
possible_transports=possible_transports,
1445
revision=revision_id,
1447
except errors.NoSuchRevision, e:
1448
raise errors.BzrCommandError(
1449
"branch has no revision %s\n"
1450
"bzr update --revision only works"
1451
" for a revision in the branch history"
1453
revno = tree.branch.revision_id_to_revno(
1454
_mod_revision.ensure_null(tree.last_revision()))
1455
note('Updated to revision %d of branch %s' %
1456
(revno, branch_location))
1457
if tree.get_parent_ids()[1:] != existing_pending_merges:
1458
note('Your local commits will now show as pending merges with '
1459
"'bzr status', and can be committed with 'bzr commit'.")
1460
1468
class cmd_info(Command):
1461
__doc__ = """Show information about a working tree, branch or repository.
1469
"""Show information about a working tree, branch or repository.
1463
1471
This command will show all known locations and formats associated to the
1464
1472
tree, branch or repository.
1530
1538
if file_list is not None:
1531
1539
file_list = [f for f in file_list]
1533
self.add_cleanup(tree.lock_write().unlock)
1534
# Heuristics should probably all move into tree.remove_smart or
1537
added = tree.changes_from(tree.basis_tree(),
1538
specific_files=file_list).added
1539
file_list = sorted([f[0] for f in added], reverse=True)
1540
if len(file_list) == 0:
1541
raise errors.BzrCommandError('No matching files.')
1542
elif file_list is None:
1543
# missing files show up in iter_changes(basis) as
1544
# versioned-with-no-kind.
1546
for change in tree.iter_changes(tree.basis_tree()):
1547
# Find paths in the working tree that have no kind:
1548
if change[1][1] is not None and change[6][1] is None:
1549
missing.append(change[1][1])
1550
file_list = sorted(missing, reverse=True)
1551
file_deletion_strategy = 'keep'
1552
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1553
keep_files=file_deletion_strategy=='keep',
1554
force=file_deletion_strategy=='force')
1543
# Heuristics should probably all move into tree.remove_smart or
1546
added = tree.changes_from(tree.basis_tree(),
1547
specific_files=file_list).added
1548
file_list = sorted([f[0] for f in added], reverse=True)
1549
if len(file_list) == 0:
1550
raise errors.BzrCommandError('No matching files.')
1551
elif file_list is None:
1552
# missing files show up in iter_changes(basis) as
1553
# versioned-with-no-kind.
1555
for change in tree.iter_changes(tree.basis_tree()):
1556
# Find paths in the working tree that have no kind:
1557
if change[1][1] is not None and change[6][1] is None:
1558
missing.append(change[1][1])
1559
file_list = sorted(missing, reverse=True)
1560
file_deletion_strategy = 'keep'
1561
tree.remove(file_list, verbose=verbose, to_file=self.outf,
1562
keep_files=file_deletion_strategy=='keep',
1563
force=file_deletion_strategy=='force')
1557
1568
class cmd_file_id(Command):
1558
__doc__ = """Print file_id of a particular file or directory.
1569
"""Print file_id of a particular file or directory.
1560
1571
The file_id is assigned when the file is first added and remains the
1561
1572
same through all revisions where the file exists, even when it is
1961
1967
raise errors.BzrCommandError('bzr diff --revision takes exactly'
1962
1968
' one or two revision specifiers')
1964
if using is not None and format is not None:
1965
raise errors.BzrCommandError('--using and --format are mutually '
1968
1970
(old_tree, new_tree,
1969
1971
old_branch, new_branch,
1970
specific_files, extra_trees) = get_trees_and_branches_to_diff_locked(
1971
file_list, revision, old, new, self.add_cleanup, apply_view=True)
1972
specific_files, extra_trees) = get_trees_and_branches_to_diff(
1973
file_list, revision, old, new, apply_view=True)
1972
1974
return show_diff_trees(old_tree, new_tree, sys.stdout,
1973
1975
specific_files=specific_files,
1974
1976
external_diff_options=diff_options,
1975
1977
old_label=old_label, new_label=new_label,
1976
extra_trees=extra_trees, using=using,
1978
extra_trees=extra_trees, using=using)
1980
1981
class cmd_deleted(Command):
1981
__doc__ = """List files deleted in the working tree.
1982
"""List files deleted in the working tree.
1983
1984
# TODO: Show files deleted since a previous revision, or
1984
1985
# between two revisions.
2331
2337
filter_by_dir = False
2333
# find the file ids to log and check for directory filtering
2334
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2335
revision, file_list, self.add_cleanup)
2336
for relpath, file_id, kind in file_info_list:
2338
raise errors.BzrCommandError(
2339
"Path unknown at end or start of revision range: %s" %
2341
# If the relpath is the top of the tree, we log everything
2341
# find the file ids to log and check for directory filtering
2342
b, file_info_list, rev1, rev2 = _get_info_for_log_files(
2343
revision, file_list)
2344
for relpath, file_id, kind in file_info_list:
2346
raise errors.BzrCommandError(
2347
"Path unknown at end or start of revision range: %s" %
2349
# If the relpath is the top of the tree, we log everything
2354
file_ids.append(file_id)
2355
filter_by_dir = filter_by_dir or (
2356
kind in ['directory', 'tree-reference'])
2359
# FIXME ? log the current subdir only RBC 20060203
2360
if revision is not None \
2361
and len(revision) > 0 and revision[0].get_branch():
2362
location = revision[0].get_branch()
2346
file_ids.append(file_id)
2347
filter_by_dir = filter_by_dir or (
2348
kind in ['directory', 'tree-reference'])
2351
# FIXME ? log the current subdir only RBC 20060203
2352
if revision is not None \
2353
and len(revision) > 0 and revision[0].get_branch():
2354
location = revision[0].get_branch()
2357
dir, relpath = bzrdir.BzrDir.open_containing(location)
2358
b = dir.open_branch()
2359
self.add_cleanup(b.lock_read().unlock)
2360
rev1, rev2 = _get_revision_range(revision, b, self.name())
2362
# Decide on the type of delta & diff filtering to use
2363
# TODO: add an --all-files option to make this configurable & consistent
2371
diff_type = 'partial'
2375
# Build the log formatter
2376
if log_format is None:
2377
log_format = log.log_formatter_registry.get_default(b)
2378
# Make a non-encoding output to include the diffs - bug 328007
2379
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2380
lf = log_format(show_ids=show_ids, to_file=self.outf,
2381
to_exact_file=unencoded_output,
2382
show_timezone=timezone,
2383
delta_format=get_verbosity_level(),
2385
show_advice=levels is None)
2387
# Choose the algorithm for doing the logging. It's annoying
2388
# having multiple code paths like this but necessary until
2389
# the underlying repository format is faster at generating
2390
# deltas or can provide everything we need from the indices.
2391
# The default algorithm - match-using-deltas - works for
2392
# multiple files and directories and is faster for small
2393
# amounts of history (200 revisions say). However, it's too
2394
# slow for logging a single file in a repository with deep
2395
# history, i.e. > 10K revisions. In the spirit of "do no
2396
# evil when adding features", we continue to use the
2397
# original algorithm - per-file-graph - for the "single
2398
# file that isn't a directory without showing a delta" case.
2399
partial_history = revision and b.repository._format.supports_chks
2400
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2401
or delta_type or partial_history)
2403
# Build the LogRequest and execute it
2404
if len(file_ids) == 0:
2406
rqst = make_log_request_dict(
2407
direction=direction, specific_fileids=file_ids,
2408
start_revision=rev1, end_revision=rev2, limit=limit,
2409
message_search=message, delta_type=delta_type,
2410
diff_type=diff_type, _match_using_deltas=match_using_deltas,
2411
exclude_common_ancestry=exclude_common_ancestry,
2413
Logger(b, rqst).show(lf)
2365
dir, relpath = bzrdir.BzrDir.open_containing(location)
2366
b = dir.open_branch()
2368
rev1, rev2 = _get_revision_range(revision, b, self.name())
2370
# Decide on the type of delta & diff filtering to use
2371
# TODO: add an --all-files option to make this configurable & consistent
2379
diff_type = 'partial'
2383
# Build the log formatter
2384
if log_format is None:
2385
log_format = log.log_formatter_registry.get_default(b)
2386
# Make a non-encoding output to include the diffs - bug 328007
2387
unencoded_output = ui.ui_factory.make_output_stream(encoding_type='exact')
2388
lf = log_format(show_ids=show_ids, to_file=self.outf,
2389
to_exact_file=unencoded_output,
2390
show_timezone=timezone,
2391
delta_format=get_verbosity_level(),
2393
show_advice=levels is None)
2395
# Choose the algorithm for doing the logging. It's annoying
2396
# having multiple code paths like this but necessary until
2397
# the underlying repository format is faster at generating
2398
# deltas or can provide everything we need from the indices.
2399
# The default algorithm - match-using-deltas - works for
2400
# multiple files and directories and is faster for small
2401
# amounts of history (200 revisions say). However, it's too
2402
# slow for logging a single file in a repository with deep
2403
# history, i.e. > 10K revisions. In the spirit of "do no
2404
# evil when adding features", we continue to use the
2405
# original algorithm - per-file-graph - for the "single
2406
# file that isn't a directory without showing a delta" case.
2407
partial_history = revision and b.repository._format.supports_chks
2408
match_using_deltas = (len(file_ids) != 1 or filter_by_dir
2409
or delta_type or partial_history)
2411
# Build the LogRequest and execute it
2412
if len(file_ids) == 0:
2414
rqst = make_log_request_dict(
2415
direction=direction, specific_fileids=file_ids,
2416
start_revision=rev1, end_revision=rev2, limit=limit,
2417
message_search=message, delta_type=delta_type,
2418
diff_type=diff_type, _match_using_deltas=match_using_deltas)
2419
Logger(b, rqst).show(lf)
2416
2425
def _get_revision_range(revisionspec_list, branch, command_name):
2561
2569
view_str = views.view_display_str(view_files)
2562
2570
note("Ignoring files outside view. View is %s" % view_str)
2564
self.add_cleanup(tree.lock_read().unlock)
2565
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2566
from_dir=relpath, recursive=recursive):
2567
# Apply additional masking
2568
if not all and not selection[fc]:
2570
if kind is not None and fkind != kind:
2575
fullpath = osutils.pathjoin(relpath, fp)
2578
views.check_path_in_view(tree, fullpath)
2579
except errors.FileOutsideView:
2574
for fp, fc, fkind, fid, entry in tree.list_files(include_root=False,
2575
from_dir=relpath, recursive=recursive):
2576
# Apply additional masking
2577
if not all and not selection[fc]:
2579
if kind is not None and fkind != kind:
2584
fullpath = osutils.pathjoin(relpath, fp)
2587
views.check_path_in_view(tree, fullpath)
2588
except errors.FileOutsideView:
2584
fp = osutils.pathjoin(prefix, fp)
2585
kindch = entry.kind_character()
2586
outstring = fp + kindch
2587
ui.ui_factory.clear_term()
2589
outstring = '%-8s %s' % (fc, outstring)
2590
if show_ids and fid is not None:
2591
outstring = "%-50s %s" % (outstring, fid)
2592
self.outf.write(outstring + '\n')
2594
self.outf.write(fp + '\0')
2597
self.outf.write(fid)
2598
self.outf.write('\0')
2606
self.outf.write('%-50s %s\n' % (outstring, my_id))
2593
fp = osutils.pathjoin(prefix, fp)
2594
kindch = entry.kind_character()
2595
outstring = fp + kindch
2596
ui.ui_factory.clear_term()
2598
outstring = '%-8s %s' % (fc, outstring)
2599
if show_ids and fid is not None:
2600
outstring = "%-50s %s" % (outstring, fid)
2608
2601
self.outf.write(outstring + '\n')
2603
self.outf.write(fp + '\0')
2606
self.outf.write(fid)
2607
self.outf.write('\0')
2615
self.outf.write('%-50s %s\n' % (outstring, my_id))
2617
self.outf.write(outstring + '\n')
2611
2622
class cmd_unknowns(Command):
2612
__doc__ = """List unknown files.
2623
"""List unknown files.
2677
2671
Ignore everything but the "debian" toplevel directory::
2679
2673
bzr ignore "RE:(?!debian/).*"
2681
Ignore everything except the "local" toplevel directory,
2682
but always ignore "*~" autosave files, even under local/::
2685
bzr ignore "!./local"
2689
2676
_see_also = ['status', 'ignored', 'patterns']
2690
2677
takes_args = ['name_pattern*']
2691
2678
takes_options = [
2692
Option('default-rules',
2693
help='Display the default ignore rules that bzr uses.')
2679
Option('old-default-rules',
2680
help='Write out the ignore rules bzr < 0.9 always used.')
2696
def run(self, name_pattern_list=None, default_rules=None):
2683
def run(self, name_pattern_list=None, old_default_rules=None):
2697
2684
from bzrlib import ignores
2698
if default_rules is not None:
2699
# dump the default rules and exit
2700
for pattern in ignores.USER_DEFAULTS:
2701
self.outf.write("%s\n" % pattern)
2685
if old_default_rules is not None:
2686
# dump the rules and exit
2687
for pattern in ignores.OLD_DEFAULTS:
2703
2690
if not name_pattern_list:
2704
2691
raise errors.BzrCommandError("ignore requires at least one "
2705
"NAME_PATTERN or --default-rules.")
2692
"NAME_PATTERN or --old-default-rules")
2706
2693
name_pattern_list = [globbing.normalize_pattern(p)
2707
2694
for p in name_pattern_list]
2708
2695
for name_pattern in name_pattern_list:
3132
3117
'(use --file "%(f)s" to take commit message from that file)'
3133
3118
% { 'f': message })
3134
3119
ui.ui_factory.show_warning(warning_msg)
3136
message = message.replace('\r\n', '\n')
3137
message = message.replace('\r', '\n')
3139
raise errors.BzrCommandError(
3140
"please specify either --message or --file")
3142
3121
def get_message(commit_obj):
3143
3122
"""Callback to get commit message"""
3145
my_message = codecs.open(
3146
file, 'rt', osutils.get_user_encoding()).read()
3147
elif message is not None:
3148
my_message = message
3150
# No message supplied: make one up.
3151
# text is the status of the tree
3152
text = make_commit_message_template_encoded(tree,
3123
my_message = message
3124
if my_message is not None and '\r' in my_message:
3125
my_message = my_message.replace('\r\n', '\n')
3126
my_message = my_message.replace('\r', '\n')
3127
if my_message is None and not file:
3128
t = make_commit_message_template_encoded(tree,
3153
3129
selected_list, diff=show_diff,
3154
3130
output_encoding=osutils.get_user_encoding())
3155
# start_message is the template generated from hooks
3156
# XXX: Warning - looks like hooks return unicode,
3157
# make_commit_message_template_encoded returns user encoding.
3158
# We probably want to be using edit_commit_message instead to
3160
3131
start_message = generate_commit_message_template(commit_obj)
3161
my_message = edit_commit_message_encoded(text,
3132
my_message = edit_commit_message_encoded(t,
3162
3133
start_message=start_message)
3163
3134
if my_message is None:
3164
3135
raise errors.BzrCommandError("please specify a commit"
3165
3136
" message with either --message or --file")
3137
elif my_message and file:
3138
raise errors.BzrCommandError(
3139
"please specify either --message or --file")
3141
my_message = codecs.open(file, 'rt',
3142
osutils.get_user_encoding()).read()
3166
3143
if my_message == "":
3167
3144
raise errors.BzrCommandError("empty commit message specified")
3168
3145
return my_message
3597
3570
verbose = not is_quiet()
3598
3571
# TODO: should possibly lock the history file...
3599
3572
benchfile = open(".perf_history", "at", buffering=1)
3600
self.add_cleanup(benchfile.close)
3602
3574
test_suite_factory = None
3603
3575
benchfile = None
3604
selftest_kwargs = {"verbose": verbose,
3606
"stop_on_failure": one,
3607
"transport": transport,
3608
"test_suite_factory": test_suite_factory,
3609
"lsprof_timed": lsprof_timed,
3610
"lsprof_tests": lsprof_tests,
3611
"bench_history": benchfile,
3612
"matching_tests_first": first,
3613
"list_only": list_only,
3614
"random_seed": randomize,
3615
"exclude_pattern": exclude,
3617
"load_list": load_list,
3618
"debug_flags": debugflag,
3619
"starting_with": starting_with
3621
selftest_kwargs.update(self.additional_selftest_args)
3622
result = selftest(**selftest_kwargs)
3577
selftest_kwargs = {"verbose": verbose,
3579
"stop_on_failure": one,
3580
"transport": transport,
3581
"test_suite_factory": test_suite_factory,
3582
"lsprof_timed": lsprof_timed,
3583
"lsprof_tests": lsprof_tests,
3584
"bench_history": benchfile,
3585
"matching_tests_first": first,
3586
"list_only": list_only,
3587
"random_seed": randomize,
3588
"exclude_pattern": exclude,
3590
"load_list": load_list,
3591
"debug_flags": debugflag,
3592
"starting_with": starting_with
3594
selftest_kwargs.update(self.additional_selftest_args)
3595
result = selftest(**selftest_kwargs)
3597
if benchfile is not None:
3623
3599
return int(not result)
3626
3602
class cmd_version(Command):
3627
__doc__ = """Show version of bzr."""
3603
"""Show version of bzr."""
3629
3605
encoding_type = 'replace'
3630
3606
takes_options = [
3806
3776
view_info = _get_view_info_for_change_reporter(tree)
3807
3777
change_reporter = delta._ChangeReporter(
3808
3778
unversioned_filter=tree.is_ignored, view_info=view_info)
3809
pb = ui.ui_factory.nested_progress_bar()
3810
self.add_cleanup(pb.finished)
3811
self.add_cleanup(tree.lock_write().unlock)
3812
if location is not None:
3814
mergeable = bundle.read_mergeable_from_url(location,
3815
possible_transports=possible_transports)
3816
except errors.NotABundle:
3781
pb = ui.ui_factory.nested_progress_bar()
3782
cleanups.append(pb.finished)
3784
cleanups.append(tree.unlock)
3785
if location is not None:
3787
mergeable = bundle.read_mergeable_from_url(location,
3788
possible_transports=possible_transports)
3789
except errors.NotABundle:
3793
raise errors.BzrCommandError('Cannot use --uncommitted'
3794
' with bundles or merge directives.')
3796
if revision is not None:
3797
raise errors.BzrCommandError(
3798
'Cannot use -r with merge directives or bundles')
3799
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3802
if merger is None and uncommitted:
3803
if revision is not None and len(revision) > 0:
3804
raise errors.BzrCommandError('Cannot use --uncommitted and'
3805
' --revision at the same time.')
3806
merger = self.get_merger_from_uncommitted(tree, location, pb,
3808
allow_pending = False
3811
merger, allow_pending = self._get_merger_from_branch(tree,
3812
location, revision, remember, possible_transports, pb)
3814
merger.merge_type = merge_type
3815
merger.reprocess = reprocess
3816
merger.show_base = show_base
3817
self.sanity_check_merger(merger)
3818
if (merger.base_rev_id == merger.other_rev_id and
3819
merger.other_rev_id is not None):
3820
note('Nothing to do.')
3823
if merger.interesting_files is not None:
3824
raise errors.BzrCommandError('Cannot pull individual files')
3825
if (merger.base_rev_id == tree.last_revision()):
3826
result = tree.pull(merger.other_branch, False,
3827
merger.other_rev_id)
3828
result.report(self.outf)
3830
if merger.this_basis is None:
3831
raise errors.BzrCommandError(
3832
"This branch has no commits."
3833
" (perhaps you would prefer 'bzr pull')")
3835
return self._do_preview(merger, cleanups)
3837
return self._do_interactive(merger, cleanups)
3820
raise errors.BzrCommandError('Cannot use --uncommitted'
3821
' with bundles or merge directives.')
3823
if revision is not None:
3824
raise errors.BzrCommandError(
3825
'Cannot use -r with merge directives or bundles')
3826
merger, verified = _mod_merge.Merger.from_mergeable(tree,
3829
if merger is None and uncommitted:
3830
if revision is not None and len(revision) > 0:
3831
raise errors.BzrCommandError('Cannot use --uncommitted and'
3832
' --revision at the same time.')
3833
merger = self.get_merger_from_uncommitted(tree, location, None)
3834
allow_pending = False
3837
merger, allow_pending = self._get_merger_from_branch(tree,
3838
location, revision, remember, possible_transports, None)
3840
merger.merge_type = merge_type
3841
merger.reprocess = reprocess
3842
merger.show_base = show_base
3843
self.sanity_check_merger(merger)
3844
if (merger.base_rev_id == merger.other_rev_id and
3845
merger.other_rev_id is not None):
3846
note('Nothing to do.')
3849
if merger.interesting_files is not None:
3850
raise errors.BzrCommandError('Cannot pull individual files')
3851
if (merger.base_rev_id == tree.last_revision()):
3852
result = tree.pull(merger.other_branch, False,
3853
merger.other_rev_id)
3854
result.report(self.outf)
3856
if merger.this_basis is None:
3857
raise errors.BzrCommandError(
3858
"This branch has no commits."
3859
" (perhaps you would prefer 'bzr pull')")
3861
return self._do_preview(merger)
3863
return self._do_interactive(merger)
3865
return self._do_merge(merger, change_reporter, allow_pending,
3868
def _get_preview(self, merger):
3839
return self._do_merge(merger, change_reporter, allow_pending,
3842
for cleanup in reversed(cleanups):
3845
def _get_preview(self, merger, cleanups):
3869
3846
tree_merger = merger.make_merger()
3870
3847
tt = tree_merger.make_preview_transform()
3871
self.add_cleanup(tt.finalize)
3848
cleanups.append(tt.finalize)
3872
3849
result_tree = tt.get_preview_tree()
3873
3850
return result_tree
3875
def _do_preview(self, merger):
3852
def _do_preview(self, merger, cleanups):
3876
3853
from bzrlib.diff import show_diff_trees
3877
result_tree = self._get_preview(merger)
3854
result_tree = self._get_preview(merger, cleanups)
3878
3855
show_diff_trees(merger.this_tree, result_tree, self.outf,
3879
3856
old_label='', new_label='')
4067
4046
def run(self, file_list=None, merge_type=None, show_base=False,
4068
4047
reprocess=False):
4069
from bzrlib.conflicts import restore
4070
4048
if merge_type is None:
4071
4049
merge_type = _mod_merge.Merge3Merger
4072
4050
tree, file_list = tree_files(file_list)
4073
self.add_cleanup(tree.lock_write().unlock)
4074
parents = tree.get_parent_ids()
4075
if len(parents) != 2:
4076
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4077
" merges. Not cherrypicking or"
4079
repository = tree.branch.repository
4080
interesting_ids = None
4082
conflicts = tree.conflicts()
4083
if file_list is not None:
4084
interesting_ids = set()
4085
for filename in file_list:
4086
file_id = tree.path2id(filename)
4088
raise errors.NotVersionedError(filename)
4089
interesting_ids.add(file_id)
4090
if tree.kind(file_id) != "directory":
4053
parents = tree.get_parent_ids()
4054
if len(parents) != 2:
4055
raise errors.BzrCommandError("Sorry, remerge only works after normal"
4056
" merges. Not cherrypicking or"
4058
repository = tree.branch.repository
4059
interesting_ids = None
4061
conflicts = tree.conflicts()
4062
if file_list is not None:
4063
interesting_ids = set()
4064
for filename in file_list:
4065
file_id = tree.path2id(filename)
4067
raise errors.NotVersionedError(filename)
4068
interesting_ids.add(file_id)
4069
if tree.kind(file_id) != "directory":
4093
for name, ie in tree.inventory.iter_entries(file_id):
4094
interesting_ids.add(ie.file_id)
4095
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4097
# Remerge only supports resolving contents conflicts
4098
allowed_conflicts = ('text conflict', 'contents conflict')
4099
restore_files = [c.path for c in conflicts
4100
if c.typestring in allowed_conflicts]
4101
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4102
tree.set_conflicts(ConflictList(new_conflicts))
4103
if file_list is not None:
4104
restore_files = file_list
4105
for filename in restore_files:
4072
for name, ie in tree.inventory.iter_entries(file_id):
4073
interesting_ids.add(ie.file_id)
4074
new_conflicts = conflicts.select_conflicts(tree, file_list)[0]
4076
# Remerge only supports resolving contents conflicts
4077
allowed_conflicts = ('text conflict', 'contents conflict')
4078
restore_files = [c.path for c in conflicts
4079
if c.typestring in allowed_conflicts]
4080
_mod_merge.transform_tree(tree, tree.basis_tree(), interesting_ids)
4081
tree.set_conflicts(ConflictList(new_conflicts))
4082
if file_list is not None:
4083
restore_files = file_list
4084
for filename in restore_files:
4086
restore(tree.abspath(filename))
4087
except errors.NotConflicted:
4089
# Disable pending merges, because the file texts we are remerging
4090
# have not had those merges performed. If we use the wrong parents
4091
# list, we imply that the working tree text has seen and rejected
4092
# all the changes from the other tree, when in fact those changes
4093
# have not yet been seen.
4094
pb = ui.ui_factory.nested_progress_bar()
4095
tree.set_parent_ids(parents[:1])
4107
restore(tree.abspath(filename))
4108
except errors.NotConflicted:
4110
# Disable pending merges, because the file texts we are remerging
4111
# have not had those merges performed. If we use the wrong parents
4112
# list, we imply that the working tree text has seen and rejected
4113
# all the changes from the other tree, when in fact those changes
4114
# have not yet been seen.
4115
tree.set_parent_ids(parents[:1])
4117
merger = _mod_merge.Merger.from_revision_ids(None, tree, parents[1])
4118
merger.interesting_ids = interesting_ids
4119
merger.merge_type = merge_type
4120
merger.show_base = show_base
4121
merger.reprocess = reprocess
4122
conflicts = merger.do_merge()
4097
merger = _mod_merge.Merger.from_revision_ids(pb,
4099
merger.interesting_ids = interesting_ids
4100
merger.merge_type = merge_type
4101
merger.show_base = show_base
4102
merger.reprocess = reprocess
4103
conflicts = merger.do_merge()
4105
tree.set_parent_ids(parents)
4124
tree.set_parent_ids(parents)
4125
4109
if conflicts > 0:
4154
4138
created as above. Directories containing unknown files will not be
4157
The working tree contains a list of revisions that have been merged but
4158
not yet committed. These revisions will be included as additional parents
4159
of the next commit. Normally, using revert clears that list as well as
4160
reverting the files. If any files are specified, revert leaves the list
4161
of uncommitted merges alone and reverts only the files. Use ``bzr revert
4162
.`` in the tree root to revert all files but keep the recorded merges,
4163
and ``bzr revert --forget-merges`` to clear the pending merge list without
4141
The working tree contains a list of pending merged revisions, which will
4142
be included as parents in the next commit. Normally, revert clears that
4143
list as well as reverting the files. If any files are specified, revert
4144
leaves the pending merge list alone and reverts only the files. Use "bzr
4145
revert ." in the tree root to revert all files but keep the merge record,
4146
and "bzr revert --forget-merges" to clear the pending merge list without
4164
4147
reverting any files.
4166
Using "bzr revert --forget-merges", it is possible to apply all of the
4167
changes from a branch in a single revision. To do this, perform the merge
4168
as desired. Then doing revert with the "--forget-merges" option will keep
4169
the content of the tree as it was, but it will clear the list of pending
4170
merges. The next commit will then contain all of the changes that are
4171
present in the other branch, but without any other parent revisions.
4172
Because this technique forgets where these changes originated, it may
4173
cause additional conflicts on later merges involving the same source and
4149
Using "bzr revert --forget-merges", it is possible to apply the changes
4150
from an arbitrary merge as a single revision. To do this, perform the
4151
merge as desired. Then doing revert with the "--forget-merges" option will
4152
keep the content of the tree as it was, but it will clear the list of
4153
pending merges. The next commit will then contain all of the changes that
4154
would have been in the merge, but without any mention of the other parent
4155
revisions. Because this technique forgets where these changes originated,
4156
it may cause additional conflicts on later merges involving the source and
4174
4157
target branches.
4186
4169
def run(self, revision=None, no_backup=False, file_list=None,
4187
4170
forget_merges=None):
4188
4171
tree, file_list = tree_files(file_list)
4189
self.add_cleanup(tree.lock_tree_write().unlock)
4191
tree.set_parent_ids(tree.get_parent_ids()[:1])
4193
self._revert_tree_to_revision(tree, revision, file_list, no_backup)
4175
tree.set_parent_ids(tree.get_parent_ids()[:1])
4177
self._revert_tree_to_revision(tree, revision, file_list, no_backup)
4196
4182
def _revert_tree_to_revision(tree, revision, file_list, no_backup):
4197
4183
rev_tree = _get_one_revision_tree('revert', revision, tree=tree)
4198
tree.revert(file_list, rev_tree, not no_backup, None,
4199
report_changes=True)
4184
pb = ui.ui_factory.nested_progress_bar()
4186
tree.revert(file_list, rev_tree, not no_backup, pb,
4187
report_changes=True)
4202
4192
class cmd_assert_fail(Command):
4203
__doc__ = """Test reporting of assertion failures"""
4193
"""Test reporting of assertion failures"""
4204
4194
# intended just for use in testing
4355
4341
_get_revision_range(revision,
4356
4342
remote_branch, self.name()))
4358
local_extra, remote_extra = find_unmerged(
4359
local_branch, remote_branch, restrict,
4360
backward=not reverse,
4361
include_merges=include_merges,
4362
local_revid_range=local_revid_range,
4363
remote_revid_range=remote_revid_range)
4365
if log_format is None:
4366
registry = log.log_formatter_registry
4367
log_format = registry.get_default(local_branch)
4368
lf = log_format(to_file=self.outf,
4370
show_timezone='original')
4373
if local_extra and not theirs_only:
4374
message("You have %d extra revision(s):\n" %
4376
for revision in iter_log_revisions(local_extra,
4377
local_branch.repository,
4379
lf.log_revision(revision)
4380
printed_local = True
4383
printed_local = False
4385
if remote_extra and not mine_only:
4386
if printed_local is True:
4388
message("You are missing %d revision(s):\n" %
4390
for revision in iter_log_revisions(remote_extra,
4391
remote_branch.repository,
4393
lf.log_revision(revision)
4396
if mine_only and not local_extra:
4397
# We checked local, and found nothing extra
4398
message('This branch is up to date.\n')
4399
elif theirs_only and not remote_extra:
4400
# We checked remote, and found nothing extra
4401
message('Other branch is up to date.\n')
4402
elif not (mine_only or theirs_only or local_extra or
4404
# We checked both branches, and neither one had extra
4406
message("Branches are up to date.\n")
4344
local_branch.lock_read()
4346
remote_branch.lock_read()
4348
local_extra, remote_extra = find_unmerged(
4349
local_branch, remote_branch, restrict,
4350
backward=not reverse,
4351
include_merges=include_merges,
4352
local_revid_range=local_revid_range,
4353
remote_revid_range=remote_revid_range)
4355
if log_format is None:
4356
registry = log.log_formatter_registry
4357
log_format = registry.get_default(local_branch)
4358
lf = log_format(to_file=self.outf,
4360
show_timezone='original')
4363
if local_extra and not theirs_only:
4364
message("You have %d extra revision(s):\n" %
4366
for revision in iter_log_revisions(local_extra,
4367
local_branch.repository,
4369
lf.log_revision(revision)
4370
printed_local = True
4373
printed_local = False
4375
if remote_extra and not mine_only:
4376
if printed_local is True:
4378
message("You are missing %d revision(s):\n" %
4380
for revision in iter_log_revisions(remote_extra,
4381
remote_branch.repository,
4383
lf.log_revision(revision)
4386
if mine_only and not local_extra:
4387
# We checked local, and found nothing extra
4388
message('This branch is up to date.\n')
4389
elif theirs_only and not remote_extra:
4390
# We checked remote, and found nothing extra
4391
message('Other branch is up to date.\n')
4392
elif not (mine_only or theirs_only or local_extra or
4394
# We checked both branches, and neither one had extra
4396
message("Branches are up to date.\n")
4398
remote_branch.unlock()
4400
local_branch.unlock()
4408
4401
if not status_code and parent is None and other_branch is not None:
4409
self.add_cleanup(local_branch.lock_write().unlock)
4410
# handle race conditions - a parent might be set while we run.
4411
if local_branch.get_parent() is None:
4412
local_branch.set_parent(remote_branch.base)
4402
local_branch.lock_write()
4404
# handle race conditions - a parent might be set while we run.
4405
if local_branch.get_parent() is None:
4406
local_branch.set_parent(remote_branch.base)
4408
local_branch.unlock()
4413
4409
return status_code
4416
4412
class cmd_pack(Command):
4417
__doc__ = """Compress the data within a repository.
4419
This operation compresses the data within a bazaar repository. As
4420
bazaar supports automatic packing of repository, this operation is
4421
normally not required to be done manually.
4423
During the pack operation, bazaar takes a backup of existing repository
4424
data, i.e. pack files. This backup is eventually removed by bazaar
4425
automatically when it is safe to do so. To save disk space by removing
4426
the backed up pack files, the --clean-obsolete-packs option may be
4429
Warning: If you use --clean-obsolete-packs and your machine crashes
4430
during or immediately after repacking, you may be left with a state
4431
where the deletion has been written to disk but the new packs have not
4432
been. In this case the repository may be unusable.
4413
"""Compress the data within a repository."""
4435
4415
_see_also = ['repositories']
4436
4416
takes_args = ['branch_or_repo?']
4438
Option('clean-obsolete-packs', 'Delete obsolete packs to save disk space.'),
4441
def run(self, branch_or_repo='.', clean_obsolete_packs=False):
4418
def run(self, branch_or_repo='.'):
4442
4419
dir = bzrdir.BzrDir.open_containing(branch_or_repo)[0]
4444
4421
branch = dir.open_branch()
4445
4422
repository = branch.repository
4446
4423
except errors.NotBranchError:
4447
4424
repository = dir.open_repository()
4448
repository.pack(clean_obsolete_packs=clean_obsolete_packs)
4451
4428
class cmd_plugins(Command):
4452
__doc__ = """List the installed plugins.
4429
"""List the installed plugins.
4454
4431
This command displays the list of installed plugins including
4455
4432
version of plugin and a short description of each.
4552
4532
wt, branch, relpath = \
4553
4533
bzrdir.BzrDir.open_containing_tree_or_branch(filename)
4554
4534
if wt is not None:
4555
self.add_cleanup(wt.lock_read().unlock)
4557
self.add_cleanup(branch.lock_read().unlock)
4558
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4559
self.add_cleanup(tree.lock_read().unlock)
4561
file_id = wt.path2id(relpath)
4563
file_id = tree.path2id(relpath)
4565
raise errors.NotVersionedError(filename)
4566
file_version = tree.inventory[file_id].revision
4567
if wt is not None and revision is None:
4568
# If there is a tree and we're not annotating historical
4569
# versions, annotate the working tree's content.
4570
annotate_file_tree(wt, file_id, self.outf, long, all,
4573
annotate_file(branch, file_version, file_id, long, all, self.outf,
4539
tree = _get_one_revision_tree('annotate', revision, branch=branch)
4541
file_id = wt.path2id(relpath)
4543
file_id = tree.path2id(relpath)
4545
raise errors.NotVersionedError(filename)
4546
file_version = tree.inventory[file_id].revision
4547
if wt is not None and revision is None:
4548
# If there is a tree and we're not annotating historical
4549
# versions, annotate the working tree's content.
4550
annotate_file_tree(wt, file_id, self.outf, long, all,
4553
annotate_file(branch, file_version, file_id, long, all, self.outf,
4577
4562
class cmd_re_sign(Command):
4578
__doc__ = """Create a digital signature for an existing revision."""
4563
"""Create a digital signature for an existing revision."""
4579
4564
# TODO be able to replace existing ones.
4581
4566
hidden = True # is this right ?
4785
4776
end_revision=last_revno)
4788
self.outf.write('Dry-run, pretending to remove'
4789
' the above revisions.\n')
4779
print 'Dry-run, pretending to remove the above revisions.'
4781
val = raw_input('Press <enter> to continue')
4791
self.outf.write('The above revision(s) will be removed.\n')
4794
if not ui.ui_factory.get_boolean('Are you sure'):
4795
self.outf.write('Canceled')
4783
print 'The above revision(s) will be removed.'
4785
val = raw_input('Are you sure [y/N]? ')
4786
if val.lower() not in ('y', 'yes'):
4798
4790
mutter('Uncommitting from {%s} to {%s}',
4799
4791
last_rev_id, rev_id)
4800
4792
uncommit(b, tree=tree, dry_run=dry_run, verbose=verbose,
4801
4793
revno=revno, local=local)
4802
self.outf.write('You can restore the old tip by running:\n'
4803
' bzr pull . -r revid:%s\n' % last_rev_id)
4794
note('You can restore the old tip by running:\n'
4795
' bzr pull . -r revid:%s', last_rev_id)
4806
4798
class cmd_break_lock(Command):
4807
__doc__ = """Break a dead lock on a repository, branch or working directory.
4799
"""Break a dead lock on a repository, branch or working directory.
4809
4801
CAUTION: Locks should only be broken when you are sure that the process
4810
4802
holding the lock has been stopped.
4812
You can get information on what locks are open via the 'bzr info
4813
[location]' command.
4804
You can get information on what locks are open via the 'bzr info' command.
4817
bzr break-lock bzr+ssh://example.com/bzr/foo
4819
4809
takes_args = ['location?']
5092
5082
directly from the merge directive, without retrieving data from a
5095
`bzr send` creates a compact data set that, when applied using bzr
5096
merge, has the same effect as merging from the source branch.
5098
By default the merge directive is self-contained and can be applied to any
5099
branch containing submit_branch in its ancestory without needing access to
5102
If --no-bundle is specified, then Bazaar doesn't send the contents of the
5103
revisions, but only a structured request to merge from the
5104
public_location. In that case the public_branch is needed and it must be
5105
up-to-date and accessible to the recipient. The public_branch is always
5106
included if known, so that people can check it later.
5108
The submit branch defaults to the parent of the source branch, but can be
5109
overridden. Both submit branch and public branch will be remembered in
5110
branch.conf the first time they are used for a particular branch. The
5111
source branch defaults to that containing the working directory, but can
5112
be changed using --from.
5114
In order to calculate those changes, bzr must analyse the submit branch.
5115
Therefore it is most efficient for the submit branch to be a local mirror.
5116
If a public location is known for the submit_branch, that location is used
5117
in the merge directive.
5119
The default behaviour is to send the merge directive by mail, unless -o is
5120
given, in which case it is sent to a file.
5085
If --no-bundle is specified, then public_branch is needed (and must be
5086
up-to-date), so that the receiver can perform the merge using the
5087
public_branch. The public_branch is always included if known, so that
5088
people can check it later.
5090
The submit branch defaults to the parent, but can be overridden. Both
5091
submit branch and public branch will be remembered if supplied.
5093
If a public_branch is known for the submit_branch, that public submit
5094
branch is used in the merge instructions. This means that a local mirror
5095
can be used as your actual submit branch, once you have set public_branch
5122
5098
Mail is sent using your preferred mail program. This should be transparent
5123
5099
on Windows (it uses MAPI). On Linux, it requires the xdg-email utility.
5311
def run(self, tag_name=None,
5278
def run(self, tag_name,
5317
5284
branch, relpath = Branch.open_containing(directory)
5318
self.add_cleanup(branch.lock_write().unlock)
5320
if tag_name is None:
5321
raise errors.BzrCommandError("No tag specified to delete.")
5322
branch.tags.delete_tag(tag_name)
5323
self.outf.write('Deleted tag %s.\n' % tag_name)
5326
if len(revision) != 1:
5327
raise errors.BzrCommandError(
5328
"Tags can only be placed on a single revision, "
5330
revision_id = revision[0].as_revision_id(branch)
5288
branch.tags.delete_tag(tag_name)
5289
self.outf.write('Deleted tag %s.\n' % tag_name)
5332
revision_id = branch.last_revision()
5333
if tag_name is None:
5334
tag_name = branch.automatic_tag_name(revision_id)
5335
if tag_name is None:
5336
raise errors.BzrCommandError(
5337
"Please specify a tag name.")
5338
if (not force) and branch.tags.has_tag(tag_name):
5339
raise errors.TagAlreadyExists(tag_name)
5340
branch.tags.set_tag(tag_name, revision_id)
5341
self.outf.write('Created tag %s.\n' % tag_name)
5292
if len(revision) != 1:
5293
raise errors.BzrCommandError(
5294
"Tags can only be placed on a single revision, "
5296
revision_id = revision[0].as_revision_id(branch)
5298
revision_id = branch.last_revision()
5299
if (not force) and branch.tags.has_tag(tag_name):
5300
raise errors.TagAlreadyExists(tag_name)
5301
branch.tags.set_tag(tag_name, revision_id)
5302
self.outf.write('Created tag %s.\n' % tag_name)
5344
5307
class cmd_tags(Command):
5345
__doc__ = """List tags.
5347
5310
This command shows a table of tag names and the revisions they reference.
5379
self.add_cleanup(branch.lock_read().unlock)
5381
graph = branch.repository.get_graph()
5382
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5383
revid1, revid2 = rev1.rev_id, rev2.rev_id
5384
# only show revisions between revid1 and revid2 (inclusive)
5385
tags = [(tag, revid) for tag, revid in tags if
5386
graph.is_between(revid, revid1, revid2)]
5389
elif sort == 'time':
5391
for tag, revid in tags:
5393
revobj = branch.repository.get_revision(revid)
5394
except errors.NoSuchRevision:
5395
timestamp = sys.maxint # place them at the end
5397
timestamp = revobj.timestamp
5398
timestamps[revid] = timestamp
5399
tags.sort(key=lambda x: timestamps[x[1]])
5401
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5402
for index, (tag, revid) in enumerate(tags):
5404
revno = branch.revision_id_to_dotted_revno(revid)
5405
if isinstance(revno, tuple):
5406
revno = '.'.join(map(str, revno))
5407
except errors.NoSuchRevision:
5408
# Bad tag data/merges can lead to tagged revisions
5409
# which are not in this branch. Fail gracefully ...
5411
tags[index] = (tag, revno)
5345
graph = branch.repository.get_graph()
5346
rev1, rev2 = _get_revision_range(revision, branch, self.name())
5347
revid1, revid2 = rev1.rev_id, rev2.rev_id
5348
# only show revisions between revid1 and revid2 (inclusive)
5349
tags = [(tag, revid) for tag, revid in tags if
5350
graph.is_between(revid, revid1, revid2)]
5353
elif sort == 'time':
5355
for tag, revid in tags:
5357
revobj = branch.repository.get_revision(revid)
5358
except errors.NoSuchRevision:
5359
timestamp = sys.maxint # place them at the end
5361
timestamp = revobj.timestamp
5362
timestamps[revid] = timestamp
5363
tags.sort(key=lambda x: timestamps[x[1]])
5365
# [ (tag, revid), ... ] -> [ (tag, dotted_revno), ... ]
5366
for index, (tag, revid) in enumerate(tags):
5368
revno = branch.revision_id_to_dotted_revno(revid)
5369
if isinstance(revno, tuple):
5370
revno = '.'.join(map(str, revno))
5371
except errors.NoSuchRevision:
5372
# Bad tag data/merges can lead to tagged revisions
5373
# which are not in this branch. Fail gracefully ...
5375
tags[index] = (tag, revno)
5413
5378
for tag, revspec in tags:
5414
5379
self.outf.write('%-20s %s\n' % (tag, revspec))
5417
5382
class cmd_reconfigure(Command):
5418
__doc__ = """Reconfigure the type of a bzr directory.
5383
"""Reconfigure the type of a bzr directory.
5420
5385
A target configuration must be specified.
5987
5925
self.outf.write('%s %s\n' % (path, location))
5990
def _register_lazy_builtins():
5991
# register lazy builtins from other modules; called at startup and should
5992
# be only called once.
5993
for (name, aliases, module_name) in [
5994
('cmd_bundle_info', [], 'bzrlib.bundle.commands'),
5995
('cmd_dpush', [], 'bzrlib.foreign'),
5996
('cmd_version_info', [], 'bzrlib.cmd_version_info'),
5997
('cmd_resolve', ['resolved'], 'bzrlib.conflicts'),
5998
('cmd_conflicts', [], 'bzrlib.conflicts'),
5999
('cmd_sign_my_commits', [], 'bzrlib.sign_my_commits'),
6001
builtin_command_registry.register_lazy(name, aliases, module_name)
5928
# these get imported and then picked up by the scan for cmd_*
5929
# TODO: Some more consistent way to split command definitions across files;
5930
# we do need to load at least some information about them to know of
5931
# aliases. ideally we would avoid loading the implementation until the
5932
# details were needed.
5933
from bzrlib.cmd_version_info import cmd_version_info
5934
from bzrlib.conflicts import cmd_resolve, cmd_conflicts, restore
5935
from bzrlib.bundle.commands import (
5938
from bzrlib.foreign import cmd_dpush
5939
from bzrlib.sign_my_commits import cmd_sign_my_commits
5940
from bzrlib.weave_commands import cmd_versionedfile_list, \
5941
cmd_weave_plan_merge, cmd_weave_merge_text