1
# Copyright (C) 2005-2010 Canonical Ltd
3
# This program is free software; you can redistribute it and/or modify
4
# it under the terms of the GNU General Public License as published by
5
# the Free Software Foundation; either version 2 of the License, or
6
# (at your option) any later version.
8
# This program is distributed in the hope that it will be useful,
9
# but WITHOUT ANY WARRANTY; without even the implied warranty of
10
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
# GNU General Public License for more details.
13
# You should have received a copy of the GNU General Public License
14
# along with this program; if not, write to the Free Software
15
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
"""Code to show logs of changes.
21
Various flavors of log can be produced:
23
* for one file, or the whole tree, and (not done yet) for
24
files in a given directory
26
* in "verbose" mode with a description of what changed from one
29
* with file-ids and revision-ids shown
31
Logs are actually written out through an abstract LogFormatter
32
interface, which allows for different preferred formats. Plugins can
35
Logs can be produced in either forward (oldest->newest) or reverse
36
(newest->oldest) order.
38
Logs can be filtered to show only revisions matching a particular
39
search string, or within a particular range of revisions. The range
40
can be given as date/times, which are reduced to revisions before
43
In verbose mode we show a summary of what changed in each particular
44
revision. Note that this is the delta for changes in that revision
45
relative to its left-most parent, not the delta relative to the last
46
logged revision. So for example if you ask for a verbose log of
47
changes touching hello.c you will get a list of those revisions also
48
listing other things that were changed in the same revision, but not
49
all the changes since the previous revision that touched hello.c.
53
from cStringIO import StringIO
54
from itertools import (
60
from warnings import (
64
from bzrlib.lazy_import import lazy_import
65
lazy_import(globals(), """
74
repository as _mod_repository,
75
revision as _mod_revision,
85
from bzrlib.osutils import (
87
format_date_with_offset_in_original_timezone,
88
get_terminal_encoding,
92
from bzrlib.symbol_versioning import (
98
def find_touching_revisions(branch, file_id):
99
"""Yield a description of revisions which affect the file_id.
101
Each returned element is (revno, revision_id, description)
103
This is the list of revisions where the file is either added,
104
modified, renamed or deleted.
106
TODO: Perhaps some way to limit this to only particular revisions,
107
or to traverse a non-mainline set of revisions?
112
for revision_id in branch.revision_history():
113
this_inv = branch.repository.get_inventory(revision_id)
114
if file_id in this_inv:
115
this_ie = this_inv[file_id]
116
this_path = this_inv.id2path(file_id)
118
this_ie = this_path = None
120
# now we know how it was last time, and how it is in this revision.
121
# are those two states effectively the same or not?
123
if not this_ie and not last_ie:
124
# not present in either
126
elif this_ie and not last_ie:
127
yield revno, revision_id, "added " + this_path
128
elif not this_ie and last_ie:
130
yield revno, revision_id, "deleted " + last_path
131
elif this_path != last_path:
132
yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path))
133
elif (this_ie.text_size != last_ie.text_size
134
or this_ie.text_sha1 != last_ie.text_sha1):
135
yield revno, revision_id, "modified " + this_path
138
last_path = this_path
142
def _enumerate_history(branch):
145
for rev_id in branch.revision_history():
146
rh.append((revno, rev_id))
153
specific_fileid=None,
161
"""Write out human-readable log of commits to this branch.
163
This function is being retained for backwards compatibility but
164
should not be extended with new parameters. Use the new Logger class
165
instead, eg. Logger(branch, rqst).show(lf), adding parameters to the
166
make_log_request_dict function.
168
:param lf: The LogFormatter object showing the output.
170
:param specific_fileid: If not None, list only the commits affecting the
171
specified file, rather than all commits.
173
:param verbose: If True show added/changed/deleted/renamed files.
175
:param direction: 'reverse' (default) is latest to earliest; 'forward' is
178
:param start_revision: If not None, only show revisions >= start_revision
180
:param end_revision: If not None, only show revisions <= end_revision
182
:param search: If not None, only show revisions with matching commit
185
:param limit: If set, shows only 'limit' revisions, all revisions are shown
188
:param show_diff: If True, output a diff after each revision.
190
# Convert old-style parameters to new-style parameters
191
if specific_fileid is not None:
192
file_ids = [specific_fileid]
197
delta_type = 'partial'
204
diff_type = 'partial'
210
# Build the request and execute it
211
rqst = make_log_request_dict(direction=direction, specific_fileids=file_ids,
212
start_revision=start_revision, end_revision=end_revision,
213
limit=limit, message_search=search,
214
delta_type=delta_type, diff_type=diff_type)
215
Logger(branch, rqst).show(lf)
218
# Note: This needs to be kept this in sync with the defaults in
219
# make_log_request_dict() below
220
_DEFAULT_REQUEST_PARAMS = {
221
'direction': 'reverse',
223
'generate_tags': True,
224
'exclude_common_ancestry': False,
225
'_match_using_deltas': True,
229
def make_log_request_dict(direction='reverse', specific_fileids=None,
230
start_revision=None, end_revision=None, limit=None,
231
message_search=None, levels=1, generate_tags=True,
233
diff_type=None, _match_using_deltas=True,
234
exclude_common_ancestry=False,
236
"""Convenience function for making a logging request dictionary.
238
Using this function may make code slightly safer by ensuring
239
parameters have the correct names. It also provides a reference
240
point for documenting the supported parameters.
242
:param direction: 'reverse' (default) is latest to earliest;
243
'forward' is earliest to latest.
245
:param specific_fileids: If not None, only include revisions
246
affecting the specified files, rather than all revisions.
248
:param start_revision: If not None, only generate
249
revisions >= start_revision
251
:param end_revision: If not None, only generate
252
revisions <= end_revision
254
:param limit: If set, generate only 'limit' revisions, all revisions
255
are shown if None or 0.
257
:param message_search: If not None, only include revisions with
258
matching commit messages
260
:param levels: the number of levels of revisions to
261
generate; 1 for just the mainline; 0 for all levels.
263
:param generate_tags: If True, include tags for matched revisions.
265
:param delta_type: Either 'full', 'partial' or None.
266
'full' means generate the complete delta - adds/deletes/modifies/etc;
267
'partial' means filter the delta using specific_fileids;
268
None means do not generate any delta.
270
:param diff_type: Either 'full', 'partial' or None.
271
'full' means generate the complete diff - adds/deletes/modifies/etc;
272
'partial' means filter the diff using specific_fileids;
273
None means do not generate any diff.
275
:param _match_using_deltas: a private parameter controlling the
276
algorithm used for matching specific_fileids. This parameter
277
may be removed in the future so bzrlib client code should NOT
280
:param exclude_common_ancestry: Whether -rX..Y should be interpreted as a
281
range operator or as a graph difference.
284
'direction': direction,
285
'specific_fileids': specific_fileids,
286
'start_revision': start_revision,
287
'end_revision': end_revision,
289
'message_search': message_search,
291
'generate_tags': generate_tags,
292
'delta_type': delta_type,
293
'diff_type': diff_type,
294
'exclude_common_ancestry': exclude_common_ancestry,
295
# Add 'private' attributes for features that may be deprecated
296
'_match_using_deltas': _match_using_deltas,
300
def _apply_log_request_defaults(rqst):
301
"""Apply default values to a request dictionary."""
302
result = _DEFAULT_REQUEST_PARAMS
308
class LogGenerator(object):
309
"""A generator of log revisions."""
311
def iter_log_revisions(self):
312
"""Iterate over LogRevision objects.
314
:return: An iterator yielding LogRevision objects.
316
raise NotImplementedError(self.iter_log_revisions)
319
class Logger(object):
320
"""An object that generates, formats and displays a log."""
322
def __init__(self, branch, rqst):
325
:param branch: the branch to log
326
:param rqst: A dictionary specifying the query parameters.
327
See make_log_request_dict() for supported values.
330
self.rqst = _apply_log_request_defaults(rqst)
335
:param lf: The LogFormatter object to send the output to.
337
if not isinstance(lf, LogFormatter):
338
warn("not a LogFormatter instance: %r" % lf)
340
self.branch.lock_read()
342
if getattr(lf, 'begin_log', None):
345
if getattr(lf, 'end_log', None):
350
def _show_body(self, lf):
351
"""Show the main log output.
353
Subclasses may wish to override this.
355
# Tweak the LogRequest based on what the LogFormatter can handle.
356
# (There's no point generating stuff if the formatter can't display it.)
358
rqst['levels'] = lf.get_levels()
359
if not getattr(lf, 'supports_tags', False):
360
rqst['generate_tags'] = False
361
if not getattr(lf, 'supports_delta', False):
362
rqst['delta_type'] = None
363
if not getattr(lf, 'supports_diff', False):
364
rqst['diff_type'] = None
366
# Find and print the interesting revisions
367
generator = self._generator_factory(self.branch, rqst)
368
for lr in generator.iter_log_revisions():
372
def _generator_factory(self, branch, rqst):
373
"""Make the LogGenerator object to use.
375
Subclasses may wish to override this.
377
return _DefaultLogGenerator(branch, rqst)
380
class _StartNotLinearAncestor(Exception):
381
"""Raised when a start revision is not found walking left-hand history."""
384
class _DefaultLogGenerator(LogGenerator):
385
"""The default generator of log revisions."""
387
def __init__(self, branch, rqst):
390
if rqst.get('generate_tags') and branch.supports_tags():
391
self.rev_tag_dict = branch.tags.get_reverse_tag_dict()
393
self.rev_tag_dict = {}
395
def iter_log_revisions(self):
396
"""Iterate over LogRevision objects.
398
:return: An iterator yielding LogRevision objects.
401
levels = rqst.get('levels')
402
limit = rqst.get('limit')
403
diff_type = rqst.get('diff_type')
405
revision_iterator = self._create_log_revision_iterator()
406
for revs in revision_iterator:
407
for (rev_id, revno, merge_depth), rev, delta in revs:
408
# 0 levels means show everything; merge_depth counts from 0
409
if levels != 0 and merge_depth >= levels:
411
if diff_type is None:
414
diff = self._format_diff(rev, rev_id, diff_type)
415
yield LogRevision(rev, revno, merge_depth, delta,
416
self.rev_tag_dict.get(rev_id), diff)
419
if log_count >= limit:
422
def _format_diff(self, rev, rev_id, diff_type):
423
repo = self.branch.repository
424
if len(rev.parent_ids) == 0:
425
ancestor_id = _mod_revision.NULL_REVISION
427
ancestor_id = rev.parent_ids[0]
428
tree_1 = repo.revision_tree(ancestor_id)
429
tree_2 = repo.revision_tree(rev_id)
430
file_ids = self.rqst.get('specific_fileids')
431
if diff_type == 'partial' and file_ids is not None:
432
specific_files = [tree_2.id2path(id) for id in file_ids]
434
specific_files = None
436
path_encoding = osutils.get_diff_header_encoding()
437
diff.show_diff_trees(tree_1, tree_2, s, specific_files, old_label='',
438
new_label='', path_encoding=path_encoding)
441
def _create_log_revision_iterator(self):
442
"""Create a revision iterator for log.
444
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
447
self.start_rev_id, self.end_rev_id = _get_revision_limits(
448
self.branch, self.rqst.get('start_revision'),
449
self.rqst.get('end_revision'))
450
if self.rqst.get('_match_using_deltas'):
451
return self._log_revision_iterator_using_delta_matching()
453
# We're using the per-file-graph algorithm. This scales really
454
# well but only makes sense if there is a single file and it's
456
file_count = len(self.rqst.get('specific_fileids'))
458
raise BzrError("illegal LogRequest: must match-using-deltas "
459
"when logging %d files" % file_count)
460
return self._log_revision_iterator_using_per_file_graph()
462
def _log_revision_iterator_using_delta_matching(self):
463
# Get the base revisions, filtering by the revision range
465
generate_merge_revisions = rqst.get('levels') != 1
466
delayed_graph_generation = not rqst.get('specific_fileids') and (
467
rqst.get('limit') or self.start_rev_id or self.end_rev_id)
468
view_revisions = _calc_view_revisions(
469
self.branch, self.start_rev_id, self.end_rev_id,
470
rqst.get('direction'),
471
generate_merge_revisions=generate_merge_revisions,
472
delayed_graph_generation=delayed_graph_generation,
473
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
475
# Apply the other filters
476
return make_log_rev_iterator(self.branch, view_revisions,
477
rqst.get('delta_type'), rqst.get('message_search'),
478
file_ids=rqst.get('specific_fileids'),
479
direction=rqst.get('direction'))
481
def _log_revision_iterator_using_per_file_graph(self):
482
# Get the base revisions, filtering by the revision range.
483
# Note that we always generate the merge revisions because
484
# filter_revisions_touching_file_id() requires them ...
486
view_revisions = _calc_view_revisions(
487
self.branch, self.start_rev_id, self.end_rev_id,
488
rqst.get('direction'), generate_merge_revisions=True,
489
exclude_common_ancestry=rqst.get('exclude_common_ancestry'))
490
if not isinstance(view_revisions, list):
491
view_revisions = list(view_revisions)
492
view_revisions = _filter_revisions_touching_file_id(self.branch,
493
rqst.get('specific_fileids')[0], view_revisions,
494
include_merges=rqst.get('levels') != 1)
495
return make_log_rev_iterator(self.branch, view_revisions,
496
rqst.get('delta_type'), rqst.get('message_search'))
499
def _calc_view_revisions(branch, start_rev_id, end_rev_id, direction,
500
generate_merge_revisions,
501
delayed_graph_generation=False,
502
exclude_common_ancestry=False,
504
"""Calculate the revisions to view.
506
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
507
a list of the same tuples.
509
if (exclude_common_ancestry and start_rev_id == end_rev_id):
510
raise errors.BzrCommandError(
511
'--exclude-common-ancestry requires two different revisions')
512
if direction not in ('reverse', 'forward'):
513
raise ValueError('invalid direction %r' % direction)
514
br_revno, br_rev_id = branch.last_revision_info()
518
if (end_rev_id and start_rev_id == end_rev_id
519
and (not generate_merge_revisions
520
or not _has_merges(branch, end_rev_id))):
521
# If a single revision is requested, check we can handle it
522
iter_revs = _generate_one_revision(branch, end_rev_id, br_rev_id,
524
elif not generate_merge_revisions:
525
# If we only want to see linear revisions, we can iterate ...
526
iter_revs = _generate_flat_revisions(branch, start_rev_id, end_rev_id,
527
direction, exclude_common_ancestry)
528
if direction == 'forward':
529
iter_revs = reversed(iter_revs)
531
iter_revs = _generate_all_revisions(branch, start_rev_id, end_rev_id,
532
direction, delayed_graph_generation,
533
exclude_common_ancestry)
534
if direction == 'forward':
535
iter_revs = _rebase_merge_depth(reverse_by_depth(list(iter_revs)))
539
def _generate_one_revision(branch, rev_id, br_rev_id, br_revno):
540
if rev_id == br_rev_id:
542
return [(br_rev_id, br_revno, 0)]
544
revno = branch.revision_id_to_dotted_revno(rev_id)
545
revno_str = '.'.join(str(n) for n in revno)
546
return [(rev_id, revno_str, 0)]
549
def _generate_flat_revisions(branch, start_rev_id, end_rev_id, direction,
550
exclude_common_ancestry=False):
551
result = _linear_view_revisions(
552
branch, start_rev_id, end_rev_id,
553
exclude_common_ancestry=exclude_common_ancestry)
554
# If a start limit was given and it's not obviously an
555
# ancestor of the end limit, check it before outputting anything
556
if direction == 'forward' or (start_rev_id
557
and not _is_obvious_ancestor(branch, start_rev_id, end_rev_id)):
559
result = list(result)
560
except _StartNotLinearAncestor:
561
raise errors.BzrCommandError('Start revision not found in'
562
' left-hand history of end revision.')
566
def _generate_all_revisions(branch, start_rev_id, end_rev_id, direction,
567
delayed_graph_generation,
568
exclude_common_ancestry=False):
569
# On large trees, generating the merge graph can take 30-60 seconds
570
# so we delay doing it until a merge is detected, incrementally
571
# returning initial (non-merge) revisions while we can.
573
# The above is only true for old formats (<= 0.92), for newer formats, a
574
# couple of seconds only should be needed to load the whole graph and the
575
# other graph operations needed are even faster than that -- vila 100201
576
initial_revisions = []
577
if delayed_graph_generation:
579
for rev_id, revno, depth in _linear_view_revisions(
580
branch, start_rev_id, end_rev_id, exclude_common_ancestry):
581
if _has_merges(branch, rev_id):
582
# The end_rev_id can be nested down somewhere. We need an
583
# explicit ancestry check. There is an ambiguity here as we
584
# may not raise _StartNotLinearAncestor for a revision that
585
# is an ancestor but not a *linear* one. But since we have
586
# loaded the graph to do the check (or calculate a dotted
587
# revno), we may as well accept to show the log... We need
588
# the check only if start_rev_id is not None as all
589
# revisions have _mod_revision.NULL_REVISION as an ancestor
591
graph = branch.repository.get_graph()
592
if (start_rev_id is not None
593
and not graph.is_ancestor(start_rev_id, end_rev_id)):
594
raise _StartNotLinearAncestor()
595
# Since we collected the revisions so far, we need to
600
initial_revisions.append((rev_id, revno, depth))
602
# No merged revisions found
603
return initial_revisions
604
except _StartNotLinearAncestor:
605
# A merge was never detected so the lower revision limit can't
606
# be nested down somewhere
607
raise errors.BzrCommandError('Start revision not found in'
608
' history of end revision.')
610
# We exit the loop above because we encounter a revision with merges, from
611
# this revision, we need to switch to _graph_view_revisions.
613
# A log including nested merges is required. If the direction is reverse,
614
# we rebase the initial merge depths so that the development line is
615
# shown naturally, i.e. just like it is for linear logging. We can easily
616
# make forward the exact opposite display, but showing the merge revisions
617
# indented at the end seems slightly nicer in that case.
618
view_revisions = chain(iter(initial_revisions),
619
_graph_view_revisions(branch, start_rev_id, end_rev_id,
620
rebase_initial_depths=(direction == 'reverse'),
621
exclude_common_ancestry=exclude_common_ancestry))
622
return view_revisions
625
def _has_merges(branch, rev_id):
626
"""Does a revision have multiple parents or not?"""
627
parents = branch.repository.get_parent_map([rev_id]).get(rev_id, [])
628
return len(parents) > 1
631
def _is_obvious_ancestor(branch, start_rev_id, end_rev_id):
632
"""Is start_rev_id an obvious ancestor of end_rev_id?"""
633
if start_rev_id and end_rev_id:
634
start_dotted = branch.revision_id_to_dotted_revno(start_rev_id)
635
end_dotted = branch.revision_id_to_dotted_revno(end_rev_id)
636
if len(start_dotted) == 1 and len(end_dotted) == 1:
638
return start_dotted[0] <= end_dotted[0]
639
elif (len(start_dotted) == 3 and len(end_dotted) == 3 and
640
start_dotted[0:1] == end_dotted[0:1]):
641
# both on same development line
642
return start_dotted[2] <= end_dotted[2]
646
# if either start or end is not specified then we use either the first or
647
# the last revision and *they* are obvious ancestors.
651
def _linear_view_revisions(branch, start_rev_id, end_rev_id,
652
exclude_common_ancestry=False):
653
"""Calculate a sequence of revisions to view, newest to oldest.
655
:param start_rev_id: the lower revision-id
656
:param end_rev_id: the upper revision-id
657
:param exclude_common_ancestry: Whether the start_rev_id should be part of
658
the iterated revisions.
659
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
660
:raises _StartNotLinearAncestor: if a start_rev_id is specified but
661
is not found walking the left-hand history
663
br_revno, br_rev_id = branch.last_revision_info()
664
repo = branch.repository
665
if start_rev_id is None and end_rev_id is None:
667
for revision_id in repo.iter_reverse_revision_history(br_rev_id):
668
yield revision_id, str(cur_revno), 0
671
if end_rev_id is None:
672
end_rev_id = br_rev_id
673
found_start = start_rev_id is None
674
for revision_id in repo.iter_reverse_revision_history(end_rev_id):
675
revno = branch.revision_id_to_dotted_revno(revision_id)
676
revno_str = '.'.join(str(n) for n in revno)
677
if not found_start and revision_id == start_rev_id:
678
if not exclude_common_ancestry:
679
yield revision_id, revno_str, 0
683
yield revision_id, revno_str, 0
686
raise _StartNotLinearAncestor()
689
def _graph_view_revisions(branch, start_rev_id, end_rev_id,
690
rebase_initial_depths=True,
691
exclude_common_ancestry=False):
692
"""Calculate revisions to view including merges, newest to oldest.
694
:param branch: the branch
695
:param start_rev_id: the lower revision-id
696
:param end_rev_id: the upper revision-id
697
:param rebase_initial_depth: should depths be rebased until a mainline
699
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples.
701
if exclude_common_ancestry:
702
stop_rule = 'with-merges-without-common-ancestry'
704
stop_rule = 'with-merges'
705
view_revisions = branch.iter_merge_sorted_revisions(
706
start_revision_id=end_rev_id, stop_revision_id=start_rev_id,
708
if not rebase_initial_depths:
709
for (rev_id, merge_depth, revno, end_of_merge
711
yield rev_id, '.'.join(map(str, revno)), merge_depth
713
# We're following a development line starting at a merged revision.
714
# We need to adjust depths down by the initial depth until we find
715
# a depth less than it. Then we use that depth as the adjustment.
716
# If and when we reach the mainline, depth adjustment ends.
717
depth_adjustment = None
718
for (rev_id, merge_depth, revno, end_of_merge
720
if depth_adjustment is None:
721
depth_adjustment = merge_depth
723
if merge_depth < depth_adjustment:
724
# From now on we reduce the depth adjustement, this can be
725
# surprising for users. The alternative requires two passes
726
# which breaks the fast display of the first revision
728
depth_adjustment = merge_depth
729
merge_depth -= depth_adjustment
730
yield rev_id, '.'.join(map(str, revno)), merge_depth
733
@deprecated_function(deprecated_in((2, 2, 0)))
734
def calculate_view_revisions(branch, start_revision, end_revision, direction,
735
specific_fileid, generate_merge_revisions):
736
"""Calculate the revisions to view.
738
:return: An iterator of (revision_id, dotted_revno, merge_depth) tuples OR
739
a list of the same tuples.
741
start_rev_id, end_rev_id = _get_revision_limits(branch, start_revision,
743
view_revisions = list(_calc_view_revisions(branch, start_rev_id, end_rev_id,
744
direction, generate_merge_revisions or specific_fileid))
746
view_revisions = _filter_revisions_touching_file_id(branch,
747
specific_fileid, view_revisions,
748
include_merges=generate_merge_revisions)
749
return _rebase_merge_depth(view_revisions)
752
def _rebase_merge_depth(view_revisions):
753
"""Adjust depths upwards so the top level is 0."""
754
# If either the first or last revision have a merge_depth of 0, we're done
755
if view_revisions and view_revisions[0][2] and view_revisions[-1][2]:
756
min_depth = min([d for r,n,d in view_revisions])
758
view_revisions = [(r,n,d-min_depth) for r,n,d in view_revisions]
759
return view_revisions
762
def make_log_rev_iterator(branch, view_revisions, generate_delta, search,
763
file_ids=None, direction='reverse'):
764
"""Create a revision iterator for log.
766
:param branch: The branch being logged.
767
:param view_revisions: The revisions being viewed.
768
:param generate_delta: Whether to generate a delta for each revision.
769
Permitted values are None, 'full' and 'partial'.
770
:param search: A user text search string.
771
:param file_ids: If non empty, only revisions matching one or more of
772
the file-ids are to be kept.
773
:param direction: the direction in which view_revisions is sorted
774
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
777
# Convert view_revisions into (view, None, None) groups to fit with
778
# the standard interface here.
779
if type(view_revisions) == list:
780
# A single batch conversion is faster than many incremental ones.
781
# As we have all the data, do a batch conversion.
782
nones = [None] * len(view_revisions)
783
log_rev_iterator = iter([zip(view_revisions, nones, nones)])
786
for view in view_revisions:
787
yield (view, None, None)
788
log_rev_iterator = iter([_convert()])
789
for adapter in log_adapters:
790
# It would be nicer if log adapters were first class objects
791
# with custom parameters. This will do for now. IGC 20090127
792
if adapter == _make_delta_filter:
793
log_rev_iterator = adapter(branch, generate_delta,
794
search, log_rev_iterator, file_ids, direction)
796
log_rev_iterator = adapter(branch, generate_delta,
797
search, log_rev_iterator)
798
return log_rev_iterator
801
def _make_search_filter(branch, generate_delta, search, log_rev_iterator):
802
"""Create a filtered iterator of log_rev_iterator matching on a regex.
804
:param branch: The branch being logged.
805
:param generate_delta: Whether to generate a delta for each revision.
806
:param search: A user text search string.
807
:param log_rev_iterator: An input iterator containing all revisions that
808
could be displayed, in lists.
809
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
813
return log_rev_iterator
814
searchRE = re_compile_checked(search, re.IGNORECASE,
815
'log message filter')
816
return _filter_message_re(searchRE, log_rev_iterator)
819
def _filter_message_re(searchRE, log_rev_iterator):
820
for revs in log_rev_iterator:
822
for (rev_id, revno, merge_depth), rev, delta in revs:
823
if searchRE.search(rev.message):
824
new_revs.append(((rev_id, revno, merge_depth), rev, delta))
828
def _make_delta_filter(branch, generate_delta, search, log_rev_iterator,
829
fileids=None, direction='reverse'):
830
"""Add revision deltas to a log iterator if needed.
832
:param branch: The branch being logged.
833
:param generate_delta: Whether to generate a delta for each revision.
834
Permitted values are None, 'full' and 'partial'.
835
:param search: A user text search string.
836
:param log_rev_iterator: An input iterator containing all revisions that
837
could be displayed, in lists.
838
:param fileids: If non empty, only revisions matching one or more of
839
the file-ids are to be kept.
840
:param direction: the direction in which view_revisions is sorted
841
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
844
if not generate_delta and not fileids:
845
return log_rev_iterator
846
return _generate_deltas(branch.repository, log_rev_iterator,
847
generate_delta, fileids, direction)
850
def _generate_deltas(repository, log_rev_iterator, delta_type, fileids,
852
"""Create deltas for each batch of revisions in log_rev_iterator.
854
If we're only generating deltas for the sake of filtering against
855
file-ids, we stop generating deltas once all file-ids reach the
856
appropriate life-cycle point. If we're receiving data newest to
857
oldest, then that life-cycle point is 'add', otherwise it's 'remove'.
859
check_fileids = fileids is not None and len(fileids) > 0
861
fileid_set = set(fileids)
862
if direction == 'reverse':
868
for revs in log_rev_iterator:
869
# If we were matching against fileids and we've run out,
870
# there's nothing left to do
871
if check_fileids and not fileid_set:
873
revisions = [rev[1] for rev in revs]
875
if delta_type == 'full' and not check_fileids:
876
deltas = repository.get_deltas_for_revisions(revisions)
877
for rev, delta in izip(revs, deltas):
878
new_revs.append((rev[0], rev[1], delta))
880
deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
881
for rev, delta in izip(revs, deltas):
883
if delta is None or not delta.has_changed():
886
_update_fileids(delta, fileid_set, stop_on)
887
if delta_type is None:
889
elif delta_type == 'full':
890
# If the file matches all the time, rebuilding
891
# a full delta like this in addition to a partial
892
# one could be slow. However, it's likely that
893
# most revisions won't get this far, making it
894
# faster to filter on the partial deltas and
895
# build the occasional full delta than always
896
# building full deltas and filtering those.
898
delta = repository.get_revision_delta(rev_id)
899
new_revs.append((rev[0], rev[1], delta))
903
def _update_fileids(delta, fileids, stop_on):
904
"""Update the set of file-ids to search based on file lifecycle events.
906
:param fileids: a set of fileids to update
907
:param stop_on: either 'add' or 'remove' - take file-ids out of the
908
fileids set once their add or remove entry is detected respectively
911
for item in delta.added:
912
if item[1] in fileids:
913
fileids.remove(item[1])
914
elif stop_on == 'delete':
915
for item in delta.removed:
916
if item[1] in fileids:
917
fileids.remove(item[1])
920
def _make_revision_objects(branch, generate_delta, search, log_rev_iterator):
921
"""Extract revision objects from the repository
923
:param branch: The branch being logged.
924
:param generate_delta: Whether to generate a delta for each revision.
925
:param search: A user text search string.
926
:param log_rev_iterator: An input iterator containing all revisions that
927
could be displayed, in lists.
928
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
931
repository = branch.repository
932
for revs in log_rev_iterator:
933
# r = revision_id, n = revno, d = merge depth
934
revision_ids = [view[0] for view, _, _ in revs]
935
revisions = repository.get_revisions(revision_ids)
936
revs = [(rev[0], revision, rev[2]) for rev, revision in
937
izip(revs, revisions)]
941
def _make_batch_filter(branch, generate_delta, search, log_rev_iterator):
942
"""Group up a single large batch into smaller ones.
944
:param branch: The branch being logged.
945
:param generate_delta: Whether to generate a delta for each revision.
946
:param search: A user text search string.
947
:param log_rev_iterator: An input iterator containing all revisions that
948
could be displayed, in lists.
949
:return: An iterator over lists of ((rev_id, revno, merge_depth), rev,
952
repository = branch.repository
954
for batch in log_rev_iterator:
957
step = [detail for _, detail in zip(range(num), batch)]
961
num = min(int(num * 1.5), 200)
964
def _get_revision_limits(branch, start_revision, end_revision):
965
"""Get and check revision limits.
967
:param branch: The branch containing the revisions.
969
:param start_revision: The first revision to be logged.
970
For backwards compatibility this may be a mainline integer revno,
971
but for merge revision support a RevisionInfo is expected.
973
:param end_revision: The last revision to be logged.
974
For backwards compatibility this may be a mainline integer revno,
975
but for merge revision support a RevisionInfo is expected.
977
:return: (start_rev_id, end_rev_id) tuple.
979
branch_revno, branch_rev_id = branch.last_revision_info()
981
if start_revision is None:
984
if isinstance(start_revision, revisionspec.RevisionInfo):
985
start_rev_id = start_revision.rev_id
986
start_revno = start_revision.revno or 1
988
branch.check_real_revno(start_revision)
989
start_revno = start_revision
990
start_rev_id = branch.get_rev_id(start_revno)
993
if end_revision is None:
994
end_revno = branch_revno
996
if isinstance(end_revision, revisionspec.RevisionInfo):
997
end_rev_id = end_revision.rev_id
998
end_revno = end_revision.revno or branch_revno
1000
branch.check_real_revno(end_revision)
1001
end_revno = end_revision
1002
end_rev_id = branch.get_rev_id(end_revno)
1004
if branch_revno != 0:
1005
if (start_rev_id == _mod_revision.NULL_REVISION
1006
or end_rev_id == _mod_revision.NULL_REVISION):
1007
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1008
if start_revno > end_revno:
1009
raise errors.BzrCommandError("Start revision must be older than "
1010
"the end revision.")
1011
return (start_rev_id, end_rev_id)
1014
def _get_mainline_revs(branch, start_revision, end_revision):
1015
"""Get the mainline revisions from the branch.
1017
Generates the list of mainline revisions for the branch.
1019
:param branch: The branch containing the revisions.
1021
:param start_revision: The first revision to be logged.
1022
For backwards compatibility this may be a mainline integer revno,
1023
but for merge revision support a RevisionInfo is expected.
1025
:param end_revision: The last revision to be logged.
1026
For backwards compatibility this may be a mainline integer revno,
1027
but for merge revision support a RevisionInfo is expected.
1029
:return: A (mainline_revs, rev_nos, start_rev_id, end_rev_id) tuple.
1031
branch_revno, branch_last_revision = branch.last_revision_info()
1032
if branch_revno == 0:
1033
return None, None, None, None
1035
# For mainline generation, map start_revision and end_revision to
1036
# mainline revnos. If the revision is not on the mainline choose the
1037
# appropriate extreme of the mainline instead - the extra will be
1039
# Also map the revisions to rev_ids, to be used in the later filtering
1042
if start_revision is None:
1045
if isinstance(start_revision, revisionspec.RevisionInfo):
1046
start_rev_id = start_revision.rev_id
1047
start_revno = start_revision.revno or 1
1049
branch.check_real_revno(start_revision)
1050
start_revno = start_revision
1053
if end_revision is None:
1054
end_revno = branch_revno
1056
if isinstance(end_revision, revisionspec.RevisionInfo):
1057
end_rev_id = end_revision.rev_id
1058
end_revno = end_revision.revno or branch_revno
1060
branch.check_real_revno(end_revision)
1061
end_revno = end_revision
1063
if ((start_rev_id == _mod_revision.NULL_REVISION)
1064
or (end_rev_id == _mod_revision.NULL_REVISION)):
1065
raise errors.BzrCommandError('Logging revision 0 is invalid.')
1066
if start_revno > end_revno:
1067
raise errors.BzrCommandError("Start revision must be older than "
1068
"the end revision.")
1070
if end_revno < start_revno:
1071
return None, None, None, None
1072
cur_revno = branch_revno
1075
for revision_id in branch.repository.iter_reverse_revision_history(
1076
branch_last_revision):
1077
if cur_revno < start_revno:
1078
# We have gone far enough, but we always add 1 more revision
1079
rev_nos[revision_id] = cur_revno
1080
mainline_revs.append(revision_id)
1082
if cur_revno <= end_revno:
1083
rev_nos[revision_id] = cur_revno
1084
mainline_revs.append(revision_id)
1087
# We walked off the edge of all revisions, so we add a 'None' marker
1088
mainline_revs.append(None)
1090
mainline_revs.reverse()
1092
# override the mainline to look like the revision history.
1093
return mainline_revs, rev_nos, start_rev_id, end_rev_id
1096
@deprecated_function(deprecated_in((2, 2, 0)))
1097
def _filter_revision_range(view_revisions, start_rev_id, end_rev_id):
1098
"""Filter view_revisions based on revision ranges.
1100
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1101
tuples to be filtered.
1103
:param start_rev_id: If not NONE specifies the first revision to be logged.
1104
If NONE then all revisions up to the end_rev_id are logged.
1106
:param end_rev_id: If not NONE specifies the last revision to be logged.
1107
If NONE then all revisions up to the end of the log are logged.
1109
:return: The filtered view_revisions.
1111
if start_rev_id or end_rev_id:
1112
revision_ids = [r for r, n, d in view_revisions]
1114
start_index = revision_ids.index(start_rev_id)
1117
if start_rev_id == end_rev_id:
1118
end_index = start_index
1121
end_index = revision_ids.index(end_rev_id)
1123
end_index = len(view_revisions) - 1
1124
# To include the revisions merged into the last revision,
1125
# extend end_rev_id down to, but not including, the next rev
1126
# with the same or lesser merge_depth
1127
end_merge_depth = view_revisions[end_index][2]
1129
for index in xrange(end_index+1, len(view_revisions)+1):
1130
if view_revisions[index][2] <= end_merge_depth:
1131
end_index = index - 1
1134
# if the search falls off the end then log to the end as well
1135
end_index = len(view_revisions) - 1
1136
view_revisions = view_revisions[start_index:end_index+1]
1137
return view_revisions
1140
def _filter_revisions_touching_file_id(branch, file_id, view_revisions,
1141
include_merges=True):
1142
r"""Return the list of revision ids which touch a given file id.
1144
The function filters view_revisions and returns a subset.
1145
This includes the revisions which directly change the file id,
1146
and the revisions which merge these changes. So if the
1158
And 'C' changes a file, then both C and D will be returned. F will not be
1159
returned even though it brings the changes to C into the branch starting
1160
with E. (Note that if we were using F as the tip instead of G, then we
1163
This will also be restricted based on a subset of the mainline.
1165
:param branch: The branch where we can get text revision information.
1167
:param file_id: Filter out revisions that do not touch file_id.
1169
:param view_revisions: A list of (revision_id, dotted_revno, merge_depth)
1170
tuples. This is the list of revisions which will be filtered. It is
1171
assumed that view_revisions is in merge_sort order (i.e. newest
1174
:param include_merges: include merge revisions in the result or not
1176
:return: A list of (revision_id, dotted_revno, merge_depth) tuples.
1178
# Lookup all possible text keys to determine which ones actually modified
1180
text_keys = [(file_id, rev_id) for rev_id, revno, depth in view_revisions]
1182
# Looking up keys in batches of 1000 can cut the time in half, as well as
1183
# memory consumption. GraphIndex *does* like to look for a few keys in
1184
# parallel, it just doesn't like looking for *lots* of keys in parallel.
1185
# TODO: This code needs to be re-evaluated periodically as we tune the
1186
# indexing layer. We might consider passing in hints as to the known
1187
# access pattern (sparse/clustered, high success rate/low success
1188
# rate). This particular access is clustered with a low success rate.
1189
get_parent_map = branch.repository.texts.get_parent_map
1190
modified_text_revisions = set()
1192
for start in xrange(0, len(text_keys), chunk_size):
1193
next_keys = text_keys[start:start + chunk_size]
1194
# Only keep the revision_id portion of the key
1195
modified_text_revisions.update(
1196
[k[1] for k in get_parent_map(next_keys)])
1197
del text_keys, next_keys
1200
# Track what revisions will merge the current revision, replace entries
1201
# with 'None' when they have been added to result
1202
current_merge_stack = [None]
1203
for info in view_revisions:
1204
rev_id, revno, depth = info
1205
if depth == len(current_merge_stack):
1206
current_merge_stack.append(info)
1208
del current_merge_stack[depth + 1:]
1209
current_merge_stack[-1] = info
1211
if rev_id in modified_text_revisions:
1212
# This needs to be logged, along with the extra revisions
1213
for idx in xrange(len(current_merge_stack)):
1214
node = current_merge_stack[idx]
1215
if node is not None:
1216
if include_merges or node[2] == 0:
1218
current_merge_stack[idx] = None
1222
@deprecated_function(deprecated_in((2, 2, 0)))
1223
def get_view_revisions(mainline_revs, rev_nos, branch, direction,
1224
include_merges=True):
1225
"""Produce an iterator of revisions to show
1226
:return: an iterator of (revision_id, revno, merge_depth)
1227
(if there is no revno for a revision, None is supplied)
1229
if not include_merges:
1230
revision_ids = mainline_revs[1:]
1231
if direction == 'reverse':
1232
revision_ids.reverse()
1233
for revision_id in revision_ids:
1234
yield revision_id, str(rev_nos[revision_id]), 0
1236
graph = branch.repository.get_graph()
1237
# This asks for all mainline revisions, which means we only have to spider
1238
# sideways, rather than depth history. That said, its still size-of-history
1239
# and should be addressed.
1240
# mainline_revisions always includes an extra revision at the beginning, so
1242
parent_map = dict(((key, value) for key, value in
1243
graph.iter_ancestry(mainline_revs[1:]) if value is not None))
1244
# filter out ghosts; merge_sort errors on ghosts.
1245
rev_graph = _mod_repository._strip_NULL_ghosts(parent_map)
1246
merge_sorted_revisions = tsort.merge_sort(
1250
generate_revno=True)
1252
if direction == 'forward':
1253
# forward means oldest first.
1254
merge_sorted_revisions = reverse_by_depth(merge_sorted_revisions)
1255
elif direction != 'reverse':
1256
raise ValueError('invalid direction %r' % direction)
1258
for (sequence, rev_id, merge_depth, revno, end_of_merge
1259
) in merge_sorted_revisions:
1260
yield rev_id, '.'.join(map(str, revno)), merge_depth
1263
def reverse_by_depth(merge_sorted_revisions, _depth=0):
1264
"""Reverse revisions by depth.
1266
Revisions with a different depth are sorted as a group with the previous
1267
revision of that depth. There may be no topological justification for this,
1268
but it looks much nicer.
1270
# Add a fake revision at start so that we can always attach sub revisions
1271
merge_sorted_revisions = [(None, None, _depth)] + merge_sorted_revisions
1273
for val in merge_sorted_revisions:
1274
if val[2] == _depth:
1275
# Each revision at the current depth becomes a chunk grouping all
1276
# higher depth revisions.
1277
zd_revisions.append([val])
1279
zd_revisions[-1].append(val)
1280
for revisions in zd_revisions:
1281
if len(revisions) > 1:
1282
# We have higher depth revisions, let reverse them locally
1283
revisions[1:] = reverse_by_depth(revisions[1:], _depth + 1)
1284
zd_revisions.reverse()
1286
for chunk in zd_revisions:
1287
result.extend(chunk)
1289
# Top level call, get rid of the fake revisions that have been added
1290
result = [r for r in result if r[0] is not None and r[1] is not None]
1294
class LogRevision(object):
1295
"""A revision to be logged (by LogFormatter.log_revision).
1297
A simple wrapper for the attributes of a revision to be logged.
1298
The attributes may or may not be populated, as determined by the
1299
logging options and the log formatter capabilities.
1302
def __init__(self, rev=None, revno=None, merge_depth=0, delta=None,
1303
tags=None, diff=None):
1305
self.revno = str(revno)
1306
self.merge_depth = merge_depth
1312
class LogFormatter(object):
1313
"""Abstract class to display log messages.
1315
At a minimum, a derived class must implement the log_revision method.
1317
If the LogFormatter needs to be informed of the beginning or end of
1318
a log it should implement the begin_log and/or end_log hook methods.
1320
A LogFormatter should define the following supports_XXX flags
1321
to indicate which LogRevision attributes it supports:
1323
- supports_delta must be True if this log formatter supports delta.
1324
Otherwise the delta attribute may not be populated. The 'delta_format'
1325
attribute describes whether the 'short_status' format (1) or the long
1326
one (2) should be used.
1328
- supports_merge_revisions must be True if this log formatter supports
1329
merge revisions. If not, then only mainline revisions will be passed
1332
- preferred_levels is the number of levels this formatter defaults to.
1333
The default value is zero meaning display all levels.
1334
This value is only relevant if supports_merge_revisions is True.
1336
- supports_tags must be True if this log formatter supports tags.
1337
Otherwise the tags attribute may not be populated.
1339
- supports_diff must be True if this log formatter supports diffs.
1340
Otherwise the diff attribute may not be populated.
1342
Plugins can register functions to show custom revision properties using
1343
the properties_handler_registry. The registered function
1344
must respect the following interface description:
1345
def my_show_properties(properties_dict):
1346
# code that returns a dict {'name':'value'} of the properties
1349
preferred_levels = 0
1351
def __init__(self, to_file, show_ids=False, show_timezone='original',
1352
delta_format=None, levels=None, show_advice=False,
1353
to_exact_file=None, author_list_handler=None):
1354
"""Create a LogFormatter.
1356
:param to_file: the file to output to
1357
:param to_exact_file: if set, gives an output stream to which
1358
non-Unicode diffs are written.
1359
:param show_ids: if True, revision-ids are to be displayed
1360
:param show_timezone: the timezone to use
1361
:param delta_format: the level of delta information to display
1362
or None to leave it to the formatter to decide
1363
:param levels: the number of levels to display; None or -1 to
1364
let the log formatter decide.
1365
:param show_advice: whether to show advice at the end of the
1367
:param author_list_handler: callable generating a list of
1368
authors to display for a given revision
1370
self.to_file = to_file
1371
# 'exact' stream used to show diff, it should print content 'as is'
1372
# and should not try to decode/encode it to unicode to avoid bug #328007
1373
if to_exact_file is not None:
1374
self.to_exact_file = to_exact_file
1376
# XXX: somewhat hacky; this assumes it's a codec writer; it's better
1377
# for code that expects to get diffs to pass in the exact file
1379
self.to_exact_file = getattr(to_file, 'stream', to_file)
1380
self.show_ids = show_ids
1381
self.show_timezone = show_timezone
1382
if delta_format is None:
1383
# Ensures backward compatibility
1384
delta_format = 2 # long format
1385
self.delta_format = delta_format
1386
self.levels = levels
1387
self._show_advice = show_advice
1388
self._merge_count = 0
1389
self._author_list_handler = author_list_handler
1391
def get_levels(self):
1392
"""Get the number of levels to display or 0 for all."""
1393
if getattr(self, 'supports_merge_revisions', False):
1394
if self.levels is None or self.levels == -1:
1395
self.levels = self.preferred_levels
1400
def log_revision(self, revision):
1403
:param revision: The LogRevision to be logged.
1405
raise NotImplementedError('not implemented in abstract base')
1407
def show_advice(self):
1408
"""Output user advice, if any, when the log is completed."""
1409
if self._show_advice and self.levels == 1 and self._merge_count > 0:
1410
advice_sep = self.get_advice_separator()
1412
self.to_file.write(advice_sep)
1414
"Use --include-merges or -n0 to see merged revisions.\n")
1416
def get_advice_separator(self):
1417
"""Get the text separating the log from the closing advice."""
1420
def short_committer(self, rev):
1421
name, address = config.parse_username(rev.committer)
1426
def short_author(self, rev):
1427
return self.authors(rev, 'first', short=True, sep=', ')
1429
def authors(self, rev, who, short=False, sep=None):
1430
"""Generate list of authors, taking --authors option into account.
1432
The caller has to specify the name of a author list handler,
1433
as provided by the author list registry, using the ``who``
1434
argument. That name only sets a default, though: when the
1435
user selected a different author list generation using the
1436
``--authors`` command line switch, as represented by the
1437
``author_list_handler`` constructor argument, that value takes
1440
:param rev: The revision for which to generate the list of authors.
1441
:param who: Name of the default handler.
1442
:param short: Whether to shorten names to either name or address.
1443
:param sep: What separator to use for automatic concatenation.
1445
if self._author_list_handler is not None:
1446
# The user did specify --authors, which overrides the default
1447
author_list_handler = self._author_list_handler
1449
# The user didn't specify --authors, so we use the caller's default
1450
author_list_handler = author_list_registry.get(who)
1451
names = author_list_handler(rev)
1453
for i in range(len(names)):
1454
name, address = config.parse_username(names[i])
1460
names = sep.join(names)
1463
def merge_marker(self, revision):
1464
"""Get the merge marker to include in the output or '' if none."""
1465
if len(revision.rev.parent_ids) > 1:
1466
self._merge_count += 1
1471
def show_properties(self, revision, indent):
1472
"""Displays the custom properties returned by each registered handler.
1474
If a registered handler raises an error it is propagated.
1476
for line in self.custom_properties(revision):
1477
self.to_file.write("%s%s\n" % (indent, line))
1479
def custom_properties(self, revision):
1480
"""Format the custom properties returned by each registered handler.
1482
If a registered handler raises an error it is propagated.
1484
:return: a list of formatted lines (excluding trailing newlines)
1486
lines = self._foreign_info_properties(revision)
1487
for key, handler in properties_handler_registry.iteritems():
1488
lines.extend(self._format_properties(handler(revision)))
1491
def _foreign_info_properties(self, rev):
1492
"""Custom log displayer for foreign revision identifiers.
1494
:param rev: Revision object.
1496
# Revision comes directly from a foreign repository
1497
if isinstance(rev, foreign.ForeignRevision):
1498
return self._format_properties(
1499
rev.mapping.vcs.show_foreign_revid(rev.foreign_revid))
1501
# Imported foreign revision revision ids always contain :
1502
if not ":" in rev.revision_id:
1505
# Revision was once imported from a foreign repository
1507
foreign_revid, mapping = \
1508
foreign.foreign_vcs_registry.parse_revision_id(rev.revision_id)
1509
except errors.InvalidRevisionId:
1512
return self._format_properties(
1513
mapping.vcs.show_foreign_revid(foreign_revid))
1515
def _format_properties(self, properties):
1517
for key, value in properties.items():
1518
lines.append(key + ': ' + value)
1521
def show_diff(self, to_file, diff, indent):
1522
for l in diff.rstrip().split('\n'):
1523
to_file.write(indent + '%s\n' % (l,))
1526
# Separator between revisions in long format
1527
_LONG_SEP = '-' * 60
1530
class LongLogFormatter(LogFormatter):
1532
supports_merge_revisions = True
1533
preferred_levels = 1
1534
supports_delta = True
1535
supports_tags = True
1536
supports_diff = True
1538
def __init__(self, *args, **kwargs):
1539
super(LongLogFormatter, self).__init__(*args, **kwargs)
1540
if self.show_timezone == 'original':
1541
self.date_string = self._date_string_original_timezone
1543
self.date_string = self._date_string_with_timezone
1545
def _date_string_with_timezone(self, rev):
1546
return format_date(rev.timestamp, rev.timezone or 0,
1549
def _date_string_original_timezone(self, rev):
1550
return format_date_with_offset_in_original_timezone(rev.timestamp,
1553
def log_revision(self, revision):
1554
"""Log a revision, either merged or not."""
1555
indent = ' ' * revision.merge_depth
1557
if revision.revno is not None:
1558
lines.append('revno: %s%s' % (revision.revno,
1559
self.merge_marker(revision)))
1561
lines.append('tags: %s' % (', '.join(revision.tags)))
1563
lines.append('revision-id: %s' % (revision.rev.revision_id,))
1564
for parent_id in revision.rev.parent_ids:
1565
lines.append('parent: %s' % (parent_id,))
1566
lines.extend(self.custom_properties(revision.rev))
1568
committer = revision.rev.committer
1569
authors = self.authors(revision.rev, 'all')
1570
if authors != [committer]:
1571
lines.append('author: %s' % (", ".join(authors),))
1572
lines.append('committer: %s' % (committer,))
1574
branch_nick = revision.rev.properties.get('branch-nick', None)
1575
if branch_nick is not None:
1576
lines.append('branch nick: %s' % (branch_nick,))
1578
lines.append('timestamp: %s' % (self.date_string(revision.rev),))
1580
lines.append('message:')
1581
if not revision.rev.message:
1582
lines.append(' (no message)')
1584
message = revision.rev.message.rstrip('\r\n')
1585
for l in message.split('\n'):
1586
lines.append(' %s' % (l,))
1588
# Dump the output, appending the delta and diff if requested
1589
to_file = self.to_file
1590
to_file.write("%s%s\n" % (indent, ('\n' + indent).join(lines)))
1591
if revision.delta is not None:
1592
# Use the standard status output to display changes
1593
from bzrlib.delta import report_delta
1594
report_delta(to_file, revision.delta, short_status=False,
1595
show_ids=self.show_ids, indent=indent)
1596
if revision.diff is not None:
1597
to_file.write(indent + 'diff:\n')
1599
# Note: we explicitly don't indent the diff (relative to the
1600
# revision information) so that the output can be fed to patch -p0
1601
self.show_diff(self.to_exact_file, revision.diff, indent)
1602
self.to_exact_file.flush()
1604
def get_advice_separator(self):
1605
"""Get the text separating the log from the closing advice."""
1606
return '-' * 60 + '\n'
1609
class ShortLogFormatter(LogFormatter):
1611
supports_merge_revisions = True
1612
preferred_levels = 1
1613
supports_delta = True
1614
supports_tags = True
1615
supports_diff = True
1617
def __init__(self, *args, **kwargs):
1618
super(ShortLogFormatter, self).__init__(*args, **kwargs)
1619
self.revno_width_by_depth = {}
1621
def log_revision(self, revision):
1622
# We need two indents: one per depth and one for the information
1623
# relative to that indent. Most mainline revnos are 5 chars or
1624
# less while dotted revnos are typically 11 chars or less. Once
1625
# calculated, we need to remember the offset for a given depth
1626
# as we might be starting from a dotted revno in the first column
1627
# and we want subsequent mainline revisions to line up.
1628
depth = revision.merge_depth
1629
indent = ' ' * depth
1630
revno_width = self.revno_width_by_depth.get(depth)
1631
if revno_width is None:
1632
if revision.revno.find('.') == -1:
1633
# mainline revno, e.g. 12345
1636
# dotted revno, e.g. 12345.10.55
1638
self.revno_width_by_depth[depth] = revno_width
1639
offset = ' ' * (revno_width + 1)
1641
to_file = self.to_file
1644
tags = ' {%s}' % (', '.join(revision.tags))
1645
to_file.write(indent + "%*s %s\t%s%s%s\n" % (revno_width,
1646
revision.revno, self.short_author(revision.rev),
1647
format_date(revision.rev.timestamp,
1648
revision.rev.timezone or 0,
1649
self.show_timezone, date_fmt="%Y-%m-%d",
1651
tags, self.merge_marker(revision)))
1652
self.show_properties(revision.rev, indent+offset)
1654
to_file.write(indent + offset + 'revision-id:%s\n'
1655
% (revision.rev.revision_id,))
1656
if not revision.rev.message:
1657
to_file.write(indent + offset + '(no message)\n')
1659
message = revision.rev.message.rstrip('\r\n')
1660
for l in message.split('\n'):
1661
to_file.write(indent + offset + '%s\n' % (l,))
1663
if revision.delta is not None:
1664
# Use the standard status output to display changes
1665
from bzrlib.delta import report_delta
1666
report_delta(to_file, revision.delta,
1667
short_status=self.delta_format==1,
1668
show_ids=self.show_ids, indent=indent + offset)
1669
if revision.diff is not None:
1670
self.show_diff(self.to_exact_file, revision.diff, ' ')
1674
class LineLogFormatter(LogFormatter):
1676
supports_merge_revisions = True
1677
preferred_levels = 1
1678
supports_tags = True
1680
def __init__(self, *args, **kwargs):
1681
super(LineLogFormatter, self).__init__(*args, **kwargs)
1682
width = terminal_width()
1683
if width is not None:
1684
# we need one extra space for terminals that wrap on last char
1686
self._max_chars = width
1688
def truncate(self, str, max_len):
1689
if max_len is None or len(str) <= max_len:
1691
return str[:max_len-3] + '...'
1693
def date_string(self, rev):
1694
return format_date(rev.timestamp, rev.timezone or 0,
1695
self.show_timezone, date_fmt="%Y-%m-%d",
1698
def message(self, rev):
1700
return '(no message)'
1704
def log_revision(self, revision):
1705
indent = ' ' * revision.merge_depth
1706
self.to_file.write(self.log_string(revision.revno, revision.rev,
1707
self._max_chars, revision.tags, indent))
1708
self.to_file.write('\n')
1710
def log_string(self, revno, rev, max_chars, tags=None, prefix=''):
1711
"""Format log info into one string. Truncate tail of string
1712
:param revno: revision number or None.
1713
Revision numbers counts from 1.
1714
:param rev: revision object
1715
:param max_chars: maximum length of resulting string
1716
:param tags: list of tags or None
1717
:param prefix: string to prefix each line
1718
:return: formatted truncated string
1722
# show revno only when is not None
1723
out.append("%s:" % revno)
1724
out.append(self.truncate(self.short_author(rev), 20))
1725
out.append(self.date_string(rev))
1726
if len(rev.parent_ids) > 1:
1727
out.append('[merge]')
1729
tag_str = '{%s}' % (', '.join(tags))
1731
out.append(rev.get_summary())
1732
return self.truncate(prefix + " ".join(out).rstrip('\n'), max_chars)
1735
class GnuChangelogLogFormatter(LogFormatter):
1737
supports_merge_revisions = True
1738
supports_delta = True
1740
def log_revision(self, revision):
1741
"""Log a revision, either merged or not."""
1742
to_file = self.to_file
1744
date_str = format_date(revision.rev.timestamp,
1745
revision.rev.timezone or 0,
1747
date_fmt='%Y-%m-%d',
1749
committer_str = self.authors(revision.rev, 'first', sep=', ')
1750
committer_str = committer_str.replace(' <', ' <')
1751
to_file.write('%s %s\n\n' % (date_str,committer_str))
1753
if revision.delta is not None and revision.delta.has_changed():
1754
for c in revision.delta.added + revision.delta.removed + revision.delta.modified:
1756
to_file.write('\t* %s:\n' % (path,))
1757
for c in revision.delta.renamed:
1758
oldpath,newpath = c[:2]
1759
# For renamed files, show both the old and the new path
1760
to_file.write('\t* %s:\n\t* %s:\n' % (oldpath,newpath))
1763
if not revision.rev.message:
1764
to_file.write('\tNo commit message\n')
1766
message = revision.rev.message.rstrip('\r\n')
1767
for l in message.split('\n'):
1768
to_file.write('\t%s\n' % (l.lstrip(),))
1772
def line_log(rev, max_chars):
1773
lf = LineLogFormatter(None)
1774
return lf.log_string(None, rev, max_chars)
1777
class LogFormatterRegistry(registry.Registry):
1778
"""Registry for log formatters"""
1780
def make_formatter(self, name, *args, **kwargs):
1781
"""Construct a formatter from arguments.
1783
:param name: Name of the formatter to construct. 'short', 'long' and
1784
'line' are built-in.
1786
return self.get(name)(*args, **kwargs)
1788
def get_default(self, branch):
1789
return self.get(branch.get_config().log_format())
1792
log_formatter_registry = LogFormatterRegistry()
1795
log_formatter_registry.register('short', ShortLogFormatter,
1796
'Moderately short log format')
1797
log_formatter_registry.register('long', LongLogFormatter,
1798
'Detailed log format')
1799
log_formatter_registry.register('line', LineLogFormatter,
1800
'Log format with one line per revision')
1801
log_formatter_registry.register('gnu-changelog', GnuChangelogLogFormatter,
1802
'Format used by GNU ChangeLog files')
1805
def register_formatter(name, formatter):
1806
log_formatter_registry.register(name, formatter)
1809
def log_formatter(name, *args, **kwargs):
1810
"""Construct a formatter from arguments.
1812
name -- Name of the formatter to construct; currently 'long', 'short' and
1813
'line' are supported.
1816
return log_formatter_registry.make_formatter(name, *args, **kwargs)
1818
raise errors.BzrCommandError("unknown log formatter: %r" % name)
1821
def author_list_all(rev):
1822
return rev.get_apparent_authors()[:]
1825
def author_list_first(rev):
1826
lst = rev.get_apparent_authors()
1833
def author_list_committer(rev):
1834
return [rev.committer]
1837
author_list_registry = registry.Registry()
1839
author_list_registry.register('all', author_list_all,
1842
author_list_registry.register('first', author_list_first,
1845
author_list_registry.register('committer', author_list_committer,
1849
def show_one_log(revno, rev, delta, verbose, to_file, show_timezone):
1850
# deprecated; for compatibility
1851
lf = LongLogFormatter(to_file=to_file, show_timezone=show_timezone)
1852
lf.show(revno, rev, delta)
1855
def show_changed_revisions(branch, old_rh, new_rh, to_file=None,
1857
"""Show the change in revision history comparing the old revision history to the new one.
1859
:param branch: The branch where the revisions exist
1860
:param old_rh: The old revision history
1861
:param new_rh: The new revision history
1862
:param to_file: A file to write the results to. If None, stdout will be used
1865
to_file = codecs.getwriter(get_terminal_encoding())(sys.stdout,
1867
lf = log_formatter(log_format,
1870
show_timezone='original')
1872
# This is the first index which is different between
1875
for i in xrange(max(len(new_rh),
1877
if (len(new_rh) <= i
1879
or new_rh[i] != old_rh[i]):
1883
if base_idx is None:
1884
to_file.write('Nothing seems to have changed\n')
1886
## TODO: It might be nice to do something like show_log
1887
## and show the merged entries. But since this is the
1888
## removed revisions, it shouldn't be as important
1889
if base_idx < len(old_rh):
1890
to_file.write('*'*60)
1891
to_file.write('\nRemoved Revisions:\n')
1892
for i in range(base_idx, len(old_rh)):
1893
rev = branch.repository.get_revision(old_rh[i])
1894
lr = LogRevision(rev, i+1, 0, None)
1896
to_file.write('*'*60)
1897
to_file.write('\n\n')
1898
if base_idx < len(new_rh):
1899
to_file.write('Added Revisions:\n')
1904
direction='forward',
1905
start_revision=base_idx+1,
1906
end_revision=len(new_rh),
1910
def get_history_change(old_revision_id, new_revision_id, repository):
1911
"""Calculate the uncommon lefthand history between two revisions.
1913
:param old_revision_id: The original revision id.
1914
:param new_revision_id: The new revision id.
1915
:param repository: The repository to use for the calculation.
1917
return old_history, new_history
1920
old_revisions = set()
1922
new_revisions = set()
1923
new_iter = repository.iter_reverse_revision_history(new_revision_id)
1924
old_iter = repository.iter_reverse_revision_history(old_revision_id)
1925
stop_revision = None
1928
while do_new or do_old:
1931
new_revision = new_iter.next()
1932
except StopIteration:
1935
new_history.append(new_revision)
1936
new_revisions.add(new_revision)
1937
if new_revision in old_revisions:
1938
stop_revision = new_revision
1942
old_revision = old_iter.next()
1943
except StopIteration:
1946
old_history.append(old_revision)
1947
old_revisions.add(old_revision)
1948
if old_revision in new_revisions:
1949
stop_revision = old_revision
1951
new_history.reverse()
1952
old_history.reverse()
1953
if stop_revision is not None:
1954
new_history = new_history[new_history.index(stop_revision) + 1:]
1955
old_history = old_history[old_history.index(stop_revision) + 1:]
1956
return old_history, new_history
1959
def show_branch_change(branch, output, old_revno, old_revision_id):
1960
"""Show the changes made to a branch.
1962
:param branch: The branch to show changes about.
1963
:param output: A file-like object to write changes to.
1964
:param old_revno: The revno of the old tip.
1965
:param old_revision_id: The revision_id of the old tip.
1967
new_revno, new_revision_id = branch.last_revision_info()
1968
old_history, new_history = get_history_change(old_revision_id,
1971
if old_history == [] and new_history == []:
1972
output.write('Nothing seems to have changed\n')
1975
log_format = log_formatter_registry.get_default(branch)
1976
lf = log_format(show_ids=False, to_file=output, show_timezone='original')
1977
if old_history != []:
1978
output.write('*'*60)
1979
output.write('\nRemoved Revisions:\n')
1980
show_flat_log(branch.repository, old_history, old_revno, lf)
1981
output.write('*'*60)
1982
output.write('\n\n')
1983
if new_history != []:
1984
output.write('Added Revisions:\n')
1985
start_revno = new_revno - len(new_history) + 1
1986
show_log(branch, lf, None, verbose=False, direction='forward',
1987
start_revision=start_revno,)
1990
def show_flat_log(repository, history, last_revno, lf):
1991
"""Show a simple log of the specified history.
1993
:param repository: The repository to retrieve revisions from.
1994
:param history: A list of revision_ids indicating the lefthand history.
1995
:param last_revno: The revno of the last revision_id in the history.
1996
:param lf: The log formatter to use.
1998
start_revno = last_revno - len(history) + 1
1999
revisions = repository.get_revisions(history)
2000
for i, rev in enumerate(revisions):
2001
lr = LogRevision(rev, i + last_revno, 0, None)
2005
def _get_info_for_log_files(revisionspec_list, file_list, add_cleanup):
2006
"""Find file-ids and kinds given a list of files and a revision range.
2008
We search for files at the end of the range. If not found there,
2009
we try the start of the range.
2011
:param revisionspec_list: revision range as parsed on the command line
2012
:param file_list: the list of paths given on the command line;
2013
the first of these can be a branch location or a file path,
2014
the remainder must be file paths
2015
:param add_cleanup: When the branch returned is read locked,
2016
an unlock call will be queued to the cleanup.
2017
:return: (branch, info_list, start_rev_info, end_rev_info) where
2018
info_list is a list of (relative_path, file_id, kind) tuples where
2019
kind is one of values 'directory', 'file', 'symlink', 'tree-reference'.
2020
branch will be read-locked.
2022
from builtins import _get_revision_range, safe_relpath_files
2023
tree, b, path = bzrdir.BzrDir.open_containing_tree_or_branch(file_list[0])
2024
add_cleanup(b.lock_read().unlock)
2025
# XXX: It's damn messy converting a list of paths to relative paths when
2026
# those paths might be deleted ones, they might be on a case-insensitive
2027
# filesystem and/or they might be in silly locations (like another branch).
2028
# For example, what should "log bzr://branch/dir/file1 file2" do? (Is
2029
# file2 implicitly in the same dir as file1 or should its directory be
2030
# taken from the current tree somehow?) For now, this solves the common
2031
# case of running log in a nested directory, assuming paths beyond the
2032
# first one haven't been deleted ...
2034
relpaths = [path] + safe_relpath_files(tree, file_list[1:])
2036
relpaths = [path] + file_list[1:]
2038
start_rev_info, end_rev_info = _get_revision_range(revisionspec_list, b,
2040
if relpaths in ([], [u'']):
2041
return b, [], start_rev_info, end_rev_info
2042
if start_rev_info is None and end_rev_info is None:
2044
tree = b.basis_tree()
2047
file_id = tree.path2id(fp)
2048
kind = _get_kind_for_file_id(tree, file_id)
2050
# go back to when time began
2053
rev1 = b.get_rev_id(1)
2054
except errors.NoSuchRevision:
2059
tree1 = b.repository.revision_tree(rev1)
2061
file_id = tree1.path2id(fp)
2062
kind = _get_kind_for_file_id(tree1, file_id)
2063
info_list.append((fp, file_id, kind))
2065
elif start_rev_info == end_rev_info:
2066
# One revision given - file must exist in it
2067
tree = b.repository.revision_tree(end_rev_info.rev_id)
2069
file_id = tree.path2id(fp)
2070
kind = _get_kind_for_file_id(tree, file_id)
2071
info_list.append((fp, file_id, kind))
2074
# Revision range given. Get the file-id from the end tree.
2075
# If that fails, try the start tree.
2076
rev_id = end_rev_info.rev_id
2078
tree = b.basis_tree()
2080
tree = b.repository.revision_tree(rev_id)
2083
file_id = tree.path2id(fp)
2084
kind = _get_kind_for_file_id(tree, file_id)
2087
rev_id = start_rev_info.rev_id
2089
rev1 = b.get_rev_id(1)
2090
tree1 = b.repository.revision_tree(rev1)
2092
tree1 = b.repository.revision_tree(rev_id)
2093
file_id = tree1.path2id(fp)
2094
kind = _get_kind_for_file_id(tree1, file_id)
2095
info_list.append((fp, file_id, kind))
2096
return b, info_list, start_rev_info, end_rev_info
2099
def _get_kind_for_file_id(tree, file_id):
2100
"""Return the kind of a file-id or None if it doesn't exist."""
2101
if file_id is not None:
2102
return tree.kind(file_id)
2107
properties_handler_registry = registry.Registry()
2109
# Use the properties handlers to print out bug information if available
2110
def _bugs_properties_handler(revision):
2111
if revision.properties.has_key('bugs'):
2112
bug_lines = revision.properties['bugs'].split('\n')
2113
bug_rows = [line.split(' ', 1) for line in bug_lines]
2114
fixed_bug_urls = [row[0] for row in bug_rows if
2115
len(row) > 1 and row[1] == 'fixed']
2118
return {'fixes bug(s)': ' '.join(fixed_bug_urls)}
2121
properties_handler_registry.register('bugs_properties_handler',
2122
_bugs_properties_handler)
2125
# adapters which revision ids to log are filtered. When log is called, the
2126
# log_rev_iterator is adapted through each of these factory methods.
2127
# Plugins are welcome to mutate this list in any way they like - as long
2128
# as the overall behaviour is preserved. At this point there is no extensible
2129
# mechanism for getting parameters to each factory method, and until there is
2130
# this won't be considered a stable api.
2134
# read revision objects
2135
_make_revision_objects,
2136
# filter on log messages
2137
_make_search_filter,
2138
# generate deltas for things we will show